diff --git a/CHANGELOG.md b/CHANGELOG.md index 8eea59a439b..d0a150898ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## v1.112.0 (12/01/2022) + +### Features: +- [#4645](https://github.com/telstra/open-kilda/pull/4645) Implement swap for YFlow paths +- [#4648](https://github.com/telstra/open-kilda/pull/4648) Y-Flow: Ping API design (Issue: [#4589](https://github.com/telstra/open-kilda/issues/4589)) [**docs**] +- [#4146](https://github.com/telstra/open-kilda/pull/4146) Adding feature to edit/delete/update isl bfd properties (Issue: [#3920](https://github.com/telstra/open-kilda/issues/3920)) [**gui**] + +### Bug Fixes: +- [#4658](https://github.com/telstra/open-kilda/pull/4658) Fix false negative stats cache missing warnigns + +### Improvements: +- [#4643](https://github.com/telstra/open-kilda/pull/4643) ignore qinq tests in yflow spec for singleTable mode [**tests**] +- [#4646](https://github.com/telstra/open-kilda/pull/4646) [test] fix LagPortSpec [**tests**] +- [#4647](https://github.com/telstra/open-kilda/pull/4647) Of Group converter for Rule Manager [**floodlight**] +- [#4630](https://github.com/telstra/open-kilda/pull/4630) Upgrade Gradle to 7.3.3 and other plugins to the recent versions +- [#4151](https://github.com/telstra/open-kilda/pull/4151) Improvement in kilda gui to defence against brute force attack (Issue: [#3564](https://github.com/telstra/open-kilda/issues/3564)) [**gui**] + +### Other changes: +- [#4640](https://github.com/telstra/open-kilda/pull/4640) [yflow] check that bandwidth is properly consumed [**tests**] +- [#4650](https://github.com/telstra/open-kilda/pull/4650) [yflow] add test for y-flow reroute [**tests**] +- [#4624](https://github.com/telstra/open-kilda/pull/4624) add tests for yFlow validation [**tests**] + +For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.111.0...v1.112.0). + +### Affected Components: +flow-hs, gui, fl + +--- + ## v1.111.0 (04/01/2022) ### Features: @@ -31,6 +60,7 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: stats, flow-monitor, network, fl, flow-hs +--- ## v1.110.0 (21/12/2021) @@ -63,14 +93,12 @@ flow-hs, server42 - [#4611](https://github.com/telstra/open-kilda/pull/4611) Rule Manager: Rules post processing - [#4612](https://github.com/telstra/open-kilda/pull/4612) Add server42 service rules rule manager implementation - ### Improvements: - [#4608](https://github.com/telstra/open-kilda/pull/4608) [test] fix timeUnstable field in featureToggleV2Spec [**tests**] - [#4609](https://github.com/telstra/open-kilda/pull/4609) adjust test for #4607 (Issue: [#4607](https://github.com/telstra/open-kilda/issues/4607)) [**tests**] - [#4618](https://github.com/telstra/open-kilda/pull/4618) Use spock 2.1 release instead of snapshot [**tests**] - [#4573](https://github.com/telstra/open-kilda/pull/4573) Make all tests do automatic switch sync in cleanup on validation failures [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.109.0...v1.109.1). --- @@ -79,13 +107,12 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Improvements: - [#4626](https://github.com/telstra/open-kilda/pull/4626) Upgrade log4j dependency version to mitigate log4shell - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.109.0...v1.109.0.1). ### Affected Components: all ----- +--- ## v1.109.0 (08/12/2021) @@ -100,20 +127,18 @@ all - [#4603](https://github.com/telstra/open-kilda/pull/4603) Rule Manager: Flow mirror - [#4542](https://github.com/telstra/open-kilda/pull/4542) Adapt existing Flow API to support YFlow sub-flows (Issue: [#4534](https://github.com/telstra/open-kilda/issues/4534)) - ### Improvements: - [#4610](https://github.com/telstra/open-kilda/pull/4610) Introduce joint rule generator for y flows - [#4483](https://github.com/telstra/open-kilda/pull/4483) Add rule manager design details [**docs**] - [#4350](https://github.com/telstra/open-kilda/pull/4350) Remove unused OFExxxState classes (Issue: [#238](https://github.com/telstra/open-kilda/issues/238)) - [#4571](https://github.com/telstra/open-kilda/pull/4571) PoC for shared bandwidth group solution - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.108.0...v1.109.0). ### Affected Components: reroute, flow-hs ----- +--- ## v1.108.0 (29/11/2021) @@ -146,7 +171,7 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: swmanager, flow-hs ----- +--- ## v1.107.4 (23/11/2021) @@ -158,7 +183,7 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: fl ----- +--- ## v1.107.3 (17/11/2021) @@ -196,18 +221,19 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: server42, fl +--- ## v1.107.2 (13/11/2021) ### Improvements: - [#4579](https://github.com/telstra/open-kilda/pull/4579) At most once guarantee strategy for flowhs input spout - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.107.1...v1.107.2). ### Affected Components: flow-hs +--- ## v1.107.1 (02/11/2021) @@ -322,7 +348,6 @@ nbworker, gui, flow-hs, flow-monitor, orientdb - [#4400](https://github.com/telstra/open-kilda/pull/4400) Improve persistence layer implementation [**storm-topologies**] - [#4476](https://github.com/telstra/open-kilda/pull/4476) delete duplicate text in server42-to-lab/README [**docs**][**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.105.0...v1.105.1). ### Affected Components: @@ -350,7 +375,6 @@ ping, flow-hs, nbworker, reroute, swmanager, network, flow-monitor - [#4405](https://github.com/telstra/open-kilda/pull/4405) Remove flow group id field (Issue: [#4404](https://github.com/telstra/open-kilda/issues/4404)) - [#4472](https://github.com/telstra/open-kilda/pull/4472) Changed log level of unknown GRPC responces in Network topology. [**storm-topologies**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.104.0...v1.105.0). ### Affected Components: @@ -458,6 +482,8 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: orientdb, grpc, flow, flow-hs, stats, network, fl +--- + ## v1.101.0 (09/08/2021) ### Features: @@ -539,7 +565,6 @@ OrientDB schema have been changed in this release. You need to apply schema migr - [#4349](https://github.com/telstra/open-kilda/pull/4349) unignore according to #4317 [**tests**] - [#4351](https://github.com/telstra/open-kilda/pull/4351) Add validation of OrientDB connections in the graph factory [**storm-topologies**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.98.0...v1.99.0). ### Affected Components: @@ -646,6 +671,8 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: flow-hs, network, swmanager, fl +--- + ## v1.95.0 (07/06/2021) ### Features: @@ -1052,7 +1079,6 @@ gui, reroute, fl, stats-router, portstate, nbworker, ping, swmanager, nb, flow-h - [#4006](https://github.com/telstra/open-kilda/pull/4006) add test for RTL a-switcl link + antiflap [**tests**] - [#4031](https://github.com/telstra/open-kilda/pull/4031) Improve some waits in func tests [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.83.0...v1.84.0). ### Affected Components: @@ -1175,7 +1201,6 @@ fl - [#3898](https://github.com/telstra/open-kilda/pull/3898) Add flow-id for attendant traffic stat entries [**storm-topologies**] - [#3963](https://github.com/telstra/open-kilda/pull/3963) Create common ZK nodes by default - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.80.0...v1.81.0). ### Affected Components: @@ -1297,13 +1322,11 @@ nbworker, network, gui, fl, flow-hs - [#3838](https://github.com/telstra/open-kilda/pull/3838) ignore unstable test in swapEndpointSpec [**tests**] - [#3839](https://github.com/telstra/open-kilda/pull/3839) Wrap git commands for floodlight and loxigen in gradle - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.76.0...v1.77.0). ### Affected Components: stats, stats-router, ping, network, nb, reroute, nbworker, otsdb, router, connected, flow-hs, isllatency, portstate, fl, swmanager - --- ## v1.76.0 (29/10/2020) @@ -1346,7 +1369,6 @@ nb, nbworker, network - [#3796](https://github.com/telstra/open-kilda/pull/3796) add topology_dependent tag for grpc tests [**tests**] - [#3735](https://github.com/telstra/open-kilda/pull/3735) extend pathsSpec [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.74.1...v1.75.0). ### Affected Components: @@ -1457,7 +1479,6 @@ The migration procedure is defined in [neo4j-to-orientdb-migration-steps](https: - [#3738](https://github.com/telstra/open-kilda/pull/3738) Simplify BFD session management [**storm-topologies**] - [#3739](https://github.com/telstra/open-kilda/pull/3739) add tests for #3728 [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.72.1...v1.72.2). ### Affected Components: @@ -1483,7 +1504,6 @@ gui - [#3728](https://github.com/telstra/open-kilda/pull/3728) update port/vlan should not cause reroute [**floodlight**][**tests**] - [#3730](https://github.com/telstra/open-kilda/pull/3730) Added encapsulation_type and path_computation_strategy to /v1/network/paths API [**api**][**northbound**][**storm-topologies**] - ### Improvements: - [#3709](https://github.com/telstra/open-kilda/pull/3709) Cleanup outdated kafka-topic references - [#3726](https://github.com/telstra/open-kilda/pull/3726) add template functional-tests.log4j2.toml [**tests**] @@ -1637,7 +1657,6 @@ ping, router, nb, nbworker, stats, flow-hs, fl, network - [#3672](https://github.com/telstra/open-kilda/pull/3672) Add catch of an exception when the ISL controller is not found [**storm-topologies**] - [#3676](https://github.com/telstra/open-kilda/pull/3676) Build outside of git repo - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.68.0...v1.68.1). ### Affected Components: @@ -1679,7 +1698,6 @@ nb, flow-hs, router, nbworker, network - [#3642](https://github.com/telstra/open-kilda/pull/3642) remove ignore annotation for "Traffic counters in ingress rule are reset on flow rerouting" (Issues: [#3641](https://github.com/telstra/open-kilda/issues/3641) [#3641](https://github.com/telstra/open-kilda/issues/3641)) [**tests**] - [#3644](https://github.com/telstra/open-kilda/pull/3644) minor fixes according to v1.66 [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.66.1...v1.67.0). ### Affected Components: @@ -1763,7 +1781,6 @@ Consider using the following migration scripts to update db: - [1.25 migration-script](https://github.com/telstra/open-kilda/blob/v1.65.0/services/src/neo4j/migrations/1.25-change-type-of-flow-event-timestamp/1-change-type-of-history-timestamps-form-iso-to-epoch.xml) - In case of issues these rollback scripts should be executed: - [1.25 rollback.cql](https://github.com/telstra/open-kilda/blob/v1.65.0/services/src/neo4j/migrations/1.25-change-type-of-flow-event-timestamp/rollback.cql) @@ -1895,7 +1912,6 @@ grpc, stats, network, router, flow, flow-hs, swmanager, fl, gui ### Features: - [#3438](https://github.com/telstra/open-kilda/pull/3438) Introducing flow with 2 levels of VLAN tagging on endpoints [**floodlight**][**storm-topologies**] - ### Improvements: - [#3494](https://github.com/telstra/open-kilda/pull/3494) Add tests for partial update v2 api [**tests**] - [#3499](https://github.com/telstra/open-kilda/pull/3499) Add proper history wait in tests after path swap [**tests**] @@ -1904,7 +1920,6 @@ grpc, stats, network, router, flow, flow-hs, swmanager, fl, gui - [#3510](https://github.com/telstra/open-kilda/pull/3510) fix tests according to 1.59.0 v [**tests**] - [#3484](https://github.com/telstra/open-kilda/pull/3484) Various test stability fixes [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.59.0...v1.60.0). ### Affected Components: @@ -2022,7 +2037,6 @@ fl, gui, network ### Features: - [#3427](https://github.com/telstra/open-kilda/pull/3427) Feature/port discovery packets (Issue: [#3043](https://github.com/telstra/open-kilda/issues/3043)) [**gui**] - ### Improvements: - [#3428](https://github.com/telstra/open-kilda/pull/3428) group cookie classes in single package - [#3405](https://github.com/telstra/open-kilda/pull/3405) add tests for link-delete spec according to #3268 [**tests**] @@ -2068,6 +2082,8 @@ For the complete list of changes, check out [the commit log](https://github.com/ ### Affected Components: nbworker, stats, gui, flow, flow-hs, fl, connected, nb +--- + ## v1.55.1 (21/04/2020) ### Features: @@ -2081,7 +2097,6 @@ nbworker, stats, gui, flow, flow-hs, fl, connected, nb - [#3393](https://github.com/telstra/open-kilda/pull/3393) Expose discovery latency as separate log field [**floodlight**] - [#3234](https://github.com/telstra/open-kilda/pull/3234) Make parallelism level configurable - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.55.0...v1.55.1). ### Affected Components: @@ -2105,7 +2120,6 @@ neo4j, grpc, fl - [#3385](https://github.com/telstra/open-kilda/pull/3385) Add retries due to #3384. Refactor an autoreroute test [**tests**] - [#3387](https://github.com/telstra/open-kilda/pull/3387) add missing import and fix knockoutSwitch operation in AutoRerouteV2Spec [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.54.0...v1.55.0). ### Affected Components: @@ -2320,7 +2334,6 @@ In case of issues these rollback scripts should be executed: - [#3250](https://github.com/telstra/open-kilda/pull/3250) improve SwitchValidationSpec(wait for meter) [**tests**] - [#3251](https://github.com/telstra/open-kilda/pull/3251) Make packet loss test hw-only again [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.50.0...v1.51.0). ### Affected Components: @@ -2356,7 +2369,6 @@ In case of issues these rollback scripts should be executed: - [#3092](https://github.com/telstra/open-kilda/pull/3092) Minor tweaks in tests according to default meters validation feature [**tests**] - [#3223](https://github.com/telstra/open-kilda/pull/3223) improve protectedPath specs [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.49.0...v1.50.0). ### Affected Components: @@ -2453,7 +2465,6 @@ For the complete list of changes, check out [the commit log](https://github.com/ - [#3132](https://github.com/telstra/open-kilda/pull/3132) Add test for #3128 (Issue: [#3128](https://github.com/telstra/open-kilda/issues/3128)) [**tests**] - [#3167](https://github.com/telstra/open-kilda/pull/3167) Fix reroute request filling in Flow H&S topology. (Issue: [#3128](https://github.com/telstra/open-kilda/issues/3128)) - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.47.1...v1.47.2). ### Affected Components: @@ -2568,7 +2579,6 @@ nb, reroute, flow-hs - [#3086](https://github.com/telstra/open-kilda/pull/3086) Decrease parallelism for reply kafka spouts in flr [**storm-topologies**] - [#3062](https://github.com/telstra/open-kilda/pull/3062) Fix rollback in flow reroute [**storm-topologies**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.45.0...v1.45.1). ### Affected Components: @@ -2599,7 +2609,6 @@ flow-hs, router, reroute - [#2997](https://github.com/telstra/open-kilda/pull/2997) ignore fucn tests according to bugs [**tests**] - [#3004](https://github.com/telstra/open-kilda/pull/3004) Revise flow priority reroute test with respect to h&s [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.44.1...v1.45.0). ### Affected Components: @@ -2800,7 +2809,6 @@ fl - [#2877](https://github.com/telstra/open-kilda/pull/2877) Fix minor sonar issues [**floodlight**][**storm-topologies**] - [#2846](https://github.com/telstra/open-kilda/pull/2846) add test System does not create a flow when bandwidth is not the same on the ISL [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.40.0...v1.41.0). ### Affected Components: @@ -2857,7 +2865,6 @@ In case of issues these rollback scripts should be executed: - [#2849](https://github.com/telstra/open-kilda/pull/2849) Improvement/controller filter default (Issues: [#2787](https://github.com/telstra/open-kilda/issues/2787) [#2803](https://github.com/telstra/open-kilda/issues/2803)) [**gui**] - [#2841](https://github.com/telstra/open-kilda/pull/2841) Allow to use traffexam on python-3.5.2 [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.39.0...v1.39.1). ### Affected Components: @@ -2893,7 +2900,6 @@ gui - [#2798](https://github.com/telstra/open-kilda/pull/2798) Extend Endurance test with a 'break isl' event [**tests**] - [#2799](https://github.com/telstra/open-kilda/pull/2799) Disable port discovery feature design (Issue: [#2794](https://github.com/telstra/open-kilda/issues/2794)) [**docs**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.38.0...v1.39.0). ### Affected Components: @@ -2928,7 +2934,6 @@ In case of issues these rollback scripts should be executed: - [#2832](https://github.com/telstra/open-kilda/pull/2832) Removed migration 1.13 artifact - [#2767](https://github.com/telstra/open-kilda/pull/2767) extend statistic test coverage for different type of flow [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.37.0...v1.38.0). ### Affected Components: @@ -2964,7 +2969,6 @@ swmanager, network, stats, fl, flow-hs, nbworker, flow, nb, connected - [#2807](https://github.com/telstra/open-kilda/pull/2807) Added event logging with getting statistics. (Issue: [#2801](https://github.com/telstra/open-kilda/issues/2801)) [**floodlight**][**storm-topologies**] - [#2815](https://github.com/telstra/open-kilda/pull/2815) Fixed 1.14 migration - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.36.1...v1.37.0). ### Affected Components: @@ -2980,6 +2984,8 @@ Also please consider using following migration scripts to update db: In case of issues these rollback scripts should be executed: - [1.14 rollback.cql](https://github.com/telstra/open-kilda/blob/v1.37.0/services/neo4j/migrations/1.14-connected-device-indexes/rollback.cql) +--- + ## v1.36.1 (19/09/2019) ### Bug Fixes: @@ -3036,7 +3042,6 @@ fl, neo4j, flow-hs, nb, flow, network, nbworker, stats - [#2778](https://github.com/telstra/open-kilda/pull/2778) Change logic of verification duplicate isl in PCE - [#2775](https://github.com/telstra/open-kilda/pull/2775) add monitoring section in readme file for performance test [**tests**] - For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v1.35.0...v1.35.1). ### Affected Components: diff --git a/README.md b/README.md index 4e8cb59955c..44d5bcd20ab 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Note that the build process will install additional packages. It is recommended ### Prerequisites The followings are required for building Kilda controller: - - Gradle 6.7+ + - Gradle 7.0+ - Maven 3.3.9+ - JDK8 - Python 3.6+ @@ -44,7 +44,7 @@ sudo pip3 install docker-compose You can either install Gradle, or use Gradle wrapper: - Option 1 - Use Gradle wrapper. The Kilda repository contains an instance of Gradle Wrapper which can be used straight from here without further installation. - - Option 2 - Install Gradle (ensure that you have gradle 6.7 or later) - https://gradle.org/install/ + - Option 2 - Install Gradle (ensure that you have gradle 7.0 or later) - https://gradle.org/install/ #### Docker diff --git a/docs/design/y-flow/y-flow-nb-api.md b/docs/design/y-flow/y-flow-nb-api.md index b39d4ada985..82a73221fea 100644 --- a/docs/design/y-flow/y-flow-nb-api.md +++ b/docs/design/y-flow/y-flow-nb-api.md @@ -317,6 +317,57 @@ Response body: } ``` +## Ping operation + +REST endpoint: `POST /v2/y-flows/{y_flow_id}/ping` + +request payload: + +```json +{ + "timeout": 0 +} +``` + +response payload: + +```json +{ + "y_flow_id": "yf-AAAAAAAAAAAAAAA", + "ping_success": true, + "error": "error", + "sub_flows": [ + { + "flow_id": "fAAAAAAAAAAAAAAA", + "forward": { + "ping_success": true, + "error": "error", + "latency": 0 + }, + "reverse": { + "ping_success": true, + "error": "error", + "latency": 0 + } + }, + { + "flow_id": "fAAAAAAAAAAAAAAB", + "forward": { + "ping_success": true, + "error": "error", + "latency": 0 + }, + "reverse": { + "ping_success": true, + "error": "error", + "latency": 0 + } + } + ] +} +``` + + # Existing API changes All flows CRUD operation responses will be extended with following fields: diff --git a/src-gui/build.gradle b/src-gui/build.gradle index 3f18d66926d..021ceb25be0 100644 --- a/src-gui/build.gradle +++ b/src-gui/build.gradle @@ -1,7 +1,3 @@ -/* - * This file was generated by the Gradle 'init' task. - */ - plugins { id 'java' id 'jacoco' @@ -11,13 +7,9 @@ plugins { } repositories { - jcenter() + mavenCentral() maven { - url = uri('https://repo.spring.io/libs-milestone') - } - - maven { - url = uri('http://repo.maven.apache.org/maven2') + url 'https://repo.spring.io/libs-milestone' } } @@ -60,13 +52,13 @@ dependencies { testImplementation 'org.mockito:mockito-all:2.0.2-beta' testImplementation 'org.junit.jupiter:junit-jupiter-api:5.0.0-M4' testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.0.0-M4' - + compileOnly 'org.projectlombok:lombok:1.18.10' testCompileOnly 'org.projectlombok:lombok:1.18.10' annotationProcessor 'org.projectlombok:lombok:1.18.10' testAnnotationProcessor 'org.projectlombok:lombok:1.18.10' - - compile group: 'org.eclipse.jdt.core.compiler', name: 'ecj', version: '4.5.1' + + implementation group: 'org.eclipse.jdt.core.compiler', name: 'ecj', version: '4.5.1' } group = 'org.openkilda' @@ -78,14 +70,14 @@ tasks.withType(JavaCompile) { } jacoco { - toolVersion = '0.8.5' + toolVersion = '0.8.7' } jacocoTestReport { reports { - xml.enabled true - csv.enabled false - html.enabled true + xml.required = true + csv.required = false + html.required = true } } @@ -114,9 +106,10 @@ checkstyle { } } +/*TODO: not compatible with Gradle 7+, need another way [checkstyleMain, checkstyleTest].each { task -> task.logging.setLevel(LogLevel.LIFECYCLE) -} +}*/ jar { manifest { diff --git a/src-gui/gradle/wrapper/gradle-wrapper.properties b/src-gui/gradle/wrapper/gradle-wrapper.properties index da9702f9e70..2e6e5897b52 100644 --- a/src-gui/gradle/wrapper/gradle-wrapper.properties +++ b/src-gui/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.8-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/src-gui/src/main/java/org/openkilda/constants/IConstants.java b/src-gui/src/main/java/org/openkilda/constants/IConstants.java index 2b69b69bb7d..0a3d63dc590 100644 --- a/src-gui/src/main/java/org/openkilda/constants/IConstants.java +++ b/src-gui/src/main/java/org/openkilda/constants/IConstants.java @@ -74,10 +74,33 @@ private SessionTimeout() { public static Integer DEFAULT_TIME_IN_MINUTE = 45; } + public static final class InvalidLoginAttempt { + private InvalidLoginAttempt() { + + } + + public static Integer INVALID_LOGIN_ATTEMPTS_COUNT; + + public static Integer DEFAULT_INVALID_LOGIN_ATTEMPTS_COUNT = 5; + } + + public static final class UserAccUnlockTime { + private UserAccUnlockTime() { + + } + + public static Integer USER_ACCOUNT_UNLOCK_TIME; + + public static Integer DEFAULT_USER_ACCOUNT_UNLOCK_TIME = 60; + } + + public enum ApplicationSetting { SESSION_TIMEOUT(String.valueOf(SessionTimeout.DEFAULT_TIME_IN_MINUTE)), SWITCH_NAME_STORAGE_TYPE( - StorageType.FILE_STORAGE.name()); + StorageType.FILE_STORAGE.name()), INVALID_LOGIN_ATTEMPT(String.valueOf(InvalidLoginAttempt + .DEFAULT_INVALID_LOGIN_ATTEMPTS_COUNT)), USER_ACCOUNT_UNLOCK_TIME( + String.valueOf(UserAccUnlockTime.DEFAULT_USER_ACCOUNT_UNLOCK_TIME)); final String value; @@ -174,6 +197,8 @@ private NorthBoundUrl() { public static final String GET_SWITCH_PORT_PROPERTY = VERSION_TWO + "/switches/{switch_id}" + "/ports/{port}/properties"; public static final String UPDATE_SWITCH_LOCATION = VERSION_TWO + "/switches/{switch_id}"; + public static final String GET_LINK_BFD_PROPERTIES = VERSION_TWO + + "/links/{src-switch}_{src-port}/{dst-switch}_{dst-port}/bfd"; } public final class OpenTsDbUrl { @@ -299,11 +324,17 @@ private Permission() { public static final String SW_SWITCH_METERS = "sw_switch_meters"; - public static final String SW_SWITCH_LOCATION_UPDATE = "sw_switch_location_update"; + public static final String UM_USER_ACCOUNT_UNLOCK = "um_user_account_unlock"; public static final String SAML_SETTING = "saml_setting"; + public static final String SW_SWITCH_LOCATION_UPDATE = "sw_switch_location_update"; + public static final String TOPOLOGY_WORLD_MAP_VIEW = "topology_world_map_view"; + + public static final String ISL_UPDATE_BFD_PROPERTIES = "isl_update_bfd_properties"; + + public static final String ISL_DELETE_BFD = "isl_delete_bfd"; } public final class Settings { diff --git a/src-gui/src/main/java/org/openkilda/constants/Status.java b/src-gui/src/main/java/org/openkilda/constants/Status.java index 2b9bbc49bdf..dc0cf5a82d2 100644 --- a/src-gui/src/main/java/org/openkilda/constants/Status.java +++ b/src-gui/src/main/java/org/openkilda/constants/Status.java @@ -1,4 +1,4 @@ -/* Copyright 2018 Telstra Open Source +/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,8 +22,9 @@ public enum Status { /** The active. */ ACTIVE("ACT"), /** The inactive. */ - INACTIVE("INA"); - /** The expired. */ + INACTIVE("INA"), + /** The locked. */ + LOCK("LCK"); private String code; diff --git a/src-gui/src/main/java/org/openkilda/controller/LoginController.java b/src-gui/src/main/java/org/openkilda/controller/LoginController.java index 0ae4ea176cd..b6afe583413 100644 --- a/src-gui/src/main/java/org/openkilda/controller/LoginController.java +++ b/src-gui/src/main/java/org/openkilda/controller/LoginController.java @@ -1,4 +1,4 @@ -/* Copyright 2018 Telstra Open Source +/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.BadCredentialsException; +import org.springframework.security.authentication.LockedException; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; @@ -120,7 +121,7 @@ public ModelAndView authenticate(@RequestParam("username") String username, SecurityContextHolder.getContext().setAuthentication(authenticate); userService.updateLoginDetail(username); } else { - error = "Invalid email or password"; + error = "Login failed; Invalid email or password."; LOGGER.warn("Authentication failure for user: '" + username + "'"); modelAndView.setViewName(IConstants.View.REDIRECT_LOGIN); } @@ -155,7 +156,10 @@ public ModelAndView authenticate(@RequestParam("username") String username, } } catch (BadCredentialsException e) { LOGGER.warn("Authentication failure", e); - error = "Invalid email or password"; + error = e.getMessage(); + modelAndView.setViewName(IConstants.View.REDIRECT_LOGIN); + } catch (LockedException e) { + error = e.getMessage(); modelAndView.setViewName(IConstants.View.REDIRECT_LOGIN); } catch (Exception e) { LOGGER.warn("Authentication failure", e); diff --git a/src-gui/src/main/java/org/openkilda/controller/SwitchController.java b/src-gui/src/main/java/org/openkilda/controller/SwitchController.java index 31c1b172d35..91eea66a31b 100644 --- a/src-gui/src/main/java/org/openkilda/controller/SwitchController.java +++ b/src-gui/src/main/java/org/openkilda/controller/SwitchController.java @@ -23,8 +23,10 @@ import org.openkilda.integration.model.response.ConfiguredPort; import org.openkilda.log.ActivityLogger; import org.openkilda.log.constants.ActivityType; +import org.openkilda.model.BfdProperties; import org.openkilda.model.FlowInfo; import org.openkilda.model.IslLinkInfo; +import org.openkilda.model.LinkBfdProperties; import org.openkilda.model.LinkMaxBandwidth; import org.openkilda.model.LinkParametersDto; import org.openkilda.model.LinkProps; @@ -410,4 +412,67 @@ public class SwitchController { activityLogger.log(ActivityType.UPDATE_SWITCH_LOCATION, switchId); return serviceSwitch.updateSwitchLocation(switchId, switchLocation); } + + /** + * Gets the link BFD properties. + * + * @param srcSwitch the src switch + * @param srcPort the src port + * @param dstSwitch the dst switch + * @param dstPort the dst port + * @return the link Bfd properties + */ + @RequestMapping(value = "/links/bfd", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + public @ResponseBody LinkBfdProperties readBfdProperties(@RequestParam(value = "src_switch", + required = false) final String srcSwitch, @RequestParam(value = "src_port", + required = false) final String srcPort, @RequestParam(value = "dst_switch", + required = false) final String dstSwitch, @RequestParam(value = "dst_port", + required = false) final String dstPort) { + return serviceSwitch.getLinkBfdProperties(srcSwitch, srcPort, dstSwitch, dstPort); + } + + /** + * Updates the link BFD properties. + * + * @param srcSwitch the src switch + * @param srcPort the src port + * @param dstSwitch the dst switch + * @param dstPort the dst port + * @return the link Bfd properties + */ + @RequestMapping(value = "/links/bfd", method = RequestMethod.PUT) + @ResponseStatus(HttpStatus.OK) + @Permissions(values = IConstants.Permission.ISL_UPDATE_BFD_PROPERTIES) + public @ResponseBody LinkBfdProperties updateBfdProperties(@RequestParam(value = "src_switch", + required = true) final String srcSwitch, @RequestParam(value = "src_port", + required = true) final String srcPort, @RequestParam(value = "dst_switch", + required = true) final String dstSwitch, @RequestParam(value = "dst_port", + required = true) final String dstPort, @RequestBody(required = true) BfdProperties properties) { + activityLogger.log(ActivityType.UPDATE_ISL_BFD_PROPERTIES, "Src_SW_" + srcSwitch + "\nSrc_PORT_" + + srcPort + "\nDst_SW_" + dstSwitch + "\nDst_PORT_" + + dstPort + "\nProperties_" + properties); + return serviceSwitch.updateLinkBfdProperties(srcSwitch, srcPort, dstSwitch, dstPort, properties); + } + + /** + * Delete link BFD. + * + * @param srcSwitch the src switch + * @param srcPort the src port + * @param dstSwitch the dst switch + * @param dstPort the dst port + */ + @RequestMapping(value = "/links/bfd", method = RequestMethod.DELETE) + @ResponseStatus(HttpStatus.OK) + @Permissions(values = IConstants.Permission.ISL_DELETE_BFD) + public @ResponseBody String deleteLinkBfd(@RequestParam(value = "src_switch", + required = true) final String srcSwitch, @RequestParam(value = "src_port", + required = true) final String srcPort, @RequestParam(value = "dst_switch", + required = true) final String dstSwitch, @RequestParam(value = "dst_port", + required = true) final String dstPort) { + activityLogger.log(ActivityType.DELETE_ISL_BFD, "Src_SW_" + srcSwitch + "\nSrc_PORT_" + + srcPort + "\nDst_SW_" + dstSwitch + "\nDst_PORT_" + dstPort); + return serviceSwitch.deleteLinkBfd(srcSwitch, srcPort, dstSwitch, dstPort); + } } diff --git a/src-gui/src/main/java/org/openkilda/helper/RestClientManager.java b/src-gui/src/main/java/org/openkilda/helper/RestClientManager.java index bc456d79b27..039a6e7c40c 100644 --- a/src-gui/src/main/java/org/openkilda/helper/RestClientManager.java +++ b/src-gui/src/main/java/org/openkilda/helper/RestClientManager.java @@ -393,4 +393,30 @@ public static boolean isValidResponse(final HttpResponse response) { } } } + + /** + * Checks if is delete link bfd valid response. + * + * @param response the response + * @return true, if is valid response + */ + public static boolean isDeleteBfdValidResponse(final HttpResponse response) { + LOGGER.debug("[isValidResponse] Response Code " + response.getStatusLine().getStatusCode()); + boolean isValid = response.getStatusLine().getStatusCode() >= HttpStatus.OK.value() + && response.getStatusLine().getStatusCode() < HttpStatus.MULTIPLE_CHOICES.value(); + if (isValid) { + return true; + } else { + try { + String content = IoUtil.toString(response.getEntity().getContent()); + LOGGER.warn("Found invalid Response. Status Code: " + response.getStatusLine().getStatusCode() + + ", content: " + content); + throw new InvalidResponseException(response.getStatusLine().getStatusCode(), content); + } catch (IOException exception) { + LOGGER.warn("Error occurred while vaildating response", exception); + throw new InvalidResponseException(HttpError.INTERNAL_ERROR.getCode(), + HttpError.INTERNAL_ERROR.getMessage()); + } + } + } } diff --git a/src-gui/src/main/java/org/openkilda/integration/service/SwitchIntegrationService.java b/src-gui/src/main/java/org/openkilda/integration/service/SwitchIntegrationService.java index 6a699ed12c4..ac80521a45a 100644 --- a/src-gui/src/main/java/org/openkilda/integration/service/SwitchIntegrationService.java +++ b/src-gui/src/main/java/org/openkilda/integration/service/SwitchIntegrationService.java @@ -30,8 +30,10 @@ import org.openkilda.integration.model.PortConfiguration; import org.openkilda.integration.model.response.ConfiguredPort; import org.openkilda.integration.model.response.IslLink; +import org.openkilda.model.BfdProperties; import org.openkilda.model.FlowInfo; import org.openkilda.model.IslLinkInfo; +import org.openkilda.model.LinkBfdProperties; import org.openkilda.model.LinkMaxBandwidth; import org.openkilda.model.LinkParametersDto; import org.openkilda.model.LinkProps; @@ -777,4 +779,86 @@ public SwitchInfo updateSwitchLocation(String switchId, SwitchLocation switchLoc } return null; } + + /** + * Gets the link Bfd properties. + * + * @param srcSwitch the src switch + * @param srcPort the src port + * @param dstSwitch the dst switch + * @param dstPort the dst port + * @return the link Bfd properties + */ + public LinkBfdProperties getLinkBfdProperties(String srcSwitch, String srcPort, String dstSwitch, String dstPort) { + try { + HttpResponse response = restClientManager.invoke( + applicationProperties.getNbBaseUrl() + IConstants.NorthBoundUrl.GET_LINK_BFD_PROPERTIES + .replace("{src-switch}", srcSwitch).replace("{src-port}", srcPort) + .replace("{dst-switch}", dstSwitch).replace("{dst-port}", dstPort), HttpMethod.GET, + "", "application/json", + applicationService.getAuthHeader()); + if (RestClientManager.isValidResponse(response)) { + return restClientManager.getResponse(response, LinkBfdProperties.class); + } + } catch (InvalidResponseException e) { + LOGGER.error("Error occurred while reading link bfd properties", e); + throw new InvalidResponseException(e.getCode(), e.getResponse()); + } + return null; + } + + /** + * Updates the link Bfd properties. + * + * @return the LinkBfdProperties + */ + public LinkBfdProperties updateLinkBfdProperties(String srcSwitch, String srcPort, String dstSwitch, + String dstPort, BfdProperties properties) { + try { + HttpResponse response = restClientManager.invoke( + applicationProperties.getNbBaseUrl() + IConstants.NorthBoundUrl.GET_LINK_BFD_PROPERTIES + .replace("{src-switch}", srcSwitch).replace("{src-port}", srcPort) + .replace("{dst-switch}", dstSwitch).replace("{dst-port}", dstPort), HttpMethod.PUT, + objectMapper.writeValueAsString(properties), "application/json", + applicationService.getAuthHeader()); + if (RestClientManager.isValidResponse(response)) { + return restClientManager.getResponse(response, LinkBfdProperties.class); + } + } catch (InvalidResponseException e) { + LOGGER.error("Error occurred while updating link bfd properties", e); + throw new InvalidResponseException(e.getCode(), e.getResponse()); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + return null; + } + + /** + * Deletes link bfd. + * @param srcSwitch the src switch + * @param srcPort the src port + * @param dstSwitch the dst switch + * @param dstPort the dst port + * + * @return LinkBfdProperties + */ + public String deleteLinkBfd(String srcSwitch, String srcPort, String dstSwitch, String dstPort) { + try { + HttpResponse response = restClientManager.invoke( + applicationProperties.getNbBaseUrl() + IConstants.NorthBoundUrl.GET_LINK_BFD_PROPERTIES + .replace("{src-switch}", srcSwitch).replace("{src-port}", srcPort) + .replace("{dst-switch}", dstSwitch).replace("{dst-port}", dstPort), HttpMethod.DELETE, + "", "application/json", + applicationService.getAuthHeader()); + if (RestClientManager.isDeleteBfdValidResponse(response)) { + return restClientManager.getResponse(response, String.class); + } + } catch (InvalidResponseException e) { + LOGGER.error("Error occured while deleting link bfd", e); + throw new InvalidResponseException(e.getCode(), e.getResponse()); + } catch (UnsupportedOperationException e) { + e.printStackTrace(); + } + return null; + } } diff --git a/src-gui/src/main/java/org/openkilda/log/constants/ActivityType.java b/src-gui/src/main/java/org/openkilda/log/constants/ActivityType.java index 9d9dd512970..dff7a2db0aa 100644 --- a/src-gui/src/main/java/org/openkilda/log/constants/ActivityType.java +++ b/src-gui/src/main/java/org/openkilda/log/constants/ActivityType.java @@ -61,7 +61,12 @@ public enum ActivityType { DELETE_SWITCH(40L), UPDATE_ISL_BFD_FLAG(41L), UPDATE_SW_PORT_PROPERTIES(42L), - UPDATE_SWITCH_LOCATION(43L); + CONFIG_INVALID_LOGIN_ATTEMPT_COUNT(46L), + CONFIG_USER_ACCOUNT_UNLOCK_TIME(47L), + UNLOCK_USER_ACCOUNT(48L), + UPDATE_SWITCH_LOCATION(43L), + UPDATE_ISL_BFD_PROPERTIES(44L), + DELETE_ISL_BFD(45L); private Long id; private ActivityTypeEntity activityTypeEntity; diff --git a/src-gui/src/main/java/org/openkilda/model/BfdProperties.java b/src-gui/src/main/java/org/openkilda/model/BfdProperties.java new file mode 100644 index 00000000000..3edb79c321f --- /dev/null +++ b/src-gui/src/main/java/org/openkilda/model/BfdProperties.java @@ -0,0 +1,36 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.model; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +import lombok.Data; + +@JsonSerialize +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@Data +public class BfdProperties { + + @JsonProperty("interval_ms") + private Long intervalMs; + + @JsonProperty("multiplier") + private Short multiplier; +} diff --git a/src-gui/src/main/java/org/openkilda/model/BfdPropertiesEndpoint.java b/src-gui/src/main/java/org/openkilda/model/BfdPropertiesEndpoint.java new file mode 100644 index 00000000000..5a8da680440 --- /dev/null +++ b/src-gui/src/main/java/org/openkilda/model/BfdPropertiesEndpoint.java @@ -0,0 +1,39 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.model; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +import lombok.Data; + +@JsonSerialize +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@Data +public class BfdPropertiesEndpoint { + + @JsonProperty("endpoint") + private NetworkEndpoint endpoint; + + @JsonProperty("properties") + private BfdProperties properties; + + @JsonProperty("status") + private String status; +} diff --git a/src-gui/src/main/java/org/openkilda/model/LinkBfdProperties.java b/src-gui/src/main/java/org/openkilda/model/LinkBfdProperties.java new file mode 100644 index 00000000000..3de9e208c6d --- /dev/null +++ b/src-gui/src/main/java/org/openkilda/model/LinkBfdProperties.java @@ -0,0 +1,40 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.model; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +import lombok.Data; + +@JsonSerialize +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@Data +public class LinkBfdProperties { + + @JsonProperty("properties") + private BfdProperties properties; + + @JsonProperty("effective_source") + private BfdPropertiesEndpoint effectiveSource; + + @JsonProperty("effective_destination") + private BfdPropertiesEndpoint effectiveDestination; + +} diff --git a/src-gui/src/main/java/org/openkilda/model/NetworkEndpoint.java b/src-gui/src/main/java/org/openkilda/model/NetworkEndpoint.java new file mode 100644 index 00000000000..52892ee11b3 --- /dev/null +++ b/src-gui/src/main/java/org/openkilda/model/NetworkEndpoint.java @@ -0,0 +1,37 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.model; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +import lombok.Data; + +@JsonSerialize +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@Data +public class NetworkEndpoint { + + @JsonProperty("switch-id") + private String switchId; + + @JsonProperty("port-id") + private int port; + +} diff --git a/src-gui/src/main/java/org/openkilda/security/CustomAuthenticationProvider.java b/src-gui/src/main/java/org/openkilda/security/CustomAuthenticationProvider.java index 79cfa793ba1..80fd747d5e4 100644 --- a/src-gui/src/main/java/org/openkilda/security/CustomAuthenticationProvider.java +++ b/src-gui/src/main/java/org/openkilda/security/CustomAuthenticationProvider.java @@ -15,23 +15,48 @@ package org.openkilda.security; +import org.openkilda.constants.IConstants.ApplicationSetting; +import org.openkilda.constants.Status; import org.openkilda.exception.InvalidOtpException; import org.openkilda.exception.OtpRequiredException; import org.openkilda.exception.TwoFaKeyNotSetException; +import org.openkilda.service.ApplicationSettingService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.authentication.BadCredentialsException; +import org.springframework.security.authentication.LockedException; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.authentication.dao.DaoAuthenticationProvider; import org.springframework.security.core.Authentication; -import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.usermanagement.dao.entity.UserEntity; import org.usermanagement.dao.repository.UserRepository; +import org.usermanagement.service.MailService; +import org.usermanagement.service.TemplateService; +import org.usermanagement.util.MailUtils; + +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; public class CustomAuthenticationProvider extends DaoAuthenticationProvider { + private static final Logger LOGGER = LoggerFactory.getLogger(CustomAuthenticationProvider.class); + + @Autowired + private MailUtils mailUtils; + @Autowired private UserRepository userRepository; + + @Autowired + private ApplicationSettingService applicationSettingService; + + @Autowired + private MailService mailService; /* * (non-Javadoc) @@ -48,10 +73,20 @@ public Authentication authenticate(final Authentication auth) String verificationCode = customWebAuthenticationDetails.getVerificationCode(); UserEntity user = userRepository.findByUsernameIgnoreCase(auth.getName()); if (user == null || !user.getActiveFlag()) { - throw new UsernameNotFoundException("User '" + auth.getName() + "' does not exist"); + throw new BadCredentialsException("Login failed; Invalid email or password."); + } + String loginCount = null; + String unlockTime = null; + if (user.getUserId() != 1) { + loginCount = applicationSettingService.getApplicationSetting(ApplicationSetting.INVALID_LOGIN_ATTEMPT); + unlockTime = applicationSettingService.getApplicationSetting(ApplicationSetting + .USER_ACCOUNT_UNLOCK_TIME); + if (!user.getStatusEntity().getStatus().equalsIgnoreCase("ACTIVE")) { + checkUserLoginAttempts(user, loginCount, unlockTime); + } } try { - Authentication result = super.authenticate(auth); + final Authentication result = super.authenticate(auth); if (user.getIs2FaEnabled()) { if (!user.getIs2FaConfigured() && !customWebAuthenticationDetails.isConfigure2Fa()) { throw new TwoFaKeyNotSetException(); @@ -65,8 +100,70 @@ public Authentication authenticate(final Authentication auth) } return new UsernamePasswordAuthenticationToken(user, result.getCredentials(), result.getAuthorities()); } catch (BadCredentialsException e) { - throw new BadCredentialsException(e.getMessage()); + String error = null; + if (user.getUserId() != 1) { + error = updateInvalidLoginAttempts(user, loginCount, unlockTime); + } else { + error = "Login Failed.Invalid email or password."; + } + throw new BadCredentialsException(error); + } + + } + + private void checkUserLoginAttempts(UserEntity user, String value, String accUnlockTime) { + if (user.getFailedLoginCount() != null) { + Date loginTime = user.getFailedLoginTime(); + Calendar cal = Calendar.getInstance(); + cal.setTime(loginTime); + cal.add(Calendar.MINUTE, Integer.valueOf(user.getUnlockTime())); + Date time = Calendar.getInstance().getTime(); + if (cal.getTime().after(time)) { + Date calTime = cal.getTime(); + long unlockTime = calTime.getTime() - time.getTime(); + long diffMinutes = unlockTime / (60 * 1000); + long minutes = diffMinutes + 1; + throw new LockedException("User account is locked, will be unlocked after " + + minutes + " minute(s)"); + } else { + user.setStatusEntity(Status.ACTIVE.getStatusEntity()); + user.setFailedLoginCount(null); + user.setUnlockTime(null); + user.setLoginTime(new Timestamp(System.currentTimeMillis())); + userRepository.save(user); + } + } + } + + private String updateInvalidLoginAttempts(UserEntity entity, String value, String accUnlockTime) { + Integer loginCount = entity.getFailedLoginCount(); + if (loginCount != null) { + if (loginCount + 1 >= Integer.valueOf(value)) { + entity.setFailedLoginCount(loginCount + 1); + entity.setUnlockTime(Integer.valueOf(accUnlockTime)); + entity.setFailedLoginTime(new Timestamp(System.currentTimeMillis())); + entity.setStatusEntity(Status.getStatusByCode(Status.LOCK.getCode()).getStatusEntity()); + userRepository.save(entity); + try { + Map map = new HashMap(); + map.put("name", entity.getName()); + map.put("time", accUnlockTime); + mailService.send(entity.getEmail(), mailUtils.getSubjectAccountBlock(), + TemplateService.Template.ACCOUNT_BLOCK, map); + } catch (Exception e) { + LOGGER.warn("User account block email failed for username:'" + entity.getUsername()); + } + throw new LockedException("User account is locked for " + + Integer.valueOf(accUnlockTime) + " minute(s)"); + } + entity.setFailedLoginCount(loginCount + 1); + } else { + entity.setFailedLoginCount(1); } + int attempts = Integer.valueOf(value) - entity.getFailedLoginCount(); + String error = "Invalid email or password.You are left with " + attempts + " more attempts."; + userRepository.save(entity); + return error; } /* diff --git a/src-gui/src/main/java/org/openkilda/service/ApplicationSettingService.java b/src-gui/src/main/java/org/openkilda/service/ApplicationSettingService.java index a918869c498..3bfe84e45e7 100644 --- a/src-gui/src/main/java/org/openkilda/service/ApplicationSettingService.java +++ b/src-gui/src/main/java/org/openkilda/service/ApplicationSettingService.java @@ -88,6 +88,12 @@ public void saveOrUpdateApplicationSetting(final ApplicationSetting applicationS } else if (applicationSetting == ApplicationSetting.SWITCH_NAME_STORAGE_TYPE) { IConstants.STORAGE_TYPE_FOR_SWITCH_NAME = StorageType.get(value); activityLogger.log(ActivityType.CONFIG_SWITCH_NAME_STORAGE_TYPE, value); + } else if (applicationSetting == ApplicationSetting.INVALID_LOGIN_ATTEMPT) { + IConstants.InvalidLoginAttempt.INVALID_LOGIN_ATTEMPTS_COUNT = Integer.valueOf(value); + activityLogger.log(ActivityType.CONFIG_INVALID_LOGIN_ATTEMPT_COUNT, value); + } else if (applicationSetting == ApplicationSetting.USER_ACCOUNT_UNLOCK_TIME) { + activityLogger.log(ActivityType.CONFIG_USER_ACCOUNT_UNLOCK_TIME, value); + IConstants.UserAccUnlockTime.USER_ACCOUNT_UNLOCK_TIME = Integer.valueOf(value); } applicationSettingEntity.setValue(value); applicationSettingEntity.setUpdatedDate(new Date()); diff --git a/src-gui/src/main/java/org/openkilda/service/SwitchService.java b/src-gui/src/main/java/org/openkilda/service/SwitchService.java index 902a6552ca6..cd2c07f4f0a 100644 --- a/src-gui/src/main/java/org/openkilda/service/SwitchService.java +++ b/src-gui/src/main/java/org/openkilda/service/SwitchService.java @@ -28,8 +28,10 @@ import org.openkilda.integration.service.SwitchIntegrationService; import org.openkilda.integration.source.store.SwitchStoreService; import org.openkilda.integration.source.store.dto.InventorySwitch; +import org.openkilda.model.BfdProperties; import org.openkilda.model.FlowInfo; import org.openkilda.model.IslLinkInfo; +import org.openkilda.model.LinkBfdProperties; import org.openkilda.model.LinkMaxBandwidth; import org.openkilda.model.LinkParametersDto; import org.openkilda.model.LinkProps; @@ -597,4 +599,17 @@ public SwitchInfo updateSwitchLocation(String switchId, SwitchLocation switchLoc return switchIntegrationService.updateSwitchLocation(switchId, switchLocation); } + public LinkBfdProperties getLinkBfdProperties(String srcSwitch, String srcPort, String dstSwitch, String dstPort) { + return switchIntegrationService.getLinkBfdProperties(srcSwitch, srcPort, dstSwitch, dstPort); + } + + public LinkBfdProperties updateLinkBfdProperties(String srcSwitch, String srcPort, String dstSwitch, + String dstPort, BfdProperties properties) { + return switchIntegrationService.updateLinkBfdProperties(srcSwitch, srcPort, dstSwitch, dstPort, properties); + } + + public String deleteLinkBfd(String srcSwitch, String srcPort, String dstSwitch, String dstPort) { + return switchIntegrationService.deleteLinkBfd(srcSwitch, srcPort, dstSwitch, dstPort); + } + } diff --git a/src-gui/src/main/java/org/openkilda/validator/ApplicationSettingsValidator.java b/src-gui/src/main/java/org/openkilda/validator/ApplicationSettingsValidator.java index 3b483b651b5..7d7b68f36c5 100644 --- a/src-gui/src/main/java/org/openkilda/validator/ApplicationSettingsValidator.java +++ b/src-gui/src/main/java/org/openkilda/validator/ApplicationSettingsValidator.java @@ -64,6 +64,26 @@ public void validate(final ApplicationSetting type, final String value) { throw new RequestValidationException(messageUtil .getAttributeNotvalid(ApplicationSetting.SWITCH_NAME_STORAGE_TYPE.name())); } + } else if (type == ApplicationSetting.INVALID_LOGIN_ATTEMPT) { + try { + if (Integer.valueOf(value) < 1) { + throw new RequestValidationException(messageUtil + .getAttributeNotvalid(ApplicationSetting.INVALID_LOGIN_ATTEMPT.name())); + } + } catch (NumberFormatException ex) { + throw new RequestValidationException(messageUtil + .getAttributeNotvalid(ApplicationSetting.INVALID_LOGIN_ATTEMPT.name())); + } + } else if (type == ApplicationSetting.USER_ACCOUNT_UNLOCK_TIME) { + try { + if (Integer.valueOf(value) < 1) { + throw new RequestValidationException(messageUtil + .getAttributeNotvalid(ApplicationSetting.USER_ACCOUNT_UNLOCK_TIME.name())); + } + } catch (NumberFormatException ex) { + throw new RequestValidationException(messageUtil + .getAttributeNotvalid(ApplicationSetting.USER_ACCOUNT_UNLOCK_TIME.name())); + } } } } diff --git a/src-gui/src/main/java/org/usermanagement/controller/UserController.java b/src-gui/src/main/java/org/usermanagement/controller/UserController.java index b8fd44251f5..5741114eaf7 100644 --- a/src-gui/src/main/java/org/usermanagement/controller/UserController.java +++ b/src-gui/src/main/java/org/usermanagement/controller/UserController.java @@ -18,6 +18,8 @@ import org.openkilda.auth.context.ServerContext; import org.openkilda.auth.model.Permissions; import org.openkilda.constants.IConstants; +import org.openkilda.log.ActivityLogger; +import org.openkilda.log.constants.ActivityType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,6 +63,9 @@ public class UserController { @Autowired private ServerContext serverContext; + + @Autowired + private ActivityLogger activityLogger; /** * Gets the users by role id. @@ -265,4 +270,20 @@ public boolean validateOtp(@RequestBody final UserInfo userInfo, final HttpServl public UserInfo getLoggedInUserInfo() throws AccessDeniedException { return userService.getLoggedInUserInfo(); } + + /** + * Unlock user account. + * + * @param userId the user id + * @return the userInfo + */ + @ResponseStatus(HttpStatus.OK) + @RequestMapping(value = "/unlock/account/{user_id}", method = RequestMethod.PUT) + @Permissions(values = { IConstants.Permission.UM_USER_ACCOUNT_UNLOCK }) + public Message unlockUserAccount(@PathVariable("user_id") final Long userId) { + activityLogger.log(ActivityType.UNLOCK_USER_ACCOUNT, String.valueOf(userId)); + LOGGER.info("Reset user account. (userId: " + userId + ")"); + userService.unlockUserAccount(userId); + return new Message("User account activated successfully"); + } } diff --git a/src-gui/src/main/java/org/usermanagement/dao/entity/UserEntity.java b/src-gui/src/main/java/org/usermanagement/dao/entity/UserEntity.java index 834e0d380f4..4170de1a659 100644 --- a/src-gui/src/main/java/org/usermanagement/dao/entity/UserEntity.java +++ b/src-gui/src/main/java/org/usermanagement/dao/entity/UserEntity.java @@ -68,6 +68,15 @@ public class UserEntity extends BaseEntity implements Serializable { @Column(name = "LOGOUT_TIME", nullable = true) private Date logoutTime; + + @Column(name = "FAILED_LOGIN_TIME") + private Date failedLoginTime; + + @Column(name = "FAILED_LOGIN_COUNT") + private Integer failedLoginCount; + + @Column(name = "UNLOCK_TIME") + private Integer unlockTime; @Column(name = "ACTIVE_FLAG") private String activeFlag; @@ -119,6 +128,40 @@ public void setRoles(final Set roles) { this.roles = roles; } + /** + * Gets the failed login count. + * + * @return the failed login count + */ + public Integer getFailedLoginCount() { + return failedLoginCount; + } + + /** + * Sets the failed login count. + * + * @param failedLoginCount the failed login count + */ + public void setFailedLoginCount(Integer failedLoginCount) { + this.failedLoginCount = failedLoginCount; + } + + public Date getFailedLoginTime() { + return failedLoginTime; + } + + public void setFailedLoginTime(Date failedLoginTime) { + this.failedLoginTime = failedLoginTime; + } + + public Integer getUnlockTime() { + return unlockTime; + } + + public void setUnlockTime(Integer unlockTime) { + this.unlockTime = unlockTime; + } + /** * Gets the user id. * diff --git a/src-gui/src/main/java/org/usermanagement/service/TemplateService.java b/src-gui/src/main/java/org/usermanagement/service/TemplateService.java index de6615f5f76..31526d86134 100644 --- a/src-gui/src/main/java/org/usermanagement/service/TemplateService.java +++ b/src-gui/src/main/java/org/usermanagement/service/TemplateService.java @@ -35,6 +35,6 @@ public interface TemplateService { * Enum of templates. */ enum Template { - RESET_ACCOUNT_PASSWORD, ACCOUNT_USERNAME, ACCOUNT_PASSWORD, RESET_2FA, CHANGE_PASSWORD; + RESET_ACCOUNT_PASSWORD, ACCOUNT_USERNAME, ACCOUNT_PASSWORD, RESET_2FA, CHANGE_PASSWORD, ACCOUNT_BLOCK; } } diff --git a/src-gui/src/main/java/org/usermanagement/service/UserService.java b/src-gui/src/main/java/org/usermanagement/service/UserService.java index f7e647eaa93..1e650906474 100644 --- a/src-gui/src/main/java/org/usermanagement/service/UserService.java +++ b/src-gui/src/main/java/org/usermanagement/service/UserService.java @@ -271,6 +271,8 @@ public void updateLoginDetail(final String userName) { throw new RequestValidationException(messageUtil.getAttributeInvalid("username", userName + "")); } userEntity.setLoginTime(Calendar.getInstance().getTime()); + userEntity.setFailedLoginCount(null); + userEntity.setUnlockTime(null); if (userEntity.getIs2FaEnabled()) { userEntity.setIs2FaConfigured(true); } @@ -582,6 +584,20 @@ public UserInfo getLoggedInUserInfo() throws AccessDeniedException { userInfo.setPermissions(requestContext.getPermissions()); return userInfo; } + + /** + * Unlock user account. + * + * @param userId the user id + */ + public void unlockUserAccount(Long userId) { + UserEntity userEntity = userValidator.validateUserId(userId); + userEntity.setStatusEntity(Status.ACTIVE.getStatusEntity()); + userEntity.setFailedLoginCount(null); + userEntity.setUnlockTime(null); + userEntity.setLoginTime(new Timestamp(System.currentTimeMillis())); + userRepository.save(userEntity); + } /** * Adds user information in session. diff --git a/src-gui/src/main/java/org/usermanagement/service/impl/VelocityTemplateService.java b/src-gui/src/main/java/org/usermanagement/service/impl/VelocityTemplateService.java index f75c3fd7665..d0a040d53fd 100644 --- a/src-gui/src/main/java/org/usermanagement/service/impl/VelocityTemplateService.java +++ b/src-gui/src/main/java/org/usermanagement/service/impl/VelocityTemplateService.java @@ -51,6 +51,7 @@ private void init() { templates.put(Template.ACCOUNT_PASSWORD, "ui/templates/mail/accountPassword.vm"); templates.put(Template.RESET_2FA, "ui/templates/mail/reset2fa.vm"); templates.put(Template.CHANGE_PASSWORD, "ui/templates/mail/changePassword.vm"); + templates.put(Template.ACCOUNT_BLOCK, "ui/templates/mail/accountBlock.vm"); } diff --git a/src-gui/src/main/java/org/usermanagement/util/MailUtils.java b/src-gui/src/main/java/org/usermanagement/util/MailUtils.java index 9c0ae0fbe9e..976a0f47f70 100644 --- a/src-gui/src/main/java/org/usermanagement/util/MailUtils.java +++ b/src-gui/src/main/java/org/usermanagement/util/MailUtils.java @@ -39,6 +39,21 @@ public class MailUtils { @Value("${subject.change.password}") private String subjectChangePassword; + @Value("${subject.account.block}") + private String subjectAccountBlock; + + + public String getSubjectAccountBlock() { + return subjectAccountBlock; + } + + + + public void setSubjectAccountBlock(String subjectAccountBlock) { + this.subjectAccountBlock = subjectAccountBlock; + } + + public String getSubjectAccountUsername() { return subjectAccountUsername; diff --git a/src-gui/src/main/resources/db/import-script_31.sql b/src-gui/src/main/resources/db/import-script_31.sql new file mode 100644 index 00000000000..181e4cd85c5 --- /dev/null +++ b/src-gui/src/main/resources/db/import-script_31.sql @@ -0,0 +1,14 @@ +INSERT INTO VERSION_ENTITY (Version_ID, Version_Number, Version_Deployment_Date) +VALUES (31, 31, CURRENT_TIMESTAMP); + +INSERT INTO ACTIVITY_TYPE (activity_type_id, activity_name) VALUES + (44, 'UPDATE_ISL_BFD_PROPERTIES'), + (45, 'DELETE_ISL_BFD'); + +INSERT INTO KILDA_PERMISSION (PERMISSION_ID, PERMISSION, IS_EDITABLE, IS_ADMIN_PERMISSION, STATUS_ID, CREATED_BY, CREATED_DATE, UPDATED_BY, UPDATED_DATE,DESCRIPTION) VALUES + (360, 'isl_update_bfd_properties', false, false, 1, 1, CURRENT_TIMESTAMP, 1, CURRENT_TIMESTAMP, 'Permission to update isl bfd properties'), + (361, 'isl_delete_bfd', false, false, 1, 1, CURRENT_TIMESTAMP, 1, CURRENT_TIMESTAMP, 'Permission to delete isl bfd'); + +INSERT INTO ROLE_PERMISSION (ROLE_ID,PERMISSION_ID) VALUES + (2, 360), + (2, 361); \ No newline at end of file diff --git a/src-gui/src/main/resources/db/import-script_32.sql b/src-gui/src/main/resources/db/import-script_32.sql new file mode 100644 index 00000000000..95a8f9f9cf3 --- /dev/null +++ b/src-gui/src/main/resources/db/import-script_32.sql @@ -0,0 +1,23 @@ +INSERT INTO VERSION_ENTITY (Version_ID, Version_Number, Version_Deployment_Date) +VALUES (32, 32, CURRENT_TIMESTAMP); + +INSERT INTO KILDA_PERMISSION (PERMISSION_ID, PERMISSION, IS_EDITABLE, IS_ADMIN_PERMISSION, STATUS_ID, CREATED_BY, CREATED_DATE, UPDATED_BY, UPDATED_DATE,DESCRIPTION) VALUES + (362, 'um_user_account_unlock', false, false, 1, 1, CURRENT_TIMESTAMP, 1, CURRENT_TIMESTAMP, 'Permission to unlock user account'); + +INSERT INTO ROLE_PERMISSION (ROLE_ID,PERMISSION_ID) VALUES + (2, 362); + +INSERT INTO ACTIVITY_TYPE (activity_type_id, activity_name) VALUES + (46, 'CONFIG_INVALID_LOGIN_ATTEMPT_COUNT'), + (47, 'CONFIG_USER_ACCOUNT_UNLOCK_TIME'), + (48, 'UNLOCK_USER_ACCOUNT'); + +INSERT INTO APPLICATION_SETTING (id, setting_type, setting_value) VALUES (3, 'invalid_login_attempt', '5'); +INSERT INTO APPLICATION_SETTING (id, setting_type, setting_value) VALUES (4, 'user_account_unlock_time', '60'); + +INSERT INTO KILDA_STATUS (status_id, STATUS_CODE, STATUS) VALUES + (3, 'LCK', 'Lock'); + + + + diff --git a/src-gui/src/main/resources/mail.properties b/src-gui/src/main/resources/mail.properties index 01786f574c1..c6e34188942 100644 --- a/src-gui/src/main/resources/mail.properties +++ b/src-gui/src/main/resources/mail.properties @@ -3,3 +3,4 @@ subject.account.password=Account password subject.reset.password=Reset Password subject.reset.twofa=Reset 2FA subject.change.password=Change Password +subject.account.block=Account Blocked diff --git a/src-gui/src/main/webapp/ui/templates/mail/accountBlock.vm b/src-gui/src/main/webapp/ui/templates/mail/accountBlock.vm new file mode 100644 index 00000000000..6a2987a8286 --- /dev/null +++ b/src-gui/src/main/webapp/ui/templates/mail/accountBlock.vm @@ -0,0 +1,444 @@ + + + + + Set up a new password for [Product Name] + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src-gui/ui/src/app/common/constants/constants.ts b/src-gui/ui/src/app/common/constants/constants.ts index 0208fa1c8ae..3ae1c4deb79 100644 --- a/src-gui/ui/src/app/common/constants/constants.ts +++ b/src-gui/ui/src/app/common/constants/constants.ts @@ -70,6 +70,10 @@ export const MessageObj = { session_setting_saved:"Session setting saved", saving_switch_name_store_setting:"Saving switch name source setting", switch_name_store_setting_saved:"Switch name source saved", + user_unlock_time_setting_saved:"User unlock time setting saved", + saving_unlock_time_setting:"Saving user unlock time setting", + saving_login_attempt_setting:"Saving login attempt setting", + user_login_attempt_setting_saved:"User login attempt setting saved", loading_switch_store:"Loading switch store settings", saving_switch_store:"Saving switch store settings", saved_switch_store:"Switch store settings saved successfully", @@ -124,6 +128,8 @@ export const MessageObj = { user_deleted:"User removed successfully!", updating_user_status:"Updating user status", user_status_updated:"User status changed successfully!", + unblok_user:"Unblocking user", + user_unblocked:"User account is unblocked successfully", resetting_password:"Resetting Password", reset_pwd_mail_sent:"Reset password email sent successfully!", resetting_pwd_by_admin:"Resetting password by admin", @@ -144,5 +150,12 @@ export const MessageObj = { provider_deleted_success:"Provider deleted successfully", switch_updated_success:"Switch location updated successfully.", switch_updated_error:"Error in updating switch location.", + delete_bfd_properties:"Deleting BFD properties.", + updating_bfd_properties:'Updating BFD properties values.', + updating_bfd_properties_success:'BFD Properties updated successfully.', + updating_bfd_properties_error:"Error in updating BFD properties.", + BFD_properties_deleted:"BFD properties deleted successfully.", + error_BFD_properties_delete:"Error in deleting BFD properties.", + delete_isl_bfd_not_authorised:"You are not authorised to delete the ISL BFD Properties." } \ No newline at end of file diff --git a/src-gui/ui/src/app/common/services/common.service.ts b/src-gui/ui/src/app/common/services/common.service.ts index 84ae8eb979d..6f7e8247411 100644 --- a/src-gui/ui/src/app/common/services/common.service.ts +++ b/src-gui/ui/src/app/common/services/common.service.ts @@ -172,6 +172,12 @@ export class CommonService { saveSwitchNameSourceSettings(value){ return this.httpClient.patch(`${environment.apiEndPoint}/settings/SWITCH_NAME_STORAGE_TYPE`,value); } + saveInvalidLoginAttemptSettings(value){ + return this.httpClient.patch(`${environment.apiEndPoint}/settings/INVALID_LOGIN_ATTEMPT`,value); + } + saveUserAccountUnlockTimeSettings(value){ + return this.httpClient.patch(`${environment.apiEndPoint}/settings/USER_ACCOUNT_UNLOCK_TIME`,value); + } getAllSettings(){ return this.httpClient.get(`${environment.apiEndPoint}/settings`); } diff --git a/src-gui/ui/src/app/common/services/isl-detail.service.ts b/src-gui/ui/src/app/common/services/isl-detail.service.ts index 1f426c83a8a..2670611360e 100644 --- a/src-gui/ui/src/app/common/services/isl-detail.service.ts +++ b/src-gui/ui/src/app/common/services/isl-detail.service.ts @@ -20,6 +20,7 @@ export class IslDetailService { getISLFlowsList(query? : any) : Observable{ return this.httpClient.get(`${environment.apiEndPoint}/switch/links/flows`,{params:query}); } + getIslLatencyfromGraph(src_switch,src_port,dst_switch,dst_port,from,to,frequency){ return this.httpClient.get( `${ diff --git a/src-gui/ui/src/app/common/services/isl-list.service.ts b/src-gui/ui/src/app/common/services/isl-list.service.ts index 2c99c1582b2..22cd1f4bb43 100644 --- a/src-gui/ui/src/app/common/services/isl-list.service.ts +++ b/src-gui/ui/src/app/common/services/isl-list.service.ts @@ -80,4 +80,34 @@ export class IslListService { const url = `${environment.apiEndPoint}/switch/link/props`; return this.httpClient.put(url,requestPayload); } + + getLinkBFDProperties(src_switch, src_port, dst_switch, dst_port){ + let date = new Date().getTime(); + return this.httpClient.get(`${environment.apiEndPoint}/switch/links/bfd?src_switch=${src_switch}&src_port=${src_port}&dst_switch=${dst_switch}&dst_port=${dst_port}&_=${date}`); + + } + updateLinkBFDProperties(data,src_switch, src_port, dst_switch, dst_port){ + const url = `${environment.apiEndPoint}/switch/links/bfd?src_switch=${src_switch}&src_port=${src_port}&dst_switch=${dst_switch}&dst_port=${dst_port}`; + return this.httpClient.put(url,data); + } + deleteLinkBFDProperties(data,successRes,errorRes){ + const url = `${environment.apiEndPoint}/switch/links/bfd?src_switch=${data.src_switch}&src_port=${data.src_port}&dst_switch=${data.dst_switch}&dst_port=${data.dst_port}`; + let token = this.cookieManager.get('XSRF-TOKEN') as string; + var xhr = new XMLHttpRequest(); + xhr.withCredentials = false; + xhr.addEventListener("readystatechange", function () { + if (this.readyState == 4 && this.status == 200) { + successRes(JSON.parse(this.responseText)); + }else if(this.readyState == 4 && this.status >= 300){ + errorRes(JSON.parse(this.responseText)); + } + }); + + xhr.open("DELETE", url); + xhr.setRequestHeader("Content-Type", "application/json"); + if (token !== null) { + xhr.setRequestHeader( "X-XSRF-TOKEN" , token); + } + xhr.send(); + } } diff --git a/src-gui/ui/src/app/common/services/user.service.ts b/src-gui/ui/src/app/common/services/user.service.ts index 08eb9617a5e..69734e663af 100644 --- a/src-gui/ui/src/app/common/services/user.service.ts +++ b/src-gui/ui/src/app/common/services/user.service.ts @@ -99,4 +99,9 @@ const httpOptions = { return this.http.patch(`${this.configUrl}/user/settings`,payload); } + unblockUser(Id){ + const url = `${this.configUrl}/user/unlock/account/${Id}`; + return this.http.put(url,{}); + } + } \ No newline at end of file diff --git a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.css b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.css index a8379f2c385..db34f70dfb1 100644 --- a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.css +++ b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.css @@ -66,4 +66,11 @@ i.icon { .isl_details_div .col-form-label { line-height: 1; +} + +.update_isl_cost{ +padding-left: 0px; +} +.isl_delete_dropdown .dropdown-menu{ + min-width: 120px; } \ No newline at end of file diff --git a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.html b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.html index 73fbc8514c6..0fcc6c9c225 100644 --- a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.html +++ b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.html @@ -2,7 +2,7 @@
-
+

@@ -11,13 +11,14 @@

-
- {{src_switch}} -
+
+ {{src_switch}} +
- +
- +
+ +
+ +
+ + +
+
+
@@ -50,14 +77,15 @@
- -
- {{dst_switch }} -
+ +
+ {{dst_switch }} +
- +
-
- {{dst_switch_name}} -
+
+ {{dst_switch_name}} +
+
+
+ +
+ {{bfdPropertyData.effective_destination.properties['interval_ms']}} +
+
+
+ +
+ {{bfdPropertyData.effective_destination.properties['multiplier']}} +
- +
-
-
- - -
- {{ convertInMB(max_bandwidth) }} -   +
+
+ + +
+ {{ convertInMB(max_bandwidth) }} +   + + + + - - - - + +
+
+
+ + + +
+
+
+
+ +

{{ convertInMB(speed) }}

+
+ +
+ +

{{latency}} ns

+
+
+ +
+ {{detailDataObservable.props.cost}} + + + + +
+
+ +
+ + + + +
+
+
+ +
+ +

{{ convertInMB(available_bandwidth) }}

+
+
+ +

{{state}}

+
+
+ +

{{bfd_session_status}}

+
+
+
+
+ +
+
+
+ + +
-
-
- - - +
+
+
+ +
+
+
+ +
-
- -

{{ convertInMB(speed) }}

+
+
+ +
+
+
+ +
- -
- -

{{latency}} ns

+
+
+
+
+
+
+
+ +
+ +
+
+ +
+
+
+
+
+
+
+
+
+ +
+ {{bfdPropertyData.properties['interval_ms']}} +
+
-
- -
- {{detailDataObservable.props.cost}} - - - +
+
+ +
+ {{bfdPropertyData.properties['multiplier']}} +
+
+ + + + - -
-
- -
- - - -
- -
- -

{{ convertInMB(available_bandwidth) }}

-
-
- -

{{state}}

-
-
- -

{{bfd_session_status}}

-
-
-
-
- -
-
-
- - -
-
-
-
-
- -
-
-
- - -
-
-
+ +
+
+
+ + +
+
+
+
+
+
+
+
+ +
+ +
+
-
- -
-
-
- - -
-
-
+
+
+ +
+ +
+
+ +
+
+
+ + +
+
+
+ +
-
- + + +
+ +
-
-
-
-
+
+
+
- -

{{currentGraphName}}

- -

Loading data.....

-
- - + +

{{currentGraphName}}

+ +

Loading data.....

+
+ + + +
+
+
+
    +
  • Forward
  • +
  • Reverse
  • +
-
-
-
    -
  • Forward
  • -
  • Reverse
  • -
-
-
-
- -
- -
-
-
- -
- -
-
-
- -
- -
-
-
- -
- - -
-
- -
- - - -
-
Auto reload time should be number
-
Auto reload time required
-
-
-
+
+
+ +
+ +
+
+
+ +
+ +
+
+
+ +
+ + +
+
+
+ +
+ + + +
+
Auto reload time should be number
+
Auto reload time required
+
+
-
- -
- @@ -295,78 +448,84 @@

{{currentGraphName}}

-
-
- -
- + + -
-
- -
- - + -
- -
- -
- + + -
-
- -
-
-
- -
-
- -
+
+
+ +
+
+
+ +
+
+
- -
+ +
- -
-
-
+
+
- +
+
- +
- +
@@ -390,6 +549,4 @@

{{currentGraphName}}

Copy to Clipboard -
- - + \ No newline at end of file diff --git a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.ts b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.ts index 10548e53b27..93b4c104fbd 100644 --- a/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.ts +++ b/src-gui/ui/src/app/modules/isl/isl-detail/isl-detail.component.ts @@ -47,11 +47,13 @@ import { FlowsService } from 'src/app/common/services/flows.service'; evacuate:boolean=false; under_maintenance:boolean=false; loadingData = true; + isBFDEdit:any=false; dataSet:any; available_bandwidth:string = ''; default_max_bandwidth=''; max_bandwidth :any = ''; detailDataObservable : any; + bfdPropertyData : any; src_switch_name: string; dst_switch_name: string; graphDataForwardUrl: string; @@ -78,6 +80,7 @@ import { FlowsService } from 'src/app/common/services/flows.service'; graphMetrics = []; flowGraphMetrics=[]; autoReloadTimerId = null; + bfdPropForm:FormGroup; islForm: FormGroup; showCostEditing: boolean = false; showBandwidthEditing : boolean = false; @@ -90,6 +93,7 @@ import { FlowsService } from 'src/app/common/services/flows.service'; targetSwitch:"" } + @Output() hideToValue: EventEmitter = new EventEmitter(); newMessageDetail(){ @@ -156,10 +160,8 @@ import { FlowsService } from 'src/app/common/services/flows.service'; auto_reload_time: ["", Validators.compose([Validators.pattern("[0-9]*")])] }); this.graphMetrics = this.dygraphService.getPortMetricData(); - this.flowGraphMetrics = this.dygraphService.getFlowMetricData(); - - - } + this.flowGraphMetrics = this.dygraphService.getFlowMetricData(); +} getIslDetailData(src_switch,src_port,dst_switch,dst_port){ this.loaderService.show(MessageObj.loading_isl); @@ -214,6 +216,26 @@ import { FlowsService } from 'src/app/common/services/flows.service'; },error=>{ this.toastr.error(MessageObj.no_cost_data_returned,'Error'); }); + + this.islListService.getLinkBFDProperties(this.src_switch, this.src_port, this.dst_switch, this.dst_port).subscribe((data : any) =>{ + if(data!= null){ + this.bfdPropertyData = data; + this.bfdPropForm = this.formBuiler.group({ + interval_ms: [this.bfdPropertyData.properties['interval_ms'], Validators.min(0)], + multiplier: [this.bfdPropertyData.properties['multiplier'],Validators.min(0)] + }); + } + else{ + this.bfdPropertyData = {}; + this.bfdPropForm = this.formBuiler.group({ + interval_ms: [0, Validators.min(0)], + multiplier:[0,Validators.min(0)] + }); + } + + },error=>{ + this.toastr.error(MessageObj.no_cost_data_returned,'Error'); + }); }else{ this.loaderService.hide(); this.toastr.error(MessageObj.no_isl,'Error'); @@ -677,8 +699,7 @@ get f() { if (formdata.timezone == "UTC") { convertedStartDate = moment(new Date(formdata.fromDate)).add(-60, 'seconds').format("YYYY-MM-DD-HH:mm:ss"); - convertedEndDate = moment(new Date(formdata.toDate)).add(60, 'seconds').format("YYYY-MM-DD-HH:mm:ss"); - + convertedEndDate = moment(new Date(formdata.toDate)).add(60, 'seconds').format("YYYY-MM-DD-HH:mm:ss"); } @@ -789,10 +810,10 @@ get f() { this.loaderService.hide(); if(this.filterForm.value.graph_type === 'stackedgraph'){ this.islDataService.changeIslFlowStackedGraph(this.currentGraphData); - this.islDataService.IslFlowStackedGraph.subscribe(message => this.message = message); + this.islDataService.IslFlowStackedGraph.subscribe(message => this.message = message); }else{ - this.islDataService.changeIslFlowGraph(this.currentGraphData); - this.islDataService.IslFlowGraph.subscribe(message => this.message = message); + this.islDataService.changeIslFlowGraph(this.currentGraphData); + this.islDataService.IslFlowGraph.subscribe(message => this.message = message); } } @@ -829,7 +850,6 @@ get f() { if (formdata.timezone == "UTC") { convertedStartDate = moment(new Date(formdata.fromDate)).add(-60, 'seconds').format("YYYY-MM-DD-HH:mm:ss"); convertedEndDate = moment(new Date(formdata.toDate)).add(60, 'seconds').format("YYYY-MM-DD-HH:mm:ss"); - } @@ -966,11 +986,49 @@ get f() { }); } - deleteISL(){ + editBFDProperties(){ + this.isBFDEdit = true; + } + + updateBFDProperties(){ + if (this.bfdPropForm.invalid) { + this.toastr.error("Please enter valid value for Interval and multiplier."); + return; + } + + const modalRef = this.modalService.open(ModalconfirmationComponent); + modalRef.componentInstance.title = "Confirmation"; + modalRef.componentInstance.content = 'Are you sure you want to change the BFD Properties'; + + modalRef.result.then((response) => { + if(response && response == true){ + this.loaderService.show(MessageObj.updating_bfd_properties); + let interval_ms = this.bfdPropForm.value.interval_ms; + let multiplier = this.bfdPropForm.value.multiplier; + var data = {interval_ms:interval_ms,multiplier:multiplier}; + this.islListService.updateLinkBFDProperties(data,this.src_switch, this.src_port, this.dst_switch, this.dst_port).subscribe((response: any) => { + this.bfdPropertyData = response; + this.loaderService.hide(); + this.toastr.success(MessageObj.updating_bfd_properties_success,'Success'); + this.isBFDEdit = false; + },error => { + this.loaderService.hide(); + this.isBFDEdit = false; + if(error.status == '500'){ + this.toastr.error(error.error['error-message'],'Error! '); + }else{ + this.toastr.error(MessageObj.updating_bfd_properties_error,'Error'); + } + }) + } + }); + } + deleteBFDProperties(){ + let is2FaEnabled = localStorage.getItem('is2FaEnabled') var self = this; const modalReff = this.modalService.open(ModalconfirmationComponent); - modalReff.componentInstance.title = "Delete ISL"; + modalReff.componentInstance.title = "Delete BFD Properties"; modalReff.componentInstance.content = 'Are you sure you want to perform delete action ?'; modalReff.result.then((response) => { @@ -980,6 +1038,56 @@ get f() { modalRef.componentInstance.emitService.subscribe( otp => { + if (otp) { + this.loaderService.show(MessageObj.delete_bfd_properties); + var data = { + src_switch:this.src_switch, + src_port:this.src_port, + dst_switch:this.dst_switch, + dst_port:this.dst_port, + code:otp + } + this.modalService.dismissAll(); + this.islListService.deleteLinkBFDProperties(data,response => { + this.toastr.success(MessageObj.BFD_properties_deleted, "Success!"); + this.loaderService.hide(); + }, error => { + this.loaderService.hide(); + var message = (error && error['error-auxiliary-message']) ? error['error-auxiliary-message'] :MessageObj.error_BFD_properties_delete; + this.toastr.error(message, "Error!"); + }) + } else { + this.toastr.error(MessageObj.otp_not_detected, "Error!"); + } + }, + error => { + } + ); + }else{ + const modalRef2 = this.modalService.open(ModalComponent); + modalRef2.componentInstance.title = "Warning"; + modalRef2.componentInstance.content = MessageObj.delete_isl_bfd_not_authorised; + } + } + }); + } + + cancelEditBFDProperties(){ + this.isBFDEdit = false; + } + deleteISL(){ + let is2FaEnabled = localStorage.getItem('is2FaEnabled') + var self = this; + const modalReff = this.modalService.open(ModalconfirmationComponent); + modalReff.componentInstance.title = "Delete ISL"; + modalReff.componentInstance.content = 'Are you sure you want to perform delete action ?'; + + modalReff.result.then((response) => { + if(response && response == true){ + if(is2FaEnabled == 'true'){ + const modalRef = this.modalService.open(OtpComponent); + modalRef.componentInstance.emitService.subscribe( + otp => { if (otp) { this.loaderService.show(MessageObj.deleting_isl); var data = { diff --git a/src-gui/ui/src/app/modules/settings/session/session.component.html b/src-gui/ui/src/app/modules/settings/session/session.component.html index 1178d23ad75..f8ee1b853a2 100644 --- a/src-gui/ui/src/app/modules/settings/session/session.component.html +++ b/src-gui/ui/src/app/modules/settings/session/session.component.html @@ -1,7 +1,30 @@
-
-
SESSION SETTINGS
+ +
+
SWITCH NAME SETTINGS
+
+
+
+
+
+ + +
+   +   +   +
+
+
+
+
+
+
+
+
USER SETTINGS
@@ -18,31 +41,40 @@
SESSION SETTINGS
-
-
-
- -
-
SWITCH NAME SETTINGS
-
-
-
-
+ +
+
- - + + +
-   -   -   +   +   +  
+ +
+
+
+ + + +
+   +   +   +
+
+
+
+
+
\ No newline at end of file diff --git a/src-gui/ui/src/app/modules/settings/session/session.component.ts b/src-gui/ui/src/app/modules/settings/session/session.component.ts index d846feb7cfd..eca0477fbf7 100644 --- a/src-gui/ui/src/app/modules/settings/session/session.component.ts +++ b/src-gui/ui/src/app/modules/settings/session/session.component.ts @@ -16,11 +16,17 @@ import { MessageObj } from 'src/app/common/constants/constants'; export class SessionComponent implements OnInit, AfterViewInit, OnChanges,DoCheck { sessionForm: FormGroup; switchNameSourceForm:FormGroup; + userUnlockForm:FormGroup; + loginAttemptForm:FormGroup; switchNameSourceTypes:any; isEdit = false; isSwitchNameSourcEdit = false; + isloginAttemptEdit = false; + isUserUnlockEdit = false; initialVal = null; initialNameSource = null; + intialLoginAttemptValue = null; + initialUserUnlockValue = null; constructor( private formBuilder: FormBuilder, private commonService: CommonService, @@ -47,6 +53,15 @@ export class SessionComponent implements OnInit, AfterViewInit, OnChanges,DoChec switch_name_source: ["FILE_STORAGE"] }); this.switchNameSourceForm.disable(); + + this.loginAttemptForm = this.formBuilder.group({ + login_attempt: [""] + }); + this.loginAttemptForm.disable(); + this.userUnlockForm = this.formBuilder.group({ + user_unlock_time: [""] + }); + this.userUnlockForm.disable(); } @@ -59,6 +74,10 @@ export class SessionComponent implements OnInit, AfterViewInit, OnChanges,DoChec this.switchNameSourceTypes = responseList[1]; this.switchNameSourceForm.setValue({"switch_name_source":settings['SWITCH_NAME_STORAGE_TYPE'] || 'FILE_STORAGE'}); this.initialNameSource = settings['SWITCH_NAME_STORAGE_TYPE'] || 'FILE_STORAGE'; + this.intialLoginAttemptValue = settings['INVALID_LOGIN_ATTEMPT'] || 5; + this.loginAttemptForm.setValue({"login_attempt":settings['INVALID_LOGIN_ATTEMPT']}); + this.initialUserUnlockValue = settings['USER_ACCOUNT_UNLOCK_TIME'] || 60; + this.userUnlockForm.setValue({"user_unlock_time":settings['USER_ACCOUNT_UNLOCK_TIME']}); this.loaderService.hide(); },error=>{ var errorMsg = error && error.error && error.error['error-auxiliary-message'] ? error.error['error-auxiliary-message']:'Api response error'; @@ -165,4 +184,78 @@ export class SessionComponent implements OnInit, AfterViewInit, OnChanges,DoChec this.switchNameSourceForm.disable(); } + saveUserUnlockTimeSetting(){ + let unlock_time = this.userUnlockForm.controls['user_unlock_time'].value; + if(unlock_time == ''){ + return false; + } + const modalReff = this.modalService.open(ModalconfirmationComponent); + modalReff.componentInstance.title = "Confirmation"; + modalReff.componentInstance.content = 'Are you sure you want to save user unlock time settings ?'; + modalReff.result.then((response) => { + if(response && response == true){ + this.loaderService.show(MessageObj.saving_unlock_time_setting); + this.commonService.saveUserAccountUnlockTimeSettings(unlock_time).subscribe((response)=>{ + this.toastrService.success(MessageObj.user_unlock_time_setting_saved,'Success'); + this.loaderService.hide(); + this.initialUserUnlockValue = this.userUnlockForm.controls['user_unlock_time'].value; + this.isUserUnlockEdit = false; + this.userUnlockForm.disable(); + },error=>{ + var errorMsg = error && error.error && error.error['error-auxiliary-message'] ? error.error['error-auxiliary-message']:'Unable to save'; + this.toastrService.error(errorMsg,'Error'); + this.loaderService.hide(); + }); + } + }); + } + + editUserUnlockTimeSetting(){ + this.isUserUnlockEdit = true; + this.userUnlockForm.enable(); + } + + cancelUserUnlockTimeSetting(){ + this.userUnlockForm.setValue({"user_unlock_time":this.initialUserUnlockValue}); + this.isUserUnlockEdit = false; + this.userUnlockForm.disable(); + } + + + saveLoginAttemptSetting(){ + let login_attempt = this.loginAttemptForm.controls['login_attempt'].value; + if(login_attempt == ''){ + return false; + } + const modalReff = this.modalService.open(ModalconfirmationComponent); + modalReff.componentInstance.title = "Confirmation"; + modalReff.componentInstance.content = 'Are you sure you want to save user login attempt settings ?'; + modalReff.result.then((response) => { + if(response && response == true){ + this.loaderService.show(MessageObj.saving_login_attempt_setting); + this.commonService.saveInvalidLoginAttemptSettings(login_attempt).subscribe((response)=>{ + this.toastrService.success(MessageObj.user_login_attempt_setting_saved,'Success'); + this.loaderService.hide(); + this.intialLoginAttemptValue = this.loginAttemptForm.controls['login_attempt'].value; + this.isloginAttemptEdit = false; + this.loginAttemptForm.disable(); + },error=>{ + var errorMsg = error && error.error && error.error['error-auxiliary-message'] ? error.error['error-auxiliary-message']:'Unable to save'; + this.toastrService.error(errorMsg,'Error'); + this.loaderService.hide(); + }); + } + }); + } + + editLoginAttemptSetting(){ + this.isloginAttemptEdit = true; + this.loginAttemptForm.enable(); + } + + cancelLoginAttemptSetting(){ + this.loginAttemptForm.setValue({"login_attempt":this.intialLoginAttemptValue}); + this.isloginAttemptEdit = false; + this.loginAttemptForm.disable(); + } } diff --git a/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.html b/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.html index 1e03ecef1a5..7614a06db89 100644 --- a/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.html +++ b/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.html @@ -35,6 +35,7 @@ + diff --git a/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.ts b/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.ts index 17af78ef9a3..784dc0b4511 100644 --- a/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.ts +++ b/src-gui/ui/src/app/modules/usermanagement/users/user-list/user-list.component.ts @@ -210,6 +210,27 @@ export class UserListComponent implements OnDestroy, OnInit, AfterViewInit{ }); } + /* + Method: unblockUser + Description: Unblock a user if blocked by failed login attempts + */ + unblockUser(id){ + const modalRef = this.modalService.open(ModalconfirmationComponent); + modalRef.componentInstance.title = "Confirmation"; + modalRef.componentInstance.content = 'Are you sure you want to unblock this user ?'; + + modalRef.result.then((response) => { + if(response && response == true){ + this.loaderService.show(MessageObj.unblok_user); + this.userService.unblockUser(id).subscribe(user => { + this.toastr.success(MessageObj.user_unblocked,'Success') + this.getUsers(); + + }); + } + }); + } + /* Method: resetpassword Description: Reset the user password and send an email with updated imformation. diff --git a/src-java/base-topology/base-storm-topology/build.gradle b/src-java/base-topology/base-storm-topology/build.gradle index d0ade8af6c4..da3b66eb805 100644 --- a/src-java/base-topology/base-storm-topology/build.gradle +++ b/src-java/base-topology/base-storm-topology/build.gradle @@ -1,6 +1,12 @@ buildscript { + repositories { + maven { url 'https://jitpack.io' } + } dependencies { - classpath 'org.anarres.jarjar:jarjar-gradle:1.0.1' + // Not compatible with Gradle 7. See https://github.com/shevek/jarjar/issues/22 + //classpath 'org.anarres.jarjar:jarjar-gradle:1.0.1' + // So build the plugin from the source at the version which supports Gradle 7. + classpath 'com.github.shevek.jarjar:jarjar-gradle:9a7eca72f9' } } @@ -40,11 +46,11 @@ dependencies { from 'org.apache.storm:flux-core:1.2.1' // Remove bundled libraries as they conflict with other dependencies. - classDelete "org.apache.commons.**" - classDelete "org.apache.http.**" - classDelete "org.apache.storm.flux.wrappers.**" - classDelete "org.apache.thrift.**" - classDelete "org.yaml.**" + classDelete 'org.apache.commons.**' + classDelete 'org.apache.http.**' + classDelete 'org.apache.storm.flux.wrappers.**' + classDelete 'org.apache.thrift.**' + classDelete 'org.yaml.**' } api('org.squirrelframework:squirrel-foundation') { @@ -65,7 +71,7 @@ dependencies { api 'org.hibernate.validator:hibernate-validator' runtimeOnly 'org.glassfish:javax.el' - implementation 'org.aspectj:aspectjrt' + api 'org.aspectj:aspectjrt' implementation 'org.mapstruct:mapstruct' implementation 'org.mapstruct:mapstruct-processor' annotationProcessor 'org.mapstruct:mapstruct-processor' @@ -100,7 +106,7 @@ dependencies { } sourceSets { - release { + releaseResources { resources { srcDir 'src/release/resources' } @@ -121,7 +127,7 @@ task testJar(type: Jar) { task releaseJar(type: Jar) { dependsOn processResources classifier 'release' - from sourceSets.release.output + from sourceSets.releaseResources.output } artifacts { @@ -130,5 +136,5 @@ artifacts { } buildAndCopyArtifacts { - from("${project.buildDir}/resources/release/topology.properties") { into "${project.name}/resources" } + from("${project.buildDir}/resources/releaseResources/topology.properties") { into "${project.name}/resources" } } diff --git a/src-java/build.gradle b/src-java/build.gradle index e233ed42e88..8338c786688 100644 --- a/src-java/build.gradle +++ b/src-java/build.gradle @@ -5,10 +5,10 @@ import java.text.DateFormat import java.util.jar.JarFile plugins { - id 'org.sonarqube' version '3.1.1' - id 'org.ajoberstar.grgit' version '4.0.2' apply false - id 'com.github.johnrengelman.shadow' version '6.1.0' apply false - id 'io.freefair.aspectj.post-compile-weaving' version '5.3.0' apply false + id 'org.sonarqube' version '3.3' + id 'org.ajoberstar.grgit' version '4.1.1' apply false + id 'com.github.johnrengelman.shadow' version '7.1.1' apply false + id 'io.freefair.aspectj.post-compile-weaving' version '6.3.0' apply false } allprojects { @@ -33,7 +33,7 @@ subprojects { repositories { mavenCentral() - maven { url "https://clojars.org/repo" } + maven { url 'https://clojars.org/repo' } } dependencies { @@ -178,7 +178,7 @@ subprojects { } checkstyle { - toolVersion '8.41' + toolVersion '9.2' configDirectory = rootProject.file('checkstyle') configProperties = [ 'checkstyle.suppression.file': project.file('src/checkstyle/checkstyle-suppressions.xml'), @@ -194,25 +194,26 @@ subprojects { } dependencies { - checkstyle 'com.puppycrawl.tools:checkstyle:8.41' + checkstyle 'com.puppycrawl.tools:checkstyle:9.2' } } + /*TODO: not compatible with Gradle 7+, need another way [checkstyleMain, checkstyleTest].each { task -> task.logging.setLevel(LogLevel.LIFECYCLE) - } + }*/ jacoco { - toolVersion = '0.8.6' + toolVersion = '0.8.7' } jacocoTestReport { dependsOn test reports { - xml.enabled true - csv.enabled false - html.enabled true + xml.required = true + csv.required = false + html.required = true } } @@ -239,22 +240,25 @@ subprojects { doFirst { Map classes = new HashMap<>() getIncludedDependencies().each { jar -> - JarFile jf = new JarFile(jar) - jf.entries().each { file -> - // TODO: it's good to reuse shadowJar plugin's patterns - if (file.name.endsWith('.class') && file.name != 'module-info.class') { - if (classes.containsKey(file.name)) { - throw new GradleException("shadowJar failure: ${file.name} conflicts in jars:${classes.get(file.name)} and ${jar}") + if(jar.isFile()) { + JarFile jf = new JarFile(jar) + jf.entries().each { file -> + // TODO: it's good to reuse shadowJar plugin's patterns + if (file.name.endsWith('.class') && file.name != 'module-info.class') { + if (classes.containsKey(file.name)) { + throw new GradleException("shadowJar failure: ${file.name} conflicts in jars:${classes.get(file.name)} and ${jar}") + } + classes.put(file.name, jar) } - classes.put(file.name, jar) } + jf.close() } - jf.close() } } } } +//TODO: the task handles Floodlight build only, should we rename the task? task buildAndCopyArtifacts(type: Copy) { onlyIf { project.hasProperty('destPath') diff --git a/src-java/checkstyle/checkstyle.xml b/src-java/checkstyle/checkstyle.xml index 127de3a77a1..15f53c5f467 100644 --- a/src-java/checkstyle/checkstyle.xml +++ b/src-java/checkstyle/checkstyle.xml @@ -1,7 +1,7 @@ + "-//Checkstyle//DTD Checkstyle Configuration 1.3//EN" + "https://checkstyle.org/dtds/configuration_1_3.dtd"> - + - + @@ -144,13 +144,13 @@ - + - + @@ -253,6 +253,7 @@ PARAMETER_DEF, VARIABLE_DEF, METHOD_DEF, PATTERN_VARIABLE_DEF, RECORD_DEF, RECORD_COMPONENT_DEF"/> + @@ -269,7 +270,8 @@ + value="COMMA, SEMI, POST_INC, POST_DEC, DOT, + LABELED_STAT, METHOD_REF"/> @@ -284,12 +286,14 @@ + LT, MINUS, MOD, NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR, METHOD_REF, + TYPE_EXTENSION_AND "/> + value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, + RECORD_DEF, COMPACT_CTOR_DEF"/> @@ -304,13 +308,14 @@ value="^@return the *|^This method returns |^A [{]@code [a-zA-Z0-9]+[}]( is a )"/> + - + + convertToGroupSpeakerData(OFGroupDescStatsReply statsReply) { List commandData = new ArrayList<>(); for (OFGroupDescStatsEntry entry : statsReply.getEntries()) { + GroupId groupId = new GroupId(entry.getGroup().getGroupNumber()); + GroupType type = fromOfGroupType(entry.getGroupType()); + List buckets = new ArrayList<>(); + List ofBuckets = entry.getBuckets(); + for (OFBucket bucket : ofBuckets) { + buckets.add(fromOfBucket(bucket)); + } + commandData.add(GroupSpeakerData.builder().groupId(groupId).type(type).buckets(buckets).build()); } return commandData; } @@ -87,6 +101,16 @@ private OFBucket toOfBucket(OFFactory ofFactory, Bucket bucket) { return builder.build(); } + private Bucket fromOfBucket(OFBucket ofBucket) { + WatchGroup watchGroup = fromOfGroup(ofBucket.getWatchGroup()); + WatchPort watchPort = fromOfPort(ofBucket.getWatchPort()); + Set actions = new HashSet<>(); + for (OFAction ofAction : ofBucket.getActions()) { + actions.add(OfInstructionsConverter.INSTANCE.convertToRuleManagerAction(ofAction)); + } + return Bucket.builder().watchGroup(watchGroup).watchPort(watchPort).writeActions(actions).build(); + } + private OFPort toOfWatchPort(WatchPort watchPort) { switch (watchPort) { case ANY: @@ -97,7 +121,7 @@ private OFPort toOfWatchPort(WatchPort watchPort) { } private WatchPort fromOfPort(OFPort watchPort) { - if (watchPort.equals(OFGroup.ANY)) { + if (watchPort.equals(OFPort.ANY)) { return WatchPort.ANY; } else { throw new IllegalArgumentException(String.format("Unknown watch port %s", watchPort)); diff --git a/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/rulemanager/OfGroupConverterTest.java b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/rulemanager/OfGroupConverterTest.java new file mode 100644 index 00000000000..cd84e67f136 --- /dev/null +++ b/src-java/floodlight-service/floodlight-modules/src/test/java/org/openkilda/floodlight/converter/rulemanager/OfGroupConverterTest.java @@ -0,0 +1,159 @@ +/* Copyright 2022 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.floodlight.converter.rulemanager; + +import static org.junit.Assert.assertEquals; + +import org.openkilda.model.GroupId; +import org.openkilda.rulemanager.GroupSpeakerData; +import org.openkilda.rulemanager.ProtoConstants.PortNumber; +import org.openkilda.rulemanager.action.PortOutAction; +import org.openkilda.rulemanager.group.Bucket; +import org.openkilda.rulemanager.group.GroupType; +import org.openkilda.rulemanager.group.WatchGroup; +import org.openkilda.rulemanager.group.WatchPort; + +import com.google.common.collect.Sets; +import org.junit.Test; +import org.projectfloodlight.openflow.protocol.OFBucket; +import org.projectfloodlight.openflow.protocol.OFGroupAdd; +import org.projectfloodlight.openflow.protocol.OFGroupDelete; +import org.projectfloodlight.openflow.protocol.OFGroupDescStatsEntry; +import org.projectfloodlight.openflow.protocol.OFGroupDescStatsReply.Builder; +import org.projectfloodlight.openflow.protocol.OFGroupType; +import org.projectfloodlight.openflow.protocol.action.OFAction; +import org.projectfloodlight.openflow.protocol.ver13.OFFactoryVer13; +import org.projectfloodlight.openflow.types.OFGroup; +import org.projectfloodlight.openflow.types.OFPort; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class OfGroupConverterTest { + + private static final int GROUP_ID = 12; + + private List getActions(OFFactoryVer13 factory, int portNumber) { + List list = new ArrayList<>(); + list.add(factory.actions().buildOutput().setPort(OFPort.of(portNumber)).build()); + return list; + } + + private OFGroupDescStatsEntry getOfGroupEntry(OFFactoryVer13 factory) { + OFGroupDescStatsEntry.Builder entryBuilder = factory.buildGroupDescStatsEntry(); + List buckets = new ArrayList<>(); + buckets.add(factory.buildBucket().setWatchPort(OFPort.ANY) + .setWatchGroup(OFGroup.ALL) + .setActions(getActions(factory, 1)) + .build()); + + buckets.add(factory.buildBucket().setWatchPort(OFPort.ANY) + .setWatchGroup(OFGroup.ALL) + .setActions(getActions(factory, 2)) + .build()); + + return entryBuilder.setGroup(OFGroup.of(GROUP_ID)) + .setGroupType(OFGroupType.ALL) + .setBuckets(buckets).build(); + } + + @Test + public void testConvertToGroupSpeakerData() { + OFFactoryVer13 factory = new OFFactoryVer13(); + + Builder builder = factory.buildGroupDescStatsReply(); + List entries = new ArrayList<>(); + entries.add(getOfGroupEntry(factory)); + builder.setEntries(entries); + + List groupSpeakerDataList = OfGroupConverter.INSTANCE.convertToGroupSpeakerData( + builder.build()); + assertEquals(1, groupSpeakerDataList.size()); + GroupSpeakerData groupSpeakerData = groupSpeakerDataList.get(0); + assertEquals(new GroupId(GROUP_ID), groupSpeakerData.getGroupId()); + assertEquals(GroupType.ALL, groupSpeakerData.getType()); + List buckets = groupSpeakerData.getBuckets(); + + Set expectedBuckets = new HashSet<>(); + expectedBuckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(2, null)))).build()); + + expectedBuckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(1, null)))).build()); + assertEquals(expectedBuckets, new HashSet<>(buckets)); + } + + @Test + public void testConvertInstallGroupCommand() { + List buckets = new ArrayList<>(); + buckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(2, null)))).build()); + + buckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(1, null)))).build()); + + + GroupSpeakerData groupSpeakerData = GroupSpeakerData.builder().groupId(new GroupId(GROUP_ID)) + .type(GroupType.ALL) + .buckets(buckets) + .build(); + OFFactoryVer13 factory = new OFFactoryVer13(); + + OFGroupAdd ofGroupAdd = OfGroupConverter.INSTANCE.convertInstallGroupCommand(groupSpeakerData, factory); + + assertEquals(OFGroup.of(GROUP_ID), ofGroupAdd.getGroup()); + assertEquals(OFGroupType.ALL, ofGroupAdd.getGroupType()); + assertEquals(2, ofGroupAdd.getBuckets().size()); + + List expectedBuckets = new ArrayList<>(); + expectedBuckets.add(factory.buildBucket().setWatchPort(OFPort.ANY) + .setWatchGroup(OFGroup.ALL) + .setActions(getActions(factory, 2)) + .build()); + + expectedBuckets.add(factory.buildBucket().setWatchPort(OFPort.ANY) + .setWatchGroup(OFGroup.ALL) + .setActions(getActions(factory, 1)) + .build()); + assertEquals(expectedBuckets, ofGroupAdd.getBuckets()); + } + + @Test + public void testConvertDeleteGroupCommand() { + List buckets = new ArrayList<>(); + buckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(2, null)))).build()); + + buckets.add(Bucket.builder().watchPort(WatchPort.ANY).watchGroup(WatchGroup.ALL) + .writeActions(Sets.newHashSet(new PortOutAction(new PortNumber(1, null)))).build()); + + + GroupSpeakerData groupSpeakerData = GroupSpeakerData.builder().groupId(new GroupId(GROUP_ID)) + .type(GroupType.ALL) + .buckets(buckets) + .build(); + OFFactoryVer13 factory = new OFFactoryVer13(); + + OFGroupDelete ofGroupDelete = OfGroupConverter.INSTANCE.convertDeleteGroupCommand(groupSpeakerData, factory); + + + assertEquals(OFGroup.of(GROUP_ID), ofGroupDelete.getGroup()); + assertEquals(OFGroupType.ALL, ofGroupDelete.getGroupType()); + } + +} diff --git a/src-java/floodlightrouter-topology/floodlightrouter-storm-topology/build.gradle b/src-java/floodlightrouter-topology/floodlightrouter-storm-topology/build.gradle index 8fa3ef9b569..70b77e68b5c 100644 --- a/src-java/floodlightrouter-topology/floodlightrouter-storm-topology/build.gradle +++ b/src-java/floodlightrouter-topology/floodlightrouter-storm-topology/build.gradle @@ -9,13 +9,13 @@ compileJava.ajc.options.compilerArgs += '-verbose' description = 'Floodlight Router Storm Topology' dependencies { implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') testImplementation project(':kilda-utils:stubs') implementation project(':floodlight-api') implementation project(':blue-green') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') implementation 'org.mapstruct:mapstruct' diff --git a/src-java/flowhs-topology/flowhs-messaging/src/main/java/org/openkilda/messaging/command/flow/FlowPathSwapRequest.java b/src-java/flowhs-topology/flowhs-messaging/src/main/java/org/openkilda/messaging/command/flow/FlowPathSwapRequest.java index 5c8a27fc7ee..386d6e1e91a 100644 --- a/src-java/flowhs-topology/flowhs-messaging/src/main/java/org/openkilda/messaging/command/flow/FlowPathSwapRequest.java +++ b/src-java/flowhs-topology/flowhs-messaging/src/main/java/org/openkilda/messaging/command/flow/FlowPathSwapRequest.java @@ -31,10 +31,13 @@ public class FlowPathSwapRequest extends CommandData { private static final long serialVersionUID = 1L; @JsonProperty("flow_id") - protected String flowId; + String flowId; + boolean manual; @JsonCreator - public FlowPathSwapRequest(@NonNull @JsonProperty("flow_id") String flowId) { + public FlowPathSwapRequest(@NonNull @JsonProperty("flow_id") String flowId, + @JsonProperty("manual") boolean manual) { this.flowId = flowId; + this.manual = manual; } } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/build.gradle b/src-java/flowhs-topology/flowhs-storm-topology/build.gradle index 47963d63985..70f15816cce 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/build.gradle +++ b/src-java/flowhs-topology/flowhs-storm-topology/build.gradle @@ -17,15 +17,15 @@ dependencies { implementation project(':base-storm-topology') implementation project(':stats-messaging') aspect project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-pce') implementation project(':rule-manager-api') implementation project(':rule-manager-implementation') implementation project(':kilda-reporting') implementation project(':floodlight-api') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') testImplementation project(path: ':kilda-persistence-api', configuration: 'testArtifacts') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/FlowHsTopology.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/FlowHsTopology.java index 065f7a7a2f3..3206852ac61 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/FlowHsTopology.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/FlowHsTopology.java @@ -295,7 +295,9 @@ private void flowSwapProtectedHub(TopologyBuilder topologyBuilder, PersistenceMa .build(); FlowResourcesConfig flowResourcesConfig = configurationProvider.getConfiguration(FlowResourcesConfig.class); - FlowPathSwapHubBolt hubBolt = new FlowPathSwapHubBolt(config, persistenceManager, flowResourcesConfig); + RuleManagerConfig ruleManagerConfig = configurationProvider.getConfiguration(RuleManagerConfig.class); + FlowPathSwapHubBolt hubBolt = new FlowPathSwapHubBolt(config, persistenceManager, flowResourcesConfig, + ruleManagerConfig); declareBolt(topologyBuilder, hubBolt, ComponentId.FLOW_PATH_SWAP_HUB.name()) .fieldsGrouping(ComponentId.FLOW_ROUTER_BOLT.name(), ROUTER_TO_FLOW_PATH_SWAP_HUB.name(), FLOW_FIELD) .directGrouping(ComponentId.FLOW_PATH_SWAP_SPEAKER_WORKER.name(), diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/bolts/FlowPathSwapHubBolt.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/bolts/FlowPathSwapHubBolt.java index 4f3ec35e032..eebafdc93c5 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/bolts/FlowPathSwapHubBolt.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/bolts/FlowPathSwapHubBolt.java @@ -26,7 +26,7 @@ import org.openkilda.bluegreen.LifecycleEvent; import org.openkilda.floodlight.api.request.SpeakerRequest; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.messaging.Message; import org.openkilda.messaging.command.CommandData; import org.openkilda.messaging.command.CommandMessage; @@ -35,6 +35,9 @@ import org.openkilda.messaging.info.InfoMessage; import org.openkilda.messaging.info.reroute.PathSwapResult; import org.openkilda.persistence.PersistenceManager; +import org.openkilda.rulemanager.RuleManager; +import org.openkilda.rulemanager.RuleManagerConfig; +import org.openkilda.rulemanager.RuleManagerImpl; import org.openkilda.wfm.error.PipelineException; import org.openkilda.wfm.share.flow.resources.FlowResourcesConfig; import org.openkilda.wfm.share.flow.resources.FlowResourcesManager; @@ -51,6 +54,7 @@ import lombok.Builder; import lombok.Getter; +import lombok.NonNull; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Tuple; @@ -60,18 +64,21 @@ public class FlowPathSwapHubBolt extends HubBolt implements FlowPathSwapHubCarri private final FlowPathSwapConfig config; private final FlowResourcesConfig flowResourcesConfig; + private final RuleManagerConfig ruleManagerConfig; private transient FlowPathSwapService service; private String currentKey; private LifecycleEvent deferredShutdownEvent; - public FlowPathSwapHubBolt(FlowPathSwapConfig config, PersistenceManager persistenceManager, - FlowResourcesConfig flowResourcesConfig) { + public FlowPathSwapHubBolt(@NonNull FlowPathSwapConfig config, @NonNull PersistenceManager persistenceManager, + @NonNull FlowResourcesConfig flowResourcesConfig, + @NonNull RuleManagerConfig ruleManagerConfig) { super(persistenceManager, config); this.config = config; this.flowResourcesConfig = flowResourcesConfig; + this.ruleManagerConfig = ruleManagerConfig; enableMeterRegistry("kilda.flow_pathswap", HUB_TO_METRICS_BOLT.name()); } @@ -79,7 +86,8 @@ public FlowPathSwapHubBolt(FlowPathSwapConfig config, PersistenceManager persist @Override protected void init() { FlowResourcesManager resourcesManager = new FlowResourcesManager(persistenceManager, flowResourcesConfig); - service = new FlowPathSwapService(this, persistenceManager, + RuleManager ruleManager = new RuleManagerImpl(ruleManagerConfig); + service = new FlowPathSwapService(this, persistenceManager, ruleManager, config.getSpeakerCommandRetriesLimit(), resourcesManager); } @@ -108,12 +116,12 @@ protected void onRequest(Tuple input) throws PipelineException { protected void onWorkerResponse(Tuple input) { String operationKey = input.getStringByField(MessageKafkaTranslator.FIELD_ID_KEY); currentKey = KeyProvider.getParentKey(operationKey); - SpeakerFlowSegmentResponse flowResponse = (SpeakerFlowSegmentResponse) input.getValueByField(FIELD_ID_PAYLOAD); + SpeakerResponse flowResponse = (SpeakerResponse) input.getValueByField(FIELD_ID_PAYLOAD); service.handleAsyncResponse(currentKey, flowResponse); } @Override - public void onTimeout(String key, Tuple tuple) { + public void onTimeout(@NonNull String key, Tuple tuple) { currentKey = key; service.handleTimeout(key); } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowPathSwappingFsm.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowPathSwappingFsm.java index 3f454ec3644..cc2fa021c4e 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowPathSwappingFsm.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/common/FlowPathSwappingFsm.java @@ -18,8 +18,7 @@ import static java.util.Collections.emptyList; import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; -import org.openkilda.floodlight.flow.response.FlowErrorResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.messaging.error.ErrorType; import org.openkilda.model.FlowPathStatus; import org.openkilda.model.PathId; @@ -76,8 +75,8 @@ public abstract class FlowPathSwappingFsm, S, protected final Map pendingCommands = new HashMap<>(); protected final Map retriedCommands = new HashMap<>(); - protected final Map failedCommands = new HashMap<>(); - protected final Map failedValidationResponses = new HashMap<>(); + protected final Map failedCommands = new HashMap<>(); + protected final Map failedValidationResponses = new HashMap<>(); protected final Map ingressCommands = new HashMap<>(); protected final Map nonIngressCommands = new HashMap<>(); @@ -105,6 +104,10 @@ public FlowSegmentRequestFactory getInstallCommand(UUID commandId) { return requestFactory; } + public FlowSegmentRequestFactory getRemoveCommand(UUID commandId) { + return removeCommands.get(commandId); + } + public abstract void fireNoPathFound(String errorReason); public void clearPendingCommands() { @@ -115,6 +118,10 @@ public Optional getPendingCommand(UUID key) { return Optional.ofNullable(pendingCommands.get(key)); } + public boolean hasPendingCommand(UUID key) { + return pendingCommands.containsKey(key); + } + public void addPendingCommand(UUID key, SwitchId switchId) { pendingCommands.put(key, switchId); } @@ -137,7 +144,7 @@ public void clearFailedCommands() { failedCommands.clear(); } - public void addFailedCommand(UUID key, FlowErrorResponse errorResponse) { + public void addFailedCommand(UUID key, SpeakerResponse errorResponse) { failedCommands.put(key, errorResponse); } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapContext.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapContext.java index ed3440ba8cf..97e6f524a96 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapContext.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapContext.java @@ -15,19 +15,17 @@ package org.openkilda.wfm.topology.flowhs.fsm.pathswap; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; -import org.openkilda.wfm.topology.flowhs.fsm.common.FlowContext; +import org.openkilda.floodlight.api.response.SpeakerResponse; import lombok.Builder; import lombok.Data; -import lombok.EqualsAndHashCode; @Data -@EqualsAndHashCode(callSuper = true) -public class FlowPathSwapContext extends FlowContext { +public class FlowPathSwapContext { + private SpeakerResponse speakerResponse; @Builder - public FlowPathSwapContext(SpeakerFlowSegmentResponse speakerFlowResponse) { - super(speakerFlowResponse); + public FlowPathSwapContext(SpeakerResponse speakerResponse) { + this.speakerResponse = speakerResponse; } } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapFsm.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapFsm.java index 933b54674ef..88457c73e9d 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapFsm.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/FlowPathSwapFsm.java @@ -15,7 +15,10 @@ package org.openkilda.wfm.topology.flowhs.fsm.pathswap; +import org.openkilda.floodlight.api.request.rulemanager.DeleteSpeakerCommandsRequest; +import org.openkilda.floodlight.api.request.rulemanager.InstallSpeakerCommandsRequest; import org.openkilda.persistence.PersistenceManager; +import org.openkilda.rulemanager.RuleManager; import org.openkilda.wfm.CommandContext; import org.openkilda.wfm.share.flow.resources.FlowResourcesManager; import org.openkilda.wfm.share.logger.FlowOperationsDashboardLogger; @@ -38,8 +41,10 @@ import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.RemoveOldRulesAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.RevertNewRulesAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.RevertPathsSwapAction; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.RevertYFlowRulesAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.UpdateFlowPathsAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.UpdateFlowStatusAction; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.UpdateYFlowRulesAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions.ValidateIngressRulesAction; import org.openkilda.wfm.topology.flowhs.service.FlowPathSwapHubCarrier; import org.openkilda.wfm.topology.flowhs.service.FlowProcessingEventListener; @@ -53,6 +58,10 @@ import org.squirrelframework.foundation.fsm.StateMachineBuilder; import org.squirrelframework.foundation.fsm.StateMachineBuilderFactory; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; import java.util.concurrent.TimeUnit; @Getter @@ -61,6 +70,9 @@ public final class FlowPathSwapFsm extends FlowPathSwappingFsm { + private final Map installSpeakerRequests = new HashMap<>(); + private final Map deleteSpeakerRequests = new HashMap<>(); + public FlowPathSwapFsm(@NonNull CommandContext commandContext, @NonNull FlowPathSwapHubCarrier carrier, @NonNull String flowId) { super(Event.NEXT, Event.ERROR, commandContext, carrier, flowId); @@ -71,18 +83,33 @@ public void fireNoPathFound(String errorReason) { } + public void addInstallSpeakerCommand(UUID key, InstallSpeakerCommandsRequest command) { + installSpeakerRequests.put(key, command); + } + + public Optional getInstallSpeakerCommand(UUID key) { + return Optional.ofNullable(installSpeakerRequests.get(key)); + } + + public void addDeleteSpeakerCommand(UUID key, DeleteSpeakerCommandsRequest command) { + deleteSpeakerRequests.put(key, command); + } + + public Optional getDeleteSpeakerCommand(UUID key) { + return Optional.ofNullable(deleteSpeakerRequests.get(key)); + } + @Override protected String getCrudActionName() { return "path swap"; } public static class Factory { - private final StateMachineBuilder builder; + private final StateMachineBuilder builder; private final FlowPathSwapHubCarrier carrier; public Factory(@NonNull FlowPathSwapHubCarrier carrier, @NonNull PersistenceManager persistenceManager, - @NonNull FlowResourcesManager resourcesManager, + @NonNull FlowResourcesManager resourcesManager, @NonNull RuleManager ruleManager, int speakerCommandRetriesLimit) { this.carrier = carrier; @@ -112,8 +139,6 @@ public Factory(@NonNull FlowPathSwapHubCarrier carrier, @NonNull PersistenceMana builder.internalTransition().within(State.INSTALLING_INGRESS_RULES).on(Event.RESPONSE_RECEIVED) .perform(new OnReceivedInstallResponseAction(speakerCommandRetriesLimit)); - builder.internalTransition().within(State.INSTALLING_INGRESS_RULES).on(Event.ERROR_RECEIVED) - .perform(new OnReceivedInstallResponseAction(speakerCommandRetriesLimit)); builder.transition().from(State.INSTALLING_INGRESS_RULES).to(State.INGRESS_RULES_INSTALLED) .on(Event.RULES_INSTALLED); builder.transitions().from(State.INSTALLING_INGRESS_RULES) @@ -129,21 +154,30 @@ public Factory(@NonNull FlowPathSwapHubCarrier carrier, @NonNull PersistenceMana builder.internalTransition().within(State.VALIDATING_INGRESS_RULES).on(Event.RESPONSE_RECEIVED) .perform(new ValidateIngressRulesAction(speakerCommandRetriesLimit)); - builder.internalTransition().within(State.VALIDATING_INGRESS_RULES).on(Event.ERROR_RECEIVED) - .perform(new ValidateIngressRulesAction(speakerCommandRetriesLimit)); builder.transition().from(State.VALIDATING_INGRESS_RULES).to(State.INGRESS_RULES_VALIDATED) .on(Event.RULES_VALIDATED); builder.transitions().from(State.VALIDATING_INGRESS_RULES) .toAmong(State.REVERTING_PATHS_SWAP, State.REVERTING_PATHS_SWAP, State.REVERTING_PATHS_SWAP) .onEach(Event.TIMEOUT, Event.MISSING_RULE_FOUND, Event.ERROR); - builder.transition().from(State.INGRESS_RULES_VALIDATED).to(State.REMOVING_OLD_RULES).on(Event.NEXT) + builder.transition().from(State.INGRESS_RULES_VALIDATED).to(State.UPDATING_YFLOW_RULES).on(Event.NEXT) + .perform(new UpdateYFlowRulesAction(persistenceManager, ruleManager)); + + builder.internalTransition().within(State.UPDATING_YFLOW_RULES).on(Event.RESPONSE_RECEIVED) + .perform(new OnReceivedInstallResponseAction(speakerCommandRetriesLimit)); + builder.transitions().from(State.UPDATING_YFLOW_RULES) + .toAmong(State.YFLOW_RULES_UPDATED, State.YFLOW_RULES_UPDATED) + .onEach(Event.RULES_INSTALLED, Event.SKIP_YFLOW_RULES_UPDATE); + builder.transitions().from(State.UPDATING_YFLOW_RULES) + .toAmong(State.REVERTING_PATHS_SWAP, State.REVERTING_PATHS_SWAP) + .onEach(Event.TIMEOUT, Event.ERROR) + .perform(new AbandonPendingCommandsAction()); + + builder.transition().from(State.YFLOW_RULES_UPDATED).to(State.REMOVING_OLD_RULES).on(Event.NEXT) .perform(new RemoveOldRulesAction(persistenceManager, resourcesManager)); builder.internalTransition().within(State.REMOVING_OLD_RULES).on(Event.RESPONSE_RECEIVED) .perform(new OnReceivedRemoveOrRevertResponseAction(speakerCommandRetriesLimit)); - builder.internalTransition().within(State.REMOVING_OLD_RULES).on(Event.ERROR_RECEIVED) - .perform(new OnReceivedRemoveOrRevertResponseAction(speakerCommandRetriesLimit)); builder.transitions().from(State.REMOVING_OLD_RULES) .toAmong(State.OLD_RULES_REMOVED, State.OLD_RULES_REMOVED) .onEach(Event.RULES_REMOVED, Event.ERROR); @@ -167,15 +201,26 @@ public Factory(@NonNull FlowPathSwapHubCarrier carrier, @NonNull PersistenceMana builder.internalTransition().within(State.REVERTING_NEW_RULES).on(Event.RESPONSE_RECEIVED) .perform(new OnReceivedRemoveOrRevertResponseAction(speakerCommandRetriesLimit)); - builder.internalTransition().within(State.REVERTING_NEW_RULES).on(Event.ERROR_RECEIVED) - .perform(new OnReceivedRemoveOrRevertResponseAction(speakerCommandRetriesLimit)); builder.transition().from(State.REVERTING_NEW_RULES).to(State.NEW_RULES_REVERTED) .on(Event.RULES_REMOVED); builder.transition().from(State.REVERTING_NEW_RULES).to(State.NEW_RULES_REVERTED) .on(Event.ERROR) .perform(new HandleNotCompletedCommandsAction()); - builder.transition().from(State.NEW_RULES_REVERTED) + builder.transition().from(State.NEW_RULES_REVERTED).to(State.REVERTING_YFLOW_RULES).on(Event.NEXT) + .perform(new RevertYFlowRulesAction(persistenceManager, ruleManager)); + + builder.internalTransition().within(State.REVERTING_YFLOW_RULES).on(Event.RESPONSE_RECEIVED) + .perform(new OnReceivedRemoveOrRevertResponseAction(speakerCommandRetriesLimit)); + builder.transitions().from(State.REVERTING_YFLOW_RULES) + .toAmong(State.YFLOW_RULES_REVERTED, State.YFLOW_RULES_REVERTED) + .onEach(Event.RULES_REMOVED, Event.SKIP_YFLOW_RULES_REVERT); + builder.transitions().from(State.REVERTING_YFLOW_RULES) + .toAmong(State.YFLOW_RULES_REVERTED, State.YFLOW_RULES_REVERTED) + .onEach(Event.TIMEOUT, Event.ERROR) + .perform(new HandleNotCompletedCommandsAction()); + + builder.transition().from(State.YFLOW_RULES_REVERTED) .to(State.REVERTING_FLOW_STATUS) .on(Event.NEXT) .perform(new RecalculateFlowStatusAction(persistenceManager, @@ -207,8 +252,11 @@ public Factory(@NonNull FlowPathSwapHubCarrier carrier, @NonNull PersistenceMana } public FlowPathSwapFsm newInstance(@NonNull CommandContext commandContext, @NonNull String flowId) { - FlowPathSwapFsm fsm = - builder.newStateMachine(FlowPathSwapFsm.State.INITIALIZED, commandContext, carrier, flowId); + FlowPathSwapFsm fsm = builder.newStateMachine(State.INITIALIZED, commandContext, carrier, flowId); + + fsm.addTransitionCompleteListener(event -> + log.debug("FlowPathSwapFsm, transition to {} on {}", event.getTargetState(), event.getCause())); + MeterRegistryHolder.getRegistry().ifPresent(registry -> { Sample sample = LongTaskTimer.builder("fsm.active_execution") .register(registry) @@ -238,6 +286,8 @@ public enum State { VALIDATING_INGRESS_RULES, INGRESS_RULES_VALIDATED, + UPDATING_YFLOW_RULES, + YFLOW_RULES_UPDATED, REMOVING_OLD_RULES, OLD_RULES_REMOVED, @@ -257,6 +307,9 @@ public enum State { REVERTING_NEW_RULES, NEW_RULES_REVERTED, + REVERTING_YFLOW_RULES, + YFLOW_RULES_REVERTED, + REVERTING_FLOW_STATUS, REVERTING_FLOW, @@ -270,14 +323,17 @@ public enum Event { NEXT, RESPONSE_RECEIVED, - ERROR_RECEIVED, RULES_INSTALLED, RULES_VALIDATED, MISSING_RULE_FOUND, + SKIP_YFLOW_RULES_UPDATE, + RULES_REMOVED, + SKIP_YFLOW_RULES_REVERT, + TIMEOUT, ERROR } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/BaseOnReceivedResponseAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/BaseOnReceivedResponseAction.java new file mode 100644 index 00000000000..1b5c68c0f8d --- /dev/null +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/BaseOnReceivedResponseAction.java @@ -0,0 +1,54 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions; + +import org.openkilda.floodlight.api.request.rulemanager.FlowCommand; +import org.openkilda.floodlight.api.request.rulemanager.GroupCommand; +import org.openkilda.floodlight.api.request.rulemanager.MeterCommand; +import org.openkilda.floodlight.api.request.rulemanager.OfCommand; +import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.Event; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.State; + +import lombok.extern.slf4j.Slf4j; + +import java.util.Collection; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +@Slf4j +public abstract class BaseOnReceivedResponseAction extends + HistoryRecordingAction { + protected final int speakerCommandRetriesLimit; + + protected BaseOnReceivedResponseAction(int speakerCommandRetriesLimit) { + this.speakerCommandRetriesLimit = speakerCommandRetriesLimit; + } + + protected Collection filterOfCommands(Collection source, Set commandUuids) { + return source.stream() + .filter(ofCommand -> ofCommand instanceof FlowCommand + && commandUuids.contains(((FlowCommand) ofCommand).getData().getUuid()) + || ofCommand instanceof MeterCommand + && commandUuids.contains(((MeterCommand) ofCommand).getData().getUuid()) + || ofCommand instanceof GroupCommand + && commandUuids.contains(((GroupCommand) ofCommand).getData().getUuid())) + .collect(Collectors.toList()); + } +} diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/InstallIngressRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/InstallIngressRulesAction.java index 9913fd6d581..74328735f0b 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/InstallIngressRulesAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/InstallIngressRulesAction.java @@ -66,7 +66,7 @@ protected void perform(State from, State to, Event event, FlowPathSwapContext co commandBuilder.buildIngressOnly( stateMachine.getCommandContext(), flow, newPrimaryForward, newPrimaryReverse, speakerContext)); commands.addAll(commandBuilder.buildEgressOnly(stateMachine.getCommandContext(), - flow, newPrimaryForward, newPrimaryReverse).stream() + flow, newPrimaryForward, newPrimaryReverse).stream() .filter(f -> f instanceof TransitFlowLoopSegmentRequestFactory || f instanceof EgressMirrorFlowSegmentRequestFactory) .collect(Collectors.toList())); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedInstallResponseAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedInstallResponseAction.java index 5e8241e7c31..69a1ad6e13a 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedInstallResponseAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedInstallResponseAction.java @@ -19,8 +19,9 @@ import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory; import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; +import org.openkilda.floodlight.api.response.rulemanager.SpeakerCommandResponse; import org.openkilda.floodlight.flow.response.FlowErrorResponse; -import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.Event; @@ -28,63 +29,102 @@ import lombok.extern.slf4j.Slf4j; +import java.util.Set; import java.util.UUID; @Slf4j -public class OnReceivedInstallResponseAction extends - HistoryRecordingAction { - private final int speakerCommandRetriesLimit; - +public class OnReceivedInstallResponseAction extends BaseOnReceivedResponseAction { public OnReceivedInstallResponseAction(int speakerCommandRetriesLimit) { - this.speakerCommandRetriesLimit = speakerCommandRetriesLimit; + super(speakerCommandRetriesLimit); } @Override protected void perform(State from, State to, Event event, FlowPathSwapContext context, FlowPathSwapFsm stateMachine) { - SpeakerFlowSegmentResponse response = context.getSpeakerFlowResponse(); + SpeakerResponse response = context.getSpeakerResponse(); UUID commandId = response.getCommandId(); - FlowSegmentRequestFactory command = stateMachine.getInstallCommand(commandId); - if (!stateMachine.getPendingCommands().containsKey(commandId) || command == null) { + if (!stateMachine.hasPendingCommand(commandId)) { log.info("Received a response for unexpected command: {}", response); return; } + boolean isInstallCommand = stateMachine.getInstallCommand(commandId) != null + || stateMachine.getInstallSpeakerCommand(commandId).isPresent(); + if (response.isSuccess()) { stateMachine.removePendingCommand(commandId); - - stateMachine.saveActionToHistory("Rule was installed", - format("The rule was installed: switch %s, cookie %s", - response.getSwitchId(), command.getCookie())); + String commandName = isInstallCommand ? "installed" : "deleted"; + if (response instanceof SpeakerFlowSegmentResponse) { + stateMachine.saveActionToHistory("Rule was " + commandName, + format("The rule was %s: switch %s, cookie %s", commandName, + response.getSwitchId(), ((SpeakerFlowSegmentResponse) response).getCookie())); + } else { + stateMachine.saveActionToHistory("Rule was " + commandName, + format("The rule was %s: switch %s", commandName, response.getSwitchId())); + } } else { - FlowErrorResponse errorResponse = (FlowErrorResponse) response; - int attempt = stateMachine.doRetryForCommand(commandId); if (attempt <= speakerCommandRetriesLimit) { - stateMachine.saveErrorToHistory("Failed to install rule", format( - "Failed to install the rule: commandId %s, switch %s, cookie %s. Error %s. " - + "Retrying (attempt %d)", - commandId, errorResponse.getSwitchId(), command.getCookie(), errorResponse, attempt)); + FlowSegmentRequestFactory flowSegmentRequest = stateMachine.getInstallCommand(commandId); + if (flowSegmentRequest != null && response instanceof FlowErrorResponse) { + FlowErrorResponse errorResponse = (FlowErrorResponse) response; + stateMachine.saveErrorToHistory("Failed to install rule", + format("Failed to install the rule: commandId %s, switch %s, cookie %s. Error %s. " + + "Retrying (attempt %d)", + commandId, errorResponse.getSwitchId(), flowSegmentRequest.getCookie(), + errorResponse, attempt)); + stateMachine.getCarrier().sendSpeakerRequest(flowSegmentRequest.makeInstallRequest(commandId)); + } else if (response instanceof SpeakerCommandResponse) { + String commandName = isInstallCommand ? "install" : "delete"; + SpeakerCommandResponse speakerCommandResponse = (SpeakerCommandResponse) response; + speakerCommandResponse.getFailedCommandIds().forEach((uuid, message) -> + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, ruleId %s, switch %s. " + + "Error %s. Retrying (attempt %d)", commandName, + commandId, uuid, response.getSwitchId(), message, attempt))); - stateMachine.getCarrier().sendSpeakerRequest(command.makeInstallRequest(commandId)); + Set failedUuids = speakerCommandResponse.getFailedCommandIds().keySet(); + stateMachine.getInstallSpeakerCommand(commandId) + .ifPresent(command -> stateMachine.getCarrier() + .sendSpeakerRequest(command.toBuilder() + .commands(filterOfCommands(command.getCommands(), failedUuids)).build())); + stateMachine.getDeleteSpeakerCommand(commandId) + .ifPresent(command -> stateMachine.getCarrier() + .sendSpeakerRequest(command.toBuilder() + .commands(filterOfCommands(command.getCommands(), failedUuids)).build())); + } else { + log.warn("Received a unknown response: {}", response); + return; + } } else { + stateMachine.addFailedCommand(commandId, response); stateMachine.removePendingCommand(commandId); - stateMachine.saveErrorToHistory("Failed to install rule", format( - "Failed to install the rule: commandId %s, switch %s, cookie %s. Error: %s", - commandId, errorResponse.getSwitchId(), command.getCookie(), errorResponse)); - - stateMachine.addFailedCommand(commandId, errorResponse); + String commandName = isInstallCommand ? "install" : "delete"; + if (response instanceof FlowErrorResponse) { + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, switch %s, cookie %s. Error %s.", + commandName, commandId, response.getSwitchId(), + ((FlowErrorResponse) response).getCookie(), response)); + } else if (response instanceof SpeakerCommandResponse) { + ((SpeakerCommandResponse) response).getFailedCommandIds().forEach((uuid, message) -> + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, ruleId %s, switch %s. Error %s.", + commandName, commandId, uuid, response.getSwitchId(), message))); + } else { + log.warn("Received a unknown response: {}", response); + return; + } } } if (stateMachine.getPendingCommands().isEmpty()) { if (stateMachine.getFailedCommands().isEmpty()) { - log.debug("Received responses for all pending install commands of the flow {}", + log.debug("Received responses for all pending install / delete commands of the flow {}", stateMachine.getFlowId()); stateMachine.fire(Event.RULES_INSTALLED); } else { - String errorMessage = format("Received error response(s) for %d install commands", + String errorMessage = format("Received error response(s) for %d install / delete commands", stateMachine.getFailedCommands().size()); stateMachine.saveErrorToHistory(errorMessage); stateMachine.fireError(errorMessage); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedRemoveOrRevertResponseAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedRemoveOrRevertResponseAction.java index d572b7e8f76..5c47cfd7784 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedRemoveOrRevertResponseAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/OnReceivedRemoveOrRevertResponseAction.java @@ -19,8 +19,9 @@ import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory; import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; +import org.openkilda.floodlight.api.response.rulemanager.SpeakerCommandResponse; import org.openkilda.floodlight.flow.response.FlowErrorResponse; -import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.Event; @@ -28,85 +29,112 @@ import lombok.extern.slf4j.Slf4j; +import java.util.Set; import java.util.UUID; @Slf4j -public class OnReceivedRemoveOrRevertResponseAction extends - HistoryRecordingAction { - private final int speakerCommandRetriesLimit; - +public class OnReceivedRemoveOrRevertResponseAction extends BaseOnReceivedResponseAction { public OnReceivedRemoveOrRevertResponseAction(int speakerCommandRetriesLimit) { - this.speakerCommandRetriesLimit = speakerCommandRetriesLimit; + super(speakerCommandRetriesLimit); } @Override protected void perform(State from, State to, Event event, FlowPathSwapContext context, FlowPathSwapFsm stateMachine) { - SpeakerFlowSegmentResponse response = context.getSpeakerFlowResponse(); + SpeakerResponse response = context.getSpeakerResponse(); UUID commandId = response.getCommandId(); - FlowSegmentRequestFactory removeCommand = stateMachine.getRemoveCommands().get(commandId); - FlowSegmentRequestFactory installCommand = stateMachine.getInstallCommand(commandId); - if (!stateMachine.getPendingCommands().containsKey(commandId) - || (removeCommand == null && installCommand == null)) { + if (!stateMachine.hasPendingCommand(commandId)) { log.info("Received a response for unexpected command: {}", response); return; } + boolean isInstallCommand = stateMachine.getInstallCommand(commandId) != null + || stateMachine.getInstallSpeakerCommand(commandId).isPresent(); + if (response.isSuccess()) { stateMachine.removePendingCommand(commandId); - - if (removeCommand != null) { - stateMachine.saveActionToHistory("Rule was deleted", - format("The rule was removed: switch %s, cookie %s", response.getSwitchId(), - removeCommand.getCookie())); + String commandName = isInstallCommand ? "re-installed (reverted)" : "deleted"; + if (response instanceof SpeakerFlowSegmentResponse) { + stateMachine.saveActionToHistory("Rule was " + commandName, + format("The rule was %s: switch %s, cookie %s", commandName, + response.getSwitchId(), ((SpeakerFlowSegmentResponse) response).getCookie())); } else { - stateMachine.saveActionToHistory("Rule was re-installed (reverted)", - format("The rule was installed: switch %s, cookie %s", - response.getSwitchId(), installCommand.getCookie())); + stateMachine.saveActionToHistory("Rule was " + commandName, + format("The rule was %s: switch %s", commandName, response.getSwitchId())); } } else { - FlowErrorResponse errorResponse = (FlowErrorResponse) response; - int attempt = stateMachine.doRetryForCommand(commandId); if (attempt <= speakerCommandRetriesLimit) { - if (removeCommand != null) { - stateMachine.saveErrorToHistory("Failed to remove rule", format( - "Failed to remove the rule: commandId %s, switch %s, cookie %s. Error %s. " - + "Retrying (attempt %d)", - commandId, errorResponse.getSwitchId(), removeCommand.getCookie(), errorResponse, attempt)); + if (response instanceof FlowErrorResponse) { + FlowErrorResponse errorResponse = (FlowErrorResponse) response; + if (isInstallCommand) { + FlowSegmentRequestFactory installCommand = stateMachine.getInstallCommand(commandId); + stateMachine.saveErrorToHistory("Failed to re-install (revert) rule", + format("Failed to install the rule: commandId %s, switch %s, cookie %s. Error %s. " + + "Retrying (attempt %d)", + commandId, errorResponse.getSwitchId(), installCommand.getCookie(), + errorResponse, attempt)); + stateMachine.getCarrier().sendSpeakerRequest(installCommand.makeInstallRequest(commandId)); + } else { + FlowSegmentRequestFactory removeCommand = stateMachine.getRemoveCommand(commandId); + stateMachine.saveErrorToHistory("Failed to delete rule", + format("Failed to remove the rule: commandId %s, switch %s, cookie %s. Error %s. " + + "Retrying (attempt %d)", + commandId, errorResponse.getSwitchId(), removeCommand.getCookie(), + errorResponse, attempt)); + stateMachine.getCarrier().sendSpeakerRequest(removeCommand.makeRemoveRequest(commandId)); + } + } else if (response instanceof SpeakerCommandResponse) { + String commandName = isInstallCommand ? "re-install (revert)" : "delete"; + SpeakerCommandResponse speakerCommandResponse = (SpeakerCommandResponse) response; + speakerCommandResponse.getFailedCommandIds().forEach((uuid, message) -> + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, ruleId %s, switch %s. " + + "Error %s. Retrying (attempt %d)", commandName, + commandId, uuid, response.getSwitchId(), message, attempt))); - stateMachine.getCarrier().sendSpeakerRequest(removeCommand.makeRemoveRequest(commandId)); + Set failedUuids = speakerCommandResponse.getFailedCommandIds().keySet(); + stateMachine.getInstallSpeakerCommand(commandId) + .ifPresent(command -> stateMachine.getCarrier() + .sendSpeakerRequest(command.toBuilder() + .commands(filterOfCommands(command.getCommands(), failedUuids)).build())); + stateMachine.getDeleteSpeakerCommand(commandId) + .ifPresent(command -> stateMachine.getCarrier() + .sendSpeakerRequest(command.toBuilder() + .commands(filterOfCommands(command.getCommands(), failedUuids)).build())); } else { - stateMachine.saveErrorToHistory("Failed to re-install (revert) rule", format( - "Failed to install the rule: commandId %s, switch %s, cookie %s. Error %s. " - + "Retrying (attempt %d)", commandId, errorResponse.getSwitchId(), - installCommand.getCookie(), errorResponse, attempt)); - - stateMachine.getCarrier().sendSpeakerRequest(installCommand.makeInstallRequest(commandId)); + log.warn("Received a unknown response: {}", response); + return; } } else { + stateMachine.addFailedCommand(commandId, response); stateMachine.removePendingCommand(commandId); - if (removeCommand != null) { - stateMachine.saveErrorToHistory("Failed to remove rule", format( - "Failed to remove the rule: commandId %s, switch %s, cookie %s. Error: %s", - commandId, errorResponse.getSwitchId(), removeCommand.getCookie(), errorResponse)); + String commandName = isInstallCommand ? "re-install (revert)" : "delete"; + if (response instanceof FlowErrorResponse) { + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, switch %s, cookie %s. Error %s.", + commandName, commandId, response.getSwitchId(), + ((FlowErrorResponse) response).getCookie(), response)); + } else if (response instanceof SpeakerCommandResponse) { + ((SpeakerCommandResponse) response).getFailedCommandIds().forEach((uuid, message) -> + stateMachine.saveErrorToHistory("Failed to " + commandName + " rule", + format("Failed to %s the rule: commandId %s, ruleId %s, switch %s. Error %s.", + commandName, commandId, uuid, response.getSwitchId(), message))); } else { - stateMachine.saveErrorToHistory("Failed to re-install rule", format( - "Failed to install the rule: commandId %s, switch %s, cookie %s. Error: %s", - commandId, errorResponse.getSwitchId(), installCommand.getCookie(), errorResponse)); + log.warn("Received a unknown response: {}", response); + return; } - stateMachine.addFailedCommand(commandId, errorResponse); } } if (stateMachine.getPendingCommands().isEmpty()) { if (stateMachine.getFailedCommands().isEmpty()) { - log.debug("Received responses for all pending remove / re-install commands of the flow {}", + log.debug("Received responses for all pending install / delete commands of the flow {}", stateMachine.getFlowId()); stateMachine.fire(Event.RULES_REMOVED); } else { - String errorMessage = format("Received error response(s) for %d remove / re-install commands", + String errorMessage = format("Received error response(s) for %d install / delete commands", stateMachine.getFailedCommands().size()); stateMachine.saveErrorToHistory(errorMessage); stateMachine.fireError(errorMessage); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/RevertYFlowRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/RevertYFlowRulesAction.java new file mode 100644 index 00000000000..0f689468438 --- /dev/null +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/RevertYFlowRulesAction.java @@ -0,0 +1,77 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions; + +import static java.lang.String.format; + +import org.openkilda.floodlight.api.request.rulemanager.DeleteSpeakerCommandsRequest; +import org.openkilda.floodlight.api.request.rulemanager.InstallSpeakerCommandsRequest; +import org.openkilda.messaging.error.ErrorType; +import org.openkilda.model.Flow; +import org.openkilda.model.SwitchId; +import org.openkilda.model.YFlow; +import org.openkilda.persistence.PersistenceManager; +import org.openkilda.rulemanager.RuleManager; +import org.openkilda.wfm.topology.flowhs.exception.FlowProcessingException; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.Event; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.State; + +import lombok.extern.slf4j.Slf4j; + +import java.util.stream.Stream; + +@Slf4j +public class RevertYFlowRulesAction extends UpdateYFlowRulesAction { + public RevertYFlowRulesAction(PersistenceManager persistenceManager, RuleManager ruleManager) { + super(persistenceManager, ruleManager); + } + + @Override + protected void perform(State from, State to, Event event, FlowPathSwapContext context, + FlowPathSwapFsm stateMachine) { + String flowId = stateMachine.getFlowId(); + Flow flow = getFlow(flowId); + String yFlowId = flow.getYFlowId(); + if (yFlowId == null) { + stateMachine.saveActionToHistory("No need to revert y-flow rules - it's not a sub-flow"); + stateMachine.fire(Event.SKIP_YFLOW_RULES_REVERT); + return; + } + YFlow yFlow = yFlowRepository.findById(yFlowId) + .orElseThrow(() -> new FlowProcessingException(ErrorType.INTERNAL_ERROR, + format("Y-flow %s not found in persistent storage", yFlowId))); + + stateMachine.clearPendingAndRetriedAndFailedCommands(); + + SwitchId sharedEndpoint = yFlow.getSharedEndpoint().getSwitchId(); + InstallSpeakerCommandsRequest installRequest = buildYFlowInstallRequest(sharedEndpoint, + stateMachine.getOldPrimaryForwardPath(), stateMachine.getCommandContext()); + stateMachine.addInstallSpeakerCommand(installRequest.getCommandId(), installRequest); + DeleteSpeakerCommandsRequest deleteRequest = buildYFlowDeleteRequest(sharedEndpoint, + stateMachine.getNewPrimaryForwardPath(), stateMachine.getCommandContext()); + stateMachine.addDeleteSpeakerCommand(deleteRequest.getCommandId(), deleteRequest); + + // emitting + Stream.of(installRequest, deleteRequest).forEach(command -> { + stateMachine.getCarrier().sendSpeakerRequest(command); + stateMachine.addPendingCommand(command.getCommandId(), command.getSwitchId()); + }); + + stateMachine.saveActionToHistory("Commands for reverting y-flow rules have been sent"); + } +} diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/UpdateYFlowRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/UpdateYFlowRulesAction.java new file mode 100644 index 00000000000..77b7976c2e6 --- /dev/null +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/UpdateYFlowRulesAction.java @@ -0,0 +1,129 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.wfm.topology.flowhs.fsm.pathswap.actions; + +import static java.lang.String.format; +import static java.util.Collections.singleton; +import static java.util.stream.Collectors.toList; + +import org.openkilda.floodlight.api.request.rulemanager.DeleteSpeakerCommandsRequest; +import org.openkilda.floodlight.api.request.rulemanager.FlowCommand; +import org.openkilda.floodlight.api.request.rulemanager.InstallSpeakerCommandsRequest; +import org.openkilda.floodlight.api.request.rulemanager.OfCommand; +import org.openkilda.messaging.MessageContext; +import org.openkilda.messaging.error.ErrorType; +import org.openkilda.model.Flow; +import org.openkilda.model.PathId; +import org.openkilda.model.SwitchId; +import org.openkilda.model.YFlow; +import org.openkilda.persistence.PersistenceManager; +import org.openkilda.persistence.repositories.YFlowRepository; +import org.openkilda.rulemanager.DataAdapter; +import org.openkilda.rulemanager.FlowSpeakerData; +import org.openkilda.rulemanager.RuleManager; +import org.openkilda.rulemanager.SpeakerData; +import org.openkilda.rulemanager.adapter.PersistenceDataAdapter; +import org.openkilda.wfm.CommandContext; +import org.openkilda.wfm.topology.flowhs.exception.FlowProcessingException; +import org.openkilda.wfm.topology.flowhs.fsm.common.actions.FlowProcessingWithHistorySupportAction; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.Event; +import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapFsm.State; + +import lombok.extern.slf4j.Slf4j; + +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +@Slf4j +public class UpdateYFlowRulesAction extends + FlowProcessingWithHistorySupportAction { + protected final YFlowRepository yFlowRepository; + protected final RuleManager ruleManager; + + public UpdateYFlowRulesAction(PersistenceManager persistenceManager, RuleManager ruleManager) { + super(persistenceManager); + yFlowRepository = persistenceManager.getRepositoryFactory().createYFlowRepository(); + this.ruleManager = ruleManager; + } + + @Override + protected void perform(State from, State to, Event event, FlowPathSwapContext context, + FlowPathSwapFsm stateMachine) { + String flowId = stateMachine.getFlowId(); + Flow flow = getFlow(flowId); + String yFlowId = flow.getYFlowId(); + if (yFlowId == null) { + stateMachine.saveActionToHistory("No need to update y-flow rules - it's not a sub-flow"); + stateMachine.fire(Event.SKIP_YFLOW_RULES_UPDATE); + return; + } + YFlow yFlow = yFlowRepository.findById(yFlowId) + .orElseThrow(() -> new FlowProcessingException(ErrorType.INTERNAL_ERROR, + format("Y-flow %s not found in persistent storage", yFlowId))); + + stateMachine.clearPendingAndRetriedAndFailedCommands(); + + SwitchId sharedEndpoint = yFlow.getSharedEndpoint().getSwitchId(); + InstallSpeakerCommandsRequest installRequest = buildYFlowInstallRequest(sharedEndpoint, + stateMachine.getNewPrimaryForwardPath(), stateMachine.getCommandContext()); + stateMachine.addInstallSpeakerCommand(installRequest.getCommandId(), installRequest); + DeleteSpeakerCommandsRequest deleteRequest = buildYFlowDeleteRequest(sharedEndpoint, + stateMachine.getOldPrimaryForwardPath(), stateMachine.getCommandContext()); + stateMachine.addDeleteSpeakerCommand(deleteRequest.getCommandId(), deleteRequest); + + // emitting + Stream.of(installRequest, deleteRequest).forEach(command -> { + stateMachine.getCarrier().sendSpeakerRequest(command); + stateMachine.addPendingCommand(command.getCommandId(), command.getSwitchId()); + }); + + stateMachine.saveActionToHistory("Commands for updating y-flow rules have been sent"); + } + + protected InstallSpeakerCommandsRequest buildYFlowInstallRequest(SwitchId switchId, PathId pathId, + CommandContext context) { + List ofCommands = buildYFlowOfCommands(switchId, pathId); + UUID commandId = commandIdGenerator.generate(); + MessageContext messageContext = new MessageContext(commandId.toString(), + context.getCorrelationId()); + return new InstallSpeakerCommandsRequest(messageContext, switchId, commandId, ofCommands); + } + + protected DeleteSpeakerCommandsRequest buildYFlowDeleteRequest(SwitchId switchId, PathId pathId, + CommandContext context) { + List ofCommands = buildYFlowOfCommands(switchId, pathId); + UUID commandId = commandIdGenerator.generate(); + MessageContext messageContext = new MessageContext(commandId.toString(), + context.getCorrelationId()); + return new DeleteSpeakerCommandsRequest(messageContext, switchId, commandId, ofCommands); + } + + private List buildYFlowOfCommands(SwitchId switchId, PathId pathId) { + DataAdapter dataAdapter = PersistenceDataAdapter.builder() + .persistenceManager(persistenceManager) + .switchIds(singleton(switchId)) + .pathIds(singleton(pathId)) + .build(); + List speakerData = ruleManager.buildRulesForSwitch(switchId, dataAdapter); + return speakerData.stream() + .filter(data -> data instanceof FlowSpeakerData) + .map(data -> new FlowCommand((FlowSpeakerData) data)) + .collect(toList()); + } +} diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/ValidateIngressRulesAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/ValidateIngressRulesAction.java index b797179ab8c..a51e1a54e26 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/ValidateIngressRulesAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/pathswap/actions/ValidateIngressRulesAction.java @@ -18,7 +18,7 @@ import static java.lang.String.format; import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.floodlight.flow.response.FlowErrorResponse; import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction; import org.openkilda.wfm.topology.flowhs.fsm.pathswap.FlowPathSwapContext; @@ -42,7 +42,7 @@ public ValidateIngressRulesAction(int speakerCommandRetriesLimit) { @Override protected void perform(State from, State to, Event event, FlowPathSwapContext context, FlowPathSwapFsm stateMachine) { - SpeakerFlowSegmentResponse response = context.getSpeakerFlowResponse(); + SpeakerResponse response = context.getSpeakerResponse(); UUID commandId = response.getCommandId(); FlowSegmentRequestFactory command = stateMachine.getIngressCommands().get(commandId); if (!stateMachine.getPendingCommands().containsKey(commandId) || command == null) { diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/HandleNotCompletedCommandsAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/HandleNotCompletedCommandsAction.java index 0c356d8d173..8611f365b11 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/HandleNotCompletedCommandsAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/HandleNotCompletedCommandsAction.java @@ -18,7 +18,7 @@ import static java.lang.String.format; import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory; -import org.openkilda.floodlight.flow.response.FlowErrorResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction; import org.openkilda.wfm.topology.flowhs.fsm.reroute.FlowRerouteContext; import org.openkilda.wfm.topology.flowhs.fsm.reroute.FlowRerouteFsm; @@ -53,7 +53,7 @@ public void perform(State from, State to, Event event, FlowRerouteContext contex } } - for (FlowErrorResponse errorResponse : stateMachine.getFailedCommands().values()) { + for (SpeakerResponse errorResponse : stateMachine.getFailedCommands().values()) { log.warn( "Receive error response from {} for command {}: {}", errorResponse.getSwitchId(), errorResponse.getCommandId(), errorResponse); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetInstallRuleErrorAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetInstallRuleErrorAction.java index d83e45f0968..76f3cf5c5b5 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetInstallRuleErrorAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetInstallRuleErrorAction.java @@ -15,7 +15,7 @@ package org.openkilda.wfm.topology.flowhs.fsm.reroute.actions.error; -import org.openkilda.floodlight.flow.response.FlowErrorResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.messaging.info.reroute.error.SpeakerRequestError; import org.openkilda.model.SwitchId; import org.openkilda.wfm.topology.flowhs.fsm.reroute.FlowRerouteContext; @@ -36,7 +36,7 @@ public class SetInstallRuleErrorAction extends AnonymousAction switches = Stream.concat(stateMachine.getPendingCommands().values().stream(), - stateMachine.getFailedCommands().values().stream().map(FlowErrorResponse::getSwitchId)) + stateMachine.getFailedCommands().values().stream().map(SpeakerResponse::getSwitchId)) .collect(Collectors.toSet()); stateMachine.setRerouteError(new SpeakerRequestError("Failed to install rules", switches)); log.debug("Abandoning all pending commands: {}", stateMachine.getPendingCommands()); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetValidateRuleErrorAction.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetValidateRuleErrorAction.java index f6e10f46dac..aee2d464f48 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetValidateRuleErrorAction.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/reroute/actions/error/SetValidateRuleErrorAction.java @@ -15,7 +15,7 @@ package org.openkilda.wfm.topology.flowhs.fsm.reroute.actions.error; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.messaging.info.reroute.error.SpeakerRequestError; import org.openkilda.model.SwitchId; import org.openkilda.wfm.topology.flowhs.fsm.reroute.FlowRerouteContext; @@ -37,7 +37,7 @@ public class SetValidateRuleErrorAction extends AnonymousAction switches = Stream.concat(stateMachine.getPendingCommands().values().stream(), stateMachine.getFailedValidationResponses().values().stream() - .map(SpeakerFlowSegmentResponse::getSwitchId)) + .map(SpeakerResponse::getSwitchId)) .collect(Collectors.toSet()); stateMachine.setRerouteError(new SpeakerRequestError("Failed to validate rules", switches)); log.debug("Abandoning all pending commands: {}", stateMachine.getPendingCommands()); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapService.java b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapService.java index f5364df9f9c..ef0d5a57ba2 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapService.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapService.java @@ -17,11 +17,11 @@ import static java.lang.String.format; -import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; -import org.openkilda.floodlight.flow.response.FlowErrorResponse; +import org.openkilda.floodlight.api.response.SpeakerResponse; import org.openkilda.messaging.command.flow.FlowPathSwapRequest; import org.openkilda.messaging.error.ErrorType; import org.openkilda.persistence.PersistenceManager; +import org.openkilda.rulemanager.RuleManager; import org.openkilda.wfm.CommandContext; import org.openkilda.wfm.share.flow.resources.FlowResourcesManager; import org.openkilda.wfm.share.utils.FsmExecutor; @@ -31,6 +31,7 @@ import org.openkilda.wfm.topology.flowhs.service.common.FlowProcessingFsmRegister; import org.openkilda.wfm.topology.flowhs.service.common.FlowProcessingService; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -38,12 +39,14 @@ public class FlowPathSwapService extends FlowProcessingService, FlowProcessingEventListener> { private final FlowPathSwapFsm.Factory fsmFactory; - public FlowPathSwapService(FlowPathSwapHubCarrier carrier, - PersistenceManager persistenceManager, - int speakerCommandRetriesLimit, FlowResourcesManager flowResourcesManager) { + public FlowPathSwapService(@NonNull FlowPathSwapHubCarrier carrier, + @NonNull PersistenceManager persistenceManager, + @NonNull RuleManager ruleManager, + int speakerCommandRetriesLimit, + @NonNull FlowResourcesManager flowResourcesManager) { super(new FlowProcessingFsmRegister<>(), new FsmExecutor<>(Event.NEXT), carrier, persistenceManager); fsmFactory = new FlowPathSwapFsm.Factory(carrier, - persistenceManager, flowResourcesManager, speakerCommandRetriesLimit); + persistenceManager, flowResourcesManager, ruleManager, speakerCommandRetriesLimit); } /** @@ -52,9 +55,10 @@ public FlowPathSwapService(FlowPathSwapHubCarrier carrier, * @param key command identifier. * @param request request data. */ - public void handleRequest(String key, CommandContext commandContext, FlowPathSwapRequest request) { + public void handleRequest(@NonNull String key, @NonNull CommandContext commandContext, + @NonNull FlowPathSwapRequest request) { String flowId = request.getFlowId(); - if (yFlowRepository.isSubFlow(flowId)) { + if (request.isManual() && yFlowRepository.isSubFlow(flowId)) { sendForbiddenSubFlowOperationToNorthbound(flowId, commandContext); return; } @@ -88,8 +92,8 @@ public void handleRequest(String key, CommandContext commandContext, FlowPathSwa * * @param key command identifier. */ - public void handleAsyncResponse(String key, SpeakerFlowSegmentResponse flowResponse) { - log.debug("Received flow command response {}", flowResponse); + public void handleAsyncResponse(@NonNull String key, @NonNull SpeakerResponse speakerResponse) { + log.debug("Received flow command response {}", speakerResponse); FlowPathSwapFsm fsm = fsmRegister.getFsmByKey(key).orElse(null); if (fsm == null) { log.warn("Failed to find a FSM: received response with key {} for non pending FSM", key); @@ -97,14 +101,9 @@ public void handleAsyncResponse(String key, SpeakerFlowSegmentResponse flowRespo } FlowPathSwapContext context = FlowPathSwapContext.builder() - .speakerFlowResponse(flowResponse) + .speakerResponse(speakerResponse) .build(); - - if (flowResponse instanceof FlowErrorResponse) { - fsmExecutor.fire(fsm, FlowPathSwapFsm.Event.ERROR_RECEIVED, context); - } else { - fsmExecutor.fire(fsm, FlowPathSwapFsm.Event.RESPONSE_RECEIVED, context); - } + fsmExecutor.fire(fsm, Event.RESPONSE_RECEIVED, context); removeIfFinished(fsm, key); } @@ -114,7 +113,7 @@ public void handleAsyncResponse(String key, SpeakerFlowSegmentResponse flowRespo * * @param key command identifier. */ - public void handleTimeout(String key) { + public void handleTimeout(@NonNull String key) { log.debug("Handling timeout for {}", key); FlowPathSwapFsm fsm = fsmRegister.getFsmByKey(key).orElse(null); if (fsm == null) { @@ -122,7 +121,7 @@ public void handleTimeout(String key) { return; } - fsmExecutor.fire(fsm, FlowPathSwapFsm.Event.TIMEOUT, null); + fsmExecutor.fire(fsm, Event.TIMEOUT, null); removeIfFinished(fsm, key); } diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/AbstractFlowTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/AbstractFlowTest.java index 21edec9c76a..c1c91f83c22 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/AbstractFlowTest.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/AbstractFlowTest.java @@ -26,7 +26,12 @@ import static org.mockito.Mockito.when; import org.openkilda.floodlight.api.request.FlowSegmentRequest; +import org.openkilda.floodlight.api.request.rulemanager.BaseSpeakerCommandsRequest; +import org.openkilda.floodlight.api.request.rulemanager.FlowCommand; +import org.openkilda.floodlight.api.request.rulemanager.GroupCommand; +import org.openkilda.floodlight.api.request.rulemanager.MeterCommand; import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; +import org.openkilda.floodlight.api.response.rulemanager.SpeakerCommandResponse; import org.openkilda.messaging.Message; import org.openkilda.messaging.command.flow.FlowRequest; import org.openkilda.messaging.error.ErrorMessage; @@ -59,6 +64,7 @@ import org.openkilda.persistence.repositories.KildaFeatureTogglesRepository; import org.openkilda.persistence.repositories.SwitchPropertiesRepository; import org.openkilda.persistence.repositories.YFlowRepository; +import org.openkilda.rulemanager.SpeakerData; import org.openkilda.wfm.CommandContext; import org.openkilda.wfm.share.flow.resources.FlowResourcesConfig; import org.openkilda.wfm.share.flow.resources.FlowResourcesManager; @@ -79,8 +85,9 @@ import java.util.List; import java.util.Map; import java.util.Queue; +import java.util.stream.Collectors; -public abstract class AbstractFlowTest extends InMemoryGraphBasedTest { +public abstract class AbstractFlowTest extends InMemoryGraphBasedTest { protected static final SwitchId SWITCH_SOURCE = new SwitchId(1); protected static final SwitchId SWITCH_DEST = new SwitchId(2); protected static final SwitchId SWITCH_TRANSIT = new SwitchId(3L); @@ -120,7 +127,7 @@ public abstract class AbstractFlowTest extends InMemoryGraphBasedTest { @Mock PathComputer pathComputer; - final Queue requests = new ArrayDeque<>(); + protected final Queue requests = new ArrayDeque<>(); final Map> installedSegments = new HashMap<>(); @Before @@ -155,30 +162,61 @@ public void tearDown() throws Exception { } } - protected SpeakerFlowSegmentResponse buildSpeakerResponse(FlowSegmentRequest flowRequest) { - return SpeakerFlowSegmentResponse.builder() - .messageContext(flowRequest.getMessageContext()) - .commandId(flowRequest.getCommandId()) - .metadata(flowRequest.getMetadata()) - .switchId(flowRequest.getSwitchId()) - .success(true) - .build(); - } - - Answer getSpeakerCommandsAnswer() { + protected Answer buildSpeakerRequestAnswer() { return invocation -> { - FlowSegmentRequest request = invocation.getArgument(0); + T request = invocation.getArgument(0); requests.offer(request); - if (request.isInstallRequest()) { - installedSegments.computeIfAbsent(request.getSwitchId(), ignore -> new HashMap<>()) - .put(request.getCookie(), request); + if (request instanceof FlowSegmentRequest) { + FlowSegmentRequest flowSegmentRequest = (FlowSegmentRequest) request; + if (flowSegmentRequest.isInstallRequest()) { + installedSegments.computeIfAbsent(flowSegmentRequest.getSwitchId(), ignore -> new HashMap<>()) + .put(flowSegmentRequest.getCookie(), flowSegmentRequest); + } } return request; }; } + protected SpeakerFlowSegmentResponse buildSpeakerResponse(FlowSegmentRequest flowRequest) { + return SpeakerFlowSegmentResponse.builder() + .messageContext(flowRequest.getMessageContext()) + .commandId(flowRequest.getCommandId()) + .metadata(flowRequest.getMetadata()) + .switchId(flowRequest.getSwitchId()) + .success(true) + .build(); + } + + protected SpeakerCommandResponse buildSuccessfulYFlowSpeakerResponse(BaseSpeakerCommandsRequest request) { + return SpeakerCommandResponse.builder() + .messageContext(request.getMessageContext()) + .commandId(request.getCommandId()) + .switchId(request.getSwitchId()) + .success(true) + .failedCommandIds(new HashMap<>()) + .build(); + } + + protected SpeakerCommandResponse buildErrorYFlowSpeakerResponse(BaseSpeakerCommandsRequest request) { + return SpeakerCommandResponse.builder() + .messageContext(request.getMessageContext()) + .commandId(request.getCommandId()) + .switchId(request.getSwitchId()) + .success(false) + .failedCommandIds(request.getCommands().stream().map(command -> { + if (command instanceof FlowCommand) { + return ((FlowCommand) command).getData(); + } + if (command instanceof MeterCommand) { + return ((MeterCommand) command).getData(); + } + return ((GroupCommand) command).getData(); + }).collect(Collectors.toMap(SpeakerData::getUuid, error -> "Switch is unavailable"))) + .build(); + } + SpeakerFlowSegmentResponse buildResponseOnVerifyRequest(FlowSegmentRequest request) { return SpeakerFlowSegmentResponse.builder() .commandId(request.getCommandId()) diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowCreateServiceTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowCreateServiceTest.java index c4a892a95ca..63ea9f2233c 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowCreateServiceTest.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowCreateServiceTest.java @@ -54,13 +54,13 @@ import java.util.UUID; @RunWith(MockitoJUnitRunner.class) -public class FlowCreateServiceTest extends AbstractFlowTest { +public class FlowCreateServiceTest extends AbstractFlowTest { @Mock private FlowGenericCarrier carrier; @Before public void init() { - doAnswer(getSpeakerCommandsAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); + doAnswer(buildSpeakerRequestAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); } @Test diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowDeleteServiceTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowDeleteServiceTest.java index 73d533dbf31..24648d49c53 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowDeleteServiceTest.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowDeleteServiceTest.java @@ -48,7 +48,7 @@ import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) -public class FlowDeleteServiceTest extends AbstractFlowTest { +public class FlowDeleteServiceTest extends AbstractFlowTest { private static final int SPEAKER_COMMAND_RETRIES_LIMIT = 3; @Mock @@ -56,7 +56,7 @@ public class FlowDeleteServiceTest extends AbstractFlowTest { @Before public void setUp() { - doAnswer(getSpeakerCommandsAnswer()).when(carrier).sendSpeakerRequest(any()); + doAnswer(buildSpeakerRequestAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); // must be done before first service create attempt, because repository objects are cached inside FSM actions setupFlowRepositorySpy(); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapServiceTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapServiceTest.java new file mode 100644 index 00000000000..00e9e83a15a --- /dev/null +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowPathSwapServiceTest.java @@ -0,0 +1,290 @@ +/* Copyright 2021 Telstra Open Source + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.openkilda.wfm.topology.flowhs.service; + +import static java.util.Collections.singletonList; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.spy; + +import org.openkilda.floodlight.api.request.FlowSegmentRequest; +import org.openkilda.floodlight.api.request.SpeakerRequest; +import org.openkilda.floodlight.api.request.rulemanager.BaseSpeakerCommandsRequest; +import org.openkilda.floodlight.api.response.SpeakerResponse; +import org.openkilda.floodlight.flow.response.FlowErrorResponse; +import org.openkilda.floodlight.flow.response.FlowErrorResponse.ErrorCode; +import org.openkilda.messaging.command.flow.FlowPathSwapRequest; +import org.openkilda.messaging.info.flow.FlowResponse; +import org.openkilda.model.Flow; +import org.openkilda.model.FlowStatus; +import org.openkilda.rulemanager.RuleManager; +import org.openkilda.rulemanager.RuleManagerConfig; +import org.openkilda.rulemanager.RuleManagerImpl; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class FlowPathSwapServiceTest extends AbstractFlowTest { + private static final int SPEAKER_COMMAND_RETRIES_LIMIT = 0; + + @Mock + private FlowPathSwapHubCarrier carrier; + private RuleManager ruleManager; + + @Before + public void setUp() { + doAnswer(buildSpeakerRequestAnswer()).when(carrier).sendSpeakerRequest(any(SpeakerRequest.class)); + + setupFlowRepositorySpy(); + setupFlowPathRepositorySpy(); + + RuleManagerConfig ruleManagerConfig = configurationProvider.getConfiguration(RuleManagerConfig.class); + ruleManager = spy(new RuleManagerImpl(ruleManagerConfig)); + } + + @Test + public void shouldSuccessfullySwapFlowPaths() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), true); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + produceAsyncResponse(service, dummyRequestKey, speakerRequest); + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + @Test + public void shouldFailSwapOnUnsuccessfulInstallation() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), true); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + int failCounter = 1; + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + if (speakerRequest instanceof FlowSegmentRequest) { + FlowSegmentRequest flowSegmentRequest = (FlowSegmentRequest) speakerRequest; + if (flowSegmentRequest.isInstallRequest() && failCounter > 0) { + service.handleAsyncResponse(dummyRequestKey, FlowErrorResponse.errorBuilder() + .messageContext(flowSegmentRequest.getMessageContext()) + .errorCode(ErrorCode.UNKNOWN) + .description(injectedErrorMessage) + .commandId(flowSegmentRequest.getCommandId()) + .metadata(flowSegmentRequest.getMetadata()) + .switchId(flowSegmentRequest.getSwitchId()) + .build()); + failCounter--; + } else { + service.handleAsyncResponse(dummyRequestKey, buildSpeakerResponse(flowSegmentRequest)); + } + } else { + fail(); + } + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathNotSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + @Test + public void shouldFailSwapOnTimeoutDuringInstallation() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), true); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + service.handleTimeout(dummyRequestKey); + + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + produceAsyncResponse(service, dummyRequestKey, speakerRequest); + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathNotSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + @Test + public void shouldSuccessfullySwapYFlowPaths() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + createTestYFlowForSubFlow(origin); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), false); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + produceAsyncResponse(service, dummyRequestKey, speakerRequest); + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + @Test + public void shouldFailSwapOnUnsuccessfulYFlowRulesInstallation() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + createTestYFlowForSubFlow(origin); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), false); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + int failCounter = 1; + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + SpeakerResponse commandResponse; + if (speakerRequest instanceof FlowSegmentRequest) { + commandResponse = buildSpeakerResponse((FlowSegmentRequest) speakerRequest); + } else { + BaseSpeakerCommandsRequest speakerCommandsRequest = (BaseSpeakerCommandsRequest) speakerRequest; + if (failCounter > 0) { + commandResponse = buildErrorYFlowSpeakerResponse(speakerCommandsRequest); + failCounter--; + } else { + commandResponse = buildSuccessfulYFlowSpeakerResponse(speakerCommandsRequest); + } + } + service.handleAsyncResponse(dummyRequestKey, commandResponse); + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathNotSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + @Test + public void shouldFailSwapOnTimeoutDuringYFlowRulesInstallation() { + // given + Flow origin = dummyFactory.makeFlowWithProtectedPath(flowSource, flowDestination, + singletonList(islSourceDest), singletonList(islSourceDestAlt)); + createTestYFlowForSubFlow(origin); + + // when + FlowPathSwapService service = makeService(); + FlowPathSwapRequest request = new FlowPathSwapRequest(origin.getFlowId(), false); + service.handleRequest(dummyRequestKey, commandContext, request); + + verifyFlowStatus(origin.getFlowId(), FlowStatus.IN_PROGRESS); + + int failCounter = 1; + SpeakerRequest speakerRequest; + while ((speakerRequest = requests.poll()) != null) { + if (speakerRequest instanceof FlowSegmentRequest) { + service.handleAsyncResponse(dummyRequestKey, buildSpeakerResponse((FlowSegmentRequest) speakerRequest)); + } else { + if (failCounter > 0) { + service.handleTimeout(dummyRequestKey); + failCounter--; + } else { + BaseSpeakerCommandsRequest speakerCommandsRequest = (BaseSpeakerCommandsRequest) speakerRequest; + service.handleAsyncResponse(dummyRequestKey, + buildSuccessfulYFlowSpeakerResponse(speakerCommandsRequest)); + } + } + } + + // then + Flow result = verifyFlowStatus(origin.getFlowId(), FlowStatus.UP); + verifyPathNotSwapped(origin, result); + verifyNorthboundSuccessResponse(carrier); + } + + private void produceAsyncResponse(FlowPathSwapService service, String fsmKey, SpeakerRequest speakerRequest) { + SpeakerResponse commandResponse; + if (speakerRequest instanceof FlowSegmentRequest) { + commandResponse = buildSpeakerResponse((FlowSegmentRequest) speakerRequest); + } else { + BaseSpeakerCommandsRequest speakerCommandsRequest = (BaseSpeakerCommandsRequest) speakerRequest; + commandResponse = buildSuccessfulYFlowSpeakerResponse(speakerCommandsRequest); + } + service.handleAsyncResponse(fsmKey, commandResponse); + } + + private void verifyPathSwapped(Flow origin, Flow result) { + assertEquals(origin.getProtectedForwardPathId(), result.getForwardPathId()); + assertEquals(origin.getForwardPathId(), result.getProtectedForwardPathId()); + assertEquals(origin.getProtectedReversePathId(), result.getReversePathId()); + assertEquals(origin.getReversePathId(), result.getProtectedReversePathId()); + } + + private void verifyPathNotSwapped(Flow origin, Flow result) { + assertEquals(origin.getForwardPathId(), result.getForwardPathId()); + assertEquals(origin.getProtectedForwardPathId(), result.getProtectedForwardPathId()); + assertEquals(origin.getReversePathId(), result.getReversePathId()); + assertEquals(origin.getProtectedReversePathId(), result.getProtectedReversePathId()); + } + + @Override + protected void verifyNorthboundSuccessResponse(FlowGenericCarrier carrierMock) { + verifyNorthboundSuccessResponse(carrierMock, FlowResponse.class); + } + + private FlowPathSwapService makeService() { + return new FlowPathSwapService(carrier, persistenceManager, ruleManager, + SPEAKER_COMMAND_RETRIES_LIMIT, flowResourcesManager); + } +} diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowRerouteServiceTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowRerouteServiceTest.java index 164fbad3dee..608dd42319a 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowRerouteServiceTest.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowRerouteServiceTest.java @@ -74,7 +74,7 @@ import java.util.Set; @RunWith(MockitoJUnitRunner.class) -public class FlowRerouteServiceTest extends AbstractFlowTest { +public class FlowRerouteServiceTest extends AbstractFlowTest { private static final int PATH_ALLOCATION_RETRIES_LIMIT = 10; private static final int PATH_ALLOCATION_RETRY_DELAY = 0; private static final int SPEAKER_COMMAND_RETRIES_LIMIT = 0; @@ -86,7 +86,7 @@ public class FlowRerouteServiceTest extends AbstractFlowTest { @Before public void setUp() { - doAnswer(getSpeakerCommandsAnswer()).when(carrier).sendSpeakerRequest(any()); + doAnswer(buildSpeakerRequestAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); // must be done before first service create attempt, because repository objects are cached inside FSM actions setupFlowRepositorySpy(); diff --git a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowUpdateServiceTest.java b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowUpdateServiceTest.java index e5853b3b8a5..3e92c198491 100644 --- a/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowUpdateServiceTest.java +++ b/src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowUpdateServiceTest.java @@ -75,7 +75,7 @@ import java.util.stream.Collectors; @RunWith(MockitoJUnitRunner.class) -public class FlowUpdateServiceTest extends AbstractFlowTest { +public class FlowUpdateServiceTest extends AbstractFlowTest { private static final int PATH_ALLOCATION_RETRIES_LIMIT = 10; private static final int PATH_ALLOCATION_RETRY_DELAY = 0; private static final int SPEAKER_COMMAND_RETRIES_LIMIT = 0; @@ -85,7 +85,7 @@ public class FlowUpdateServiceTest extends AbstractFlowTest { @Before public void setUp() { - doAnswer(getSpeakerCommandsAnswer()).when(carrier).sendSpeakerRequest(any()); + doAnswer(buildSpeakerRequestAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); // must be done before first service create attempt, because repository objects are cached inside FSM actions setupFlowRepositorySpy(); diff --git a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/build.gradle b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/build.gradle index 993a857a963..870a589030a 100644 --- a/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/build.gradle +++ b/src-java/flowmonitoring-topology/flowmonitoring-storm-topology/build.gradle @@ -16,10 +16,10 @@ dependencies { implementation project(':network-messaging') implementation project(':floodlight-api') implementation project(':blue-green') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') testImplementation project(path: ':kilda-persistence-api', configuration: 'testArtifacts') aspect project(':kilda-persistence-api') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/gradle/wrapper/gradle-wrapper.jar b/src-java/gradle/wrapper/gradle-wrapper.jar index e708b1c023e..7454180f2ae 100644 Binary files a/src-java/gradle/wrapper/gradle-wrapper.jar and b/src-java/gradle/wrapper/gradle-wrapper.jar differ diff --git a/src-java/gradle/wrapper/gradle-wrapper.properties b/src-java/gradle/wrapper/gradle-wrapper.properties index 549d84424d0..2e6e5897b52 100644 --- a/src-java/gradle/wrapper/gradle-wrapper.properties +++ b/src-java/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.9-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/src-java/gradlew b/src-java/gradlew index 4f906e0c811..1b6c787337f 100755 --- a/src-java/gradlew +++ b/src-java/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,67 +17,101 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar @@ -87,9 +121,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -98,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -106,80 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/src-java/grpc-speaker/grpc-service/build.gradle b/src-java/grpc-speaker/grpc-service/build.gradle index 827ef870384..39d32f2b15c 100644 --- a/src-java/grpc-speaker/grpc-service/build.gradle +++ b/src-java/grpc-speaker/grpc-service/build.gradle @@ -4,7 +4,7 @@ plugins { configurations { // This conflicts with spring-boot-starter-log4j2 - compile.exclude module: 'spring-boot-starter-logging' + implementation.exclude module: 'spring-boot-starter-logging' } description = 'GRPC service' @@ -73,7 +73,7 @@ dependencies { compileJava { options.compilerArgs = [ - "-Amapstruct.defaultComponentModel=spring" + '-Amapstruct.defaultComponentModel=spring' ] } diff --git a/src-java/isllatency-topology/isllatency-storm-topology/build.gradle b/src-java/isllatency-topology/isllatency-storm-topology/build.gradle index 46438e4aef3..580742c0166 100644 --- a/src-java/isllatency-topology/isllatency-storm-topology/build.gradle +++ b/src-java/isllatency-topology/isllatency-storm-topology/build.gradle @@ -9,13 +9,13 @@ compileJava.ajc.options.compilerArgs += '-verbose' description = 'Isl Latency Storm Topology' dependencies { implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':isllatency-messaging') implementation project(':network-messaging') implementation project(':floodlight-api') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') aspect project(':kilda-persistence-api') testImplementation project(path: ':kilda-persistence-api', configuration: 'testArtifacts') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/nbworker-topology/nbworker-storm-topology/build.gradle b/src-java/nbworker-topology/nbworker-storm-topology/build.gradle index e5469828d76..af2963cebfc 100644 --- a/src-java/nbworker-topology/nbworker-storm-topology/build.gradle +++ b/src-java/nbworker-topology/nbworker-storm-topology/build.gradle @@ -16,13 +16,13 @@ dependencies { implementation project(':base-storm-topology') implementation project(':blue-green') aspect project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-pce') implementation project(':kilda-reporting') implementation project(':floodlight-api') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') testImplementation project(path: ':kilda-persistence-api', configuration: 'testArtifacts') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/network-topology/network-storm-topology/build.gradle b/src-java/network-topology/network-storm-topology/build.gradle index 4f5bc17ba6b..014ff6d25b3 100644 --- a/src-java/network-topology/network-storm-topology/build.gradle +++ b/src-java/network-topology/network-storm-topology/build.gradle @@ -12,7 +12,7 @@ dependencies { implementation project(':network-messaging') implementation project(':swmanager-messaging') implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-reporting') implementation project(':nbworker-messaging') @@ -21,8 +21,8 @@ dependencies { implementation project(':ping-messaging') implementation project(':floodlight-api') implementation project(':grpc-api') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') aspect project(':kilda-persistence-api') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') testImplementation project(':kilda-utils:stubs') diff --git a/src-java/northbound-service/northbound/build.gradle b/src-java/northbound-service/northbound/build.gradle index 8b12deef0be..adbae5bc850 100644 --- a/src-java/northbound-service/northbound/build.gradle +++ b/src-java/northbound-service/northbound/build.gradle @@ -3,7 +3,8 @@ plugins { } configurations { - compile.exclude module: 'spring-boot-starter-logging' + // This conflicts with spring-boot-starter-log4j2 + implementation.exclude module: 'spring-boot-starter-logging' } description = 'Northbound Service' @@ -19,8 +20,8 @@ dependencies { implementation project(':floodlight-api') implementation project(':kilda-persistence-api') implementation project(':blue-green') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') implementation project(':kilda-configuration') testImplementation project(':kilda-utils:stubs') diff --git a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java index 2cf5ede5582..acc50034a61 100644 --- a/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java +++ b/src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/service/impl/FlowServiceImpl.java @@ -556,7 +556,7 @@ public CompletableFuture swapFlowPaths(String flowId) { final String correlationId = RequestCorrelationId.getId(); logger.info("Swapping paths for flow : {}", flowId); - FlowPathSwapRequest payload = new FlowPathSwapRequest(flowId); + FlowPathSwapRequest payload = new FlowPathSwapRequest(flowId, true); CommandMessage request = new CommandMessage( payload, System.currentTimeMillis(), correlationId, Destination.WFM); diff --git a/src-java/opentsdb-topology/opentsdb-storm-topology/build.gradle b/src-java/opentsdb-topology/opentsdb-storm-topology/build.gradle index 12313ad09fb..cceb406927b 100644 --- a/src-java/opentsdb-topology/opentsdb-storm-topology/build.gradle +++ b/src-java/opentsdb-topology/opentsdb-storm-topology/build.gradle @@ -9,14 +9,14 @@ compileJava.ajc.options.compilerArgs += '-verbose' description = 'OpenTSDB Storm Topology' dependencies { implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-reporting') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') implementation project(':blue-green') - runtimeClasspath 'org.yaml:snakeyaml:1.15' - runtimeClasspath 'commons-logging:commons-logging:1.2' + runtimeOnly 'org.yaml:snakeyaml:1.15' + runtimeOnly 'commons-logging:commons-logging:1.2' implementation('org.apache.storm:storm-opentsdb:1.2.1') { exclude group: 'org.apache.httpcomponents', module: 'httpclient' // This conflicts with jcl-over-slf4j diff --git a/src-java/ping-topology/ping-storm-topology/build.gradle b/src-java/ping-topology/ping-storm-topology/build.gradle index 2486841b96c..bad181cc754 100644 --- a/src-java/ping-topology/ping-storm-topology/build.gradle +++ b/src-java/ping-topology/ping-storm-topology/build.gradle @@ -10,13 +10,13 @@ description = 'Ping Storm Topology' dependencies { implementation project(':ping-messaging') implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-reporting') implementation project(':floodlight-api') implementation project(':blue-green') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') aspect project(':kilda-persistence-api') compileOnly('org.apache.storm:storm-core') diff --git a/src-java/portstate-topology/portstate-storm-topology/build.gradle b/src-java/portstate-topology/portstate-storm-topology/build.gradle index 44d26b91dcc..8f726ad2000 100644 --- a/src-java/portstate-topology/portstate-storm-topology/build.gradle +++ b/src-java/portstate-topology/portstate-storm-topology/build.gradle @@ -9,14 +9,14 @@ compileJava.ajc.options.compilerArgs += '-verbose' description = 'Port State Storm Topology' dependencies { implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') implementation project(':kilda-reporting') implementation project(':network-messaging') implementation project(':nbworker-messaging') implementation project(':floodlight-api') implementation project(':blue-green') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') compileOnly('org.apache.storm:storm-core') diff --git a/src-java/projectfloodlight/floodlight/build.gradle b/src-java/projectfloodlight/floodlight/build.gradle index 4dcd26bc872..9ae86c7c3ef 100644 --- a/src-java/projectfloodlight/floodlight/build.gradle +++ b/src-java/projectfloodlight/floodlight/build.gradle @@ -1,6 +1,6 @@ plugins { id 'java-base' - id 'org.ajoberstar.grgit' version '4.0.2' apply false + id 'org.ajoberstar.grgit' version '4.1.1' apply false } group = 'org.projectfloodlight' @@ -43,12 +43,12 @@ task cloneFloodlightRepo { grgit.checkout(branch: versionTag) grgit.close() } catch (Exception e) { - logger.warn("Failed to fetch floodlight from git: {}", e.getMessage()) + logger.warn('Failed to fetch floodlight from git: {}', e.getMessage()) } } else { def repoUri = 'https://github.com/kilda/floodlight.git' - if (project.hasProperty("floodlightGitRepo")) { - repoUri = project.property("floodlightGitRepo") + if (project.hasProperty('floodlightGitRepo')) { + repoUri = project.property('floodlightGitRepo') } def grgit = Grgit.clone(dir: repo.absolutePath, uri: repoUri, refToCheckout: versionTag) grgit.close() diff --git a/src-java/projectfloodlight/openflowj/build.gradle b/src-java/projectfloodlight/openflowj/build.gradle index e062bf243b0..39dd089aa5f 100644 --- a/src-java/projectfloodlight/openflowj/build.gradle +++ b/src-java/projectfloodlight/openflowj/build.gradle @@ -1,6 +1,6 @@ plugins { id 'java-base' - id 'org.ajoberstar.grgit' version '4.0.2' apply false + id 'org.ajoberstar.grgit' version '4.1.1' apply false } group = 'org.projectfloodlight' @@ -42,8 +42,8 @@ task cloneLoxigenRepo { } } else { def repoUri = 'https://github.com/kilda/loxigen.git' - if (project.hasProperty("loxigenGitRepo")) { - repoUri = project.property("loxigenGitRepo") + if (project.hasProperty('loxigenGitRepo')) { + repoUri = project.property('loxigenGitRepo') } def grgit = Grgit.clone(dir: repo.absolutePath, uri: repoUri, refToCheckout: 'STABLE') grgit.close() diff --git a/src-java/reroute-topology/reroute-storm-topology/build.gradle b/src-java/reroute-topology/reroute-storm-topology/build.gradle index 7fc0d6bcdda..18c5fffd44f 100644 --- a/src-java/reroute-topology/reroute-storm-topology/build.gradle +++ b/src-java/reroute-topology/reroute-storm-topology/build.gradle @@ -13,11 +13,11 @@ dependencies { implementation project(':base-storm-topology') implementation project(':blue-green') aspect project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-reporting') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') compileOnly('org.apache.storm:storm-core') diff --git a/src-java/reroute-topology/reroute-storm-topology/src/main/java/org/openkilda/wfm/topology/reroute/bolts/RerouteBolt.java b/src-java/reroute-topology/reroute-storm-topology/src/main/java/org/openkilda/wfm/topology/reroute/bolts/RerouteBolt.java index 197b5a150f0..d09a7e15417 100644 --- a/src-java/reroute-topology/reroute-storm-topology/src/main/java/org/openkilda/wfm/topology/reroute/bolts/RerouteBolt.java +++ b/src-java/reroute-topology/reroute-storm-topology/src/main/java/org/openkilda/wfm/topology/reroute/bolts/RerouteBolt.java @@ -174,7 +174,7 @@ public void emitManualRerouteCommand(String flowId, FlowThrottlingData flowThrot public void emitPathSwapCommand(String correlationId, String flowId, String reason) { CommandContext context = new CommandContext(correlationId).fork(UUID.randomUUID().toString()); emit(STREAM_OPERATION_QUEUE_ID, getCurrentTuple(), - new Values(flowId, new FlowPathSwapRequest(flowId), context)); + new Values(flowId, new FlowPathSwapRequest(flowId, false), context)); log.warn("Flow {} swap path command message sent with correlationId {}, reason \"{}\"", flowId, context.getCorrelationId(), reason); diff --git a/src-java/reroute-topology/reroute-storm-topology/src/test/java/org/openkilda/wfm/topology/reroute/service/OperationQueueServiceTest.java b/src-java/reroute-topology/reroute-storm-topology/src/test/java/org/openkilda/wfm/topology/reroute/service/OperationQueueServiceTest.java index bb69d763f9e..b464f4c431c 100644 --- a/src-java/reroute-topology/reroute-storm-topology/src/test/java/org/openkilda/wfm/topology/reroute/service/OperationQueueServiceTest.java +++ b/src-java/reroute-topology/reroute-storm-topology/src/test/java/org/openkilda/wfm/topology/reroute/service/OperationQueueServiceTest.java @@ -48,7 +48,7 @@ public void shouldAddFirst() { assertEquals(TEST_CORRELATION_ID_A, flowQueueData.getTaskInProgress()); assertEquals(1, flowQueueData.getQueue().size()); - service.addFirst(TEST_FLOW_ID, TEST_CORRELATION_ID_B, new FlowPathSwapRequest(TEST_FLOW_ID)); + service.addFirst(TEST_FLOW_ID, TEST_CORRELATION_ID_B, new FlowPathSwapRequest(TEST_FLOW_ID, false)); service.operationCompleted(TEST_FLOW_ID, null); diff --git a/src-java/server42/server42-control-server-stub/build.gradle b/src-java/server42/server42-control-server-stub/build.gradle index de86d5638d4..929bb048684 100644 --- a/src-java/server42/server42-control-server-stub/build.gradle +++ b/src-java/server42/server42-control-server-stub/build.gradle @@ -4,7 +4,7 @@ plugins { configurations { // This conflicts with spring-boot-starter-log4j2 - compile.exclude module: 'spring-boot-starter-logging' + implementation.exclude module: 'spring-boot-starter-logging' } dependencies { diff --git a/src-java/server42/server42-control-storm-stub/build.gradle b/src-java/server42/server42-control-storm-stub/build.gradle index 5575d221450..815390f8476 100644 --- a/src-java/server42/server42-control-storm-stub/build.gradle +++ b/src-java/server42/server42-control-storm-stub/build.gradle @@ -4,7 +4,7 @@ plugins { configurations { // This conflicts with spring-boot-starter-log4j2 - compile.exclude module: 'spring-boot-starter-logging' + implementation.exclude module: 'spring-boot-starter-logging' } description = 'server42-control-storm-stub' diff --git a/src-java/server42/server42-control-storm-topology/build.gradle b/src-java/server42/server42-control-storm-topology/build.gradle index ea37c5a6c29..5ebbb1b5cbd 100644 --- a/src-java/server42/server42-control-storm-topology/build.gradle +++ b/src-java/server42/server42-control-storm-topology/build.gradle @@ -10,14 +10,14 @@ description = 'Server42 Control Storm Topology' dependencies { implementation project(':network-messaging') implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':nbworker-messaging') implementation project(':server42-control-messaging') implementation project(':server42-messaging') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') aspect project(':kilda-persistence-api') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/server42/server42-control/build.gradle b/src-java/server42/server42-control/build.gradle index c942a23c471..75e218be9bf 100644 --- a/src-java/server42/server42-control/build.gradle +++ b/src-java/server42/server42-control/build.gradle @@ -4,7 +4,7 @@ plugins { configurations { // This conflicts with spring-boot-starter-log4j2 - compile.exclude module: 'spring-boot-starter-logging' + implementation.exclude module: 'spring-boot-starter-logging' } description = 'server42-control' diff --git a/src-java/server42/server42-stats/build.gradle b/src-java/server42/server42-stats/build.gradle index 4bcc752e6a6..d19ee85a135 100644 --- a/src-java/server42/server42-stats/build.gradle +++ b/src-java/server42/server42-stats/build.gradle @@ -4,7 +4,7 @@ plugins { configurations { // This conflicts with spring-boot-starter-log4j2 - compile.exclude module: 'spring-boot-starter-logging' + implementation.exclude module: 'spring-boot-starter-logging' } description = 'server42-stats' diff --git a/src-java/stats-topology/stats-storm-topology/build.gradle b/src-java/stats-topology/stats-storm-topology/build.gradle index 88efb9f5db6..08ac74df2b6 100644 --- a/src-java/stats-topology/stats-storm-topology/build.gradle +++ b/src-java/stats-topology/stats-storm-topology/build.gradle @@ -10,14 +10,14 @@ description = 'Stats Storm Topology' dependencies { implementation project(':stats-messaging') implementation project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':kilda-reporting') implementation project(':floodlight-api') implementation project(':grpc-api') implementation project(':blue-green') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') aspect project(':kilda-persistence-api') testImplementation project(path: ':kilda-persistence-tinkerpop', configuration: 'testArtifacts') diff --git a/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/bolts/metrics/FlowMetricGenBolt.java b/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/bolts/metrics/FlowMetricGenBolt.java index 2d610053724..d0d3c7cf8b7 100644 --- a/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/bolts/metrics/FlowMetricGenBolt.java +++ b/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/bolts/metrics/FlowMetricGenBolt.java @@ -19,7 +19,6 @@ import org.openkilda.messaging.info.stats.FlowStatsEntry; import org.openkilda.model.SwitchId; -import org.openkilda.model.cookie.Cookie; import org.openkilda.wfm.topology.stats.model.DummyFlowDescriptor; import org.openkilda.wfm.topology.stats.model.FlowStatsAndDescriptor; import org.openkilda.wfm.topology.stats.model.KildaEntryDescriptor; @@ -61,9 +60,6 @@ private void handleStatsEntry( FlowStatsEntry statsEntry, long timestamp, @NonNull SwitchId switchId, @Nullable KildaEntryDescriptor descriptor) { if (descriptor == null) { - log.warn( - "Missed cache for switch {} cookie {} cookie-hex {}", - switchId, statsEntry.getCookie(), new Cookie(statsEntry.getCookie())); descriptor = new DummyFlowDescriptor(switchId); } diff --git a/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/service/FlowEndpointStatsEntryHandler.java b/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/service/FlowEndpointStatsEntryHandler.java index d979dff39b5..f107f1ed2b6 100644 --- a/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/service/FlowEndpointStatsEntryHandler.java +++ b/src-java/stats-topology/stats-storm-topology/src/main/java/org/openkilda/wfm/topology/stats/service/FlowEndpointStatsEntryHandler.java @@ -17,6 +17,8 @@ import org.openkilda.messaging.info.stats.FlowStatsEntry; import org.openkilda.model.SwitchId; +import org.openkilda.model.cookie.Cookie; +import org.openkilda.model.cookie.CookieBase.CookieType; import org.openkilda.model.cookie.FlowSegmentCookie; import org.openkilda.wfm.share.utils.MetricFormatter; import org.openkilda.wfm.topology.stats.bolts.metrics.FlowDirectionHelper.Direction; @@ -28,6 +30,9 @@ import org.openkilda.wfm.topology.stats.model.YFlowDescriptor; import org.openkilda.wfm.topology.stats.model.YFlowSubDescriptor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j public final class FlowEndpointStatsEntryHandler extends BaseFlowStatsEntryHandler { /** * Handle stats entry. @@ -79,7 +84,16 @@ public void handleStatsEntry(YFlowSubDescriptor descriptor) { @Override public void handleStatsEntry(DummyFlowDescriptor descriptor) { - // nothing to do here + Cookie cookie = new Cookie(statsEntry.getCookie()); + if (cookie.getType() == CookieType.SERVICE_OR_FLOW_SEGMENT) { + log.warn( + "Missed cache entry for stats record from switch {} from table {} with cookie {}", + switchId, statsEntry.getTableId(), cookie); + } else { + log.debug( + "Ignoring stats entry from switch {} from table {} with cookie {}", + switchId, statsEntry.getTableId(), cookie); + } } @Override diff --git a/src-java/swmanager-topology/swmanager-storm-topology/build.gradle b/src-java/swmanager-topology/swmanager-storm-topology/build.gradle index cbaba6b453c..205e7024795 100644 --- a/src-java/swmanager-topology/swmanager-storm-topology/build.gradle +++ b/src-java/swmanager-topology/swmanager-storm-topology/build.gradle @@ -12,13 +12,13 @@ dependencies { implementation project(':swmanager-messaging') implementation project(':grpc-api') aspect project(':base-storm-topology') - runtimeClasspath project(path: ':base-storm-topology', configuration: 'releaseArtifacts') + runtimeOnly project(path: ':base-storm-topology', configuration: 'releaseArtifacts') testImplementation project(path: ':base-storm-topology', configuration: 'testArtifacts') implementation project(':flowhs-messaging') implementation project(':floodlight-api') implementation project(':kilda-reporting') - runtimeClasspath project(':kilda-persistence-orientdb') - runtimeClasspath project(':kilda-persistence-hibernate') + runtimeOnly project(':kilda-persistence-orientdb') + runtimeOnly project(':kilda-persistence-hibernate') implementation project(':blue-green') compileOnly('org.apache.storm:storm-core') diff --git a/src-java/testing/functional-tests/build.gradle b/src-java/testing/functional-tests/build.gradle index a1d617b9b78..f57f96aea8b 100644 --- a/src-java/testing/functional-tests/build.gradle +++ b/src-java/testing/functional-tests/build.gradle @@ -1,7 +1,7 @@ plugins { id 'groovy' - id 'com.adarshr.test-logger' version '3.0.0' - id "org.gradle.test-retry" version "1.2.1" + id 'com.adarshr.test-logger' version '3.1.0' + id 'org.gradle.test-retry' version '1.3.1' } description = 'Functional-Tests' @@ -64,7 +64,7 @@ task functionalTest(type: Test, dependsOn: 'compileGroovy') { } Properties properties = new Properties() - File propertiesFile = file("kilda.properties.example") + File propertiesFile = file('kilda.properties.example') propertiesFile.withInputStream { properties.load(it) } @@ -91,11 +91,11 @@ tasks.withType(Test) { //if there is a failed/unstable test, create log file for further processing in Jenkins pipeline def unstableLog = new File("${project.buildDir}/test-results/unstable.log").tap { it.parentFile.mkdirs() - it.write "false" + it.write 'false' } afterTest { desc, result -> - if ("FAILURE" == result.resultType as String) { - unstableLog.write "true" + if ('FAILURE' == result.resultType as String) { + unstableLog.write 'true' } } retry { //test-retry plugin config diff --git a/src-java/testing/functional-tests/src/main/groovy/org/openkilda/functionaltests/helpers/FlowHistoryConstants.groovy b/src-java/testing/functional-tests/src/main/groovy/org/openkilda/functionaltests/helpers/FlowHistoryConstants.groovy index 9778e68823d..8c47f58f010 100644 --- a/src-java/testing/functional-tests/src/main/groovy/org/openkilda/functionaltests/helpers/FlowHistoryConstants.groovy +++ b/src-java/testing/functional-tests/src/main/groovy/org/openkilda/functionaltests/helpers/FlowHistoryConstants.groovy @@ -9,6 +9,7 @@ class FlowHistoryConstants { public static String CREATE_SUCCESS = "Flow was created successfully" public static String CREATE_SUCCESS_Y = "The y-flow was created successfully" public static String REROUTE_SUCCESS = "Flow was rerouted successfully" + public static String REROUTE_SUCCESS_Y = "The y-flow was rerouted successfully" public static String REROUTE_FAIL = "Failed to reroute the flow" public static String REROUTE_COMPLETE = "Flow reroute completed" public static String DELETE_SUCCESS = "Flow was deleted successfully" diff --git a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowCreateSpec.groovy b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowCreateSpec.groovy index 1b4126af3e6..00b5cd52aac 100644 --- a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowCreateSpec.groovy +++ b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowCreateSpec.groovy @@ -52,8 +52,12 @@ class YFlowCreateSpec extends HealthCheckSpecification { @Tags([TOPOLOGY_DEPENDENT]) def "Valid y-flow can be created#trafficDisclaimer, covered cases: #coveredCases"() { assumeTrue(swT != null, "These cases cannot be covered on given topology: $coveredCases") + if (coveredCases.toString().contains("qinq")) { + assumeTrue(useMultitable, "Multi table is not enabled in kilda configuration") + } when: "Create a y-flow of certain configuration" + def allLinksBefore = northbound.getAllLinks() def yFlow = northboundV2.addYFlow(yFlowRequest) then: "Y-flow is created and has UP status" @@ -87,7 +91,18 @@ class YFlowCreateSpec extends HealthCheckSpecification { // northbound.validateSwitch(sw.dpId).verifyMeterSectionsAreEmpty(["missing", "excess", "misconfigured"]) // } -// and: "Bandwidth is properly consumed on shared and non-shared ISLs" + and: "Bandwidth is properly consumed on shared and non-shared ISLs" + def allLinksAfter = northbound.getAllLinks() + def involvedIslsSFlow_1 = pathHelper.getInvolvedIsls(yFlow.subFlows[0].flowId) + def involvedIslsSFlow_2 = pathHelper.getInvolvedIsls(yFlow.subFlows[1].flowId) + + (involvedIslsSFlow_1 + involvedIslsSFlow_2).unique().each { link -> + [link, link.reversed].each { + def bwBefore = islUtils.getIslInfo(allLinksBefore, it).get().availableBandwidth + def bwAfter = islUtils.getIslInfo(allLinksAfter, it).get().availableBandwidth + assert bwBefore == bwAfter + yFlow.maximumBandwidth + } + } when: "Traffic starts to flow on both sub-flows with maximum bandwidth (if applicable)" def beforeTraffic = new Date() diff --git a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowRerouteSpec.groovy b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowRerouteSpec.groovy new file mode 100644 index 00000000000..1c8e5db00c5 --- /dev/null +++ b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowRerouteSpec.groovy @@ -0,0 +1,127 @@ +package org.openkilda.functionaltests.spec.flows.yflows + +import static groovyx.gpars.GParsPool.withPool +import static org.junit.jupiter.api.Assumptions.assumeTrue +import static org.openkilda.functionaltests.extension.tags.Tag.TOPOLOGY_DEPENDENT +import static org.openkilda.functionaltests.helpers.FlowHistoryConstants.REROUTE_SUCCESS +import static org.openkilda.functionaltests.helpers.Wrappers.wait +import static org.openkilda.messaging.info.event.IslChangeType.DISCOVERED +import static org.openkilda.messaging.info.event.IslChangeType.FAILED +import static org.openkilda.testing.Constants.FLOW_CRUD_TIMEOUT +import static org.openkilda.testing.Constants.WAIT_OFFSET + +import org.openkilda.functionaltests.HealthCheckSpecification +import org.openkilda.functionaltests.extension.failfast.Tidy +import org.openkilda.functionaltests.extension.tags.Tags +import org.openkilda.functionaltests.helpers.YFlowHelper +import org.openkilda.messaging.payload.flow.FlowState +import org.openkilda.testing.service.traffexam.TraffExamService +import org.openkilda.testing.service.traffexam.model.Exam +import org.openkilda.testing.service.traffexam.model.ExamReport +import org.openkilda.testing.tools.FlowTrafficExamBuilder + +import groovy.util.logging.Slf4j +import org.springframework.beans.factory.annotation.Autowired +import spock.lang.Narrative +import spock.lang.Shared + +import javax.inject.Provider + +@Slf4j +@Narrative("Verify reroute operations on y-flows.") +class YFlowRerouteSpec extends HealthCheckSpecification { + @Autowired + @Shared + YFlowHelper yFlowHelper + @Autowired + @Shared + Provider traffExamProvider + + @Tidy + @Tags([TOPOLOGY_DEPENDENT]) + def "Valid y-flow can be rerouted"() { + assumeTrue(useMultitable, "Multi table is not enabled in kilda configuration") + given: "A qinq y-flow" + def swT = topologyHelper.switchTriplets.find { + [it.shared, it.ep1, it.ep2].every { it.traffGens } && + [it.pathsEp1, it.pathsEp2].every { it.size() > 1 } && + it.ep1 != it.ep2 + } + assumeTrue(swT != null, "These cases cannot be covered on given topology:") + def yFlowRequest = yFlowHelper.randomYFlow(swT).tap { + it.subFlows[0].sharedEndpoint.vlanId = 123 + it.subFlows[1].sharedEndpoint.vlanId = 124 + it.subFlows[0].sharedEndpoint.innerVlanId = 111 + it.subFlows[1].sharedEndpoint.innerVlanId = 111 + it.subFlows[0].endpoint.vlanId = 222 + it.subFlows[1].endpoint.vlanId = 222 + it.subFlows[0].endpoint.innerVlanId = 333 + it.subFlows[1].endpoint.innerVlanId = 444 + } + def yFlow = yFlowHelper.addYFlow(yFlowRequest) + + def paths = northboundV2.getYFlowPaths(yFlow.YFlowId) + def islToFail = pathHelper.getInvolvedIsls(paths.sharedPath.nodes).first() + + when: "Fail a flow ISL (bring switch port down)" + antiflap.portDown(islToFail.srcSwitch.dpId, islToFail.srcPort) + wait(WAIT_OFFSET) { northbound.getLink(islToFail).state == FAILED } + + then: "The flow was rerouted after reroute delay" + and: "History has relevant entries about y-flow reroute" + // wait(FLOW_CRUD_TIMEOUT) { northbound.getFlowHistory(yFlow.YFlowId).last().payload.last().action == REROUTE_SUCCESS_Y } + yFlow.subFlows.each { sf -> + wait(FLOW_CRUD_TIMEOUT) { assert northbound.getFlowHistory(sf.flowId).last().payload.last().action == REROUTE_SUCCESS } + } + wait(rerouteDelay + WAIT_OFFSET) { + assert northboundV2.getYFlow(yFlow.YFlowId).status == FlowState.UP.toString() + assert northboundV2.getYFlowPaths(yFlow.YFlowId) != paths + } + + and: "Y-flow passes flow validation" + northboundV2.validateYFlow(yFlow.YFlowId) + + and: "Both sub-flows pass flow validation" + yFlow.subFlows.each { + assert northbound.validateFlow(it.flowId).every { it.asExpected } + } + + and: "All involved switches pass switch validation" + def involvedSwitches = pathHelper.getInvolvedYSwitches(paths) +// involvedSwitches.each { sw -> +// northbound.validateSwitch(sw.dpId).verifyRuleSectionsAreEmpty(["missing", "excess", "misconfigured"]) +// northbound.validateSwitch(sw.dpId).verifyMeterSectionsAreEmpty(["missing", "excess", "misconfigured"]) +// } + + when: "Traffic starts to flow on both sub-flows with maximum bandwidth (if applicable)" + def beforeTraffic = new Date() + def traffExam = traffExamProvider.get() + List examReports + def exam = new FlowTrafficExamBuilder(topology, traffExam).buildYFlowExam(yFlow, yFlow.maximumBandwidth, 5) + examReports = withPool { + [exam.forward1, exam.reverse1, exam.forward2, exam.reverse2].collectParallel { Exam direction -> + def resources = traffExam.startExam(direction) + direction.setResources(resources) + traffExam.waitExam(direction) + } + } + + then: "Traffic flows on both sub-flows, but does not exceed the y-flow bandwidth restriction (~halves for each sub-flow)" + examReports.each { report -> + assert report.hasTraffic(), report.exam + } + + + and: "Y-flow and subflows stats are available (flow.raw.bytes)" +// statsHelper.verifyFlowWritesStats(yFlow.YFlowId, beforeTraffic, true) + yFlow.subFlows.each { + statsHelper.verifyFlowWritesStats(it.flowId, beforeTraffic, true) + } + + cleanup: + yFlow && yFlowHelper.deleteYFlow(yFlow.YFlowId) + islToFail && antiflap.portUp(islToFail.srcSwitch.dpId, islToFail.srcPort) + wait(WAIT_OFFSET) { northbound.getLink(islToFail).state == DISCOVERED } + database.resetCosts(topology.isls) + } +} diff --git a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowValidationSpec.groovy b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowValidationSpec.groovy new file mode 100644 index 00000000000..7d967c685f1 --- /dev/null +++ b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/flows/yflows/YFlowValidationSpec.groovy @@ -0,0 +1,184 @@ +package org.openkilda.functionaltests.spec.flows.yflows + +import static org.openkilda.model.MeterId.MAX_SYSTEM_RULE_METER_ID +import static org.openkilda.testing.Constants.NON_EXISTENT_FLOW_ID +import static org.openkilda.testing.Constants.WAIT_OFFSET + +import org.openkilda.functionaltests.HealthCheckSpecification +import org.openkilda.functionaltests.extension.failfast.Tidy +import org.openkilda.functionaltests.helpers.Wrappers +import org.openkilda.functionaltests.helpers.YFlowHelper +import org.openkilda.messaging.error.MessageError +import org.openkilda.messaging.payload.flow.FlowState + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.http.HttpStatus +import org.springframework.web.client.HttpClientErrorException +import spock.lang.Ignore +import spock.lang.Narrative +import spock.lang.Shared + +@Narrative("""Verify that missing yFlow rule is detected by switch/flow validations. +And make sure that the yFlow rule can be installed by syncSw/syncYFlow endpoints.""") +class YFlowValidationSpec extends HealthCheckSpecification { + @Autowired + @Shared + YFlowHelper yFlowHelper + + @Tidy + @Ignore("not implemented") + def "Y-Flow/flow validation should fail in case of missing y-flow shared rule"() { + given: "Existing y-flow" + def swT = topologyHelper.switchTriplets[0] + def yFlowRequest = yFlowHelper.randomYFlow(swT) + def yFlow = yFlowHelper.addYFlow(yFlowRequest) + + when: "Delete shared y-flow rule" + def swIdToManipulate = swT.shared.dpId + def sharedMeterId = northbound.getAllMeters(swIdToManipulate).meterEntries.findAll { + it.meterId > MAX_SYSTEM_RULE_METER_ID + }.max { it.meterId }.meterId + def sharedRules = northbound.getSwitchRules(swIdToManipulate).flowEntries.findAll { + it.instructions.goToMeter == sharedMeterId + } +// sharedRules.each { northbound.deleteSwitchRules(swIdToManipulate, it.cookie) } + + then: "Y-Flow validate detects discrepancies" + !northboundV2.validateYFlow(yFlow.YFlowId).asExpected + + and: "Simple flow validation detects discrepancies" + yFlow.subFlows.each { + assert !northbound.validateFlow(it.flowId).findAll { !it.discrepancies.empty }.empty + } + + and: "Switch validation detects missing y-flow rule" + with(northbound.validateSwitch(swIdToManipulate).rules) { + it.misconfigured.empty + it.excess.empty + it.missing.size() == 1 + it.missing.sort() == sharedRules*.cookie.sort() + } + + when: "Synchronize the shared switch" + northbound.synchronizeSwitch(swIdToManipulate, false) + + then: "Y-Flow/subFlow passes flow validation" + northboundV2.validateYFlow(yFlow.YFlowId).each { direction -> assert direction.asExpected } + yFlow.subFlows.each { + northbound.validateFlow(it.flowId).each { direction -> assert direction.asExpected } + } + + and: "Switch passes validation" + northbound.validateSwitch(swIdToManipulate).verifyRuleSectionsAreEmpty(["missing", "excess", "misconfigured"]) + + cleanup: + yFlowHelper.deleteYFlow(yFlow.YFlowId) + } + + @Tidy + def "Y-Flow/flow validation should fail in case of missing subFlow rule"() { + given: "Existing y-flow" + def swT = topologyHelper.switchTriplets[0] + def yFlowRequest = yFlowHelper.randomYFlow(swT) + def yFlow = yFlowHelper.addYFlow(yFlowRequest) + + when: "Delete reverse rule of subFlow_1" + def subFl_1 = yFlow.subFlows[0] + def subFl_2 = yFlow.subFlows[1] + def swIdToManipulate = swT.ep2.dpId + def cookieToDelete = database.getFlow(subFl_1.flowId).reversePath.cookie.value + northbound.deleteSwitchRules(swIdToManipulate, cookieToDelete) + + then: "Y-Flow validate detects discrepancies" + def validateYflowInfo = northboundV2.validateYFlow(yFlow.YFlowId) + !validateYflowInfo.asExpected + !validateYflowInfo.subFlowValidationResults.findAll { it.flowId == subFl_1.flowId } + .findAll { !it.discrepancies.empty }.empty + validateYflowInfo.subFlowValidationResults.findAll { it.flowId == subFl_2.flowId } + .findAll { !it.discrepancies.empty }.empty + + and: "Simple flow validation detects discrepancies for the subFlow_1 only" + !northbound.validateFlow(subFl_1.flowId).findAll { !it.discrepancies.empty }.empty + northbound.validateFlow(subFl_2.flowId).each { direction -> assert direction.asExpected } + + and: "Switch validation detects missing reverse rule only for the subFlow_1" + with(northbound.validateSwitch(swIdToManipulate).rules) { + it.misconfigured.empty + it.excess.empty + it.missing.size() == 1 + it.missing[0] == cookieToDelete + } + + when: "Delete shared rule of y-flow from the shared switch" + def sharedSwId = swT.shared.dpId + def sharedMeterId = northbound.getAllMeters(sharedSwId).meterEntries.findAll { + it.meterId > MAX_SYSTEM_RULE_METER_ID + }.max { it.meterId }.meterId + def sharedRules = northbound.getSwitchRules(sharedSwId).flowEntries.findAll { + it.instructions.goToMeter == sharedMeterId + } +// sharedRules.each { northbound.deleteSwitchRules(sharedSwId, it.cookie) } + + then: "Both subFlows are not valid" +// yFlow.subFlows.each { +// assert !northbound.validateFlow(it.flowId).findAll { !it.discrepancies.empty }.empty +// } + + and: "Switch validation detects missing y-flow rule on the shared switch" +// with(northbound.validateSwitch(sharedSwId).rules) { +// it.misconfigured.empty +// it.excess.empty +// it.missing.size() == 1 +// it.missing.sort() == sharedRules*.cookie.sort() +// } + + when: "Sync y-flow" + northboundV2.synchronizeYFlow(yFlow.YFlowId) + + then: "Y-Flow/subFlow passes flow validation" + Wrappers.wait(WAIT_OFFSET) { + northboundV2.getYFlow(yFlow.YFlowId).status == FlowState.UP.toString() + } + northboundV2.validateYFlow(yFlow.YFlowId).asExpected + yFlow.subFlows.each { + northbound.validateFlow(it.flowId).each { direction -> assert direction.asExpected } + } + + and: "Switches pass validation" + [swIdToManipulate, sharedSwId].each { + northbound.validateSwitch(it).verifyRuleSectionsAreEmpty(["missing", "misconfigured"]) +// .verifyRuleSectionsAreEmpty(swIdToManipulate, ["missing", "excess", "misconfigured"]) + } + + cleanup: + yFlowHelper.deleteYFlow(yFlow.YFlowId) + } + + @Tidy + def "Unable to #data.action a non-existent y-flow"() { + when: "Invoke a certain action for a non-existent y-flow" + data.method() + + then: "Human readable error is returned" + def e = thrown(HttpClientErrorException) + e.statusCode == HttpStatus.NOT_FOUND + verifyAll(e.responseBodyAsString.to(MessageError)) { + errorMessage == "Could not ${data.actionInMsg} y-flow" + errorDescription == "Y-flow $NON_EXISTENT_FLOW_ID not found" + } + + where: + data << [ + [ + action : "validate", + actionInMsg: "validate", + method : { northboundV2.validateYFlow(NON_EXISTENT_FLOW_ID) } + ], + [ + action : "synchronize", + actionInMsg: "reroute", + method : { northboundV2.synchronizeYFlow(NON_EXISTENT_FLOW_ID) } + ] + ] + } +} diff --git a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/switches/LagPortSpec.groovy b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/switches/LagPortSpec.groovy index 97e5ff04b40..2b4d295e1d3 100644 --- a/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/switches/LagPortSpec.groovy +++ b/src-java/testing/functional-tests/src/test/groovy/org/openkilda/functionaltests/spec/switches/LagPortSpec.groovy @@ -118,7 +118,7 @@ class LagPortSpec extends HealthCheckSpecification { [it.src, it.dst].every { it.dpId in allTraffGenSwitchIds } } def traffgenSrcSwPort = switchPair.src.traffGens.switchPort[0] - def portsArray = topology.getAllowedPortsForSwitch(switchPair.src)[-2, -1] << traffgenSrcSwPort + def portsArray = (topology.getAllowedPortsForSwitch(switchPair.src)[-2, -1] << traffgenSrcSwPort).unique() def payload = new CreateLagPortDto(portNumbers: portsArray) def lagPort = northboundV2.createLagLogicalPort(switchPair.src.dpId, payload).logicalPortNumber @@ -328,8 +328,8 @@ class LagPortSpec extends HealthCheckSpecification { exc.statusCode == HttpStatus.BAD_REQUEST def errorDetails = exc.responseBodyAsString.to(MessageError) errorDetails.errorMessage == "Error during LAG create" - errorDetails.errorDescription == "Physical port $mirrorPort already used as sink by following mirror points" + - " flow '$flow.flowId': [$mirrorEndpoint.mirrorPointId]" + errorDetails.errorDescription == "Physical port $mirrorPort already used by following flows: [$flow.flowId]. " + + "You must remove these flows to be able to use the port in LAG." cleanup: flow && flowHelperV2.deleteFlow(flow.flowId) diff --git a/src-java/testing/performance-tests/build.gradle b/src-java/testing/performance-tests/build.gradle index 88edcff4eca..f8575ecdd42 100644 --- a/src-java/testing/performance-tests/build.gradle +++ b/src-java/testing/performance-tests/build.gradle @@ -43,7 +43,7 @@ task performanceTest(type: Test, dependsOn: 'compileGroovy') { include '**/performancetests/**' systemProperty 'tags', System.getProperty('tags') Properties properties = new Properties() - File propertiesFile = file("kilda.properties.example") + File propertiesFile = file('kilda.properties.example') propertiesFile.withInputStream { properties.load(it) } diff --git a/src-java/testing/test-library/build.gradle b/src-java/testing/test-library/build.gradle index 197949d59b3..2bcf5698f00 100644 --- a/src-java/testing/test-library/build.gradle +++ b/src-java/testing/test-library/build.gradle @@ -5,7 +5,7 @@ plugins { description = 'Test-Library' dependencies { implementation(platform('org.springframework:spring-framework-bom:5.2.13.RELEASE')) - compile 'org.springframework.boot:spring-boot-autoconfigure:2.2.13.RELEASE' + implementation 'org.springframework.boot:spring-boot-autoconfigure:2.2.13.RELEASE' api project(':base-messaging') api project(':flowhs-messaging') diff --git a/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2.java b/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2.java index 17b978a24f6..fb887646751 100644 --- a/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2.java +++ b/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2.java @@ -44,7 +44,9 @@ import org.openkilda.northbound.dto.v2.yflows.YFlowPatchPayload; import org.openkilda.northbound.dto.v2.yflows.YFlowPaths; import org.openkilda.northbound.dto.v2.yflows.YFlowRerouteResult; +import org.openkilda.northbound.dto.v2.yflows.YFlowSyncResult; import org.openkilda.northbound.dto.v2.yflows.YFlowUpdatePayload; +import org.openkilda.northbound.dto.v2.yflows.YFlowValidationResult; import org.openkilda.testing.model.topology.TopologyDefinition; import java.util.Date; @@ -152,4 +154,8 @@ BfdPropertiesPayload setLinkBfd(SwitchId srcSwId, Integer srcPort, SwitchId dstS YFlowRerouteResult rerouteYFlow(String yFlowId); YFlowPaths getYFlowPaths(String yFlowId); + + YFlowValidationResult validateYFlow(String yFlowId); + + YFlowSyncResult synchronizeYFlow(String yFlowId); } diff --git a/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2Impl.java b/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2Impl.java index 47dd4ee8a26..e883d4acc45 100644 --- a/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2Impl.java +++ b/src-java/testing/test-library/src/main/java/org/openkilda/testing/service/northbound/NorthboundServiceV2Impl.java @@ -47,7 +47,9 @@ import org.openkilda.northbound.dto.v2.yflows.YFlowPatchPayload; import org.openkilda.northbound.dto.v2.yflows.YFlowPaths; import org.openkilda.northbound.dto.v2.yflows.YFlowRerouteResult; +import org.openkilda.northbound.dto.v2.yflows.YFlowSyncResult; import org.openkilda.northbound.dto.v2.yflows.YFlowUpdatePayload; +import org.openkilda.northbound.dto.v2.yflows.YFlowValidationResult; import org.openkilda.testing.model.topology.TopologyDefinition; import lombok.extern.slf4j.Slf4j; @@ -432,6 +434,18 @@ public YFlowPaths getYFlowPaths(String yFlowId) { new HttpEntity(buildHeadersWithCorrelationId()), YFlowPaths.class, yFlowId).getBody(); } + @Override + public YFlowValidationResult validateYFlow(String yFlowId) { + return restTemplate.exchange("/api/v2/y-flows/{y_flow_id}/validate", HttpMethod.POST, + new HttpEntity<>(buildHeadersWithCorrelationId()), YFlowValidationResult.class, yFlowId).getBody(); + } + + @Override + public YFlowSyncResult synchronizeYFlow(String yFlowId) { + return restTemplate.exchange("/api/v2/y-flows/{y_flow_id}/sync", HttpMethod.POST, + new HttpEntity<>(buildHeadersWithCorrelationId()), YFlowSyncResult.class, yFlowId).getBody(); + } + private HttpHeaders buildHeadersWithCorrelationId() { HttpHeaders headers = new HttpHeaders(); headers.set(Utils.CORRELATION_ID, "fn-tests-" + UUID.randomUUID().toString());