diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 3fa89eb1..00000000 Binary files a/.DS_Store and /dev/null differ diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 00000000..810e7718 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,186 @@ +version: 2.1 +orbs: + docker-buildx: devarsh/docker-buildx-orb@0.1.1 +executors: + docker-executor: + docker: + - image: eclipse-temurin:17-jdk +jobs: + build_and_push_tag_image: + executor: docker-executor + environment: + JVM_OPTS: -Xmx512m + TERM: dumb + steps: + - checkout + - setup_remote_docker: + version: 20.10.24 + - run: + name: Set Lowercase Docker Image Vars + # Using tr for POSIX compatibility, could use ${VAR,,} with Bash 4+ + command: | + echo "export DOCKER_ORG_LOWER=$(echo $CIRCLE_PROJECT_USERNAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "export DOCKER_REPO_LOWER=$(echo $CIRCLE_PROJECT_REPONAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "Using Docker Namespace: $DOCKER_ORG_LOWER" + echo "Using Docker Repository: $DOCKER_REPO_LOWER" + - run: + name: Install Docker CLI + command: | + apt-get update + apt-get install -y curl ca-certificates gnupg + install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc + chmod a+r /etc/apt/keyrings/docker.asc + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bullseye stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null + apt-get update + apt-cache madison docker-ce-cli | grep 20.10 + apt-get install -y docker-ce-cli=5:20.10.24~3-0~debian-bullseye + - docker-buildx/install + # - run: + # name: Check if Docker image tag exists + # command: | + # IMAGE_TAG=$CIRCLE_TAG + # # Using lowercase variables defined above + # IMAGE_NAME="$DOCKER_ORG_LOWER/$DOCKER_REPO_LOWER" + # echo "Checking for Docker image: $IMAGE_NAME:$IMAGE_TAG" + # # Ensure DOCKERHUB_USERNAME and DOCKERHUB_PASSWORD are set in your CircleCI Context or Project Environment Variables + # if curl -s -f -u "$DOCKERHUB_USERNAME":"$DOCKERHUB_PASSWORD" "https://hub.docker.com/v2/repositories/$IMAGE_NAME/tags/$IMAGE_TAG" > /dev/null; then + # echo "Skipping the build and push as the tag $IMAGE_TAG already exists in Docker Hub for image $IMAGE_NAME." + # circleci-agent step halt + # else + # echo "Tag $IMAGE_TAG does not exist for image $IMAGE_NAME. Proceeding with build." + # fi + - run: + name: Build Application + command: ./gradlew bootJar + - docker-buildx/build-and-push: + # Using lowercase variables defined above + image-name: "$DOCKER_ORG_LOWER/$DOCKER_REPO_LOWER" + tag: "$CIRCLE_TAG" + # Add dockerhub credentials if needed + # dockerhub-username: "$DOCKERHUB_USERNAME" + # dockerhub-password: "$DOCKERHUB_PASSWORD" + build_and_push_branch_image: + executor: docker-executor + environment: + JVM_OPTS: -Xmx512m + TERM: dumb + steps: + - checkout + - setup_remote_docker: + version: 20.10.24 + - run: + name: Set Lowercase Docker Image Vars + command: | + echo "export DOCKER_ORG_LOWER=$(echo $CIRCLE_PROJECT_USERNAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "export DOCKER_REPO_LOWER=$(echo $CIRCLE_PROJECT_REPONAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "Using Docker Namespace: $DOCKER_ORG_LOWER" + echo "Using Docker Repository: $DOCKER_REPO_LOWER" + - run: + name: Install Docker CLI + command: | + apt-get update + apt-get install -y curl ca-certificates gnupg + install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc + chmod a+r /etc/apt/keyrings/docker.asc + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bullseye stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null + apt-get update + apt-cache madison docker-ce-cli | grep 20.10 + apt-get install -y docker-ce-cli=5:20.10.24~3-0~debian-bullseye + - docker-buildx/install + - run: + name: Build Application + command: | + ./gradlew checkstyleMain + ./gradlew clean bootJar + - run: + name: Sanitize Branch Name + command: | + echo "export SANITIZED_BRANCH=$(echo $CIRCLE_BRANCH | sed 's/[^a-zA-Z0-9.-]/-/g' | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV # Also ensure branch tag is lowercase + # First build and push with branch-latest tag + - docker-buildx/build-and-push: + image-name: "$DOCKER_ORG_LOWER/$DOCKER_REPO_LOWER" + tag: "${SANITIZED_BRANCH}-latest" + # Then build and push with branch-version tag + - docker-buildx/build-and-push: + image-name: "$DOCKER_ORG_LOWER/$DOCKER_REPO_LOWER" + tag: "${SANITIZED_BRANCH}" + # Add dockerhub credentials if needed + # dockerhub-username: "$DOCKERHUB_USERNAME" + # dockerhub-password: "$DOCKERHUB_PASSWORD" + build_and_push_latest_image: + executor: docker-executor + environment: + JVM_OPTS: -Xmx512m + TERM: dumb + steps: + - checkout + - setup_remote_docker: + version: 20.10.24 + - run: + name: Set Lowercase Docker Image Vars + command: | + echo "export DOCKER_ORG_LOWER=$(echo $CIRCLE_PROJECT_USERNAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "export DOCKER_REPO_LOWER=$(echo $CIRCLE_PROJECT_REPONAME | tr '[:upper:]' '[:lower:]')" >> $BASH_ENV + echo "Using Docker Namespace: $DOCKER_ORG_LOWER" + echo "Using Docker Repository: $DOCKER_REPO_LOWER" + - run: + name: Install Docker CLI + command: | + apt-get update + apt-get install -y curl ca-certificates gnupg + install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc + chmod a+r /etc/apt/keyrings/docker.asc + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bullseye stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null + apt-get update + apt-cache madison docker-ce-cli | grep 20.10 + apt-get install -y docker-ce-cli=5:20.10.24~3-0~debian-bullseye + - docker-buildx/install + - run: + name: Build Application + command: | + ./gradlew checkstyleMain + ./gradlew clean bootJar + - docker-buildx/build-and-push: + # Using lowercase variables defined above + image-name: "$DOCKER_ORG_LOWER/$DOCKER_REPO_LOWER" + tag: "latest" + # Add dockerhub credentials if needed + # dockerhub-username: "$DOCKERHUB_USERNAME" + # dockerhub-password: "$DOCKERHUB_PASSWORD" +workflows: + version: 2 + build-and-push-pipeline: + jobs: + # Build tags matching vX.Y.Z format + - build_and_push_tag_image: + filters: + tags: + only: /^v\d+\.\d+\.\d+$/ + branches: + ignore: /.*/ + context: + - DOCKER + # Build any branch commit (except tags) + - build_and_push_branch_image: + filters: + tags: + ignore: /.*/ + branches: + only: /.*/ + context: + - DOCKER + # Build 'latest' only when the branch image succeeds AND it's the main/master branch + - build_and_push_latest_image: + requires: + - build_and_push_branch_image + filters: + tags: + ignore: /.*/ + branches: + only: + - main # Or your primary branch name like 'master' + context: + - DOCKER diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..bcf84d8d --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,22 @@ +## Description + +* PR title should have jira ticket enclosed in `[]`.
+Format: ``` [jira_ticket] description```
+ex: [phee-123] PR title. +* Add a link to the Jira ticket. +* Describe the changes made and why they were made. + +## Checklist + +Please make sure these boxes are checked before submitting your pull request - thanks! +- [ ] Followed the PR title naming convention mentioned above. + +- [ ] Design related bullet points or design document link related to this PR added in the description above. + +- [ ] Updated corresponding Postman Collection or Api documentation for the changes in this PR. + +- [ ] Created/updated unit or integration tests for verifying the changes made. + +- [ ] Added required Swagger annotation and update API documentation with details of any API changes if applicable + +- [ ] Followed the naming conventions as given in https://docs.google.com/document/d/1Q4vaMSzrTxxh9TS0RILuNkSkYCxotuYk1Xe0CMIkkCU/edit?usp=sharing diff --git a/.gitignore b/.gitignore index 14cce966..3d34106d 100644 --- a/.gitignore +++ b/.gitignore @@ -36,4 +36,8 @@ build/ .vscode/ .DS_Store -/.mvn \ No newline at end of file +/.mvn +# Certificate files (use K8s secrets instead) +*.jks +*.p12 +*.keystore diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java deleted file mode 100644 index a45eb6ba..00000000 --- a/.mvn/wrapper/MavenWrapperDownloader.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2007-present the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.net.*; -import java.io.*; -import java.nio.channels.*; -import java.util.Properties; - -public class MavenWrapperDownloader { - - private static final String WRAPPER_VERSION = "0.5.6"; - /** - * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. - */ - private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" - + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; - - /** - * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to - * use instead of the default one. - */ - private static final String MAVEN_WRAPPER_PROPERTIES_PATH = - ".mvn/wrapper/maven-wrapper.properties"; - - /** - * Path where the maven-wrapper.jar will be saved to. - */ - private static final String MAVEN_WRAPPER_JAR_PATH = - ".mvn/wrapper/maven-wrapper.jar"; - - /** - * Name of the property which should be used to override the default download url for the wrapper. - */ - private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; - - public static void main(String args[]) { - System.out.println("- Downloader started"); - File baseDirectory = new File(args[0]); - System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); - - // If the maven-wrapper.properties exists, read it and check if it contains a custom - // wrapperUrl parameter. - File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); - String url = DEFAULT_DOWNLOAD_URL; - if (mavenWrapperPropertyFile.exists()) { - FileInputStream mavenWrapperPropertyFileInputStream = null; - try { - mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); - Properties mavenWrapperProperties = new Properties(); - mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); - url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); - } catch (IOException e) { - System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); - } finally { - try { - if (mavenWrapperPropertyFileInputStream != null) { - mavenWrapperPropertyFileInputStream.close(); - } - } catch (IOException e) { - // Ignore ... - } - } - } - System.out.println("- Downloading from: " + url); - - File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); - if (!outputFile.getParentFile().exists()) { - if (!outputFile.getParentFile().mkdirs()) { - System.out.println( - "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); - } - } - System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); - try { - downloadFileFromURL(url, outputFile); - System.out.println("Done"); - System.exit(0); - } catch (Throwable e) { - System.out.println("- Error downloading"); - e.printStackTrace(); - System.exit(1); - } - } - - private static void downloadFileFromURL(String urlString, File destination) throws Exception { - if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { - String username = System.getenv("MVNW_USERNAME"); - char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); - Authenticator.setDefault(new Authenticator() { - @Override - protected PasswordAuthentication getPasswordAuthentication() { - return new PasswordAuthentication(username, password); - } - }); - } - URL website = new URL(urlString); - ReadableByteChannel rbc; - rbc = Channels.newChannel(website.openStream()); - FileOutputStream fos = new FileOutputStream(destination); - fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); - fos.close(); - rbc.close(); - } - -} diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar deleted file mode 100644 index 2cc7d4a5..00000000 Binary files a/.mvn/wrapper/maven-wrapper.jar and /dev/null differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties deleted file mode 100644 index ffdc10e5..00000000 --- a/.mvn/wrapper/maven-wrapper.properties +++ /dev/null @@ -1,2 +0,0 @@ -distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip -wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar diff --git a/Dockerfile b/Dockerfile index 7424e18f..2dccf6ed 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ -FROM openjdk:13 +FROM eclipse-temurin:17-jdk EXPOSE 5000 -COPY target/*.jar . +COPY build/libs/*.jar . CMD java -jar *.jar \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 00000000..a612ad98 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/README.md b/README.md new file mode 100644 index 00000000..d6d1bae5 --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ + +# Bulk Transaction Processor +**Core Function**: Domain-specific bulk processing engine for financial operations. + +## Key Responsibilities +- Validates and transforms raw financial data (loans, repayments, fees) +- Applies business logic (interest calculations, payment allocations) +- Generates audit trails and error reports +- Prepares processed records for downstream systems + +## Inputs +- CSV/Excel files via `/api/upload` +- Direct API payloads to `/api/process` +- Database polling from staging tables + +## Outputs +- Standardized JSON to message queues (RabbitMQ/Kafka) +- Callbacks to originating systems +- Error reports in `ERROR_.csv` + +## Dependencies +- Requires `ph-ee-connector-bulk` for outbound delivery +- Integrates with accounting rule engines + + +## SSL Configuration +```yaml +server: + ssl: + key-alias: "tomcat-https" + key-store: "classpath:keystore.jks" + key-store-type: JKS + key-password: "" + key-store-password: "" + port: 8443 +``` +#### NOTE: For disabling TLS, change the port to "8080" and add null values for all the "ssl" related fields. + +## Checkstyle +Use below command to execute the checkstyle test. +```shell +./gradlew checkstyleMain +``` + +## Spotless +Use below command to execute the spotless apply. +```shell +./gradlew spotlessApply +``` diff --git a/build.gradle b/build.gradle index 60be2b26..3d52e720 100644 --- a/build.gradle +++ b/build.gradle @@ -6,8 +6,12 @@ plugins { id 'java' id 'maven-publish' id 'eclipse' + id 'checkstyle' + id 'org.springframework.boot' version '2.6.2' + id 'com.diffplug.spotless' version '6.19.0' + id "com.jfrog.artifactory" version '5.+' } - +apply plugin: "com.jfrog.artifactory" repositories { mavenLocal() maven { @@ -15,28 +19,214 @@ repositories { } maven { - url = uri('https://fynarfin.jfrog.io/artifactory/fyn-libs-snapshot-local/') + url = uri('https://mifos.jfrog.io/artifactory/phee-gradle-local') + } + + maven { + url = uri('https://mifos.jfrog.io/artifactory/mifosx-gradle-local') + } +} + +apply plugin:'com.diffplug.spotless' +spotless { + format 'misc', { + target '**/*.md', '**/*.properties', '**/.gitignore', '**/.openapi-generator-ignore', '**/*.yml', '**/*.xml', '**/**.json', '**/*.sql' + targetExclude '**/build/**', '**/bin/**', '**/.settings/**', '**/.idea/**', '**/.gradle/**', '**/gradlew.bat', '**/licenses/**', '**/banner.txt', '.vscode/**' + indentWithSpaces(4) + endWithNewline() + trimTrailingWhitespace() + } + + groovyGradle { + target '*.gradle', '**/*.gradle' + targetExclude '**/build/**' + greclipse() + indentWithSpaces(4) + endWithNewline() + trimTrailingWhitespace() } + lineEndings 'UNIX' +} + +ext { + camelVersion = '3.4.0' + springBootVersion = '2.6.2' + cucumberVersion = '7.8.1' + lambokVersion = '1.18.24' } dependencies { - implementation 'org.mifos:ph-ee-connector-common:1.0.0-SNAPSHOT' + // spring dependency + implementation "org.springframework.boot:spring-boot-starter:$springBootVersion" + implementation "org.springframework.boot:spring-boot-starter-web:$springBootVersion" + implementation "org.springframework:spring-web:5.3.14" + implementation 'org.springframework.kafka:spring-kafka:2.8.1' + implementation "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion" + + // spring test dependency + testImplementation "org.springframework.boot:spring-boot-starter-test:$springBootVersion" + + // camel dependency + implementation "org.apache.camel.springboot:camel-spring-boot-starter:$camelVersion" + implementation "org.apache.camel:camel-undertow:$camelVersion" + implementation "org.apache.camel:camel-http:$camelVersion" + implementation "org.apache.camel.springboot:camel-mail-starter:$camelVersion" + implementation "org.apache.camel:camel-jackson:$camelVersion" + + // camel test dependency + testImplementation "org.apache.camel:camel-test:$camelVersion" + testImplementation "org.apache.camel:camel-test-spring-junit5:$camelVersion" + + // cucumber test dependency + testImplementation "io.cucumber:cucumber-junit:$cucumberVersion" + testImplementation "io.cucumber:cucumber-spring:$cucumberVersion" + testImplementation "io.cucumber:cucumber-java:$cucumberVersion" + + // miscellaneous dependency + implementation 'com.google.code.gson:gson:2.8.9' + implementation 'org.json:json:20210307' + implementation 'org.mifos:ph-ee-connector-common:1.8.1-gazelle' implementation 'org.apache.camel.springboot:camel-spring-boot-starter:3.4.0' implementation 'org.apache.camel:camel-undertow:3.4.0' implementation 'org.springframework.boot:spring-boot-starter:2.5.2' implementation 'org.springframework.boot:spring-boot-starter-web:2.5.2' + implementation 'org.apache.camel:camel-http:3.4.0' + implementation 'org.springframework:spring-web:5.3.19' implementation 'com.amazonaws:aws-java-sdk:1.11.486' + implementation 'commons-io:commons-io:2.11.0' + + implementation 'com.amazonaws:aws-java-sdk-s3:1.11.486' + //To be removed + implementation 'com.amazonaws:aws-java-sdk-dynamodb:1.11.486' implementation 'com.azure:azure-storage-blob:12.12.0' - implementation 'io.camunda:zeebe-client-java:1.1.0' - implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.0' - implementation 'org.springframework.kafka:spring-kafka:2.5.8.RELEASE' - testImplementation 'org.springframework.boot:spring-boot-starter-test:2.5.2' + + implementation 'io.camunda:zeebe-client-java:8.1.23' + implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.3' + implementation 'org.apache.tika:tika-core:1.4' + implementation 'org.apache.commons:commons-io:1.3.2' + implementation "org.projectlombok:lombok:$lambokVersion" + annotationProcessor "org.projectlombok:lombok:$lambokVersion" + checkstyle 'com.puppycrawl.tools:checkstyle:10.9.3' + checkstyle 'com.github.sevntu-checkstyle:sevntu-checks:1.44.1' + implementation 'com.diffplug.gradle.spotless:spotless:2.4.1' + + // miscellaneous test dependency + testImplementation "com.google.truth:truth:1.1.3" + testImplementation 'com.google.code.gson:gson:2.9.0' + implementation 'io.rest-assured:rest-assured:4.4.0' + + //retrofit + implementation 'com.squareup.retrofit2:retrofit:2.9.0' + implementation 'com.squareup.retrofit2:converter-jackson:2.9.0' + implementation('com.squareup.retrofit2:converter-gson:2.4.0') +} + +configure(this) { + // NOTE: order matters! + apply plugin: 'java' + apply plugin: 'idea' + apply plugin: 'eclipse' + apply plugin: 'checkstyle' + apply plugin: 'com.diffplug.spotless' + + configurations { + implementation.setCanBeResolved(true) + api.setCanBeResolved(true) + } + tasks.withType(JavaCompile) { + options.compilerArgs += [ + "-Xlint:unchecked", + "-Xlint:cast", + "-Xlint:auxiliaryclass", + "-Xlint:deprecation", + "-Xlint:dep-ann", + "-Xlint:divzero", + "-Xlint:empty", + "-Xlint:exports", + "-Xlint:fallthrough", + "-Xlint:finally", + "-Xlint:module", + "-Xlint:opens", + "-Xlint:options", + "-Xlint:overloads", + "-Xlint:overrides", + "-Xlint:path", + "-Xlint:processing", + "-Xlint:removal", + "-Xlint:requires-automatic", + "-Xlint:requires-transitive-automatic", + "-Xlint:try", + "-Xlint:varargs", + "-Xlint:preview", + "-Xlint:static", + // -Werror needs to be disabled because EclipseLink's static weaving doesn't generate warning-free code + // and during an IntelliJ recompilation, it fails + //"-Werror", + "-Xmaxwarns", + 1500, + "-Xmaxerrs", + 1500 + ] + options.deprecation = true + } + // Configuration for the spotless plugin + // https://github.com/diffplug/spotless/tree/main/plugin-gradle + spotless { + java { + targetExclude '**/build/**', '**/bin/**', '**/out/**' + importOrder() //sort imports alphabetically + removeUnusedImports() + eclipse().configFile "$rootDir/config/bulk-formatter.xml" + endWithNewline() + trimTrailingWhitespace() + // Enforce style modifier order + custom 'Modifier ordering', { + def modifierRanking = [ + public : 1, + protected : 2, + private : 3, + abstract : 4, + default : 5, + static : 6, + final : 7, + transient : 8, + volatile : 9, + synchronized: 10, + native : 11, + strictfp : 12] + // Find any instance of multiple modifiers. Lead with a non-word character to avoid + // accidental matching against for instance, "an alternative default value" + it.replaceAll(/\W(?:public |protected |private |abstract |default |static |final |transient |volatile |synchronized |native |strictfp ){2,}/, { + // Do not replace the leading non-word character. Identify the modifiers + it.replaceAll(/(?:public |protected |private |abstract |default |static |final |transient |volatile |synchronized |native |strictfp ){2,}/, { + // Sort the modifiers according to the ranking above + it.split().sort({ modifierRanking[it] }).join(' ') + ' ' + } + ) + } + ) + } + } + lineEndings 'UNIX' + } + // If we are running Gradle within Eclipse to enhance classes, + // set the classes directory to point to Eclipse's default build directory + if (project.hasProperty('env') && project.getProperty('env') == 'eclipse') { + sourceSets.main.java.outputDir = file("$projectDir/bin/main") + } + // Configuration for the Checkstyle plugin + // https://docs.gradle.org/current/userguide/checkstyle_plugin.html + dependencies { + checkstyle 'com.puppycrawl.tools:checkstyle:10.3.1' + checkstyle 'com.github.sevntu-checkstyle:sevntu-checks:1.42.0' + } } + group = 'org.mifos' -version = '0.0.1-SNAPSHOT' +version = '2.0.0.mifos-SNAPSHOT' description = 'ph-ee-processor-bulk' -sourceCompatibility = '1.8' +sourceCompatibility = '17' publishing { publications { @@ -49,3 +239,31 @@ publishing { tasks.withType(JavaCompile) { options.encoding = 'UTF-8' } + + + +// tasks.named('test') { +// useJUnitPlatform() +// } + + +// configurations { +// cucumberRuntime { +// extendsFrom testImplementation +// } +// } + +// task cucumberCli() { +// dependsOn assemble, testClasses +// doLast { +// javaexec { +// main = "io.cucumber.core.cli.Main" +// classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output +// args = [ +// '--plugin', 'pretty', +// '--plugin', 'html:target/cucumber-report.html', +// '--glue', 'org.mifos.processor.cucumber', +// 'src/test/java/resources'] +// } +// } +// } diff --git a/config/bulk-cleanup.xml b/config/bulk-cleanup.xml new file mode 100644 index 00000000..67cb2b70 --- /dev/null +++ b/config/bulk-cleanup.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/bulk-formatter.xml b/config/bulk-formatter.xml new file mode 100644 index 00000000..21a66024 --- /dev/null +++ b/config/bulk-formatter.xml @@ -0,0 +1,366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml new file mode 100644 index 00000000..5183efef --- /dev/null +++ b/config/checkstyle/checkstyle.xml @@ -0,0 +1,266 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/config/checkstyle/suppressions.xml b/config/checkstyle/suppressions.xml new file mode 100644 index 00000000..f4e5b54c --- /dev/null +++ b/config/checkstyle/suppressions.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 490fda85..41d9927a 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index a4b44297..e750102e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.3-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 2fe81a7d..1b6c7873 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,78 +17,113 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -97,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -105,79 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index 9109989e..ac1b06f9 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -54,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -64,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell diff --git a/kubernetes/phee-bulk-processor.yml b/kubernetes/phee-bulk-processor.yml index 88816cdd..d9f4683a 100644 --- a/kubernetes/phee-bulk-processor.yml +++ b/kubernetes/phee-bulk-processor.yml @@ -44,4 +44,4 @@ spec: selector: app: ph-ee-bulk-processor sessionAffinity: None - type: LoadBalancer \ No newline at end of file + type: LoadBalancer diff --git a/src/.DS_Store b/src/.DS_Store deleted file mode 100644 index eeb17bb1..00000000 Binary files a/src/.DS_Store and /dev/null differ diff --git a/src/main/.DS_Store b/src/main/.DS_Store deleted file mode 100644 index 6b3d8da9..00000000 Binary files a/src/main/.DS_Store and /dev/null differ diff --git a/src/main/java/.DS_Store b/src/main/java/.DS_Store deleted file mode 100644 index 298f56fa..00000000 Binary files a/src/main/java/.DS_Store and /dev/null differ diff --git a/src/main/java/org/.DS_Store b/src/main/java/org/.DS_Store deleted file mode 100644 index 57e5982b..00000000 Binary files a/src/main/java/org/.DS_Store and /dev/null differ diff --git a/src/main/java/org/mifos/.DS_Store b/src/main/java/org/mifos/.DS_Store deleted file mode 100644 index 83e9905c..00000000 Binary files a/src/main/java/org/mifos/.DS_Store and /dev/null differ diff --git a/src/main/java/org/mifos/processor/BulkProcessorApplication.java b/src/main/java/org/mifos/processor/BulkProcessorApplication.java index c523eee4..79f1e958 100644 --- a/src/main/java/org/mifos/processor/BulkProcessorApplication.java +++ b/src/main/java/org/mifos/processor/BulkProcessorApplication.java @@ -7,11 +7,16 @@ import com.fasterxml.jackson.dataformat.csv.CsvMapper; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.apache.camel.Processor; +import org.mifos.connector.common.interceptor.annotation.EnableJsonWebSignature; +import org.mifos.processor.bulk.api.ApiOriginFilter; +import org.mifos.processor.bulk.camel.config.HttpClientConfigurerTrustAllCACerts; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.web.servlet.FilterRegistrationBean; import org.springframework.context.annotation.Bean; @SpringBootApplication +@EnableJsonWebSignature public class BulkProcessorApplication { public static void main(String[] args) { @@ -23,8 +28,7 @@ public ObjectMapper objectMapper() { ObjectMapper objectMapper = new ObjectMapper(); objectMapper.registerModule(new JavaTimeModule()); objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); - return objectMapper - .setSerializationInclusion(JsonInclude.Include.NON_NULL) + return objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } @@ -39,4 +43,19 @@ public CsvMapper csvMapper() { return new CsvMapper(); } + @Bean + public HttpClientConfigurerTrustAllCACerts httpClientConfigurer() { + return new HttpClientConfigurerTrustAllCACerts(); + } + + @Bean + public FilterRegistrationBean apiOriginFilter() { + FilterRegistrationBean registration = new FilterRegistrationBean<>(); + registration.setFilter(new ApiOriginFilter()); + registration.addUrlPatterns("/**"); + registration.setName("apiOriginFilter"); + registration.setOrder(Integer.MIN_VALUE + 1); + return registration; + } + } diff --git a/src/main/java/org/mifos/processor/bulk/ConfigurationValidator.java b/src/main/java/org/mifos/processor/bulk/ConfigurationValidator.java new file mode 100644 index 00000000..516d3227 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/ConfigurationValidator.java @@ -0,0 +1,107 @@ +package org.mifos.processor.bulk; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.PostConstruct; +import org.mifos.processor.bulk.format.Standard; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.zeebe.worker.WorkerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class ConfigurationValidator { + + public Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Value("${config.ordering.field}") + private String orderingField; + + @Value("${config.completion-threshold-check.completion-threshold}") + private int completionRate; + + @Value("${config.completion-threshold-check.max-retry}") + private int maxThresholdCheckRetry; + + @Value("${config.formatting.standard}") + private String standard; + + @Autowired + private WorkerConfig workerConfig; + + @PostConstruct + private void validate() { + if (workerConfig.isOrderingWorkerEnabled) { + validateOrderingConfig(); + } + if (workerConfig.isCompletionThresholdCheckEnabled) { + validateCompletionThresholdConfig(); + validateMaxRetryFromThresholdCheck(); + } + if (workerConfig.isFormattingWorkerEnabled) { + validateFormattingStandard(); + } + } + + private void validateMaxRetryFromThresholdCheck() { + if (maxThresholdCheckRetry <= 0) { + logger.error("Invalid maxThresholdCheckRetry count set. Needs to be +ve integer"); + throw new ConfigurationValidationException("Invalid maxThresholdCheckRetry count set. Needs to be +ve integer"); + } + } + + // validates the standard to be used for formatting + private void validateFormattingStandard() { + String std = this.standard.toUpperCase(); + try { + Standard standardEnum = Standard.valueOf(std); + logger.info("Configured formatting standard as >> {}", standardEnum.name()); + return; + } catch (Exception e) { + logger.debug(e.getMessage()); + } + List possibleStandards = new ArrayList<>(); + for (Field f : Standard.class.getFields()) { + possibleStandards.add(f.getName()); + } + throw new ConfigurationValidationException( + "Invalid standard configured for formatting data. Possible values are [" + String.join(",", possibleStandards) + "]"); + } + + // validates the ordering configuration + private void validateOrderingConfig() { + List possibleOrderingFields = new ArrayList<>(); + + for (Field field : Transaction.class.getDeclaredFields()) { + possibleOrderingFields.add(field.getName()); + } + + if (!possibleOrderingFields.contains(orderingField)) { + throw new ConfigurationValidationException( + "Invalid ordering field, possible values are [" + String.join(",", possibleOrderingFields) + "]"); + } + } + + // validates the success threshold related configuration + private void validateCompletionThresholdConfig() { + if (completionRate <= 0 || completionRate > 100) { + throw new ConfigurationValidationException("Invalid completion threshold value configured (value=" + completionRate + ")."); + } + + if (completionRate < 50) { + logger.warn("It is advised to set the completion threshold greater than 50. Currently configured as {}", completionRate); + } + } + + // this exception is thrown when unexpected application config is set, and can't pass the ConfigurationValidator + public static class ConfigurationValidationException extends RuntimeException { + + ConfigurationValidationException(String message) { + super(message); + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/HealthCheck.java b/src/main/java/org/mifos/processor/bulk/HealthCheck.java index 9ecbe2a4..36285d18 100644 --- a/src/main/java/org/mifos/processor/bulk/HealthCheck.java +++ b/src/main/java/org/mifos/processor/bulk/HealthCheck.java @@ -1,14 +1,18 @@ package org.mifos.processor.bulk; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; + import com.fasterxml.jackson.databind.MappingIterator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.csv.CsvMapper; import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import java.io.InputStream; +import java.util.UUID; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.builder.RouteBuilder; import org.mifos.processor.bulk.file.FileTransferService; -import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionOlder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -17,8 +21,6 @@ import org.springframework.kafka.core.KafkaTemplate; import org.springframework.stereotype.Component; -import java.util.UUID; - @Component public class HealthCheck extends RouteBuilder { @@ -51,34 +53,31 @@ public class HealthCheck extends RouteBuilder { @Override public void configure() { - from("rest:GET:/") - .setHeader(Exchange.HTTP_RESPONSE_CODE, constant(200)) - .setBody(constant("")); - - from("rest:GET:/channel/bulk/transfer/{fileName}") - .id("transfer-details") - .log(LoggingLevel.INFO, "## CHANNEL -> inbound bulk transfer request with ${header.fileName}") - .process(exchange -> { + from("rest:GET:/").setHeader(Exchange.HTTP_RESPONSE_CODE, constant(200)).setBody(constant("")); + + from("rest:GET:/channel/bulk/transfer/{fileName}").id("transfer-details") + .log(LoggingLevel.INFO, "## CHANNEL -> inbound bulk transfer request with ${header.fileName}").process(exchange -> { String fileName = exchange.getIn().getHeader("fileName", String.class); String batchId = UUID.randomUUID().toString(); + exchange.setProperty(BATCH_ID, batchId); // TODO: How to get sender information? Hard coded in Channel connector? - byte[] csvFile = fileTransferService.downloadFile(fileName, bucketName); + InputStream csvFileInputStream = fileTransferService.streamFile(fileName, bucketName); CsvSchema schema = CsvSchema.emptySchema().withHeader(); - MappingIterator readValues = csvMapper.readerWithSchemaFor(Transaction.class).with(schema).readValues(csvFile); + MappingIterator readValues = csvMapper.readerWithSchemaFor(TransactionOlder.class).with(schema) + .readValues(csvFileInputStream); while (readValues.hasNext()) { - Transaction current = readValues.next(); + TransactionOlder current = readValues.next(); current.setBatchId(batchId); - System.out.println(objectMapper.writeValueAsString(current)); - if (current.getPayment_mode().equals("gsma")) + logger.info("Writing string in kafka {}", objectMapper.writeValueAsString(current)); + if (current.getPaymentMode().equals("gsma") || current.getPaymentMode().equals("afrimoney")) { kafkaTemplate.send(gsmaTopicName, objectMapper.writeValueAsString(current)); - else if (current.getPayment_mode().equals("sclb")) + } else if (current.getPaymentMode().equals("sclb")) { kafkaTemplate.send(slcbTopicName, objectMapper.writeValueAsString(current)); + } } - }) - .setHeader(Exchange.HTTP_RESPONSE_CODE, constant(200)) - .setBody(constant("")); + }).setHeader(Exchange.HTTP_RESPONSE_CODE, constant(200)).setBody(exchange -> exchange.getProperty(BATCH_ID)); } -} \ No newline at end of file +} diff --git a/src/main/java/org/mifos/processor/bulk/OperationsAppConfig.java b/src/main/java/org/mifos/processor/bulk/OperationsAppConfig.java new file mode 100644 index 00000000..4c754802 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/OperationsAppConfig.java @@ -0,0 +1,46 @@ +package org.mifos.processor.bulk; + +import javax.annotation.PostConstruct; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class OperationsAppConfig { + + @Value("${operations-app.contactpoint}") + public String operationAppContactPoint; + + @Value("${operations-app.endpoints.batch-transaction}") + public String batchTransactionEndpoint; + + @Value("${operations-app.endpoints.batch-summary}") + public String batchSummaryEndpoint; + + @Value("${operations-app.endpoints.batch-aggregate}") + public String batchAggregateEndpoint; + + @Value("${operations-app.endpoints.auth}") + public String authEndpoint; + + @Value("${operations-app.username}") + public String username; + + @Value("${operations-app.password}") + public String password; + + public String batchTransactionUrl; + + public String batchSummaryUrl; + + public String batchAggregateUrl; + + public String authUrl; + + @PostConstruct + private void setup() { + batchTransactionUrl = operationAppContactPoint + batchTransactionEndpoint; + batchSummaryUrl = operationAppContactPoint + batchSummaryEndpoint; + authUrl = operationAppContactPoint + authEndpoint; + batchAggregateUrl = operationAppContactPoint + batchAggregateEndpoint; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/api/ApiOriginFilter.java b/src/main/java/org/mifos/processor/bulk/api/ApiOriginFilter.java new file mode 100644 index 00000000..4137a6cf --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/ApiOriginFilter.java @@ -0,0 +1,27 @@ +package org.mifos.processor.bulk.api; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; + +import java.io.IOException; +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.filter.GenericFilterBean; + +public class ApiOriginFilter extends GenericFilterBean { + + private Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + HttpServletRequest req = (HttpServletRequest) request; + String tenant = req.getHeader("" + HEADER_PLATFORM_TENANT_ID); + logger.debug("Tenant Name is : {}", tenant); + logger.info("Client IP Address: {}", req.getRemoteHost()); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/api/CallbackController.java b/src/main/java/org/mifos/processor/bulk/api/CallbackController.java new file mode 100644 index 00000000..8dab4d5b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/CallbackController.java @@ -0,0 +1,67 @@ +package org.mifos.processor.bulk.api; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.APPROVED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_FAIL_REASON; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_RESPONSE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_STATUS; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_SUCCESSFUL; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import org.mifos.processor.bulk.schema.AuthorizationResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class CallbackController { + + @Autowired + private ZeebeClient zeebeClient; + + @Autowired + ObjectMapper objectMapper; + + protected Logger logger = LoggerFactory.getLogger(this.getClass()); + + private static final String EXPECTED_AUTH_STATUS = "Y"; + + @PostMapping("/authorization/callback") + public ResponseEntity handleAuthorizationCallback(@RequestBody AuthorizationResponse authResponse) + throws JsonProcessingException { + logger.info("Callback received"); + logger.debug("Auth response: {}", objectMapper.writeValueAsString(authResponse)); + Map variables = new HashMap<>(); + + boolean isAuthorizationSuccessful = EXPECTED_AUTH_STATUS.equals(authResponse.getStatus()); + variables.put(AUTHORIZATION_SUCCESSFUL, isAuthorizationSuccessful); + variables.put(CLIENT_CORRELATION_ID, authResponse.getClientCorrelationId()); + variables.put(AUTHORIZATION_STATUS, authResponse.getStatus()); + variables.put(AUTHORIZATION_FAIL_REASON, authResponse.getReason()); + + if (!isAuthorizationSuccessful) { + variables.put(APPROVED_AMOUNT, 0); + } + + logger.info("Is auth successful: {}", isAuthorizationSuccessful); + + if (zeebeClient != null) { + zeebeClient.newPublishMessageCommand().messageName(AUTHORIZATION_RESPONSE).correlationKey(authResponse.getClientCorrelationId()) + .timeToLive(Duration.ofMillis(500000)).variables(variables).send(); + logger.debug("Published zeebe message event {}", AUTHORIZATION_RESPONSE); + zeebeClient.newPublishMessageCommand().messageName(AUTHORIZATION_RESPONSE).correlationKey(authResponse.getClientCorrelationId()) + .timeToLive(Duration.ofMillis(500000)).variables(variables).send(); + } + return ResponseEntity.ok().build(); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/api/definition/BatchTransactions.java b/src/main/java/org/mifos/processor/bulk/api/definition/BatchTransactions.java new file mode 100644 index 00000000..e7fb0a02 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/definition/BatchTransactions.java @@ -0,0 +1,40 @@ +package org.mifos.processor.bulk.api.definition; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CALLBACK; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PROGRAM_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYEE_DFSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_TYPE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.springframework.http.MediaType; +import org.springframework.util.MultiValueMap; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestParam; + +public interface BatchTransactions { + + @PostMapping(value = "/batchtransactions", produces = "application/json") + String batchTransactions(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, + @RequestHeader(value = HEADER_CLIENT_CORRELATION_ID) String requestId, + @RequestHeader(value = FILE_NAME, required = false) String fileName, @RequestHeader(value = PURPOSE) String purpose, + @RequestHeader(value = HEADER_TYPE) String type, @RequestHeader(value = HEADER_PLATFORM_TENANT_ID) String tenant, + @RequestHeader(value = HEADER_REGISTERING_INSTITUTE_ID, required = false) String registeringInstitutionId, + @RequestHeader(value = HEADER_PROGRAM_ID, required = false) String programId, + @RequestHeader(value = CALLBACK, required = false) String callbackUrl, + @RequestHeader(value = PAYEE_DFSP_ID, required = false) String payeeDfspId) throws IOException; + + // NEW execution reconciliation endpoint + @PostMapping(value = "/batchtransactions/execution", consumes = MediaType.APPLICATION_FORM_URLENCODED_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) + String updateBatchExecution(@RequestParam MultiValueMap executionPayload, + @RequestHeader(value = HEADER_PLATFORM_TENANT_ID) String tenant, + @RequestHeader(value = HEADER_CLIENT_CORRELATION_ID, required = false) String requestId); + +} diff --git a/src/main/java/org/mifos/processor/bulk/api/definition/BulkTransfer.java b/src/main/java/org/mifos/processor/bulk/api/definition/BulkTransfer.java new file mode 100644 index 00000000..d081c28d --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/definition/BulkTransfer.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.api.definition; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; + +import java.io.IOException; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.multipart.MultipartFile; + +public interface BulkTransfer { + + @Deprecated + + @PostMapping(value = "/bulk/transfer/{requestId}/{fileName}", produces = "application/json") + String bulkTransfer(@RequestHeader(value = "X-CorrelationID", required = false) String requestId, + @RequestParam("data") MultipartFile file, @RequestHeader(value = FILE_NAME, required = false) String fileName, + @RequestHeader(value = PURPOSE, required = false) String purpose, @RequestHeader(value = "Type", required = false) String type, + @RequestHeader(value = "Platform-TenantId") String tenant) throws IOException; +} diff --git a/src/main/java/org/mifos/processor/bulk/api/definition/Simulate.java b/src/main/java/org/mifos/processor/bulk/api/definition/Simulate.java new file mode 100644 index 00000000..91406d60 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/definition/Simulate.java @@ -0,0 +1,13 @@ +package org.mifos.processor.bulk.api.definition; + +import java.io.IOException; +import javax.servlet.http.HttpServletResponse; +import org.springframework.web.bind.annotation.PostMapping; + +// from("rest:post:/simulate").log("Reached Simulation"); +public interface Simulate { + + @PostMapping(value = "/simulate", produces = "application/json") + void simulate(HttpServletResponse httpServletResponse) throws IOException; + +} diff --git a/src/main/java/org/mifos/processor/bulk/api/implementation/BatchTransactionsController.java b/src/main/java/org/mifos/processor/bulk/api/implementation/BatchTransactionsController.java new file mode 100644 index 00000000..2d2ead74 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/implementation/BatchTransactionsController.java @@ -0,0 +1,241 @@ +package org.mifos.processor.bulk.api.implementation; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CALLBACK; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PROGRAM_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYEE_DFSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_TYPE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import io.camunda.zeebe.client.api.command.ClientStatusException; +import io.grpc.Status; +import java.nio.charset.Charset; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.apache.commons.io.IOUtils; +import org.json.JSONObject; +import org.mifos.connector.common.interceptor.JWSUtil; +import org.mifos.processor.bulk.api.definition.BatchTransactions; +import org.mifos.processor.bulk.file.FileStorageService; +import org.mifos.processor.bulk.format.RestRequestConvertor; +import org.mifos.processor.bulk.schema.BatchRequestDTO; +import org.mifos.processor.bulk.schema.CamelApiResponse; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.CsvWriter; +import org.mifos.processor.bulk.utility.Headers; +import org.mifos.processor.bulk.utility.SpringWrapperUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.util.MultiValueMap; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartException; + +@Slf4j +@RestController +public class BatchTransactionsController implements BatchTransactions { + + @Autowired + private ProducerTemplate producerTemplate; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + FileStorageService fileStorageService; + + @Autowired + RestRequestConvertor restRequestConvertor; + + @Value("#{'${tenants}'.split(',')}") + protected List tenants; + @Autowired + private CsvMapper csvMapper; + + @SneakyThrows + @Override + public String batchTransactions(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, String requestId, + String fileName, String purpose, String type, String tenant, String registeringInstitutionId, String programId, + String callbackUrl, String payeeDfspId) { + + log.info("Inside api logic"); + Headers.HeaderBuilder headerBuilder = new Headers.HeaderBuilder().addHeader(HEADER_CLIENT_CORRELATION_ID, requestId) + .addHeader(PURPOSE, purpose).addHeader(HEADER_TYPE, type).addHeader(HEADER_PLATFORM_TENANT_ID, tenant) + .addHeader(HEADER_REGISTERING_INSTITUTE_ID, registeringInstitutionId).addHeader(HEADER_PROGRAM_ID, programId) + .addHeader(CALLBACK, callbackUrl).addHeader(PAYEE_DFSP_ID, payeeDfspId); + + Optional validationResponse = isValidRequest(httpServletRequest, fileName, type); + if (validationResponse.isPresent()) { + httpServletResponse.setStatus(httpServletResponse.SC_BAD_REQUEST); + return validationResponse.get(); + } + + if (JWSUtil.isMultipartRequest(httpServletRequest)) { + log.info("This is file based request"); + // Use Spring's MultipartHttpServletRequest instead of JWSUtil.parseFormData() + org.springframework.web.multipart.MultipartHttpServletRequest multipartRequest = (org.springframework.web.multipart.MultipartHttpServletRequest) httpServletRequest; + org.springframework.web.multipart.MultipartFile multipartFile = multipartRequest.getFile("data"); + + log.info("multipartFile is null: {}", multipartFile == null); + if (multipartFile != null) { + log.info("multipartFile.isEmpty(): {}", multipartFile.isEmpty()); + log.info("multipartFile.getSize(): {}", multipartFile.getSize()); + log.info("multipartFile.getOriginalFilename(): {}", multipartFile.getOriginalFilename()); + } + + if (multipartFile == null || multipartFile.isEmpty()) { + log.error("No file data found in multipart request"); + httpServletResponse.setStatus(HttpServletResponse.SC_BAD_REQUEST); + return getErrorResponse("No file data", "No file was uploaded with the request", HttpServletResponse.SC_BAD_REQUEST); + } + + String localFileName = fileStorageService.save(multipartFile); + Headers headers = headerBuilder.addHeader(FILE_NAME, localFileName).build(); + log.info("Headers passed: {}", headers.getHeaders()); + + CamelApiResponse response = sendRequestToCamel(headers); + httpServletResponse.setStatus(response.getStatus()); + return response.getBody(); + } else { + log.info("This is json based request"); + String jsonString = IOUtils.toString(httpServletRequest.getInputStream(), Charset.defaultCharset()); + List batchRequestDTOList = objectMapper.readValue(jsonString, new TypeReference<>() {}); + List transactionList = restRequestConvertor.convertListFrom(batchRequestDTOList); + + String localFileName = UUID.randomUUID() + ".csv"; + CsvWriter.writeToCsv(transactionList, Transaction.class, csvMapper, true, localFileName); + Headers headers = headerBuilder.addHeader(HEADER_TYPE, "csv").addHeader(FILE_NAME, localFileName).build(); + + CamelApiResponse response = sendRequestToCamel(headers); + httpServletResponse.setStatus(response.getStatus()); + return response.getBody(); + } + + } + + @ExceptionHandler({ MultipartException.class }) + public String handleMultipartException(HttpServletResponse httpServletResponse) { + httpServletResponse.setStatus(httpServletResponse.SC_BAD_REQUEST); + return getErrorResponse("File not uploaded", "There was no fie uploaded with the request. " + "Please upload a file and try again.", + 400); + } + + private CamelApiResponse sendRequestToCamel(Headers headers) { + Exchange exchange = SpringWrapperUtil.getDefaultWrappedExchange(producerTemplate.getCamelContext(), headers); + exchange = producerTemplate.send("direct:post-batch-transactions", exchange); + checkAndThrowClientStatusException(exchange); + int statusCode = exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE, Integer.class); + String body = exchange.getIn().getBody(String.class); + return new CamelApiResponse(body, statusCode); + } + + private String getErrorResponse(String information, String description, int code) { + JSONObject json = new JSONObject(); + json.put("errorInformation", "File not uploaded"); + json.put("errorDescription", "There was no fie uploaded with the request. " + "Please upload a file and try again."); + json.put("errorCode", code); + return json.toString(); + } + + // validates the request header, and return errorJson string if the request is invalid else an empty optional + private Optional isValidRequest(HttpServletRequest httpServletRequest, String fileName, String type) { + + Optional response = Optional.empty(); + if ((JWSUtil.isMultipartRequest(httpServletRequest) && !type.equalsIgnoreCase("csv")) + || (!JWSUtil.isMultipartRequest(httpServletRequest) && !type.equalsIgnoreCase("raw"))) { + String errorJson = getErrorResponse("Type mismatch", + "The value of the header \"" + HEADER_TYPE + "\" doesn't match with the request content-type", 400); + response = Optional.of(errorJson); + + } + if (JWSUtil.isMultipartRequest(httpServletRequest) && fileName.isEmpty()) { + String errorJson = getErrorResponse("Header can't be empty", + "If the request is of type csv, the header \"" + FILE_NAME + "\"can't be empty", 400); + response = Optional.of(errorJson); + } + if (!type.equalsIgnoreCase("raw") && !type.equalsIgnoreCase("csv")) { + String errorJson = getErrorResponse("Invalid TYPE header value passed", + "The value of the header \"" + HEADER_TYPE + "\" can be \"[raw,csv]\" but is " + type, 400); + response = Optional.of(errorJson); + } + return response; + } + + private void checkAndThrowClientStatusException(Exchange exchange) { + Exception cause = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class); + if (cause instanceof ClientStatusException) { + throw new ClientStatusException(Status.FAILED_PRECONDITION, cause); + } + } + + @Override + public String updateBatchExecution(MultiValueMap executionPayload, String tenant, String requestId) { + + log.info("## BATCH EXECUTION UPDATE - Received execution update request"); + log.info("## Tenant: {}", tenant); + log.info("## Request ID: {}", requestId); + log.info("## Payload keys: {}", executionPayload.keySet()); + log.info("## Payload size: {}", executionPayload.size()); + + // Log all payload data for debugging + executionPayload.forEach((key, value) -> { + log.info("## Execution payload - {}: {}", key, value); + }); + + try { + // Extract transaction results from payload + // Expected format: status, transactionId, completedTimestamp, etc. + String status = getPayloadValue(executionPayload, "status"); + String transactionId = getPayloadValue(executionPayload, "transactionId"); + String batchId = getPayloadValue(executionPayload, "batchId"); + String subBatchId = getPayloadValue(executionPayload, "subBatchId"); + + log.info("## Processing execution update - batchId: {}, subBatchId: {}, transactionId: {}, status: {}", batchId, subBatchId, + transactionId, status); + + // TODO: Update transaction status in database/Zeebe workflow + // For now, just acknowledge receipt + + JSONObject response = new JSONObject(); + response.put("message", "Batch execution update received successfully"); + response.put("requestId", requestId); + response.put("batchId", batchId); + response.put("transactionId", transactionId); + response.put("status", "ACCEPTED"); + + log.info("## BATCH EXECUTION UPDATE - Successfully processed update for transactionId: {}", transactionId); + + return response.toString(); + + } catch (Exception e) { + log.error("## BATCH EXECUTION UPDATE - Error processing execution update", e); + JSONObject errorResponse = new JSONObject(); + errorResponse.put("error", "Failed to process execution update"); + errorResponse.put("message", e.getMessage()); + errorResponse.put("requestId", requestId); + return errorResponse.toString(); + } + } + + private String getPayloadValue(MultiValueMap payload, String key) { + List values = payload.get(key); + if (values != null && !values.isEmpty()) { + Object value = values.get(0); + return value != null ? value.toString() : null; + } + return null; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/api/implementation/BulkTransferController.java b/src/main/java/org/mifos/processor/bulk/api/implementation/BulkTransferController.java new file mode 100644 index 00000000..8a8d1e6e --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/implementation/BulkTransferController.java @@ -0,0 +1,47 @@ +package org.mifos.processor.bulk.api.implementation; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.HEADER_TYPE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.mifos.processor.bulk.api.definition.BulkTransfer; +import org.mifos.processor.bulk.file.FileStorageService; +import org.mifos.processor.bulk.utility.Headers; +import org.mifos.processor.bulk.utility.SpringWrapperUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@Slf4j +@RestController +public class BulkTransferController implements BulkTransfer { + + @Autowired + private ProducerTemplate producerTemplate; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + FileStorageService fileStorageService; + + @Override + public String bulkTransfer(String requestId, MultipartFile file, String fileName, String purpose, String type, String tenant) + throws IOException { + Headers headers = new Headers.HeaderBuilder().addHeader(HEADER_CLIENT_CORRELATION_ID, requestId).addHeader(PURPOSE, purpose) + .addHeader(FILE_NAME, fileName).addHeader(HEADER_TYPE, type).addHeader(HEADER_PLATFORM_TENANT_ID, tenant).build(); + Exchange exchange = SpringWrapperUtil.getDefaultWrappedExchange(producerTemplate.getCamelContext(), headers); + log.info("Inside bulkTransfer"); + log.info("file: {}", file); + fileStorageService.save(file); + producerTemplate.send("direct:post-bulk-transfer", exchange); + return exchange.getIn().getBody(String.class); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/api/implementation/SimulateApiController.java b/src/main/java/org/mifos/processor/bulk/api/implementation/SimulateApiController.java new file mode 100644 index 00000000..e535c914 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/api/implementation/SimulateApiController.java @@ -0,0 +1,14 @@ +package org.mifos.processor.bulk.api.implementation; + +import javax.servlet.http.HttpServletResponse; +import org.mifos.processor.bulk.api.definition.Simulate; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class SimulateApiController implements Simulate { + + @Override + public void simulate(HttpServletResponse httpServletResponse) { + httpServletResponse.setStatus(HttpServletResponse.SC_OK); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/config/CamelContextConfig.java b/src/main/java/org/mifos/processor/bulk/camel/config/CamelContextConfig.java index 348a0747..6366dd11 100644 --- a/src/main/java/org/mifos/processor/bulk/camel/config/CamelContextConfig.java +++ b/src/main/java/org/mifos/processor/bulk/camel/config/CamelContextConfig.java @@ -1,23 +1,31 @@ package org.mifos.processor.bulk.camel.config; +import java.util.HashMap; import org.apache.camel.CamelContext; +import org.apache.camel.component.http.HttpComponent; import org.apache.camel.spi.RestConfiguration; import org.apache.camel.spring.boot.CamelContextConfiguration; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.HashMap; - @Configuration public class CamelContextConfig { @Value("${camel.server-port}") private int serverPort; + @Value("${camel.disable-ssl}") + private boolean disableSSL; + + @Autowired + private HttpClientConfigurerTrustAllCACerts httpClientConfigurerTrustAllCACerts; + @Bean CamelContextConfiguration contextConfiguration() { return new CamelContextConfiguration() { + @Override public void beforeApplicationStart(CamelContext camelContext) { camelContext.setTracing(false); @@ -25,6 +33,11 @@ public void beforeApplicationStart(CamelContext camelContext) { camelContext.setStreamCaching(true); camelContext.disableJMX(); + if (disableSSL) { + HttpComponent httpComponent = camelContext.getComponent("https", HttpComponent.class); + httpComponent.setHttpClientConfigurer(httpClientConfigurerTrustAllCACerts); + } + RestConfiguration rest = new RestConfiguration(); camelContext.setRestConfiguration(rest); rest.setComponent("undertow"); diff --git a/src/main/java/org/mifos/processor/bulk/camel/config/CamelProperties.java b/src/main/java/org/mifos/processor/bulk/camel/config/CamelProperties.java index 310ae182..f2339f64 100644 --- a/src/main/java/org/mifos/processor/bulk/camel/config/CamelProperties.java +++ b/src/main/java/org/mifos/processor/bulk/camel/config/CamelProperties.java @@ -1,9 +1,89 @@ package org.mifos.processor.bulk.camel.config; -public class CamelProperties { +public final class CamelProperties { private CamelProperties() {} public static final String AUTH_TYPE = "authType"; + public static final String IS_BATCH_READY = "isBatchReady"; // camel property to check if batch is ready for + // sampling + public static final String SERVER_FILE_NAME = "serverFileName"; + + public static final String LOCAL_FILE_PATH = "localFilePath"; + + public static final String LOCAL_FILE_PATH_LIST = "localFilePaths"; + + public static final String SUB_BATCH_FILE_ARRAY = "subBatchFileArray"; + + public static final String SUB_BATCH_COUNT = "subBatchCount"; + + public static final String SUB_BATCH_CREATED = "subBatchCreated"; + public static final String SUB_BATCH_DETAILS = "subBatchDetails"; + + public static final String SERVER_SUB_BATCH_FILE_NAME_ARRAY = "serverSubBatchFileName"; + + public static final String TRANSACTION_LIST = "transactionList"; + + public static final String TRANSACTION_LIST_LENGTH = "transactionListLength"; + + public static final String TRANSACTION_LIST_ELEMENT = "transactionListElement"; + + public static final String GSMA_CHANNEL_REQUEST = "gsmaChannelRequest"; + + public static final String OVERRIDE_HEADER = "overrideHeader"; + + public static final String TENANT_NAME = "tenantName"; + + public static final String FILE_1 = "file1"; + + public static final String FILE_2 = "file2"; + + public static final String OPS_APP_ACCESS_TOKEN = "opsAppAccessToken"; + + public static final String BATCH_STATUS_FAILED = "batchStatusFailed"; + + public static final String CALLBACK_RESPONSE_CODE = "responseCode"; + + public static final String BATCH_REQUEST_TYPE = "batchRequestType"; + + public static final String RESULT_TRANSACTION_LIST = "resultTransactionList"; + + public static final String ZEEBE_VARIABLE = "zeebeVariable"; + + public static final String EXTERNAL_ENDPOINT_FAILED = "extEndpointFailed"; + + public static final String EXTERNAL_ENDPOINT = "extEndpoint"; + + public static final String PAYLOAD_LIST = "payloadList"; + + public static final String IS_PAYMENT_MODE_VALID = "isPaymentModeValid"; + + public static final String PAYMENT_MODE_TYPE = "paymentModeType"; + + public static final String PAYLOAD = "payload"; + + public static final String BATCH_ID_HEADER = "X-BatchID"; + public static final String HOST = "externalApiCallHost"; + public static final String ENDPOINT = "externalApiCallEndpoint"; + public static final String CACHED_TRANSACTION_ID = "cachedTransactionId"; + public static final String PAYEE_IDENTITY = "payeeIdentity"; + public static final String PAYMENT_MODALITY = "paymentModality"; + public static final String PAYEE_PARTY_ID = "payeePartyId"; + public static final String PAYEE_PARTY_ID_TYPE = "payeePartyIdType"; + public static final String HEADER_REGISTERING_INSTITUTE_ID = "X-Registering-Institution-ID"; + public static final String HEADER_PROGRAM_ID = "X-Program-ID"; + public static final String REGISTERING_INSTITUTE_ID = "registeringInstituteId"; + public static final String PROGRAM_ID = "programId"; + public static final String IS_UPDATED = "isUpdated"; + public static final String HEADER_PLATFORM_TENANT_ID = "Platform-TenantId"; + public static final String HEADER_CLIENT_CORRELATION_ID = "X-CorrelationID"; + public static final String CLIENT_CORRELATION_ID = "clientCorrelationId"; + public static final String SUB_BATCH_ENTITY = "subBatchEntity"; + public static final String EVENT_TYPE = "eventType"; + public static final String DUPLICATE_TRANSACTION_LIST = "duplicateTransactionList"; + public static final String ORIGINAL_TRANSACTION_LIST = "originalTransactionList"; + public static final String CALLBACK = "X-CallbackURL"; + public static final String CONTENT_TYPE = "Content-Type"; + public static final String PAYEE_DFSP_ID = "X-PayeeDFSP-ID"; } diff --git a/src/main/java/org/mifos/processor/bulk/camel/config/HttpClientConfigurerTrustAllCACerts.java b/src/main/java/org/mifos/processor/bulk/camel/config/HttpClientConfigurerTrustAllCACerts.java new file mode 100644 index 00000000..bdc5464f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/config/HttpClientConfigurerTrustAllCACerts.java @@ -0,0 +1,65 @@ +package org.mifos.processor.bulk.camel.config; + +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; +import org.apache.camel.component.http.HttpClientConfigurer; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustStrategy; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContextBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HttpClientConfigurerTrustAllCACerts implements HttpClientConfigurer { + + public Logger logger = LoggerFactory.getLogger(this.getClass()); + + public HttpClientConfigurerTrustAllCACerts() {} + + @Override + public void configureHttpClient(HttpClientBuilder clientBuilder) { + // setup a Trust Strategy that allows all certificates. + // + SSLContext sslContext = null; + try { + sslContext = new SSLContextBuilder().loadTrustMaterial(null, new TrustStrategy() { + + public boolean isTrusted(X509Certificate[] arg0, String arg1) throws CertificateException { + return true; + } + }).build(); + } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) { + logger.debug(e.getMessage()); + } + clientBuilder.setSSLContext(sslContext); + // don't check Hostnames, either. + // -- use SSLConnectionSocketFactory.getDefaultHostnameVerifier(), if you don't want to weaken + HostnameVerifier hostnameVerifier = NoopHostnameVerifier.INSTANCE; + // TD Deprecated HostnameVerifier hostnameVerifier = SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; + + // here's the special part: + // -- need to create an SSL Socket Factory, to use our weakened "trust strategy"; + // -- and create a Registry, to register it. + // + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext, hostnameVerifier); + Registry socketFactoryRegistry = RegistryBuilder.create() + .register("http", PlainConnectionSocketFactory.getSocketFactory()).register("https", sslSocketFactory).build(); + + // now, we create connection-manager using our Registry. + // -- allows multi-threaded use + PoolingHttpClientConnectionManager connMgr = new PoolingHttpClientConnectionManager(socketFactoryRegistry); + clientBuilder.setConnectionManager(connMgr); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/processor/DTOJsonConversionException.java b/src/main/java/org/mifos/processor/bulk/camel/processor/DTOJsonConversionException.java new file mode 100644 index 00000000..d07f4aa1 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/processor/DTOJsonConversionException.java @@ -0,0 +1,8 @@ +package org.mifos.processor.bulk.camel.processor; + +public class DTOJsonConversionException extends RuntimeException { + + public DTOJsonConversionException(Class dtoClass, String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/processor/GsmaApiPayload.java b/src/main/java/org/mifos/processor/bulk/camel/processor/GsmaApiPayload.java new file mode 100644 index 00000000..b35cfa8b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/processor/GsmaApiPayload.java @@ -0,0 +1,34 @@ +package org.mifos.processor.bulk.camel.processor; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_ELEMENT; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +import org.apache.camel.Exchange; +import org.mifos.connector.common.gsma.dto.GSMATransaction; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +@Slf4j +public class GsmaApiPayload implements Function { + + @Autowired + ObjectMapper objectMapper; + + @Override + public String apply(Exchange exchange) { + + Transaction transaction = exchange.getProperty(TRANSACTION_LIST_ELEMENT, Transaction.class); + GSMATransaction gsmaTransaction = Utils.convertTxnToGSMA(transaction); + try { + return objectMapper.writeValueAsString(gsmaTransaction); + } catch (JsonProcessingException e) { + throw new DTOJsonConversionException(GSMATransaction.class, "Unable to convert GSMATransaction to Json", e); + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/processor/MastercardApiPayload.java b/src/main/java/org/mifos/processor/bulk/camel/processor/MastercardApiPayload.java new file mode 100644 index 00000000..adf5c0d6 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/processor/MastercardApiPayload.java @@ -0,0 +1,31 @@ +package org.mifos.processor.bulk.camel.processor; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_ELEMENT; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.function.Function; +import org.apache.camel.Exchange; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class MastercardApiPayload implements Function { + + @Autowired + ObjectMapper objectMapper; + + @Override + public String apply(Exchange exchange) { + Transaction transaction = exchange.getProperty(TRANSACTION_LIST_ELEMENT, Transaction.class); + TransactionChannelRequestDTO inboundTransferPayload = Utils.convertTxnToInboundTransferPayload(transaction); + try { + return objectMapper.writeValueAsString(inboundTransferPayload); + } catch (JsonProcessingException e) { + throw new DTOJsonConversionException(MastercardApiPayload.class, "Unable to convert MastercardPayload to JSON", e); + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/processor/MojaloopApiPayload.java b/src/main/java/org/mifos/processor/bulk/camel/processor/MojaloopApiPayload.java new file mode 100644 index 00000000..c3802c83 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/processor/MojaloopApiPayload.java @@ -0,0 +1,31 @@ +package org.mifos.processor.bulk.camel.processor; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_ELEMENT; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.function.Function; +import org.apache.camel.Exchange; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class MojaloopApiPayload implements Function { + + @Autowired + ObjectMapper objectMapper; + + @Override + public String apply(Exchange exchange) { + Transaction transaction = exchange.getProperty(TRANSACTION_LIST_ELEMENT, Transaction.class); + TransactionChannelRequestDTO inboundTransferPayload = Utils.convertTxnToInboundTransferPayload(transaction); + try { + return objectMapper.writeValueAsString(inboundTransferPayload); + } catch (JsonProcessingException e) { + throw new DTOJsonConversionException(MojaloopApiPayload.class, "Unable to convert MojaloopPayload to JSON", e); + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupCallbackRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupCallbackRoute.java new file mode 100644 index 00000000..1232ef7e --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupCallbackRoute.java @@ -0,0 +1,114 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RESULT_FILE; + +import io.camunda.zeebe.client.ZeebeClient; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; +import org.mifos.processor.bulk.schema.BatchAccountLookupResponseDTO; +import org.mifos.processor.bulk.schema.BeneficiaryDTO; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionResult; +import org.mifos.processor.bulk.utility.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class AccountLookupCallbackRoute extends BaseRouteBuilder { + + @Autowired + private ZeebeClient zeebeClient; + private Integer totalApprovedAmount; + private Integer totalApprovedCount; + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + from("direct:accountLookupCallback").id("direct:accountLookupCallback") + .log("Starting route " + RouteId.ACCOUNT_LOOKUP_CALLBACK.name()).to("direct:download-file") + .to("direct:get-transaction-array").to("direct:batch-account-lookup-callback") + .process(exchange -> exchange.setProperty(OVERRIDE_HEADER, true)); + from("direct:batch-account-lookup-callback").id("direct:batch-account-lookup-callback").process(exchange -> { + String serverFileName = exchange.getProperty(SERVER_FILE_NAME, String.class); + String resultFile = String.format("Result_%s", serverFileName); + BatchAccountLookupResponseDTO batchAccountLookupCallback = objectMapper + .readValue(exchange.getProperty("batchAccountLookupCallback", String.class), BatchAccountLookupResponseDTO.class); + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + List transactionResultList = new ArrayList<>(); + List updatedTransactionList = new ArrayList<>(); + Map variables = new HashMap<>(); + + updateTransactionStatus(transactionList, batchAccountLookupCallback.getBeneficiaryDTOList(), transactionResultList, + updatedTransactionList); + exchange.setProperty(PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_AMOUNT, totalApprovedAmount); + exchange.setProperty(PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_COUNT, totalApprovedCount); + exchange.setProperty(RESULT_TRANSACTION_LIST, transactionResultList); + exchange.setProperty(RESULT_FILE, resultFile); + exchange.setProperty(TRANSACTION_LIST, updatedTransactionList); + Long workflowInstanceKey = Long.valueOf(exchange.getProperty("workflowInstanceKey").toString()); + variables.put(PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_AMOUNT, totalApprovedAmount); + variables.put(PARTY_LOOKUP_SUCCESSFUL_TRANSACTION_COUNT, totalApprovedCount); + if (zeebeClient != null) { + + zeebeClient.newSetVariablesCommand(workflowInstanceKey).variables(variables).send().join(); + } + }) + // setting localfilepath as result file to make sure result file is uploaded + .log("updating orignal").setProperty(LOCAL_FILE_PATH, exchangeProperty(SERVER_FILE_NAME)) + .setProperty(OVERRIDE_HEADER, constant(true)).to("direct:update-file").to("direct:upload-file") + .log("updating failed transaction").setProperty(TRANSACTION_LIST, exchangeProperty(RESULT_TRANSACTION_LIST)) + .setProperty(LOCAL_FILE_PATH, exchangeProperty(RESULT_FILE)).setProperty(OVERRIDE_HEADER, constant(true)) + .to("direct:update-result-file").to("direct:upload-file"); + } + + public List updateTransactionStatus(List transactionList, + List batchAccountLookupResponseDTO, List transactionResultList, + List updatedTransactionList) { + totalApprovedCount = 0; + totalApprovedAmount = 0; + AtomicInteger count = new AtomicInteger(totalApprovedCount); + AtomicInteger amount = new AtomicInteger(totalApprovedAmount); + + transactionList.forEach(transaction -> { + Optional matchingBeneficiary = batchAccountLookupResponseDTO.stream() + .filter(beneficiary -> transaction.getPayeeIdentifier().equals(beneficiary.getPayeeIdentity())).findFirst(); + + if (matchingBeneficiary.isPresent()) { + count.incrementAndGet(); // Increment the count atomically + try { + amount.addAndGet(Integer.parseInt(transaction.getAmount())); + } catch (NumberFormatException e) { + logger.error(e.getMessage()); + } + // Store financialAddress in accountNumber field for reconciliation + // DO NOT overwrite payeeIdentifier - keep the MSISDN for party lookup! + String financialAddress = matchingBeneficiary.get().getFinancialAddress(); + transaction.setAccountNumber(financialAddress); + transaction.setPayeeDfspId(matchingBeneficiary.get().getBankingInstitutionCode()); + updatedTransactionList.add(transaction); + } else { + TransactionResult transactionResult = Utils.mapToResultDTO(transaction); + transactionResult.setErrorCode("404"); + transactionResult.setErrorDescription("Payee Identifier not found"); + transactionResult.setStatus("Failed"); + transactionResultList.add(transactionResult); + } + }); + totalApprovedCount = count.get(); + totalApprovedAmount = amount.get(); + + return transactionResultList; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupRoute.java new file mode 100644 index 00000000..c60454f7 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/AccountLookupRoute.java @@ -0,0 +1,99 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYEE_IDENTITY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYMENT_MODALITY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REGISTERING_INSTITUTION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import javax.net.ssl.HttpsURLConnection; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.mifos.connector.common.identityaccountmapper.dto.AccountMapperRequestDTO; +import org.mifos.connector.common.identityaccountmapper.dto.BeneficiaryDTO; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.service.BatchAccountLookup; +import org.mifos.processor.bulk.service.FileProcessingRouteService; +import org.mifos.processor.bulk.service.FileRouteService; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +@SuppressWarnings("unchecked") +public class AccountLookupRoute extends BaseRouteBuilder { + + @Value("${identity_account_mapper.account_lookup}") + private String accountLookupEndpoint; + @Value("${identity_account_mapper.hostname}") + private String identityURL; + @Value("${identity_account_mapper.hostname}") + private String identityMapperURL; + @Value("${identity_account_mapper.batch_account_lookup_callback}") + private String batchAccountLookupCallback; + @Value("${identity_account_mapper.batch_account_lookup}") + private String batchAccountLookup; + + @Override + public void configure() throws Exception { + Processor disableSslProcessor = new Processor() { + + @Override + public void process(Exchange exchange) throws Exception { + // Disable SSL certificate validation + HttpsURLConnection.setDefaultHostnameVerifier((hostname, sslSession) -> true); + } + }; + from("direct:send-account-lookup").id("account-lookup").process(exchange -> { + String callbackUrl = exchange.getProperty(CALLBACK, String.class); + String registeringInstitutionId = exchange.getProperty(HEADER_REGISTERING_INSTITUTE_ID, String.class); + exchange.getIn().setHeader(CALLBACK, callbackUrl); + exchange.getIn().setHeader(HEADER_REGISTERING_INSTITUTE_ID, registeringInstitutionId); + }).setHeader(Exchange.HTTP_METHOD, constant("GET")) + .toD(identityURL + accountLookupEndpoint + "?" + PAYEE_IDENTITY + "=${exchangeProperty.payeeIdentity}&" + PAYMENT_MODALITY + + "=${exchangeProperty.paymentModality}&" + "requestId=${exchangeProperty.requestId}") + .log("API Response: ${body}").process(disableSslProcessor); + + from(RouteId.ACCOUNT_LOOKUP.getValue()).id(RouteId.ACCOUNT_LOOKUP.getValue()).log("Starting route " + RouteId.ACCOUNT_LOOKUP.name()) + .process(exchange -> exchange.setProperty(OVERRIDE_HEADER, true)).bean(FileRouteService.class, "downloadFile") + .bean(FileProcessingRouteService.class, "getTxnArray").bean(BatchAccountLookup.class, "doBatchAccountLookup") + .bean(FileProcessingRouteService.class, "updateFile").bean(FileRouteService.class, "uploadFile").process(exchange -> { + exchange.setProperty(PARTY_LOOKUP_FAILED, false); + }); + + from("direct:batch-account-lookup").id("direct:batch-account-lookup").process(exchange -> { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + HashMap> stringListHashMap = new HashMap<>(); + List beneficiaryDTOList = new ArrayList<>(); + logger.info("=== ACCOUNT LOOKUP ROUTE DEBUG ==="); + logger.info("Building beneficiary list from {} transactions", transactionList != null ? transactionList.size() : 0); + transactionList.forEach(transaction -> { + logger.info("Adding beneficiary - payeeIdentity: {}, paymentMode: {}", transaction.getPayeeIdentifier(), + transaction.getPaymentMode()); + beneficiaryDTOList.add(new BeneficiaryDTO(transaction.getPayeeIdentifier(), "", "", "")); + }); + String requestId = exchange.getProperty(REQUEST_ID, String.class); + String callbackUrl = exchange.getProperty(CALLBACK, String.class); + String registeringInstitutionId = exchange.getProperty(HEADER_REGISTERING_INSTITUTE_ID, String.class); + logger.info("Request metadata - ID: {}, RegisteringInstitution: {}, Callback: {}", requestId, registeringInstitutionId, + callbackUrl); + AccountMapperRequestDTO accountMapperRequestDTO = new AccountMapperRequestDTO(requestId, registeringInstitutionId, + beneficiaryDTOList); + String requestBody = objectMapper.writeValueAsString(accountMapperRequestDTO); + logger.info("Request body to send to identity mapper ({} chars): {}", requestBody.length(), requestBody); + + exchange.getIn().setHeader(CALLBACK, callbackUrl); + exchange.getIn().setHeader(REGISTERING_INSTITUTION_ID, registeringInstitutionId); + exchange.getIn().setHeader("Content-type", "application/json"); + exchange.getIn().setBody(requestBody); + }).setHeader(Exchange.HTTP_METHOD, constant("POST")).toD(identityURL + batchAccountLookup).log("API Response: ${body}") + .process(disableSslProcessor); + + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/BaseRouteBuilder.java b/src/main/java/org/mifos/processor/bulk/camel/routes/BaseRouteBuilder.java new file mode 100644 index 00000000..770f97dd --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/BaseRouteBuilder.java @@ -0,0 +1,60 @@ +package org.mifos.processor.bulk.camel.routes; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.List; +import org.apache.camel.Exchange; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.model.RouteDefinition; +import org.mifos.processor.bulk.OperationsAppConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +public abstract class BaseRouteBuilder extends RouteBuilder { + + @Autowired + public ObjectMapper objectMapper; + + @Autowired + public OperationsAppConfig operationsAppConfig; + + @Autowired + ZeebeClient zeebeClient; + + @Value("#{'${tenants}'.split(',')}") + protected List tenants; + + @Value("${cloud.aws.s3BaseUrl}") + protected String awsS3BaseUrl; + + public Logger logger = LoggerFactory.getLogger(this.getClass()); + + public RouteDefinition getBaseExternalApiRequestRouteDefinition(String routeId, HttpRequestMethod httpMethod) { + return from(String.format("direct:%s", routeId)).id(routeId).log("Starting external API request route: " + routeId) + .removeHeader("*").setHeader(Exchange.HTTP_METHOD, constant(httpMethod.text)) + .setHeader("X-Date", simple(ZonedDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ISO_INSTANT))) + .setHeader("Content-Type", constant("application/json;charset=UTF-8")) + .setHeader("Accept", constant("application/json, text/plain, */*")); + } + + protected enum HttpRequestMethod { + + GET("GET"), POST("POST"), PUT("PUT"), DELETE("DELETE"); + + private final String text; + + HttpRequestMethod(String text) { + this.text = text; + } + + @Override + public String toString() { + return text; + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/BatchAggregateRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/BatchAggregateRoute.java new file mode 100644 index 00000000..6a89e9fa --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/BatchAggregateRoute.java @@ -0,0 +1,76 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.BATCH_STATUS_FAILED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OPS_APP_ACCESS_TOKEN; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_RATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_DESCRIPTION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import org.apache.camel.Exchange; +import org.apache.camel.LoggingLevel; +import org.apache.camel.ProducerTemplate; +import org.apache.camel.model.dataformat.JsonLibrary; +import org.mifos.processor.bulk.schema.BatchDTO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class BatchAggregateRoute extends BaseRouteBuilder { + + @Value("${config.completion-threshold-check.completion-threshold}") + private int completionThreshold; + @Autowired + private ProducerTemplate producerTemplate; + + @Override + public void configure() throws Exception { + + from("rest:get:test/batch/summary").to(RouteId.BATCH_AGGREGATE.getValue()); + + /** + * Base route for kicking off ordering logic. Performs below tasks. 1. Downloads the csv form cloud. 2. Builds + * the [Transaction] array using [direct:get-transaction-array] route. 3. Format the data based on the + * configuration provided in application.yaml. @see [Standard.java] 4. Update file with the updated data. 5. + * Uploads the updated file in cloud. + */ + from(RouteId.BATCH_AGGREGATE.getValue()).id(RouteId.BATCH_AGGREGATE.getValue()) + .log("Starting route " + RouteId.BATCH_AGGREGATE.name()).to("direct:get-access-token").choice() + .when(exchange -> exchange.getProperty(OPS_APP_ACCESS_TOKEN, String.class) != null) + .log(LoggingLevel.INFO, "Got access token, moving on to API call").to("direct:batch-aggregate-api-call") + .to("direct:batch-aggregate-response-handler").otherwise().log(LoggingLevel.INFO, "Authentication failed.").endChoice(); + + getBaseExternalApiRequestRouteDefinition("batch-aggregate-api-call", HttpRequestMethod.GET) + // .setHeader(Exchange.REST_HTTP_QUERY, simple("batchId=${exchangeProperty." + BATCH_ID + "}")) + .setHeader("Authorization", simple("Bearer ${exchangeProperty." + OPS_APP_ACCESS_TOKEN + "}")) + .setHeader(HEADER_PLATFORM_TENANT_ID, simple("${exchangeProperty." + TENANT_ID + "}")).process(exchange -> { + logger.info(exchange.getIn().getHeaders().toString()); + }).toD(operationsAppConfig.batchAggregateUrl + "${exchangeProperty." + BATCH_ID + "}?bridgeEndpoint=true") + .log(LoggingLevel.INFO, "Actual dynamic endpoint called: ${header.CamelToEndpoint}") + .log(LoggingLevel.INFO, "Batch aggregate API response: \n\n ${body}") + .log(LoggingLevel.INFO, "Aggregate Response body: ${body}"); + + from("direct:batch-aggregate-response-handler").id("direct:batch-aggregate-response-handler") + .log("Starting route direct:batch-aggregate-response-handler") + // .setBody(exchange -> exchange.getIn().getBody(String.class)) + .choice().when(header("CamelHttpResponseCode").isEqualTo("200")).log(LoggingLevel.INFO, "Batch summary request successful") + .log("Response body: ${body}").unmarshal().json(JsonLibrary.Jackson, BatchDTO.class).process(exchange -> { + BatchDTO batchAggregateResponse = exchange.getIn().getBody(BatchDTO.class); + int percentage = (int) (((double) batchAggregateResponse.getSuccessful() / batchAggregateResponse.getTotal()) * 100); + + if (percentage >= completionThreshold) { + logger.info("Batch success threshold reached. Expected rate: {}, Actual Rate: {}", completionThreshold, percentage); + } + + exchange.setProperty(COMPLETION_RATE, percentage); + }).otherwise().log(LoggingLevel.ERROR, "Batch aggregate request unsuccessful").process(exchange -> { + exchange.setProperty(BATCH_STATUS_FAILED, true); + exchange.setProperty(ERROR_DESCRIPTION, exchange.getIn().getBody(String.class)); + exchange.setProperty(ERROR_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/DeDuplicationRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/DeDuplicationRoute.java new file mode 100644 index 00000000..c4c640c3 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/DeDuplicationRoute.java @@ -0,0 +1,120 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.DUPLICATE_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.ORIGINAL_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DE_DUPLICATION_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DUPLICATE_TRANSACTION_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FAILED_TRANSACTION_FILE; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.stereotype.Component; + +@Component +public class DeDuplicationRoute extends BaseRouteBuilder { + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + from(RouteId.DE_DUPLICATION.getValue()).id(RouteId.DE_DUPLICATION.getValue()) + .log("Started route " + RouteId.DE_DUPLICATION.getValue()).to("direct:download-file").to("direct:get-transaction-array") + .process(exchange -> { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + + if (Objects.isNull(transactionList) || transactionList.isEmpty()) { + exchange.setProperty(DE_DUPLICATION_FAILED, false); + exchange.setProperty(DUPLICATE_TRANSACTION_COUNT, 0); + } + + int duplicateTxnCount = 0; + List duplicateTransactionList = new ArrayList<>(); // contains the duplicate + // transaction + List originalTransactionList = new ArrayList<>(); // contains the original txn after + // removing duplicate + Set set = new HashSet<>(); + + for (Transaction transaction : transactionList) { + String payeeDetail = fetchPayeeDetail(transaction); + if (set.contains(payeeDetail)) { + transaction.setNote("Duplicate transaction."); + duplicateTransactionList.add(transaction); + duplicateTxnCount++; + } else { + set.add(payeeDetail); + originalTransactionList.add(transaction); + } + } + + log.info("Duplicate txn: {} and count: {}", duplicateTransactionList, duplicateTxnCount); + + exchange.setProperty(DUPLICATE_TRANSACTION_COUNT, duplicateTxnCount); + exchange.setProperty(DUPLICATE_TRANSACTION_LIST, duplicateTransactionList); + exchange.setProperty(ORIGINAL_TRANSACTION_LIST, originalTransactionList); + }).choice().when(exchange -> exchange.getProperty(DUPLICATE_TRANSACTION_COUNT, Integer.class) > 0) + .log("Updating original transaction list") + .setProperty(TRANSACTION_LIST, simple("${exchangeProperty." + ORIGINAL_TRANSACTION_LIST + "}")) + .setProperty(LOCAL_FILE_PATH, simple("${exchangeProperty." + SERVER_FILE_NAME + "}")) + .setProperty(OVERRIDE_HEADER, constant(true)).to("direct:update-file").to("direct:upload-file").process(exchange -> { + String originalFileServerName = exchange.getProperty(SERVER_FILE_NAME, String.class); + String duplicateFileName = "duplicate_transaction_" + originalFileServerName; + + exchange.setProperty(FAILED_TRANSACTION_FILE, duplicateFileName); + }).log("Updating duplicate transaction list") + .setProperty(TRANSACTION_LIST, simple("${exchangeProperty." + DUPLICATE_TRANSACTION_LIST + "}")) + .setProperty(LOCAL_FILE_PATH, simple("${exchangeProperty." + FAILED_TRANSACTION_FILE + "}")) + .setProperty(OVERRIDE_HEADER, constant(true)).to("direct:update-file").to("direct:upload-file").process(exchange -> { + // checking if file upload was success or + String serverFileName = exchange.getProperty(SERVER_FILE_NAME, String.class); + if (serverFileName == null) { + exchange.setProperty(DE_DUPLICATION_FAILED, true); + } else { + exchange.setProperty(DE_DUPLICATION_FAILED, false); + } + }).otherwise().log("No duplicate transaction found").setProperty(DE_DUPLICATION_FAILED, constant(false)).endChoice(); + } + + private void removeDuplicatesIfOrderingDisabled(List transactionList) { + Set set = new HashSet<>(); + + if (Objects.isNull(transactionList)) { + return; + } + + for (Transaction transaction : transactionList) { + String payeeDetail = fetchPayeeDetail(transaction); + if (set.contains(payeeDetail)) { + transaction.setNote("Duplicate transaction."); + } else { + set.add(payeeDetail); + } + } + } + + private Map getTransactionPayeeDetailHashMap(List transactionList) { + Map payeeDetailTransactionMap = new HashMap<>(); + for (Transaction transaction : transactionList) { + payeeDetailTransactionMap.put(fetchPayeeDetail(transaction), transaction); + } + return payeeDetailTransactionMap; + } + + private String fetchPayeeDetail(Transaction transaction) { + String payeeIdentifier = transaction.getPayeeIdentifier(); + String payeeIdentifierType = transaction.getPayeeIdentifierType(); + String amount = transaction.getAmount(); + String currency = transaction.getCurrency(); + + return String.format("%s%s%s%s", payeeIdentifier, payeeIdentifierType, amount, currency); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/ExternalApiCallRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/ExternalApiCallRoute.java new file mode 100644 index 00000000..b2d645d7 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/ExternalApiCallRoute.java @@ -0,0 +1,27 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.ENDPOINT; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HOST; + +import org.apache.camel.LoggingLevel; +import org.apache.camel.builder.RouteBuilder; + +//@Component +public class ExternalApiCallRoute extends RouteBuilder { + + @Override + public void configure() { + from("direct:external-api-calling").id("external-api-call").log(LoggingLevel.DEBUG, "######## API CALL -> Calling an external api") + .process(exchange -> { + // remove the trailing "/" from endpoint + String endpoint = exchange.getProperty(ENDPOINT, String.class); + if (endpoint.startsWith("/")) { + exchange.setProperty(ENDPOINT, endpoint.substring(1)); + } + }).log(LoggingLevel.DEBUG, "Host: ${exchangeProperty." + HOST + "}") + .log(LoggingLevel.DEBUG, "Endpoint: ${exchangeProperty." + ENDPOINT + "}").log(LoggingLevel.DEBUG, "Headers: ${headers}") + .log(LoggingLevel.DEBUG, "Request Body: ${body}").toD("${exchangeProperty." + HOST + "}/${exchangeProperty." + ENDPOINT + + "}" + "?bridgeEndpoint=true" + "&throwExceptionOnFailure=false") + .log(LoggingLevel.DEBUG, "Response body: ${body}"); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/FileProcessingRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/FileProcessingRoute.java new file mode 100644 index 00000000..26605d64 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/FileProcessingRoute.java @@ -0,0 +1,126 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_LENGTH; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FAILED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ONGOING_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TOTAL_AMOUNT; + +import com.fasterxml.jackson.databind.MappingIterator; +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import java.io.File; +import java.io.FileReader; +import java.util.ArrayList; +import java.util.List; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionResult; +import org.mifos.processor.bulk.utility.CsvWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class FileProcessingRoute extends BaseRouteBuilder { + + @Autowired + private CsvMapper csvMapper; + + @Override + @SuppressWarnings("unchecked") + public void configure() { + + /** + * Parse the [Transaction] array from the csv file exchangeInput: [LOCAL_FILE_PATH] the absolute path to the csv + * file exchangeOutput: [TRANSACTION_LIST] containing the list of [Transaction] + */ + from("direct:get-transaction-array").id("direct:get-transaction-array").log("Starting route direct:get-transaction-array") + .process(exchange -> { + Double totalAmount = 0.0; + Long failedAmount = 0L; + Long completedAmount = 0L; + String filename = exchange.getProperty(LOCAL_FILE_PATH, String.class); + log.debug("Local file path: {}", filename); + CsvSchema schema = CsvSchema.emptySchema().withHeader(); + log.info("Filename: {}", filename); + FileReader reader = new FileReader(filename); + MappingIterator readValues = csvMapper.readerWithSchemaFor(Transaction.class).with(schema) + .readValues(reader); + List transactionList = new ArrayList<>(); + while (readValues.hasNext()) { + Transaction current = readValues.next(); + transactionList.add(current); + totalAmount += Double.parseDouble(current.getAmount()); + } + reader.close(); + exchange.setProperty(TRANSACTION_LIST, transactionList); + exchange.setProperty(TRANSACTION_LIST_LENGTH, transactionList.size()); + exchange.setProperty(TOTAL_AMOUNT, totalAmount); + exchange.setProperty(ONGOING_AMOUNT, totalAmount); // initially ongoing amount is same as total + // amount + exchange.setProperty(FAILED_AMOUNT, failedAmount); + exchange.setProperty(COMPLETED_AMOUNT, completedAmount); + }); + + /** + * updates the data in local file exchangeInput: [LOCAL_FILE_PATH] the absolute path to the csv file + * [RESULT_TRANSACTION_LIST] containing the list of [Transaction] [OVERRIDE_HEADER] if set to true will override + * the header or else use the existing once in csv file + */ + from("direct:update-result-file").id("direct:update-result-file").log("Starting route direct:update-result-file") + .process(exchange -> { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + List transactionList = exchange.getProperty(RESULT_TRANSACTION_LIST, List.class); + + // getting header + Boolean overrideHeader = exchange.getProperty(OVERRIDE_HEADER, Boolean.class); + + CsvWriter.writeToCsv(transactionList, TransactionResult.class, csvMapper, overrideHeader, filepath); + }).log("Update complete"); + + /** + * updates the data in local file exchangeInput: [LOCAL_FILE_PATH] the absolute path to the csv file + * [TRANSACTION_LIST] containing the list of [Transaction] [OVERRIDE_HEADER] if set to true will override the + * header or else use the existing once in csv file + */ + from("direct:update-file").id("direct:update-file").log("Starting route direct:update-file").to("direct:update-file-v2") + .log("Update complete"); + + /** + * this is backward compatible version of update-file route for new CSV schema exchangeInput: [LOCAL_FILE_PATH] + * the absolute path to the csv file [TRANSACTION_LIST] containing the list of [Transaction] [OVERRIDE_HEADER] + * if set to true will override the header or else use the existing once in csv file + */ + from("direct:update-file-v2").id("direct:update-file-v2").log("Starting route direct:update-file-v2").process(exchange -> { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + + log.info("update-file-v2 - filepath: {}", filepath); + log.info("update-file-v2 - transactionList: {} (size: {})", transactionList != null ? "present" : "NULL", + transactionList != null ? transactionList.size() : 0); + + // getting header + Boolean overrideHeader = exchange.getProperty(OVERRIDE_HEADER, Boolean.class); + CsvSchema csvSchema = csvMapper.schemaFor(Transaction.class); + if (overrideHeader) { + csvSchema = csvSchema.withHeader(); + } else { + csvSchema = csvSchema.withoutHeader(); + } + + File file = new File(filepath); + SequenceWriter writer = csvMapper.writerWithSchemaFor(Transaction.class).with(csvSchema).writeValues(file); + if (transactionList != null) { + for (Transaction transaction : transactionList) { + writer.write(transaction); + } + } + writer.close(); + log.info("update-file-v2 - wrote {} transactions to {}", transactionList != null ? transactionList.size() : 0, filepath); + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/FileRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/FileRoute.java new file mode 100644 index 00000000..c4d48b78 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/FileRoute.java @@ -0,0 +1,54 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; + +import java.io.File; +import java.io.FileOutputStream; +import org.mifos.processor.bulk.file.FileTransferService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class FileRoute extends BaseRouteBuilder { + + @Autowired + @Qualifier("awsStorage") + private FileTransferService fileTransferService; + + @Value("${application.bucket-name}") + private String bucketName; + + @Override + public void configure() throws Exception { + + /** + * Downloads the file from cloud, stores in local and returns the file path Input the file name through exchange + * variable: [SERVER_FILE_NAME] Output the local file path through exchange variable: [LOCAL_FILE_PATH] + */ + from("direct:download-file").id("direct:download-file").log("Started download-file route").process(exchange -> { + String filename = exchange.getProperty(SERVER_FILE_NAME, String.class); + + byte[] csvFile = fileTransferService.downloadFile(filename, bucketName); + File file = new File(filename); + try (FileOutputStream fos = new FileOutputStream(file)) { + fos.write(csvFile); + } + exchange.setProperty(LOCAL_FILE_PATH, file.getAbsolutePath()); + logger.info("File downloaded"); + }); + + /** + * Uploads the file to cloud and returns the file name in cloud Input the local file path through exchange + * variable: [LOCAL_FILE_PATH] Output the server file name through exchange variable: [SERVER_FILE_NAME] + */ + from("direct:upload-file").id("direct:upload-file").log("Uploading file").process(exchange -> { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + String serverFileName = fileTransferService.uploadFile(new File(filepath), bucketName); + exchange.setProperty(SERVER_FILE_NAME, serverFileName); + logger.info("Uploaded file: {}", serverFileName); + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/FormattingRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/FormattingRoute.java new file mode 100644 index 00000000..0589471d --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/FormattingRoute.java @@ -0,0 +1,62 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FORMATTING_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FORMATTING_STANDARD; + +import java.util.List; +import org.mifos.processor.bulk.format.Standard; +import org.mifos.processor.bulk.format.helper.Mappers; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class FormattingRoute extends BaseRouteBuilder { + + @Autowired + private Mappers mappers; + + @Value("${config.formatting.standard}") + private String standard; + + private Standard formattingStandard; + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + // parsing enum from application.yaml string + formattingStandard = Standard.valueOf(standard); + + /** + * Base route for kicking off ordering logic. Performs below tasks. 1. Downloads the csv form cloud. 2. Builds + * the [Transaction] array using [direct:get-transaction-array] route. 3. Format the data based on the + * configuration provided in application.yaml. @see [Standard.java] 4. Update file with the updated data. 5. + * Uploads the updated file in cloud. + */ + from(RouteId.FORMATTING.getValue()).id(RouteId.FORMATTING.getValue()).log("Starting route " + RouteId.FORMATTING.name()).choice() + .when(exchange -> formattingStandard != Standard.DEFAULT).to("direct:download-file").to("direct:get-transaction-array") + .to("direct:format-data") + // making sure to override header as well, since data format is now updated + .process(exchange -> exchange.setProperty(OVERRIDE_HEADER, true)).to("direct:update-file").to("direct:upload-file") + .otherwise().log("Skipping formatting since standard is set to DEFAULT").end().process(exchange -> { + exchange.setProperty(FORMATTING_FAILED, false); + exchange.setProperty(FORMATTING_STANDARD, standard); + }); + + // formatting data based on configuration. Uses [Mappers] for converting data. + from("direct:format-data").id("direct:format-data").log("Starting route direct:format-data").process(exchange -> { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + // replace with switch when multiple standards are added + if (formattingStandard == Standard.GSMA) { + logger.info("Formatting based on {} standard", formattingStandard.name()); + exchange.setProperty(TRANSACTION_LIST, mappers.gsmaMapper.convertList(transactionList)); + } else { + exchange.setProperty(TRANSACTION_LIST, transactionList); + } + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/HealthRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/HealthRoute.java new file mode 100644 index 00000000..e7a61686 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/HealthRoute.java @@ -0,0 +1,19 @@ +package org.mifos.processor.bulk.camel.routes; + +import org.json.JSONObject; +import org.springframework.stereotype.Component; + +@Component +public class HealthRoute extends BaseRouteBuilder { + + @Override + public void configure() throws Exception { + + // todo remove once camel APIs are migrated to spring + from("rest:GET:/actuator/health/liveness").id("rest:GET:/actuator/health/liveness").setBody(exchange -> { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("status", "UP"); + return jsonObject.toString(); + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/InitSubBatchRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/InitSubBatchRoute.java new file mode 100644 index 00000000..3d4f9b95 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/InitSubBatchRoute.java @@ -0,0 +1,259 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.BATCH_ID_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.EXTERNAL_ENDPOINT; +import static org.mifos.processor.bulk.camel.config.CamelProperties.EXTERNAL_ENDPOINT_FAILED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.IS_PAYMENT_MODE_VALID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYMENT_MODE_TYPE; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_ENTITY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TENANT_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_ELEMENT; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_LENGTH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.ZEEBE_VARIABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DEBULKINGDFSPID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FAILED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_SUB_BATCH_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ONGOING_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYEE_DFSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYMENT_MODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RESULT_FILE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TOTAL_AMOUNT; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.camel.Exchange; +import org.apache.camel.LoggingLevel; +import org.mifos.processor.bulk.config.ExternalApiPayloadConfig; +import org.mifos.processor.bulk.config.PaymentModeConfiguration; +import org.mifos.processor.bulk.config.PaymentModeMapping; +import org.mifos.processor.bulk.config.PaymentModeType; +import org.mifos.processor.bulk.schema.SubBatchEntity; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionResult; +import org.mifos.processor.bulk.utility.Utils; +import org.mifos.processor.bulk.zeebe.BpmnConfig; +import org.mifos.processor.bulk.zeebe.ZeebeProcessStarter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class InitSubBatchRoute extends BaseRouteBuilder { + + @Autowired + private ZeebeProcessStarter zeebeProcessStarter; + + @Autowired + private BpmnConfig bpmnConfig; + + @Autowired + private PaymentModeConfiguration paymentModeConfiguration; + + @Autowired + private ExternalApiPayloadConfig externalApiPayloadConfig; + + @Value("${channel.hostname}") + private String channelURL; + + @Value("${config.partylookup.enable}") + private boolean isPartyLookupEnabled; + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + + /** + * Base route for kicking off init sub batch logic. Performs below tasks. 1. Downloads the csv form cloud. 2. + * Builds the [Transaction] array using [direct:get-transaction-array] route. 3. Loops through each transaction + * and start the respective workflow + */ + from(RouteId.INIT_SUB_BATCH.getValue()).id(RouteId.INIT_SUB_BATCH.getValue()).log("Starting route " + RouteId.INIT_SUB_BATCH.name()) + .to("direct:download-file").to("direct:get-transaction-array").to("direct:start-workflow-step1"); + + // crates the zeebe variables map and starts the workflow by calling >> direct:start-workflow-step2 + from("direct:start-workflow-step1").id("direct:start-flow-step1").log("Starting route direct:start-flow-step1") + .process(exchange -> { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + + Map variables = exchange.getProperty(ZEEBE_VARIABLE, Map.class); + variables.put(BATCH_ID, exchange.getProperty(BATCH_ID)); + variables.put(FILE_NAME, exchange.getProperty(SERVER_FILE_NAME)); + variables.put(REQUEST_ID, exchange.getProperty(REQUEST_ID)); + variables.put(PURPOSE, exchange.getProperty(PURPOSE)); + variables.put(TOTAL_AMOUNT, exchange.getProperty(TOTAL_AMOUNT)); + variables.put(ONGOING_AMOUNT, exchange.getProperty(ONGOING_AMOUNT)); + variables.put(FAILED_AMOUNT, exchange.getProperty(FAILED_AMOUNT)); + variables.put(COMPLETED_AMOUNT, exchange.getProperty(COMPLETED_AMOUNT)); + variables.put(RESULT_FILE, String.format("Result_%s", exchange.getProperty(SERVER_FILE_NAME))); + + exchange.setProperty(ZEEBE_VARIABLE, variables); + exchange.setProperty(PAYMENT_MODE, transactionList.get(0).getPaymentMode()); + + }).to("direct:start-workflow-step2"); + + from("direct:start-workflow-step2").id("direct:start-flow-step2").log("Starting route direct:start-flow-step2") + .to("direct:validate-payment-mode").choice() + // if invalid payment mode + .when(exchangeProperty(IS_PAYMENT_MODE_VALID).isEqualTo(false)).to("direct:payment-mode-missing") + .setProperty(INIT_SUB_BATCH_FAILED, constant(true)) + // else + .otherwise().to("direct:start-workflow-step3").endChoice(); + + from("direct:start-workflow-step3").id("direct:start-flow-step3").log("Starting route direct:start-flow-step3").choice() + // if type of payment mode is bulk + .when(exchangeProperty(PAYMENT_MODE_TYPE).isEqualTo(PaymentModeType.BULK)).process(exchange -> { + String paymentMode = exchange.getProperty(PAYMENT_MODE, String.class); + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(paymentMode); + + String tenantName = exchange.getProperty(TENANT_NAME, String.class); + Map variables = exchange.getProperty(ZEEBE_VARIABLE, Map.class); + variables.put(PAYMENT_MODE, paymentMode); + variables.put(DEBULKINGDFSPID, mapping.getDebulkingDfspid() == null ? tenantName : mapping.getDebulkingDfspid()); + if (isPartyLookupEnabled && !(Boolean) variables.get(PARTY_LOOKUP_FAILED)) { + String filename = exchange.getProperty(SERVER_FILE_NAME).toString(); + String regex = ".*_sub-batch-([\\w-]+)\\.csv"; // payee DFSP Id for sub batch are extracted from + // the sub batch file name when party lookup is + // enabled and it is successful + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(filename); + + if (matcher.matches()) { + String payeeDfspId = matcher.group(1); + logger.info("Payee DFSP Id {}", payeeDfspId); + variables.put(PAYEE_DFSP_ID, payeeDfspId); + } + } + zeebeProcessStarter.startZeebeWorkflow( + Utils.getBulkConnectorBpmnName(mapping.getEndpoint(), mapping.getId().toLowerCase(), tenantName), variables); + exchange.setProperty(INIT_SUB_BATCH_FAILED, false); + }) + // if type of payment mode is payment todo // else case or else if case ? + .otherwise().loop(simple("${exchangeProperty." + TRANSACTION_LIST_LENGTH + "}")).process(exchange -> { + int index = exchange.getProperty(Exchange.LOOP_INDEX, Integer.class); + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + Transaction transaction = transactionList.get(index); + + exchange.setProperty(REQUEST_ID, transaction.getRequestId()); + exchange.setProperty(PAYEE_DFSP_ID, transaction.getPayeeDfspId()); + logger.info("REQUEST_ID: {}", transaction.getRequestId()); + exchange.setProperty(TRANSACTION_LIST_ELEMENT, transaction); + }).setHeader("Platform-TenantId", exchangeProperty(TENANT_NAME)) + .setHeader("X-PayeeDFSP-ID", exchangeProperty(PAYEE_DFSP_ID)).to("direct:dynamic-payload-setter") + .to("direct:external-api-call").to("direct:external-api-response-handler").end() // end loop block + .endChoice(); + + from("direct:dynamic-payload-setter").id("direct:runtime-payload-test").log("Starting route direct:runtime-payload-test") + .process(exchange -> { + String mode = exchange.getProperty(PAYMENT_MODE, String.class); + Function localPayloadVariable = externalApiPayloadConfig.getApiPayloadSetter(mode); + logger.info("MODE FOR API CALL : {}", mode); + logger.info("localPayloadVariable: {}", localPayloadVariable); + exchange.setProperty("body", localPayloadVariable.apply(exchange)); + }) + // this payload variable returns the body for respective payment modes + .setBody(simple("${exchangeProperty.body}")); + + // Loops through each transaction and start the respective workflow + from("direct:external-api-response-handler").id("direct:external-api-response-handler") + .log("Starting route direct:external-api-response-handler").choice().when(header("CamelHttpResponseCode").isEqualTo(200)) + .process(exchange -> { + logger.info("INIT_SUB_BATCH_FAILED is false"); + exchange.setProperty(INIT_SUB_BATCH_FAILED, false); + }).otherwise().process(exchange -> { + logger.info("INIT_SUB_BATCH_FAILED is false"); + exchange.setProperty(INIT_SUB_BATCH_FAILED, true); + }).endChoice(); + + from("direct:payment-mode-missing").id("direct:payment-mode-missing").log("Starting route direct:payment-mode-missing") + .process(exchange -> { + String serverFileName = exchange.getProperty(SERVER_FILE_NAME, String.class); + String resultFile = String.format("Result_%s", serverFileName); + + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + List transactionResultList = updateTransactionStatusToFailed(transactionList); + exchange.setProperty(RESULT_TRANSACTION_LIST, transactionResultList); + exchange.setProperty(RESULT_FILE, resultFile); + }) + // setting localfilepath as result file to make sure result file is uploaded + .setProperty(LOCAL_FILE_PATH, exchangeProperty(RESULT_FILE)).setProperty(OVERRIDE_HEADER, constant(true)) + .process(exchange -> { + logger.info("A1 {}", exchange.getProperty(RESULT_FILE)); + logger.info("A2 {}", exchange.getProperty(LOCAL_FILE_PATH)); + logger.info("A3 {}", exchange.getProperty(OVERRIDE_HEADER)); + }).to("direct:update-result-file").to("direct:upload-file"); + + from("direct:external-api-call").id("direct:external-api-call").log("Starting route direct:external-api-call").process(exchange -> { + String paymentMode = exchange.getProperty(PAYMENT_MODE, String.class); + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(paymentMode); + if (mapping == null) { + exchange.setProperty(EXTERNAL_ENDPOINT_FAILED, true); + logger.info("Failed to get the payment mode config, check the configuration for payment mode"); + } else { + exchange.setProperty(EXTERNAL_ENDPOINT_FAILED, false); + exchange.setProperty(EXTERNAL_ENDPOINT, mapping.getEndpoint()); + logger.info("Got the config with routing to endpoint {}", mapping.getEndpoint()); + } + }).choice().when(exchangeProperty(EXTERNAL_ENDPOINT_FAILED).isEqualTo(false)) + .log(LoggingLevel.DEBUG, "Making API call to endpoint ${exchangeProperty.extEndpoint} and body: ${body}") + .setHeader(Exchange.CONTENT_TYPE, constant("application/json")).choice() + .when(exchange -> exchange.getProperty(SUB_BATCH_ENTITY, SubBatchEntity.class) != null) + .log("Sub batch entity is not null, hence passing subBatchId while calling channel API").process(exchange -> { + SubBatchEntity subBatchEntity = exchange.getProperty(SUB_BATCH_ENTITY, SubBatchEntity.class); + exchange.getIn().setHeader(BATCH_ID_HEADER, subBatchEntity.getSubBatchId()); + }).otherwise().log("Sub batch entity is null, hence passing batchId while calling channel API") + .setHeader(BATCH_ID_HEADER, simple("${exchangeProperty." + BATCH_ID + "}")).endChoice() + .setHeader(HEADER_CLIENT_CORRELATION_ID, simple("${exchangeProperty." + REQUEST_ID + "}")) + .setHeader(HEADER_REGISTERING_INSTITUTE_ID, simple("${exchangeProperty." + HEADER_REGISTERING_INSTITUTE_ID + "}")) + .process(exchange -> { + log.debug("Variables: {}", exchange.getProperties()); + log.debug("Emergency: {}", exchange.getIn().getHeaders()); + }) + + .toD(channelURL + "${exchangeProperty.extEndpoint}" + "?bridgeEndpoint=true&throwExceptionOnFailure=false") + .log(LoggingLevel.DEBUG, "Response body: ${body}").otherwise().endChoice(); + + from("direct:validate-payment-mode").id("direct:validate-payment-mode").log("Starting route direct:validate-payment-mode") + .process(exchange -> { + String paymentMde = exchange.getProperty(PAYMENT_MODE, String.class); + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(paymentMde); + if (mapping == null) { + exchange.setProperty(IS_PAYMENT_MODE_VALID, false); + } else { + exchange.setProperty(IS_PAYMENT_MODE_VALID, true); + exchange.setProperty(PAYMENT_MODE_TYPE, mapping.getType()); + } + }); + } + + // update Transactions status to failed + private List updateTransactionStatusToFailed(List transactionList) { + List transactionResultList = new ArrayList<>(); + for (Transaction transaction : transactionList) { + TransactionResult transactionResult = Utils.mapToResultDTO(transaction); + transactionResult.setErrorCode("404"); + transactionResult.setErrorDescription("Payment mode not configured"); + transactionResult.setStatus("Failed"); + transactionResultList.add(transactionResult); + } + + return transactionResultList; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/MergeBackRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/MergeBackRoute.java new file mode 100644 index 00000000..63969efa --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/MergeBackRoute.java @@ -0,0 +1,106 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.FILE_1; +import static org.mifos.processor.bulk.camel.config.CamelProperties.FILE_2; +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_COMPLETED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_FILE_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_ITERATION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RESULT_FILE; + +import java.io.File; +import java.util.List; +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.utility.Utils; +import org.springframework.stereotype.Component; + +@Component +public class MergeBackRoute extends BaseRouteBuilder { + + @Override + public void configure() throws Exception { + + /** + * Base route for kicking off merge back logic. Performs below tasks: 1. Picks the top two files from the array + * of files to be merged. 2. Merges them into single CSV. 3. Uploads the CSV to S3. 4. Updates the exchange + * variables. + */ + from(RouteId.MERGE_BACK.getValue()).id(RouteId.MERGE_BACK.getValue()).log("Starting route " + RouteId.MERGE_BACK.name()).choice() + .when(exchange -> exchange.getProperty(MERGE_FILE_LIST, List.class).size() == 0).log("Nothing to merge") + .setProperty(MERGE_FAILED, constant(false)).setProperty(MERGE_COMPLETED, constant(true)) + .when(exchange -> exchange.getProperty(MERGE_FILE_LIST, List.class).size() == 1).process(exchange -> { + exchange.setProperty(MERGE_FAILED, false); + exchange.setProperty(MERGE_COMPLETED, true); + @SuppressWarnings("unchecked") + List mergeFileList = (List) exchange.getProperty(MERGE_FILE_LIST, List.class); + String resultFile = mergeFileList.get(0); + setResultFileProperty(exchange, resultFile); + }).otherwise().to("direct:start-merge").endChoice(); + + // starts the merge process, merges the file and uploads the file in s3 + from("direct:start-merge").id("direct:start-merge").log("Starting route direct:start-merge").to("direct:download-file-to-be-merged") + .process(exchange -> { + String file1 = exchange.getProperty(FILE_1, String.class); + String file2 = exchange.getProperty(FILE_2, String.class); + + String mergedFile = Utils.mergeCsvFile(file1, file2); + if (mergedFile == null) { + exchange.setProperty(MERGE_COMPLETED, false); + return; + } + if (exchange.getProperty(MERGE_ITERATION, Integer.class) == 1) { + // generate new name for merged file in case of first iteration + String newFileName = System.currentTimeMillis() + "_" + exchange.getProperty(BATCH_ID, String.class) + ".csv"; + new File(mergedFile).renameTo(new File(newFileName)); + exchange.setProperty(LOCAL_FILE_PATH, newFileName); + } else { + exchange.setProperty(LOCAL_FILE_PATH, mergedFile); + } + }).to("direct:upload-file").process(exchange -> { + String mergedFileServerName = exchange.getProperty(SERVER_FILE_NAME, String.class); + @SuppressWarnings("unchecked") + List mergeList = (List) exchange.getProperty(MERGE_FILE_LIST, List.class); + String first = mergeList.remove(0); + String second = mergeList.remove(0); + logger.info("Merge iteration {}, for list, {}", exchange.getProperty(MERGE_ITERATION), mergeList); + log.info("Merged files {} and {}", first, second); + mergeList.add(0, mergedFileServerName); + + if (mergeList.size() == 1) { + exchange.setProperty(MERGE_FAILED, false); + exchange.setProperty(MERGE_COMPLETED, true); + setResultFileProperty(exchange, mergedFileServerName); + } else { + exchange.setProperty(MERGE_COMPLETED, false); + } + + exchange.setProperty(MERGE_FILE_LIST, mergeList); + + // make sure to remove the files from local storage + new File(exchange.getProperty(FILE_1, String.class)).delete(); + new File(exchange.getProperty(FILE_2, String.class)).delete(); + }); + + // downloads the two files (using FIFO access pattern) from s3 which are to be merged + from("direct:download-file-to-be-merged").id("direct:download-file-to-be-merged") + .log("Starting route direct:download-file-to-be-merged").log("Downloading files to be merged").process(exchange -> { + @SuppressWarnings("unchecked") + List mergeList = (List) exchange.getProperty(MERGE_FILE_LIST, List.class); + exchange.setProperty(SERVER_FILE_NAME, mergeList.get(0)); + }).to("direct:download-file") // downloading first file + .setProperty(FILE_1, exchangeProperty(LOCAL_FILE_PATH)).process(exchange -> { + @SuppressWarnings("unchecked") + List mergeList = (List) exchange.getProperty(MERGE_FILE_LIST, List.class); + exchange.setProperty(SERVER_FILE_NAME, mergeList.get(1)); + }).to("direct:download-file") // downloading second file + .setProperty(FILE_2, exchangeProperty(LOCAL_FILE_PATH)); + } + + // set RESULT_FILE exchange property to the file url + public void setResultFileProperty(Exchange exchange, String fileName) { + exchange.setProperty(RESULT_FILE, Utils.getAwsFileUrl(awsS3BaseUrl, fileName)); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/OperationsAuthRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/OperationsAuthRoute.java new file mode 100644 index 00000000..35f2c5f9 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/OperationsAuthRoute.java @@ -0,0 +1,60 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OPS_APP_ACCESS_TOKEN; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import java.util.HashMap; +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.LoggingLevel; +import org.apache.camel.model.dataformat.JsonLibrary; +import org.springframework.stereotype.Component; + +@Component +public class OperationsAuthRoute extends BaseRouteBuilder { + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + + from("rest:get:test/auth").to("direct:get-access-token"); + + /** + * Error handling route + */ + from("direct:access-token-error").id("access-token-error").process(exchange -> { + logger.error("Error while fetching Access Token from server: " + exchange.getIn().getBody()); + }); + + /** + * Save Access Token to AccessTokenStore + */ + + from("direct:access-token-save").id("access-token-save").unmarshal().json(JsonLibrary.Jackson, HashMap.class).process(exchange -> { + // TODO: Figure out access token storage if required + Map jsonObject = exchange.getIn().getBody(HashMap.class); + exchange.setProperty(OPS_APP_ACCESS_TOKEN, jsonObject.get("access_token")); + logger.debug("Saved Access Token: " + exchange.getProperty(OPS_APP_ACCESS_TOKEN, String.class)); + exchange.getIn().setBody(jsonObject.toString()); + }); + + /** + * Fetch Access Token from SLCB + */ + getBaseExternalApiRequestRouteDefinition("access-token-fetch", HttpRequestMethod.POST) + .setHeader(Exchange.REST_HTTP_QUERY, + simpleF("username=%s&password=%s&grant_type=%s", operationsAppConfig.username, operationsAppConfig.password, + "password")) + .setHeader("Authorization", constant("Basic Y2xpZW50Og==")) + .setHeader(HEADER_PLATFORM_TENANT_ID, simple("${exchangeProperty." + TENANT_ID + "}")) + .toD(operationsAppConfig.authUrl + "?bridgeEndpoint=true").log(LoggingLevel.INFO, "Auth response: \n\n ${body}"); + + /** + * Access Token check validity and return value + */ + from("direct:get-access-token").id("get-access-token").to("direct:access-token-fetch").choice() + .when(header("CamelHttpResponseCode").isEqualTo("200")).log("Access Token Fetch Successful").to("direct:access-token-save") + .otherwise().log("Access Token Fetch Unsuccessful").to("direct:access-token-error"); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/OrderingRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/OrderingRoute.java new file mode 100644 index 00000000..e0665fa7 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/OrderingRoute.java @@ -0,0 +1,80 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORDERED_BY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORDERING_FAILED; + +import java.util.HashMap; +import java.util.List; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class OrderingRoute extends BaseRouteBuilder { + + @Value("${config.ordering.field}") + private String orderingField; + + @Override + @SuppressWarnings("unchecked") + public void configure() { + + /** + * Base route for kicking off ordering logic. Performs below tasks. 1. Downloads the csv form cloud. 2. Builds + * the [Transaction] array using CsvMapper. 3. Re-order the array generated in step1 based on [orderingField]. + * 4. Update file with the updated data. 5. Uploads the updated file in cloud. + */ + from(RouteId.ORDERING.getValue()).id(RouteId.ORDERING.getValue()).log("Starting route " + RouteId.ORDERING.name()) + .to("direct:download-file").to("direct:get-transaction-array").to("direct:order-data").to("direct:update-file") + .to("direct:upload-file").process(exchange -> { + exchange.setProperty(ORDERING_FAILED, false); + exchange.setProperty(ORDERED_BY, orderingField); + }); + + // re-order the array of [Transaction] based on [orderingField] + from("direct:order-data").id("direct:order-data").log("Starting route direct:order-data").process(exchange -> { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + HashMap> stringListHashMap = new HashMap<>(); + transactionList.forEach((transaction) -> { + String key; + switch (orderingField) { + case "id": + key = "" + transaction.getId(); + break; + case "request_id": + key = transaction.getRequestId(); + break; + case "account_number": + key = transaction.getAccountNumber(); + break; + case "payee_identifier": + key = transaction.getPayeeIdentifier(); + break; + case "amount": + key = transaction.getAmount(); + break; + case "currency": + key = transaction.getCurrency(); + break; + case "note": + key = transaction.getNote(); + break; + default: + key = transaction.getPaymentMode(); + break; + } + + if (stringListHashMap.containsKey(key)) { + stringListHashMap.get(key).add(transaction); + } else { + transactionList.add(transaction); + stringListHashMap.put(key, transactionList); + } + }); + transactionList.clear(); + stringListHashMap.forEach((s, transactions) -> transactionList.addAll(transactions)); + exchange.setProperty(TRANSACTION_LIST, transactionList); + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/ProcessorStartRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/ProcessorStartRoute.java new file mode 100644 index 00000000..46ff527f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/ProcessorStartRoute.java @@ -0,0 +1,361 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.BATCH_REQUEST_TYPE; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.IS_UPDATED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TENANT_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.APPROVAL_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_AGGREGATE_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BULK_NOTIF_FAILURE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BULK_NOTIF_SUCCESS; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_THRESHOLD; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_THRESHOLD_CHECK_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DE_DUPLICATION_ENABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FORMATTING_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_STATUS_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORDERING_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYEE_DFSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SPLITTING_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.THRESHOLD_DELAY; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.apache.camel.Exchange; +import org.apache.camel.LoggingLevel; +import org.apache.tika.Tika; +import org.json.JSONObject; +import org.mifos.processor.bulk.camel.config.CamelProperties; +import org.mifos.processor.bulk.config.BudgetAccountConfig; +import org.mifos.processor.bulk.connectors.service.ProcessorStartRouteService; +import org.mifos.processor.bulk.file.FileTransferService; +import org.mifos.processor.bulk.properties.TenantImplementation; +import org.mifos.processor.bulk.properties.TenantImplementationProperties; +import org.mifos.processor.bulk.utility.PhaseUtils; +import org.mifos.processor.bulk.utility.Utils; +import org.mifos.processor.bulk.zeebe.ZeebeProcessStarter; +import org.mifos.processor.bulk.zeebe.worker.WorkerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class ProcessorStartRoute extends BaseRouteBuilder { + + @Autowired + private ZeebeProcessStarter zeebeProcessStarter; + @Autowired + TenantImplementationProperties tenantImplementationProperties; + + @Autowired + @Qualifier("awsStorage") + private FileTransferService fileTransferService; + + @Autowired + protected WorkerConfig workerConfig; + + @Value("${application.bucket-name}") + private String bucketName; + + @Value("${bpmn.flows.bulk-processor}") + private String workflowId; + + @Value("${config.completion-threshold-check.completion-threshold}") + private int completionThreshold; + + @Value("${config.completion-threshold-check.max-retry}") + private int maxThresholdCheckRetry; + + @Value("${config.completion-threshold-check.delay}") + private int thresholdCheckDelay; + + @Value("${callback.max-retry}") + private int maxCallbackRetry; + + @Value("${pollingApi.timer}") + private String pollApiTimer; + + @Value("#{'${csv.columnNames}'.split(',')}") + private List columnNames; + + @Value("${csv.size}") + private int csvSize; + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Autowired + PhaseUtils phaseUtils; + + @Autowired + BudgetAccountConfig budgetAccountConfig; + + @Override + public void configure() { + setup(); + } + + private void setup() { + + from("direct:post-batch-transactions").id("rest:POST:/batchtransactions").log("Starting route rest:POST:/batchtransactions") + .to("direct:validate-file").choice().when(header("CamelHttpResponseCode").isNotEqualTo("200")) + .log(LoggingLevel.ERROR, "File upload failed").otherwise().process(exchange -> { + String batchId = UUID.randomUUID().toString(); + exchange.setProperty(BATCH_ID, batchId); + + }).bean(ProcessorStartRouteService.class, "validateFileSyncResponse").choice() + .when(header("CamelHttpResponseCode").isNotEqualTo("200")).log(LoggingLevel.ERROR, "File upload failed").otherwise() + .to("direct:executeBatch").endChoice().endChoice(); + + from("direct:post-bulk-transfer").unmarshal().mimeMultipart("multipart/*").to("direct:validate-tenant").process(exchange -> { + String fileName = System.currentTimeMillis() + "_" + exchange.getIn().getHeader("fileName", String.class); + String requestId = exchange.getIn().getHeader("requestId", String.class); + String purpose = exchange.getIn().getHeader("purpose", String.class); + String batchId = UUID.randomUUID().toString(); + String payeeDfspId = exchange.getIn().getHeader(CamelProperties.PAYEE_DFSP_ID, String.class); + String callbackUrl = exchange.getIn().getHeader("X-CallbackURL", String.class); + exchange.setProperty(CALLBACK, callbackUrl); + exchange.setProperty(BATCH_ID, batchId); + exchange.setProperty(FILE_NAME, fileName); + exchange.setProperty(REQUEST_ID, requestId); + exchange.setProperty(PURPOSE, purpose); + exchange.setProperty(PAYEE_DFSP_ID, payeeDfspId); + }).wireTap("direct:start-batch-process-csv"); + + from("direct:validate-tenant").id("direct:validate-tenant").log("Validating tenant").process(exchange -> { + String tenantName = exchange.getIn().getHeader(HEADER_PLATFORM_TENANT_ID, String.class); + // validation is disabled for now + /* + * if (tenantName == null || tenantName.isEmpty() || !tenants.contains(tenantName)) { throw new + * Exception("Invalid tenant value."); } + */ + exchange.setProperty(TENANT_NAME, tenantName); + }).setHeader("Content-Type", constant("application/json;charset=UTF-8")).log("Completed route direct:validate-tenant"); + + // this route is responsible for editing the incoming records based on configuration + // this step is done to make sure the file format of CSV is not altered and only the data is updated based on + // config + from("direct:update-incoming-data").id("direct:update-incoming-data").log("direct:update-incoming-data") + // [LOCAL_FILE_PATH] is already set in [direct:validateFileSyncResponse] route + // FIXED: Prepend "/" because files are saved to root directory by FileStorageServiceImpl + // (Paths.get("")) + .process(exchange -> { + String filename = exchange.getProperty(FILE_NAME, String.class); + String fullPath = "/" + filename; + exchange.setProperty(LOCAL_FILE_PATH, fullPath); + }).to("direct:get-transaction-array") + // make sure new data is set under the exchange variable [RESULT_TRANSACTION_LIST] + .bean(ProcessorStartRouteService.class, "updateIncomingData").choice() + // update only when previous(edit function) makes any changes to data + .when(exchange -> exchange.getProperty(IS_UPDATED, Boolean.class)) + // warning: changing this flag can break things + .setProperty(OVERRIDE_HEADER, constant(true)) // default header in CSV file will be used + .to("direct:update-file-v2").otherwise().log(LoggingLevel.INFO, "No update"); + + from("direct:start-batch-process-csv").id("direct:start-batch-process-csv").log("Starting route direct:start-batch-process-csv") + .to("direct:update-incoming-data").bean(ProcessorStartRouteService.class, "startBatchProcessCsv") + .log("Completed route direct:start-batch-process-csv").bean(ProcessorStartRouteService.class, "pollingOutput"); + + from("direct:start-batch-process-raw").id("direct:start-batch-process-raw").log("Starting route direct:start-batch-process-raw") + .process(exchange -> { + JSONObject response = new JSONObject(); + response.put("batch_id", UUID.randomUUID().toString()); + response.put("request_id", UUID.randomUUID().toString()); + response.put("status", "queued"); + exchange.getIn().setBody(response.toString()); + }).log("Completed route direct:start-batch-process-raw"); + + from("direct:executeBatch").id("direct:executeBatch").log("Starting route direct:executeBatch") + .bean(ProcessorStartRouteService.class, "validateTenant").bean(ProcessorStartRouteService.class, "executeBatch").choice() + .when(exchange -> exchange.getProperty(BATCH_REQUEST_TYPE, String.class).equalsIgnoreCase("raw")) + .bean(ProcessorStartRouteService.class, "startBatchProcessRaw") + .when(exchange -> exchange.getProperty(BATCH_REQUEST_TYPE, String.class).equalsIgnoreCase("csv")) + .to("direct:start-batch-process-csv").otherwise() + .setBody(exchange -> getUnsupportedTypeJson(exchange.getProperty(BATCH_REQUEST_TYPE, String.class)).toString()) + .log("Completed execution of route rest:POST:/batchtransactions"); + + from("direct:pollingOutput").id("direct:pollingOutput").log("Started pollingOutput route").process(exchange -> { + JSONObject json = new JSONObject(); + json.put("PollingPath", "/batch/Summary/" + exchange.getProperty(BATCH_ID)); + json.put("SuggestedCallbackSeconds", pollApiTimer); + exchange.getIn().setBody(json.toString()); + exchange.getIn().setHeader(Exchange.HTTP_RESPONSE_CODE, 202); + }); + + from("direct:validateFileSyncResponse").id("direct:validateFileSyncResponse").log("Starting route direct:validateFileSyncResponse") + .process(exchange -> { + // move this logic to spring + String fileName = exchange.getIn().getHeader(FILE_NAME, String.class); + File file = new File(fileName); + + // check the file structure + int fileSize = (int) file.length(); + if (fileSize > csvSize) { + setErrorResponse(exchange, 400, "File too big", + "The file uploaded is too big. " + "Please upload a file and try again."); + } else if (!verifyCsv(file)) { + setErrorResponse(exchange, 400, "Invalid file structure", + "The file uploaded contains wrong structure." + " Please upload correct file columns and try again."); + } else { + logger.debug("Filename: {}", fileName); + setResponse(exchange, 200); + } + + }).log("Completed route direct:validateFileSyncResponse"); + + from("direct:validate-file").id("direct:validate-file").log("Starting route direct:validate-file").process(exchange -> { + File f = new File(exchange.getIn().getHeader(FILE_NAME, String.class)); + logger.debug("File name: {} ", f.getName()); + Tika tika = new Tika(); + String fileType = tika.detect(f.getName()); + logger.debug("File type: {} ", fileType); + if (f.getName().isEmpty()) { + setErrorResponse(exchange, 400, "File not uploaded", + "There was no fie uploaded with the request. " + "Please upload a file and try again."); + } else if (!fileType.equalsIgnoreCase("text/csv")) { + setErrorResponse(exchange, 400, "Broken file", + "The file uploaded is broken as it has a different extension. " + "Please upload a csv file and try again."); + } else { + setResponse(exchange, 200); + } + + }); + + } + + public boolean verifyData(File file) throws IOException { + logger.info("verifyData() - file exists: {}, file size: {}, file path: {}", file.exists(), file.length(), file.getAbsolutePath()); + InputStream ips = new FileInputStream(file); + InputStreamReader ipsr = new InputStreamReader(ips); + BufferedReader br = new BufferedReader(ipsr); + String line; + String header = br.readLine(); + logger.info("verifyData() - CSV header: {}", header); + int rowCount = 0; + while ((line = br.readLine()) != null) { + rowCount++; + String[] row = line.split(","); + logger.info("verifyData() - row {}: length={}, expected={}, content={}", rowCount, row.length, columnNames.size(), line); + if (row.length != columnNames.size()) { + logger.info("verifyData() - Row invalid: length={}, expected={}", row.length, columnNames.size()); + logger.debug("DEBUG FRED10 Row invalid {} {}", row.length, columnNames.size()); + logger.debug("Row invalid {} {}", row.length, columnNames.size()); + return false; + } + if (!verifyRow(row)) { + logger.info("verifyData() - verifyRow failed for row {}", rowCount); + return false; + } + } + logger.info("verifyData() - SUCCESS: validated {} rows", rowCount); + return true; + } + + public String getWorkflowForTenant(String tenantId, String useCase) { + + for (TenantImplementation tenant : tenantImplementationProperties.getTenants()) { + logger.info("FRED1 Tenant id: {} ", tenant.getId()); + logger.info("FRED1 tenandId {} ", tenantId); + logger.info("FRED1 useCase {} ", useCase); + if (tenant.getId().equals(tenantId)) { + return tenant.getFlows().getOrDefault(useCase, "default"); + } + } + return "default"; + } + + private boolean verifyRow(String[] row) { + // DISABLED: This validation logic is broken - it checks row VALUES instead of using column positions + // Example bug: When it sees "MSISDN" at position 3, it does indexOf("MSISDN") which returns 0, + // then checks row[1] (the UUID request_id) instead of row[4] (the actual phone number) + // TODO: Rewrite this to use column positions from columnNames instead of searching values + logger.info("verifyRow() - VALIDATION DISABLED - row accepted"); + return true; + } + + public boolean verifyCsv(File csvData) throws IOException { + BufferedReader br = new BufferedReader(new FileReader(csvData)); + String header = br.readLine(); + String[] columns = new String[0]; + if (header != null) { + columns = header.split(","); + logger.debug("Columns in the csv file are {}", Arrays.toString(columns)); + } + int i = 0; + while (i < columns.length) { + if (columnNames.contains(columns[i])) { + logger.debug("Column name {} is at index {} ", columns[i], columnNames.indexOf(columns[i])); + i++; + + } else { + return false; + } + } + return true; + } + + public void setErrorResponse(Exchange exchange, int responseCode, String errorInfo, String errorDescription) { + // TODO Auto-generated method stub + JSONObject json = new JSONObject(); + json.put("Error Information: ", errorInfo); + json.put("Error Description : ", errorDescription); + exchange.getIn().setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode); + exchange.getIn().setBody(json.toString()); + exchange.setProperty("body", json); + logger.error("Error response is {}", json); + } + + public void setResponse(Exchange exchange, int responseCode) { + exchange.getIn().setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode); + } + + public Map setConfigProperties(Map variables) { + variables.put(BATCH_AGGREGATE_ENABLED, workerConfig.isBatchAggregateEnabled); + variables.put(PARTY_LOOKUP_ENABLED, workerConfig.isPartyLookUpWorkerEnabled); + variables.put(AUTHORIZATION_ENABLED, workerConfig.isAuthorizationWorkerEnabled); + variables.put(APPROVAL_ENABLED, workerConfig.isApprovalWorkerEnabled); + variables.put(DE_DUPLICATION_ENABLE, workerConfig.isTransactionDeduplicationEnabled); + variables.put(ORDERING_ENABLED, workerConfig.isOrderingWorkerEnabled); + variables.put(SPLITTING_ENABLED, workerConfig.isSplittingWorkerEnabled); + variables.put(FORMATTING_ENABLED, workerConfig.isFormattingWorkerEnabled); + variables.put(COMPLETION_THRESHOLD_CHECK_ENABLED, workerConfig.isCompletionThresholdCheckEnabled); + variables.put(MERGE_ENABLED, workerConfig.isMergeBackWorkerEnabled); + variables.put(MAX_STATUS_RETRY, maxThresholdCheckRetry); + variables.put(COMPLETION_THRESHOLD, completionThreshold); + variables.put(THRESHOLD_DELAY, Utils.getZeebeTimerValue(thresholdCheckDelay)); + variables.put(BULK_NOTIF_SUCCESS, false); + variables.put(BULK_NOTIF_FAILURE, false); + variables.put(MAX_CALLBACK_RETRY, maxCallbackRetry); + + return variables; + } + + private JSONObject getUnsupportedTypeJson(String type) { + JSONObject response = new JSONObject(); + response.put("errorCode", 400); + response.put("errorDescription", String.format("Query parameter ?type=%s not supported", type)); + response.put("developerMessage", ""); + return response; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/PubSubRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/PubSubRoute.java new file mode 100644 index 00000000..c108c360 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/PubSubRoute.java @@ -0,0 +1,54 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.ENDPOINT; +import static org.mifos.processor.bulk.camel.config.CamelProperties.EVENT_TYPE; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HOST; + +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.config.PubSubConfig; +import org.mifos.processor.bulk.config.SecurityServerConfig; +import org.mifos.processor.bulk.schema.SubscriptionDTO; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class PubSubRoute extends BaseRouteBuilder { + + private final PubSubConfig pubSubConfig; + private final SecurityServerConfig securityServerConfig; + + @Value("${gov-stack-client.header-key}") + private String govStackClientHeaderKey; + + @Value("${gov-stack-client.header-value}") + private String govStackClientHeaderValue; + + public PubSubRoute(PubSubConfig pubSubConfig, SecurityServerConfig securityServerConfig) { + this.pubSubConfig = pubSubConfig; + this.securityServerConfig = securityServerConfig; + } + + @Override + public void configure() throws Exception { + + // needs EVENT_TYPE input from exchange + from("direct:subscribe").id("direct:subscribe") + .setBody(exchange -> getEventTypeSpecificSubscriptionDTO(exchange.getProperty(EVENT_TYPE, String.class))) + .setHeader(Exchange.HTTP_METHOD, constant(HttpRequestMethod.POST.toString())) + .setHeader(govStackClientHeaderKey, constant(govStackClientHeaderValue)) + .setProperty(HOST, constant(securityServerConfig.host)).setProperty(ENDPOINT, constant(securityServerConfig.subscribingUrl)) + .to("direct:external-api-calling"); + + } + + private SubscriptionDTO getDefaultSubscriptionDTO() { + return SubscriptionDTO.subscriptionDTOBuilder.roomCode(pubSubConfig.roomCode).roomClass(pubSubConfig.roomClass) + .srcOperationId("bulkProcessing").srcServiceCode("bulk").dstOperationId("newRecord").dstServiceCode("bulk").build(); + } + + private SubscriptionDTO getEventTypeSpecificSubscriptionDTO(String eventType) { + SubscriptionDTO subscriptionDTO = getDefaultSubscriptionDTO(); + subscriptionDTO.setDstServiceCode(eventType); // todo update once confirmed which field is for eventType + return subscriptionDTO; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/RouteId.java b/src/main/java/org/mifos/processor/bulk/camel/routes/RouteId.java new file mode 100644 index 00000000..3a4879b3 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/RouteId.java @@ -0,0 +1,20 @@ +package org.mifos.processor.bulk.camel.routes; + +public enum RouteId { + + PARTY_LOOKUP("direct:partyLookup"), APPROVAL("direct:approval"), ORDERING("direct:ordering"), SPLITTING("direct:splitting"), FORMATTING( + "direct:formatting"), BATCH_STATUS("direct:batchStatus"), SEND_CALLBACK("direct:sendCallback"), MERGE_BACK( + "direct:mergeSubBatch"), INIT_SUB_BATCH("direct:init-sub-batches"), BATCH_AGGREGATE( + "direct:batch-aggregate"), DE_DUPLICATION("direct:deDuplication"), ACCOUNT_LOOKUP( + "direct:accountLookup"), ACCOUNT_LOOKUP_CALLBACK("direct:accountLookupCallback"); + + private final String value; + + RouteId(String s) { + value = s; + } + + public String getValue() { + return value; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/Routes.java b/src/main/java/org/mifos/processor/bulk/camel/routes/Routes.java new file mode 100644 index 00000000..191b7361 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/Routes.java @@ -0,0 +1,103 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.IS_BATCH_READY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OPS_APP_ACCESS_TOKEN; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.IS_SAMPLE_READY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SAMPLED_TX_IDS; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import com.google.gson.Gson; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import org.json.JSONObject; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class Routes extends BaseRouteBuilder { + + @Value("${config.minimum-successful-tx-ratio}") + double minimumSuccessfulTxRatio; + + @Override + public void configure() throws Exception { + routeCheckTransactions(); + routeSampleTransactions(); + } + + private void routeCheckTransactions() { + String id = "check-transactions"; + from("direct:" + id).id(id).log("Fetching transaction details") + // set request params + .setHeader("Authorization", simple("Bearer ${exchangeProperty." + OPS_APP_ACCESS_TOKEN + "}")) + .setHeader(HEADER_PLATFORM_TENANT_ID, simple("${exchangeProperty." + TENANT_ID + "}")) + .toD(operationsAppConfig.batchTransactionEndpoint).process(exchange -> { + // get response body + JSONObject transfers = new JSONObject(exchange.getIn().getBody(String.class)); + + int totalTransactions = transfers.length(); + int successfulTransactions = 0; + for (Iterator it = transfers.keys(); it.hasNext();) { + String transactionId = it.next(); + String transactionStatus = transfers.getString(transactionId); + if (transactionStatus.equals("COMPLETED")) { + successfulTransactions++; + } + } + + HashMap newVariables = new HashMap<>(); + // check successful transactions >= x% + if (((double) successfulTransactions / totalTransactions) >= minimumSuccessfulTxRatio) { + newVariables.put(IS_SAMPLE_READY, true); + } else { + newVariables.put(IS_SAMPLE_READY, false); + } + + zeebeClient.newSetVariablesCommand(Long.parseLong(exchange.getProperty(BATCH_ID).toString())).variables(newVariables) + .send().join(); + }); + } + + private void routeSampleTransactions() { + String id = "sample-transactions"; + from("direct:" + id).id(id).log("Fetching transaction details").process(exchange -> { + exchange.getIn().setHeader("batchId", exchange.getProperty(BATCH_ID)); + }).setHeader("Authorization", simple("Bearer ${exchangeProperty." + OPS_APP_ACCESS_TOKEN + "}")) + .setHeader(HEADER_PLATFORM_TENANT_ID, simple("${exchangeProperty." + TENANT_ID + "}")) + .toD(operationsAppConfig.batchTransactionEndpoint).process(exchange -> { + // get response body + + // check if batch is ready for sampling + if (exchange.getProperty(IS_BATCH_READY, String.class).equals("false")) { + return; + } + // sample transactions + JSONObject transfers = new JSONObject(exchange.getIn().getBody(String.class)); + final ArrayList successfulTransactionIds = new ArrayList<>(); + final ArrayList sampledTransactionIds = new ArrayList<>(); + for (Iterator it = transfers.keys(); it.hasNext();) { + String transactionId = it.next(); + String transactionStatus = transfers.getString(transactionId); + if (transactionStatus.equals("COMPLETED")) { + successfulTransactionIds.add(transactionId); + } + } + Collections.shuffle(successfulTransactionIds); + int sampleSize = (int) (successfulTransactionIds.size() * 0.9); + for (int i = 0; i < sampleSize; i++) { + sampledTransactionIds.add(successfulTransactionIds.get(i)); + } + HashMap newVariables = new HashMap<>(); + newVariables.put(SAMPLED_TX_IDS, new Gson().toJson(sampledTransactionIds)); + + // store the sampled transaction ids in zeebe variable + zeebeClient.newSetVariablesCommand(Long.parseLong(exchange.getProperty(BATCH_ID).toString())).variables(newVariables) + .send().join(); + + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/SendCallbackRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/SendCallbackRoute.java new file mode 100644 index 00000000..099caf2b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/SendCallbackRoute.java @@ -0,0 +1,99 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CALLBACK_RESPONSE_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_SUCCESS; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_RATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_DESCRIPTION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASES; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.List; +import org.apache.camel.Exchange; +import org.apache.camel.LoggingLevel; +import org.mifos.processor.bulk.schema.BatchCallbackDTO; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class SendCallbackRoute extends BaseRouteBuilder { + + @Value("${callback.url}") + private String callbackUrl; + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + + from("rest:get:test/send/callback").to(RouteId.SEND_CALLBACK.getValue()); + + /** + * Base route for kicking off callback. Performs below tasks. Sends Callback to the set url Checks of response + * code is anything not 2xx then retries + */ + + from(RouteId.SEND_CALLBACK.getValue()).id(RouteId.SEND_CALLBACK.getValue()).log("Starting route " + RouteId.SEND_CALLBACK.name()) + .log("Sending callback for Batch Processing").setHeader(Exchange.HTTP_METHOD, constant("POST")).process(exchange -> { + String message = String.format("The Batch Aggregation API was complete with : %s", + exchange.getProperty(COMPLETION_RATE).toString()); + callbackUrl = exchange.getProperty(CALLBACK, String.class); + logger.info("Callback URL: {}", callbackUrl); + logger.info("Callback Body: {}", message); + String batchId = exchange.getProperty(BATCH_ID, String.class); + String clientCorrelationId = exchange.getProperty(CLIENT_CORRELATION_ID, String.class); + BatchCallbackDTO batchCallbackDTO = new BatchCallbackDTO(clientCorrelationId, batchId, message); + ObjectMapper objectMapper = new ObjectMapper(); + String jsonString = objectMapper.writeValueAsString(batchCallbackDTO); + exchange.getIn().setBody(jsonString); + }).choice().when(exchangeProperty("X-CallbackURL").isNotNull()).setHeader(Exchange.HTTP_METHOD, constant("POST")) + .toD("${exchangeProperty.X-CallbackURL}?bridgeEndpoint=true&throwExceptionOnFailure=false") + .log(LoggingLevel.INFO, "Callback Response body: ${body}").endChoice().otherwise() + .log("Unable to send callback: callback url is null").choice().when(header(Exchange.HTTP_RESPONSE_CODE).regex("^2\\d{2}$")) + .when(exchangeProperty("X-CallbackURL").isNotNull()).log(LoggingLevel.INFO, "Callback sending was successful") + .process(exchange -> { + List phases = exchange.getProperty(PHASES, List.class); + exchange.setProperty(CALLBACK_RESPONSE_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + exchange.setProperty(CALLBACK_RETRY, 1); + exchange.setProperty(CALLBACK_SUCCESS, true); + eliminatePhases(exchange); + }).otherwise().log(LoggingLevel.ERROR, "Callback request was unsuccessful").process(exchange -> { + int retry = exchange.getProperty(CALLBACK_RETRY, Integer.class); + int maxRetry = exchange.getProperty(MAX_CALLBACK_RETRY, Integer.class); + if (retry >= maxRetry) { + List phases = exchange.getProperty(PHASES, List.class); + logger.info("Retry Exhausted, setting Callback as Failed"); + eliminatePhases(exchange); + exchange.setProperty(CALLBACK_RESPONSE_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + exchange.setProperty(CALLBACK_SUCCESS, false); + exchange.setProperty(ERROR_DESCRIPTION, exchange.getIn().getBody(String.class)); + exchange.setProperty(ERROR_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + } else { + retry++; + logger.info("Retry Left {}, Setting Callback as Failed and Retrying...", (maxRetry - retry)); + exchange.setProperty(CALLBACK_RETRY, retry); + + } + exchange.setProperty(CALLBACK_RESPONSE_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + exchange.setProperty(CALLBACK_SUCCESS, false); + exchange.setProperty(ERROR_DESCRIPTION, exchange.getIn().getBody(String.class)); + exchange.setProperty(ERROR_CODE, exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + }); + } + + @SuppressWarnings("unchecked") + public void eliminatePhases(Exchange exchange) { + List phases = exchange.getProperty(PHASES, List.class); + int completionRate = exchange.getProperty(COMPLETION_RATE, Integer.class); + + phases.removeIf(phase -> phase <= completionRate); + + exchange.setProperty(PHASES, phases); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/camel/routes/SplittingRoute.java b/src/main/java/org/mifos/processor/bulk/camel/routes/SplittingRoute.java new file mode 100644 index 00000000..4cffdc80 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/camel/routes/SplittingRoute.java @@ -0,0 +1,238 @@ +package org.mifos.processor.bulk.camel.routes; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_SUB_BATCH_FILE_NAME_ARRAY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_COUNT; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_CREATED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_DETAILS; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_FILE_ARRAY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.ZEEBE_VARIABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYER_IDENTIFIER; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SPLITTING_FAILED; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import org.apache.camel.LoggingLevel; +import org.mifos.processor.bulk.schema.SubBatchEntity; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.TransactionParser; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class SplittingRoute extends BaseRouteBuilder { + + @Value("${config.splitting.sub-batch-size}") + private int subBatchSize; + @Autowired + private CsvMapper csvMapper; + @Value("${config.partylookup.enable}") + private boolean isPartyLookupEnabled; + + @Override + @SuppressWarnings("unchecked") + public void configure() throws Exception { + + /** + * Base route for starting the splitting process. Refer below routes for more info 1. + * direct:create-sub-batch-file 2. direct:upload-sub-batch-file + */ + from(RouteId.SPLITTING.getValue()).id(RouteId.SPLITTING.getValue()).log("Starting route " + RouteId.SPLITTING.name()) + .to("direct:download-file").to("direct:get-transaction-array").to("direct:create-sub-batch-file").choice() + .when(exchange -> exchange.getProperty(SUB_BATCH_CREATED, Boolean.class)).to("direct:upload-sub-batch-file").otherwise() + .log("No sub batch created, so skipping upload").end().process(exchange -> exchange.setProperty(SPLITTING_FAILED, false)); + + // Creates the sub-batch CSVs + from("direct:create-sub-batch-file").id("direct:create-sub-batch-file").log("Creating sub-batch file").process(exchange -> { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + BufferedReader reader = new BufferedReader(new FileReader(filepath)); + String header = reader.readLine() + System.lineSeparator(); + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + List subBatchFile = new ArrayList<>(); + Set distinctPayeeIds = transactionList.stream().map(Transaction::getPayeeDfspId).collect(Collectors.toSet()); + logger.info("Payee id {}", distinctPayeeIds); + logger.info("Number of payeeId {}", distinctPayeeIds.size()); + Boolean isBatchAccountLookupEnabled = (Boolean) exchange.getProperty("batchAccountLookup"); + if (isPartyLookupEnabled && isBatchAccountLookupEnabled) { + // Create a map to store transactions for each payeeid + Map> transactionsByPayeeId = new HashMap<>(); + + // Split the list based on distinct payeeids + Map> subBatchIdMap = new HashMap<>(); + List subBatchIdList = new ArrayList<>(); + Map transactionBatchMap = new HashMap<>(); + for (String payeeId : distinctPayeeIds) { + List transactionsForPayee = transactionList.stream() + .filter(transaction -> payeeId.equals(transaction.getPayeeDfspId())).collect(Collectors.toList()); + + String subBatchId = UUID.randomUUID().toString(); + transactionsByPayeeId.put(payeeId, transactionsForPayee); + + // Assign batch IDs to transactions and store in a map for easy lookup + transactionsForPayee.forEach(transaction -> { + transaction.setBatchId(subBatchId); + transactionBatchMap.put(transaction, subBatchId); + }); + + // Create CSV file for the current payee + String filename = UUID.randomUUID() + "_sub-batch-" + payeeId + ".csv"; + logger.info("Created sub-batch with file name {}", filename); + CsvSchema csvSchema = csvMapper.schemaFor(Transaction.class).withHeader(); + File file = new File(filename); + SequenceWriter writer = csvMapper.writerWithSchemaFor(Transaction.class).with(csvSchema).writeValues(file); + for (Transaction transaction : transactionsForPayee) { + writer.write(transaction); + } + subBatchFile.add(filename); + } + // Set properties + transactionList.forEach(transaction -> transaction.setBatchId(transactionBatchMap.get(transaction))); + exchange.setProperty(RESULT_TRANSACTION_LIST, transactionList); + exchange.setProperty(TRANSACTION_LIST, transactionList); + } else { + List lines = new ArrayList<>(); + String line = null; + while ((line = reader.readLine()) != null) { + lines.add(line); + } + reader.close(); + + if (lines.size() <= subBatchSize) { + exchange.setProperty(SUB_BATCH_CREATED, false); + exchange.setProperty(SERVER_SUB_BATCH_FILE_NAME_ARRAY, new ArrayList()); + logger.info("Skipping creating sub batch, as batch size is less than configured sub-batch size"); + return; + } + + int subBatchCount = 1; + CsvSchema csvSchema = csvMapper.schemaFor(Transaction.class); + csvSchema = csvSchema.withHeader(); + for (int i = 0; i < lines.size(); i += subBatchSize) { + String subBatchId = UUID.randomUUID().toString(); + String filename = UUID.randomUUID() + "_" + "sub-batch-" + subBatchCount + ".csv"; + logger.info("SubBatch Id {}", subBatchId); + + List subBatchTransactions = new ArrayList<>(); + for (int j = i; j < Math.min(i + subBatchSize, lines.size()); j++) { + Transaction transaction = TransactionParser.parseLineToTransaction(lines.get(j)); + assert transaction != null; + transaction.setBatchId(subBatchId); // Set the subBatchId for the transaction + subBatchTransactions.add(transaction); + } + + // Write the list of Transactions to the file + File file = new File(filename); + try (SequenceWriter writer = csvMapper.writer(csvSchema).writeValues(file)) { + writer.writeAll(subBatchTransactions); + } catch (IOException e) { + logger.error("Failed to write sub-batch file: " + filename, e); + } + logger.info("Created sub-batch with file name {}", filename); + subBatchFile.add(filename); // Ensure this list is declared and accessible + subBatchCount++; + } + } + exchange.setProperty(SUB_BATCH_FILE_ARRAY, subBatchFile); + exchange.setProperty(SUB_BATCH_COUNT, subBatchFile.size()); + exchange.setProperty(SUB_BATCH_CREATED, true); + exchange.setProperty(SERVER_SUB_BATCH_FILE_NAME_ARRAY, new ArrayList()); + }).log("updating orignal").setProperty(LOCAL_FILE_PATH, exchangeProperty(SERVER_FILE_NAME)) + .setProperty(OVERRIDE_HEADER, constant(true)) // default header in CSV file will be used + .to("direct:update-file-v2").to("direct:upload-file"); + + // Iterate through each CSVs of sub-batches and uploads in cloud + from("direct:upload-sub-batch-file").id("direct:upload-sub-batch-file").log("Starting upload of sub-batch file") + .loopDoWhile(exchange -> exchange.getProperty(SUB_BATCH_FILE_ARRAY, List.class).size() > 0).process(exchange -> { + List subBatchFile = exchange.getProperty(SUB_BATCH_FILE_ARRAY, List.class); + String localFilePath = subBatchFile.remove(0); + exchange.setProperty(LOCAL_FILE_PATH, localFilePath); + exchange.setProperty(SUB_BATCH_FILE_ARRAY, subBatchFile); + logger.debug("Local file path: {}", localFilePath); + logger.debug("Sub batch file array: {}, ", subBatchFile); + }).log(LoggingLevel.DEBUG, "LOCAL_FILE_PATH: ${exchangeProperty." + LOCAL_FILE_PATH + "}") + .to("direct:generate-sub-batch-entity").log("direct:generate-sub-batch-entity completed").to("direct:upload-file") + .process(exchange -> { + String serverFilename = exchange.getProperty(SERVER_FILE_NAME, String.class); + List serverSubBatchFile = exchange.getProperty(SERVER_SUB_BATCH_FILE_NAME_ARRAY, List.class); + serverSubBatchFile.add(serverFilename); + exchange.setProperty(SERVER_SUB_BATCH_FILE_NAME_ARRAY, serverSubBatchFile); + logger.debug("Server subbatch filename array: {}", serverSubBatchFile); + }); + + // generate subBatchEntityDetails, make sure [LOCAL_FILE_PATH] has the absolute sub batch file path + from("direct:generate-sub-batch-entity").id("direct:generate-sub-batch-entity").log("Generating sub batch entity") + .to("direct:get-transaction-array").process(exchange -> { + @SuppressWarnings("unchecked") + List transactionList = (List) exchange.getProperty(TRANSACTION_LIST, List.class); + @SuppressWarnings("unchecked") + Map zeebeVariables = (Map) exchange.getProperty(ZEEBE_VARIABLE, Map.class); + String serverFileName = exchange.getProperty(LOCAL_FILE_PATH, String.class); + + logger.info("Generating sub batch entity for file {}", serverFileName); + if (transactionList.isEmpty()) { + logger.info("Transaction list is empty"); + return; + } + + Long totalAmount = getTotalAmount(transactionList); + + SubBatchEntity subBatchEntity = getDefaultSubBatchEntity(); + subBatchEntity.setBatchId((String) zeebeVariables.get(BATCH_ID)); + subBatchEntity.setSubBatchId(transactionList.get(0).getBatchId()); + subBatchEntity.setRequestId((String) zeebeVariables.get(REQUEST_ID)); + subBatchEntity.setCorrelationId((String) zeebeVariables.get(CLIENT_CORRELATION_ID)); + subBatchEntity.setPayerFsp((String) zeebeVariables.get(PAYER_IDENTIFIER)); + subBatchEntity.setRegisteringInstitutionId((String) zeebeVariables.get(REGISTERING_INSTITUTE_ID)); + subBatchEntity.setPaymentMode(transactionList.get(0).getPaymentMode()); + subBatchEntity.setRequestFile(serverFileName); + subBatchEntity.setTotalTransactions((long) transactionList.size()); + subBatchEntity.setOngoing((long) transactionList.size()); + subBatchEntity.setTotalAmount(totalAmount); + subBatchEntity.setOngoingAmount(totalAmount); + subBatchEntity.setStartedAt(new Date(System.currentTimeMillis())); + + logger.debug("SubBatchEntity: {}", objectMapper.writeValueAsString(subBatchEntity)); + // update the sub batch details array + List subBatchEntityList = exchange.getProperty(SUB_BATCH_DETAILS, List.class); + subBatchEntityList.add(subBatchEntity); + exchange.setProperty(SUB_BATCH_DETAILS, subBatchEntityList); + logger.debug("generate-sub-batch-entity route end: {}", objectMapper.writeValueAsString(subBatchEntityList)); + }); + } + + private SubBatchEntity getDefaultSubBatchEntity() { + SubBatchEntity subBatchEntity = new SubBatchEntity(); + subBatchEntity.setAllEmptyAmount(); + return subBatchEntity; + } + + private long getTotalAmount(List transactionList) { + long totalAmount = 0L; + for (Transaction transaction : transactionList) { + totalAmount += Long.parseLong(transaction.getAmount()); + } + return totalAmount; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/config/BudgetAccountConfig.java b/src/main/java/org/mifos/processor/bulk/config/BudgetAccountConfig.java new file mode 100644 index 00000000..8b22c615 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/BudgetAccountConfig.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.config; + +import java.util.ArrayList; +import java.util.List; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +@Getter +@Setter +@Component +@ConfigurationProperties(prefix = "budget-account") +public class BudgetAccountConfig { + + private List registeringInstitutions = new ArrayList<>(); + + public RegisteringInstitutionConfig getByRegisteringInstituteId(String id) { + return getRegisteringInstitutions().stream().filter(p -> p.getId().equals(id)).findFirst().orElse(null); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/config/CorsProperties.java b/src/main/java/org/mifos/processor/bulk/config/CorsProperties.java new file mode 100644 index 00000000..ba713a79 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/CorsProperties.java @@ -0,0 +1,20 @@ +package org.mifos.processor.bulk.config; + +import java.util.List; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConfigurationProperties(prefix = "cors") +public class CorsProperties { + + private List allowedOrigins; + + public List getAllowedOrigins() { + return allowedOrigins; + } + + public void setAllowedOrigins(List allowedOrigins) { + this.allowedOrigins = allowedOrigins; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/config/ExternalApiPayloadConfig.java b/src/main/java/org/mifos/processor/bulk/config/ExternalApiPayloadConfig.java new file mode 100644 index 00000000..52f763d6 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/ExternalApiPayloadConfig.java @@ -0,0 +1,51 @@ +package org.mifos.processor.bulk.config; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import javax.annotation.PostConstruct; +import lombok.Getter; +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.camel.processor.GsmaApiPayload; +import org.mifos.processor.bulk.camel.processor.MastercardApiPayload; +import org.mifos.processor.bulk.camel.processor.MojaloopApiPayload; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Getter +public class ExternalApiPayloadConfig { + + private Map> payloadMap = new HashMap<>(); + + @Autowired + GsmaApiPayload gsmaApiPayload; + + @Autowired + MojaloopApiPayload mojaloopApiPayload; + + @Autowired + MastercardApiPayload mastercardApiPayload; + + @Autowired + PaymentModeConfiguration paymentModeConfiguration; + + @PostConstruct + private void registerApiProcessor() { + for (PaymentModeMapping paymentMode : paymentModeConfiguration.getMappings()) { + if (paymentMode.getId().equalsIgnoreCase("gsma")) { + payloadMap.put(paymentMode.getId(), gsmaApiPayload); + } else if (paymentMode.getId().equalsIgnoreCase("mojaloop")) { + payloadMap.put(paymentMode.getId(), mojaloopApiPayload); + } else if (paymentMode.getId().equalsIgnoreCase("MASTERCARD_CBS")) { + payloadMap.put(paymentMode.getId(), mastercardApiPayload); + } + } + } + + public Function getApiPayloadSetter(String paymentMode) { + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(paymentMode); + return payloadMap.get(mapping.getId()); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/PaymentModeConfiguration.java b/src/main/java/org/mifos/processor/bulk/config/PaymentModeConfiguration.java new file mode 100644 index 00000000..4597f031 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/PaymentModeConfiguration.java @@ -0,0 +1,22 @@ +package org.mifos.processor.bulk.config; + +import java.util.ArrayList; +import java.util.List; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConfigurationProperties(prefix = "payment-mode") +@Setter +@Getter +public class PaymentModeConfiguration { + + private List mappings = new ArrayList<>(); + + public PaymentModeMapping getByMode(String paymentMode) { + return getMappings().stream().filter(p -> p.getId().equalsIgnoreCase(paymentMode)).findFirst().orElse(null); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/PaymentModeMapping.java b/src/main/java/org/mifos/processor/bulk/config/PaymentModeMapping.java new file mode 100644 index 00000000..0d9d4663 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/PaymentModeMapping.java @@ -0,0 +1,14 @@ +package org.mifos.processor.bulk.config; + +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class PaymentModeMapping { + + private String id; + private String endpoint; + private String debulkingDfspid; + private PaymentModeType type; +} diff --git a/src/main/java/org/mifos/processor/bulk/config/PaymentModeType.java b/src/main/java/org/mifos/processor/bulk/config/PaymentModeType.java new file mode 100644 index 00000000..bda7a171 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/PaymentModeType.java @@ -0,0 +1,16 @@ +package org.mifos.processor.bulk.config; + +import lombok.Getter; + +@Getter +public enum PaymentModeType { + + PAYMENT("PAYMENT"), BULK("BULK"); + + private String modeType; + + PaymentModeType(String modeType) { + this.modeType = modeType; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/PreflightCorsFilter.java b/src/main/java/org/mifos/processor/bulk/config/PreflightCorsFilter.java new file mode 100644 index 00000000..6931055b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/PreflightCorsFilter.java @@ -0,0 +1,55 @@ +package org.mifos.processor.bulk.config; + +import java.io.IOException; +import java.util.List; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class PreflightCorsFilter implements Filter { + + private final List allowedOrigins; + + @Autowired + public PreflightCorsFilter(CorsProperties corsProperties) { + this.allowedOrigins = corsProperties.getAllowedOrigins(); + } + + @Override + public void doFilter(javax.servlet.ServletRequest servletRequest, javax.servlet.ServletResponse servletResponse, + FilterChain filterChain) throws IOException, ServletException { + + HttpServletRequest request = (HttpServletRequest) servletRequest; + HttpServletResponse response = (HttpServletResponse) servletResponse; + + String origin = request.getHeader("Origin"); + + if ("OPTIONS".equalsIgnoreCase(request.getMethod()) && origin != null && allowedOrigins.contains(origin)) { + response.setStatus(HttpServletResponse.SC_OK); + response.setHeader("Access-Control-Allow-Origin", origin); + response.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"); + response.setHeader("Access-Control-Allow-Headers", + "content-type,platform-tenantid,purpose,type,x-callback-url,x-correlationid,x-program-id,x-registering-institution-id,x-signature"); + response.setHeader("Access-Control-Allow-Credentials", "true"); + response.setHeader("Access-Control-Max-Age", "86400"); + return; // short-circuit OPTIONS preflight + } + + filterChain.doFilter(servletRequest, servletResponse); + } + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + + } + + @Override + public void destroy() {} + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/Program.java b/src/main/java/org/mifos/processor/bulk/config/Program.java new file mode 100644 index 00000000..46bd2e07 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/Program.java @@ -0,0 +1,19 @@ +package org.mifos.processor.bulk.config; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class Program { + + private String id; + private String name; + private String identifierType; + private String identifierValue; + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/PubSubConfig.java b/src/main/java/org/mifos/processor/bulk/config/PubSubConfig.java new file mode 100644 index 00000000..23d39064 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/PubSubConfig.java @@ -0,0 +1,18 @@ +package org.mifos.processor.bulk.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class PubSubConfig { + + @Value("${pubsub.room.code}") + public String roomCode; + + @Value("${pubsub.room.class}") + public String roomClass; + + @Value("${pubsub.event.type}") + public String eventType; + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/RegisteringInstitutionConfig.java b/src/main/java/org/mifos/processor/bulk/config/RegisteringInstitutionConfig.java new file mode 100644 index 00000000..cb71a9f7 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/RegisteringInstitutionConfig.java @@ -0,0 +1,23 @@ +package org.mifos.processor.bulk.config; + +import java.util.ArrayList; +import java.util.List; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +@Getter +@Setter +@Component +@ConfigurationProperties(prefix = "budget-account.registering-institutions") +public class RegisteringInstitutionConfig { + + private String id; + private List programs = new ArrayList<>(); + + public Program getByProgramId(String id) { + return getPrograms().stream().filter(p -> p.getId().equals(id)).findFirst().orElse(null); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/config/SecurityServerConfig.java b/src/main/java/org/mifos/processor/bulk/config/SecurityServerConfig.java new file mode 100644 index 00000000..fd27c6a1 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/config/SecurityServerConfig.java @@ -0,0 +1,32 @@ +package org.mifos.processor.bulk.config; + +import javax.annotation.PostConstruct; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class SecurityServerConfig { + + @Value("${security-server.host}") + public String host; + + @Value("${security-server.baseuri}") + public String baseUri; + + @Value("${security-server.country}") + public String country; + + @Value("${security-server.organisation}") + public String organisation; + + @Value("${security-server.endpoints.subs}") + public String subscribingEndpoint; + + public String subscribingUrl; + + @PostConstruct + public void setup() { + subscribingUrl = host + baseUri + subscribingEndpoint; + subscribingUrl = subscribingUrl.replace("{country}", country).replace("{orgs}", organisation); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/connectors/api/AccountLookupApi.java b/src/main/java/org/mifos/processor/bulk/connectors/api/AccountLookupApi.java new file mode 100644 index 00000000..70985acb --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/connectors/api/AccountLookupApi.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.connectors.api; + +import java.util.Map; +import org.mifos.connector.common.identityaccountmapper.dto.AccountMapperRequestDTO; +import org.springframework.stereotype.Component; +import retrofit2.Call; +import retrofit2.http.Body; +import retrofit2.http.HeaderMap; +import retrofit2.http.POST; +import retrofit2.http.Url; + +@Component +public interface AccountLookupApi { + + @POST + Call batchAccountLookup(@Url String fullUrl, @Body AccountMapperRequestDTO requestBody, @HeaderMap Map headers); +} diff --git a/src/main/java/org/mifos/processor/bulk/connectors/service/AccountLookupService.java b/src/main/java/org/mifos/processor/bulk/connectors/service/AccountLookupService.java new file mode 100644 index 00000000..aba71029 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/connectors/service/AccountLookupService.java @@ -0,0 +1,44 @@ +package org.mifos.processor.bulk.connectors.service; + +import java.io.IOException; +import java.util.Map; +import org.mifos.connector.common.identityaccountmapper.dto.AccountMapperRequestDTO; +import org.mifos.processor.bulk.connectors.api.AccountLookupApi; +import org.mifos.processor.bulk.utility.Headers; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import retrofit2.Call; +import retrofit2.Retrofit; + +@Service +public class AccountLookupService { + + @Autowired + RetrofitService retrofitService; + + public Logger logger = LoggerFactory.getLogger(this.getClass()); + + public void accountLookupCall(String baseUrl, String fullUrl, AccountMapperRequestDTO requestBody, Map headers) + throws IOException { + Retrofit retrofit = retrofitService.createRetrofit(baseUrl); + + AccountLookupApi accountLookupApi = retrofit.create(AccountLookupApi.class); + + Call call = accountLookupApi.batchAccountLookup(fullUrl, requestBody, Headers.convertHeaders(headers)); + + try { + retrofit2.Response response = call.execute(); + if (response.isSuccessful()) { + Object apiResponse = response.body(); + logger.debug("API response is :: {}", apiResponse); + } else { + logger.error("Error occurred. HTTP status code: {}", response.code()); + } + } catch (IOException e) { + logger.error("Error making Retrofit API call", e); + throw e; + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/connectors/service/ProcessorStartRouteService.java b/src/main/java/org/mifos/processor/bulk/connectors/service/ProcessorStartRouteService.java new file mode 100644 index 00000000..d936546c --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/connectors/service/ProcessorStartRouteService.java @@ -0,0 +1,310 @@ +package org.mifos.processor.bulk.connectors.service; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.BATCH_REQUEST_TYPE; +import static org.mifos.processor.bulk.camel.config.CamelProperties.CONTENT_TYPE; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PLATFORM_TENANT_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_PROGRAM_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.IS_UPDATED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PROGRAM_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TENANT_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_URL; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.IS_FILE_VALID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.NOTE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYEE_DFSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYER_IDENTIFIER_TYPE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYER_IDENTIFIER_VALUE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASE_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PROGRAM_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.api.command.ClientStatusException; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.apache.camel.Exchange; +import org.json.JSONObject; +import org.mifos.processor.bulk.camel.routes.ProcessorStartRoute; +import org.mifos.processor.bulk.config.BudgetAccountConfig; +import org.mifos.processor.bulk.config.Program; +import org.mifos.processor.bulk.config.RegisteringInstitutionConfig; +import org.mifos.processor.bulk.file.FileTransferService; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.utility.PhaseUtils; +import org.mifos.processor.bulk.zeebe.ZeebeProcessStarter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.util.StringUtils; + +@Service +public class ProcessorStartRouteService { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Autowired + private ProcessorStartRoute processorStartRoute; + + @Autowired + @Qualifier("awsStorage") + private FileTransferService fileTransferService; + + @Autowired + private ZeebeProcessStarter zeebeProcessStarter; + + @Autowired + private PhaseUtils phaseUtils; + + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private BudgetAccountConfig budgetAccountConfig; + + @Value("${application.bucket-name}") + private String bucketName; + + @Value("${csv.size}") + private int csvSize; + + @Value("${pollingApi.path}") + private String pollApiPath; + + @Value("${pollingApi.timer}") + private String pollApiTimer; + + public void validateFileSyncResponse(Exchange exchange) throws IOException { + String fileName = exchange.getIn().getHeader(FILE_NAME, String.class); + File file = new File(fileName); + + // check the file structure + int fileSize = (int) file.length(); + if (fileSize > csvSize) { + processorStartRoute.setErrorResponse(exchange, 400, "File too big", + "The file uploaded is too big. Please upload a file and try again."); + } else if (!processorStartRoute.verifyCsv(file)) { + processorStartRoute.setErrorResponse(exchange, 400, "Invalid file structure", + "The file uploaded contains wrong structure. Please upload correct file columns and try again."); + } else { + logger.debug("Filename: {}", fileName); + processorStartRoute.setResponse(exchange, 200); + } + } + + public void validateTenant(Exchange exchange) { + String tenantName = exchange.getIn().getHeader(HEADER_PLATFORM_TENANT_ID, String.class); + // validation is disabled for now + /* + * if (tenantName == null || tenantName.isEmpty() || !tenants.contains(tenantName)) { throw new + * Exception("Invalid tenant value."); } + */ + exchange.setProperty(TENANT_NAME, tenantName); + exchange.getIn().setHeader(CONTENT_TYPE, "application/json;charset=UTF-8"); + } + + public void pollingOutput(Exchange exchange) { + JSONObject json = new JSONObject(); + String pollingPath = String.format("%s%s", pollApiPath, exchange.getProperty(BATCH_ID)); + json.put("PollingPath", pollingPath); + json.put("SuggestedCallbackSeconds", pollApiTimer); + exchange.getIn().setBody(json.toString()); + exchange.getIn().setHeader(Exchange.HTTP_RESPONSE_CODE, 202); + } + + public void executeBatch(Exchange exchange) { + String filename = exchange.getIn().getHeader("filename", String.class); + String requestId = exchange.getIn().getHeader("X-CorrelationID", String.class); + String purpose = exchange.getIn().getHeader("Purpose", String.class); + String type = exchange.getIn().getHeader("Type", String.class); + String clientCorrelationId = exchange.getIn().getHeader(HEADER_CLIENT_CORRELATION_ID, String.class); + String registeringInstitutionId = exchange.getIn().getHeader(HEADER_REGISTERING_INSTITUTE_ID, String.class); + logger.info("registeringInstitutionId {}", registeringInstitutionId); + String programId = exchange.getIn().getHeader(HEADER_PROGRAM_ID, String.class); + String callbackUrl = exchange.getIn().getHeader("X-CallbackURL", String.class); + exchange.setProperty(FILE_NAME, filename); + exchange.setProperty(REQUEST_ID, requestId); + exchange.setProperty(PURPOSE, purpose); + exchange.setProperty(BATCH_REQUEST_TYPE, type); + exchange.setProperty(CLIENT_CORRELATION_ID, clientCorrelationId); + exchange.setProperty(REGISTERING_INSTITUTE_ID, registeringInstitutionId); + exchange.setProperty(PROGRAM_ID, programId); + exchange.setProperty(CALLBACK, callbackUrl); + } + + public void startBatchProcessRaw(Exchange exchange) { + JSONObject response = new JSONObject(); + response.put("batch_id", UUID.randomUUID().toString()); + response.put("request_id", UUID.randomUUID().toString()); + response.put("status", "queued"); + exchange.getIn().setBody(response.toString()); + } + + @SuppressWarnings("unchecked") + public void updateIncomingData(Exchange exchange) { + String registeringInstituteId = exchange.getProperty(REGISTERING_INSTITUTE_ID, String.class); + String programId = exchange.getProperty(PROGRAM_ID, String.class); + logger.debug("Inst id: {}, prog id: {}", registeringInstituteId, programId); + + if (!(StringUtils.hasText(registeringInstituteId) && StringUtils.hasText(programId))) { + // this will make sure the file is not updated since there is no update in data + logger.debug("InstitutionId or programId is null"); + exchange.setProperty(IS_UPDATED, false); + return; + } + + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + logger.debug("Size: {}", transactionList.size()); + + RegisteringInstitutionConfig registeringInstitutionConfig = budgetAccountConfig.getByRegisteringInstituteId(registeringInstituteId); + + if (registeringInstitutionConfig == null) { + logger.debug("Element in nested in config: {}", budgetAccountConfig.getRegisteringInstitutions().get(0).getPrograms().size()); + logger.debug("Registering institute id is null"); + exchange.setProperty(IS_UPDATED, false); + return; + } + + Program program = registeringInstitutionConfig.getByProgramId(programId); + if (program == null) { + // this will make sure the file is not updated since there is no update in data + logger.debug("Program is null"); + exchange.setProperty(IS_UPDATED, false); + return; + } + + List resultTransactionList = new ArrayList<>(); + + transactionList.forEach(transaction -> { + transaction.setPayerIdentifierType(program.getIdentifierType()); + transaction.setPayerIdentifier(program.getIdentifierValue()); + resultTransactionList.add(transaction); + try { + logger.debug("Txn: {}", objectMapper.writeValueAsString(transaction)); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }); + + exchange.setProperty(RESULT_TRANSACTION_LIST, resultTransactionList); + exchange.setProperty(IS_UPDATED, true); + exchange.setProperty(PROGRAM_NAME, program.getName()); + exchange.setProperty(PAYER_IDENTIFIER_TYPE, program.getIdentifierType()); + exchange.setProperty(PAYER_IDENTIFIER_VALUE, program.getIdentifierValue()); + } + + public void startBatchProcessCsv(Exchange exchange) throws IOException { + String fileName = exchange.getProperty(FILE_NAME, String.class); + String requestId = exchange.getProperty(REQUEST_ID, String.class); + String purpose = exchange.getProperty(PURPOSE, String.class); + String batchId = exchange.getProperty(BATCH_ID, String.class); + String callbackUrl = exchange.getProperty(CALLBACK, String.class); + String payeeDfspId = exchange.getProperty(PAYEE_DFSP_ID, String.class); + String note = null; + + if (purpose == null || purpose.isEmpty()) { + purpose = "test payment"; + } + + logger.debug("\n\n Filename: {}", fileName); + logger.debug("\n\n BatchId: {} ", batchId); + + File file = new File(fileName); + file.setWritable(true); + file.setReadable(true); + + logger.debug("File absolute path: {}", file.getAbsolutePath()); + + boolean verifyData = processorStartRoute.verifyData(file); + logger.debug("Data verification result {}", verifyData); + if (!verifyData) { + note = "Invalid data in file data processing stopped"; + } + + String nm = fileTransferService.uploadFile(file, bucketName); + + logger.debug("File uploaded {}", nm); + + // extracting and setting callback Url + exchange.setProperty(CALLBACK_URL, callbackUrl); + + List phases = phaseUtils.getValues(); + logger.debug(phases.toString()); + + Map variables = new HashMap<>(); + variables.put(BATCH_ID, batchId); + variables.put(FILE_NAME, fileName); + variables.put(REQUEST_ID, requestId); + variables.put(PURPOSE, purpose); + variables.put(TENANT_ID, exchange.getProperty(TENANT_NAME)); + variables.put(CALLBACK, callbackUrl); + variables.put(PHASES, phases); + variables.put(PHASE_COUNT, phases.size()); + variables.put(NOTE, note); + variables.put(CLIENT_CORRELATION_ID, exchange.getProperty(CLIENT_CORRELATION_ID)); + variables.put(PROGRAM_NAME, exchange.getProperty(PROGRAM_NAME)); + variables.put(PAYER_IDENTIFIER_TYPE, exchange.getProperty(PAYER_IDENTIFIER_TYPE)); + variables.put(PAYER_IDENTIFIER_VALUE, exchange.getProperty(PAYER_IDENTIFIER_VALUE)); + variables.put(REGISTERING_INSTITUTE_ID, exchange.getProperty(REGISTERING_INSTITUTE_ID)); + variables.put(IS_FILE_VALID, true); + variables.put(PAYEE_DFSP_ID, payeeDfspId); + processorStartRoute.setConfigProperties(variables); + + logger.info("Zeebe variables published: {}", variables); + logger.info("Variables published to zeebe: {}", variables); + + JSONObject response = new JSONObject(); + // Check if GovStack mode (registeringInstituteId is set) to select appropriate workflow + String registeringInstituteId = exchange.getProperty(REGISTERING_INSTITUTE_ID, String.class); + String flowType = StringUtils.hasText(registeringInstituteId) ? "batch-transactions-govstack" : "batch-transactions"; + logger.info("Selecting workflow flow type: {} (registeringInstituteId: {})", flowType, registeringInstituteId); + String bpmn = processorStartRoute.getWorkflowForTenant(exchange.getProperty(TENANT_NAME).toString(), flowType); + + try { + logger.info("FREDa "); + logger.info("FREDa: tenant is < {} > ", exchange.getProperty(TENANT_NAME).toString()); + String tenantSpecificWorkflowId = bpmn.replace("{dfspid}", exchange.getProperty(TENANT_NAME).toString()); + logger.info("Tenant specific workflow id: {}", tenantSpecificWorkflowId); + logger.info("FRED: tenant is < {} > ", exchange.getProperty(TENANT_NAME).toString()); + + String txnId = zeebeProcessStarter.startZeebeWorkflow(tenantSpecificWorkflowId, "", variables); + if (txnId == null || txnId.isEmpty()) { + response.put("errorCode", 500); + response.put("errorDescription", "Unable to start zeebe workflow"); + response.put("developerMessage", "Issue in starting the zeebe workflow, check the zeebe configuration"); + } else { + response.put("batch_id", batchId); + response.put("request_id", requestId); + response.put("status", "queued"); + } + } catch (ClientStatusException c) { + logger.error("Got ClientStatusException : {}", c.getMessage()); + throw c; + } catch (Exception e) { + response.put("errorCode", 500); + response.put("errorDescription", "Unable to start zeebe workflow"); + response.put("developerMessage", e.getLocalizedMessage()); + } + + exchange.getIn().setBody(response.toString()); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/connectors/service/RetrofitService.java b/src/main/java/org/mifos/processor/bulk/connectors/service/RetrofitService.java new file mode 100644 index 00000000..8dc80da5 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/connectors/service/RetrofitService.java @@ -0,0 +1,13 @@ +package org.mifos.processor.bulk.connectors.service; + +import org.springframework.stereotype.Component; +import retrofit2.Retrofit; +import retrofit2.converter.gson.GsonConverterFactory; + +@Component +public class RetrofitService { + + public Retrofit createRetrofit(String baseUrl) { + return new Retrofit.Builder().baseUrl(baseUrl).addConverterFactory(GsonConverterFactory.create()).build(); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/file/AwsFileTransferImpl.java b/src/main/java/org/mifos/processor/bulk/file/AwsFileTransferImpl.java index 80895b02..945729aa 100644 --- a/src/main/java/org/mifos/processor/bulk/file/AwsFileTransferImpl.java +++ b/src/main/java/org/mifos/processor/bulk/file/AwsFileTransferImpl.java @@ -5,6 +5,10 @@ import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.util.IOUtils; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -12,40 +16,57 @@ import org.springframework.stereotype.Service; import org.springframework.web.multipart.MultipartFile; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; - @Service @Qualifier("awsStorage") public class AwsFileTransferImpl implements FileTransferService { + private Logger logger = LoggerFactory.getLogger(this.getClass()); @Autowired private AmazonS3 s3Client; + @Override + public byte[] downloadFile(String fileName, String bucketName) { + logger.info("downloadFile() - downloading from MinIO: bucket={}, file={}", bucketName, fileName); + S3Object s3Object = s3Client.getObject(bucketName, fileName); + S3ObjectInputStream inputStream = s3Object.getObjectContent(); + try { + byte[] content = IOUtils.toByteArray(inputStream); + logger.info("downloadFile() - downloaded {} bytes", content != null ? content.length : 0); + return content; + } catch (IOException e) { + logger.error("downloadFile() - IOException: {}", e.getMessage(), e); + logger.debug("{}", e.getMessage()); + } + logger.info("downloadFile() - returning null (download failed)"); + return null; + } + @Override public String uploadFile(MultipartFile file, String bucketName) { File fileObj = convertMultiPartFileToFile(file); - String fileName = System.currentTimeMillis() + "_" + file.getOriginalFilename(); - s3Client.putObject(new PutObjectRequest(bucketName, fileName, fileObj)); - fileObj.delete(); + return uploadFile(fileObj, bucketName); + } + + @Override + public String uploadFile(File file, String bucketName) { + String fileName = file.getName(); + logger.info("uploadFile() - file: {}, size: {}, exists: {}, path: {}", fileName, file.length(), file.exists(), + file.getAbsolutePath()); + s3Client.putObject(new PutObjectRequest(bucketName, fileName, file)); + logger.info("uploadFile() - uploaded to MinIO bucket: {}", bucketName); + file.delete(); + logger.info("uploadFile() - local file deleted"); return fileName; } @Override - public byte[] downloadFile(String fileName, String bucketName) { + public InputStream streamFile(String fileName, String bucketName) { S3Object s3Object = s3Client.getObject(bucketName, fileName); S3ObjectInputStream inputStream = s3Object.getObjectContent(); - try { - byte[] content = IOUtils.toByteArray(inputStream); - return content; - } catch (IOException e) { - e.printStackTrace(); - } - return null; + return inputStream; } @Override diff --git a/src/main/java/org/mifos/processor/bulk/file/AzureFileTransferImpl.java b/src/main/java/org/mifos/processor/bulk/file/AzureFileTransferImpl.java index e38b721a..33df0fa3 100644 --- a/src/main/java/org/mifos/processor/bulk/file/AzureFileTransferImpl.java +++ b/src/main/java/org/mifos/processor/bulk/file/AzureFileTransferImpl.java @@ -2,6 +2,12 @@ import com.azure.storage.blob.BlobClientBuilder; import com.azure.storage.blob.models.BlobProperties; +import com.azure.storage.blob.specialized.BlobInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -10,22 +16,30 @@ import org.springframework.stereotype.Service; import org.springframework.web.multipart.MultipartFile; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; - @Service @Qualifier("azureStorage") -@ConditionalOnProperty( - value="cloud.azure.enabled", - havingValue = "true") +@ConditionalOnProperty(value = "cloud.azure.enabled", havingValue = "true") public class AzureFileTransferImpl implements FileTransferService { + private Logger logger = LoggerFactory.getLogger(this.getClass()); @Autowired BlobClientBuilder client; + @Override + public byte[] downloadFile(String fileName, String bucketName) { + try { + File temp = new File("/temp/" + fileName); + BlobProperties properties = client.containerName(bucketName).blobName(fileName).buildClient().downloadToFile(temp.getPath()); + byte[] content = Files.readAllBytes(Paths.get(temp.getPath())); + temp.delete(); + return content; + } catch (Exception e) { + logger.debug("{}", e.getMessage()); + } + return null; + } + @Override public String uploadFile(MultipartFile file, String bucketName) { @@ -41,15 +55,27 @@ public String uploadFile(MultipartFile file, String bucketName) { } @Override - public byte[] downloadFile(String fileName, String bucketName) { + public String uploadFile(File file, String bucketName) { try { - File temp = new File("/temp/"+fileName); - BlobProperties properties = client.containerName(bucketName).blobName(fileName).buildClient().downloadToFile(temp.getPath()); - byte[] content = Files.readAllBytes(Paths.get(temp.getPath())); - temp.delete(); - return content; + String fileName = System.currentTimeMillis() + "_" + file.getName(); + client.containerName(bucketName).blobName(fileName).buildClient().upload(Files.newInputStream(file.toPath()), file.length()); + return fileName; + } catch (IOException e) { + logger.error("Error uploading file to Azure", e); + } + return null; + } + + @Override + public InputStream streamFile(String fileName, String bucketName) { + try { + File temp = new File("/temp/" + fileName); + BlobInputStream csvInputStream = client.containerName(bucketName).blobName(fileName).buildClient().openInputStream(); + // byte[] content = Files.Paths.get(temp.getPath())); + // temp.delete(); + return csvInputStream; } catch (Exception e) { - e.printStackTrace(); + logger.debug(e.getMessage()); } return null; } @@ -58,4 +84,5 @@ public byte[] downloadFile(String fileName, String bucketName) { public void deleteFile(String fileName, String bucketName) { client.containerName(bucketName).blobName(fileName).buildClient().delete(); } + } diff --git a/src/main/java/org/mifos/processor/bulk/file/FileStorageService.java b/src/main/java/org/mifos/processor/bulk/file/FileStorageService.java new file mode 100644 index 00000000..01682f1f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/file/FileStorageService.java @@ -0,0 +1,16 @@ +package org.mifos.processor.bulk.file; + +import java.io.InputStream; +import org.springframework.stereotype.Service; +import org.springframework.web.multipart.MultipartFile; + +@Service +public interface FileStorageService { + + String save(MultipartFile file); + + String save(InputStream file, String filename); + + String save(String data, String filename); + +} diff --git a/src/main/java/org/mifos/processor/bulk/file/FileStorageServiceImpl.java b/src/main/java/org/mifos/processor/bulk/file/FileStorageServiceImpl.java new file mode 100644 index 00000000..445cbc92 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/file/FileStorageServiceImpl.java @@ -0,0 +1,65 @@ +package org.mifos.processor.bulk.file; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Objects; +import java.util.UUID; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; +import org.springframework.web.multipart.MultipartFile; + +@Service +public class FileStorageServiceImpl implements FileStorageService { + + private static final Logger logger = LoggerFactory.getLogger(FileStorageServiceImpl.class); + + private final Path root = Paths.get(""); + + @Override + public String save(MultipartFile file) { + String filename = UUID.randomUUID() + "_" + Objects.requireNonNull(file.getOriginalFilename()); + logger.info("FileStorageServiceImpl.save() - filename: {}", filename); + logger.info("FileStorageServiceImpl.save() - file.getSize(): {}", file.getSize()); + logger.info("FileStorageServiceImpl.save() - file.isEmpty(): {}", file.isEmpty()); + try { + InputStream inputStream = file.getInputStream(); + logger.info("FileStorageServiceImpl.save() - inputStream available: {}", inputStream.available()); + long bytesCopied = Files.copy(inputStream, this.root.resolve(filename)); + logger.info("FileStorageServiceImpl.save() - bytesCopied: {}", bytesCopied); + } catch (Exception e) { + logger.error("FileStorageServiceImpl.save() - Exception: {}", e.getMessage(), e); + throw new RuntimeException("Failed to save file: " + e.getMessage(), e); + } + return filename; + } + + @Override + public String save(InputStream inputStream, String filename) { + String uniqueFileName = getUniqueFileName(filename); + try { + Files.copy(inputStream, this.root.resolve(uniqueFileName)); + } catch (Exception e) { + throw new RuntimeException("Failed to save file: " + e.getMessage(), e); + } + return uniqueFileName; + } + + @Override + public String save(String data, String filename) { + String uniqueFileName = getUniqueFileName(filename); + try { + Files.writeString(this.root.resolve(uniqueFileName), data); + } catch (IOException e) { + throw new RuntimeException(e); + } + return uniqueFileName; + } + + private String getUniqueFileName(String filename) { + return UUID.randomUUID() + "_" + Objects.requireNonNull(filename); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/file/FileTransferService.java b/src/main/java/org/mifos/processor/bulk/file/FileTransferService.java index a139a4b1..f7e9f3ee 100644 --- a/src/main/java/org/mifos/processor/bulk/file/FileTransferService.java +++ b/src/main/java/org/mifos/processor/bulk/file/FileTransferService.java @@ -1,14 +1,20 @@ package org.mifos.processor.bulk.file; +import java.io.File; +import java.io.InputStream; import org.springframework.stereotype.Service; import org.springframework.web.multipart.MultipartFile; @Service public interface FileTransferService { + byte[] downloadFile(String fileName, String bucketName); + String uploadFile(MultipartFile file, String bucketName); - byte[] downloadFile(String fileName, String bucketName); + String uploadFile(File file, String bucketName); + + InputStream streamFile(String fileName, String bucketName); void deleteFile(String fileName, String bucketName); diff --git a/src/main/java/org/mifos/processor/bulk/file/config/AwsStorageConfig.java b/src/main/java/org/mifos/processor/bulk/file/config/AwsStorageConfig.java index 8b8ef1f4..25b1ebe2 100644 --- a/src/main/java/org/mifos/processor/bulk/file/config/AwsStorageConfig.java +++ b/src/main/java/org/mifos/processor/bulk/file/config/AwsStorageConfig.java @@ -3,6 +3,7 @@ import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import org.springframework.beans.factory.annotation.Value; @@ -21,16 +22,17 @@ public class AwsStorageConfig { @Value("${cloud.aws.region.static}") private String region; + @Value("${cloud.aws.s3BaseUrl}") + private String endpoint; @Bean - @ConditionalOnProperty( - value="cloud.aws.enabled", - havingValue = "true") + @ConditionalOnProperty(value = "cloud.aws.enabled", havingValue = "true") public AmazonS3 s3Client() { AWSCredentials credentials = new BasicAWSCredentials(accessKey, accessSecret); - return AmazonS3ClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider(credentials)) - .withRegion(region).build(); + return AmazonS3ClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials)) + .withPathStyleAccessEnabled(true).withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region)) + .build(); + } } diff --git a/src/main/java/org/mifos/processor/bulk/file/config/AzureStorageConfig.java b/src/main/java/org/mifos/processor/bulk/file/config/AzureStorageConfig.java index 1dc91e49..f7ffa998 100644 --- a/src/main/java/org/mifos/processor/bulk/file/config/AzureStorageConfig.java +++ b/src/main/java/org/mifos/processor/bulk/file/config/AzureStorageConfig.java @@ -1,6 +1,6 @@ package org.mifos.processor.bulk.file.config; -import com.azure.storage.blob.*; +import com.azure.storage.blob.BlobClientBuilder; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; @@ -13,9 +13,7 @@ public class AzureStorageConfig { String connectionString; @Bean - @ConditionalOnProperty( - value="cloud.azure.enabled", - havingValue = "true") + @ConditionalOnProperty(value = "cloud.azure.enabled", havingValue = "true") public BlobClientBuilder getClient() { BlobClientBuilder client = new BlobClientBuilder(); client.connectionString(connectionString); diff --git a/src/main/java/org/mifos/processor/bulk/format/EntityMapper.java b/src/main/java/org/mifos/processor/bulk/format/EntityMapper.java new file mode 100644 index 00000000..61eb1dc6 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/EntityMapper.java @@ -0,0 +1,14 @@ +package org.mifos.processor.bulk.format; + +import java.util.List; + +public interface EntityMapper { + + T convertTo(K object); + + K convertFrom(T object); + + List convertListTo(List objects); + + List convertListFrom(List objects); +} diff --git a/src/main/java/org/mifos/processor/bulk/format/RestRequestConvertor.java b/src/main/java/org/mifos/processor/bulk/format/RestRequestConvertor.java new file mode 100644 index 00000000..8186bafe --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/RestRequestConvertor.java @@ -0,0 +1,80 @@ +package org.mifos.processor.bulk.format; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import org.mifos.processor.bulk.schema.BatchRequestDTO; +import org.mifos.processor.bulk.schema.Party; +import org.mifos.processor.bulk.schema.PartyType; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.stereotype.Component; + +@Component +public class RestRequestConvertor implements EntityMapper { + + @Override + public BatchRequestDTO convertTo(Transaction object) { + Party creditParty = new Party(); + creditParty.setKey(PartyType.fromValue(object.getPayeeIdentifierType()).getValue()); + creditParty.setValue(object.getPayeeIdentifier()); + + Party debitParty = new Party(); + debitParty.setKey(PartyType.fromValue(object.getPayerIdentifierType()).getValue()); + debitParty.setValue(object.getPayerIdentifier()); + + // creating the DTO + BatchRequestDTO batchRequestDTO = new BatchRequestDTO(); + batchRequestDTO.setAmount(object.getAmount()); + batchRequestDTO.setCurrency(object.getCurrency()); + batchRequestDTO.setCreditParty(List.of(creditParty)); + batchRequestDTO.setDebitParty(List.of(debitParty)); + batchRequestDTO.setDescriptionText(object.getNote()); + batchRequestDTO.setPaymentMode(object.getPaymentMode()); + batchRequestDTO.setRequestId(object.getRequestId()); + + return batchRequestDTO; + } + + @Override + public Transaction convertFrom(BatchRequestDTO object) { + + // creating the transaction + Transaction transaction = new Transaction(); + transaction.setCurrency(object.getCurrency()); + transaction.setAmount(object.getAmount()); + if (object.getDebitParty() != null && object.getDebitParty().size() > 0) { + transaction.setPayerIdentifierType(object.getDebitParty().get(0).getKey()); + transaction.setPayerIdentifier(object.getDebitParty().get(0).getValue()); + } + if (object.getCreditParty() != null && object.getCreditParty().size() > 0) { + transaction.setPayeeIdentifierType(object.getCreditParty().get(0).getKey()); + transaction.setPayeeIdentifier(object.getCreditParty().get(0).getValue()); + } + transaction.setNote(object.getDescriptionText()); + transaction.setPaymentMode(object.getPaymentMode()); + transaction.setRequestId(object.getRequestId()); + return transaction; + } + + @Override + public List convertListTo(List objects) { + List batchRequestDTOList = new ArrayList<>(); + objects.forEach(e -> batchRequestDTOList.add(convertTo(e))); + return batchRequestDTOList; + } + + @Override + public List convertListFrom(List objects) { + List transactionList = new ArrayList<>(); + for (int i = 0; i < objects.size(); i++) { + Transaction transaction = convertFrom(objects.get(i)); + transaction.setId(i); + if (transaction.getRequestId() == null) { + transaction.setRequestId(UUID.randomUUID().toString()); + } + transactionList.add(transaction); + } + return transactionList; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/format/Standard.java b/src/main/java/org/mifos/processor/bulk/format/Standard.java new file mode 100644 index 00000000..394670fa --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/Standard.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.format; + +public enum Standard { + + GSMA(StandardValue.GSMA), DEFAULT(StandardValue.DEFAULT); + + private final String value; + + Standard(String s) { + value = s; + } + + public String getValue() { + return value; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/format/StandardValue.java b/src/main/java/org/mifos/processor/bulk/format/StandardValue.java new file mode 100644 index 00000000..d0037157 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/StandardValue.java @@ -0,0 +1,10 @@ +package org.mifos.processor.bulk.format; + +public final class StandardValue { + + private StandardValue() {} + + public static final String GSMA = "GSMA"; + + public static final String DEFAULT = "DEFAULT"; +} diff --git a/src/main/java/org/mifos/processor/bulk/format/helper/BaseMapper.java b/src/main/java/org/mifos/processor/bulk/format/helper/BaseMapper.java new file mode 100644 index 00000000..bae65290 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/helper/BaseMapper.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.format.helper; + +import java.util.ArrayList; +import java.util.List; +import org.mifos.processor.bulk.schema.CsvSchema; + +public abstract class BaseMapper implements Mapper { + + @Override + public List convertList(List objects) { + List list = new ArrayList<>(); + objects.forEach(transaction -> { + list.add(convert(transaction)); + }); + return list; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/format/helper/GSMAMapper.java b/src/main/java/org/mifos/processor/bulk/format/helper/GSMAMapper.java new file mode 100644 index 00000000..3cb4f427 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/helper/GSMAMapper.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.format.helper; + +import org.mifos.processor.bulk.schema.GSMATransaction; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.stereotype.Component; + +@Component +public class GSMAMapper extends BaseMapper { + + @Override + public GSMATransaction convert(Transaction object) { + GSMATransaction gsmaTransaction = new GSMATransaction(); + gsmaTransaction.setId(object.getId()); + gsmaTransaction.setRequestId("test"); + gsmaTransaction.setPaymentMode(object.getPaymentMode()); + gsmaTransaction.setAccountNumber(object.getAccountNumber()); + gsmaTransaction.setAmount(object.getAmount()); + gsmaTransaction.setNote(object.getNote()); + return gsmaTransaction; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/format/helper/Mapper.java b/src/main/java/org/mifos/processor/bulk/format/helper/Mapper.java new file mode 100644 index 00000000..e6743117 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/helper/Mapper.java @@ -0,0 +1,26 @@ +package org.mifos.processor.bulk.format.helper; + +import java.util.List; +import org.mifos.processor.bulk.schema.CsvSchema; + +public interface Mapper { + + /** + * Use for converting the object of type [FROM] to an object of type [TO] + * + * @param object + * of type [FROM] + * @return object of type [To] + */ + TO convert(FROM object); + + /** + * Use for converting the list of objects of type [FROM] to an object of type [TO] + * + * @param objects + * lost of object of type [FROM] + * @return objects of type [To] + */ + List convertList(List objects); + +} diff --git a/src/main/java/org/mifos/processor/bulk/format/helper/Mappers.java b/src/main/java/org/mifos/processor/bulk/format/helper/Mappers.java new file mode 100644 index 00000000..351157ff --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/format/helper/Mappers.java @@ -0,0 +1,12 @@ +package org.mifos.processor.bulk.format.helper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class Mappers { + + @Autowired + public GSMAMapper gsmaMapper; + +} diff --git a/src/main/java/org/mifos/processor/bulk/kafka/Consumers.java b/src/main/java/org/mifos/processor/bulk/kafka/Consumers.java index 70317a10..03769b77 100644 --- a/src/main/java/org/mifos/processor/bulk/kafka/Consumers.java +++ b/src/main/java/org/mifos/processor/bulk/kafka/Consumers.java @@ -1,7 +1,23 @@ package org.mifos.processor.bulk.kafka; +import static org.mifos.connector.common.mojaloop.type.InitiatorType.CONSUMER; +import static org.mifos.connector.common.mojaloop.type.Scenario.TRANSFER; +import static org.mifos.connector.common.mojaloop.type.TransactionRole.PAYER; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.GSMA_CHANNEL_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INITIATOR_FSPID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.IS_RTP_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_ID_TYPE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FSPID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TRANSACTION_TYPE; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.HashMap; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; import org.mifos.connector.common.gsma.dto.GSMATransaction; import org.mifos.connector.common.gsma.dto.GsmaParty; @@ -10,36 +26,17 @@ import org.mifos.connector.common.mojaloop.dto.PartyIdInfo; import org.mifos.connector.common.mojaloop.dto.TransactionType; import org.mifos.connector.common.mojaloop.type.IdentifierType; -import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionOlder; import org.mifos.processor.bulk.zeebe.ZeebeProcessStarter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Service; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.mifos.connector.common.mojaloop.type.InitiatorType.CONSUMER; -import static org.mifos.connector.common.mojaloop.type.Scenario.TRANSFER; -import static org.mifos.connector.common.mojaloop.type.TransactionRole.PAYER; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.GSMA_CHANNEL_REQUEST; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INITIATOR_FSPID; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.IS_RTP_REQUEST; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_ID; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_ID_TYPE; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FSPID; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; -import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TRANSACTION_TYPE; - @Service +@Slf4j public class Consumers { - @Value("#{'${dfspids}'.split(',')}") - private List dfspIds; - @Value("${bpmn.flows.international-remittance-payer}") private String internationalRemittancePayer; @@ -51,9 +48,8 @@ public class Consumers { @KafkaListener(topics = "${kafka.topic.gsma.name}", groupId = "group_id") public void listenTopicGsma(String message) throws JsonProcessingException { - System.out.println("Received Message in topic GSMA and group group_id: " + message); - - Transaction transaction = objectMapper.readValue((String) message, Transaction.class); + log.debug("Received Message in topic GSMA and group group_id: {}", message); + TransactionOlder transaction = objectMapper.readValue(message, TransactionOlder.class); String tenantId = "ibank-usa"; GSMATransaction gsmaChannelRequest = new GSMATransaction(); @@ -63,16 +59,16 @@ public void listenTopicGsma(String message) throws JsonProcessingException { gsmaChannelRequest.setReceivingLei("ibank-india"); GsmaParty creditParty = new GsmaParty(); creditParty.setKey("msisdn"); - creditParty.setValue(transaction.getAccount_number()); + creditParty.setValue(transaction.getAccountNumber()); GsmaParty debitParty = new GsmaParty(); debitParty.setKey("msisdn"); - debitParty.setValue(transaction.getAccount_number()); - gsmaChannelRequest.setCreditParty(new GsmaParty[]{creditParty}); - gsmaChannelRequest.setDebitParty(new GsmaParty[]{debitParty}); -// gsmaChannelRequest.setInternationalTransferInformation().setReceivingAmount(gsmaChannelRequest.getAmount()); + debitParty.setValue(transaction.getAccountNumber()); + gsmaChannelRequest.setCreditParty(new GsmaParty[] { creditParty }); + gsmaChannelRequest.setDebitParty(new GsmaParty[] { debitParty }); + // gsmaChannelRequest.setInternationalTransferInformation().setReceivingAmount(gsmaChannelRequest.getAmount()); TransactionChannelRequestDTO channelRequest = new TransactionChannelRequestDTO(); // Fineract Object - Party payee = new Party(new PartyIdInfo(IdentifierType.MSISDN, transaction.getAccount_number())); + Party payee = new Party(new PartyIdInfo(IdentifierType.MSISDN, transaction.getAccountNumber())); Party payer = new Party(new PartyIdInfo(IdentifierType.MSISDN, "7543010")); MoneyData moneyData = new MoneyData(); @@ -98,7 +94,8 @@ public void listenTopicGsma(String message) throws JsonProcessingException { String tenantSpecificBpmn = internationalRemittancePayer.replace("{dfspid}", tenantId); channelRequest.setTransactionType(transactionType); - PartyIdInfo requestedParty = (boolean)extraVariables.get(IS_RTP_REQUEST) ? channelRequest.getPayer().getPartyIdInfo() : channelRequest.getPayee().getPartyIdInfo(); + PartyIdInfo requestedParty = (boolean) extraVariables.get(IS_RTP_REQUEST) ? channelRequest.getPayer().getPartyIdInfo() + : channelRequest.getPayee().getPartyIdInfo(); extraVariables.put(PARTY_ID_TYPE, requestedParty.getPartyIdType()); extraVariables.put(PARTY_ID, requestedParty.getPartyIdentifier()); @@ -106,15 +103,14 @@ public void listenTopicGsma(String message) throws JsonProcessingException { extraVariables.put(PARTY_LOOKUP_FSPID, gsmaChannelRequest.getReceivingLei()); extraVariables.put(INITIATOR_FSPID, gsmaChannelRequest.getRequestingLei()); - String transactionId = zeebeProcessStarter.startZeebeWorkflow(tenantSpecificBpmn, - objectMapper.writeValueAsString(channelRequest), + String transactionId = zeebeProcessStarter.startZeebeWorkflow(tenantSpecificBpmn, objectMapper.writeValueAsString(channelRequest), extraVariables); - System.out.println("GSMA Transaction Started with: " + transactionId); + log.debug("GSMA Transaction Started with:{} ", transactionId); } @KafkaListener(topics = "${kafka.topic.slcb.name}", groupId = "group_id") public void listenTopicSlcb(String message) { - System.out.println("Received Message in topic SLCB and group group_id: " + message); + log.debug("Received Message in topic SLCB and group group_id:{} ", message); } } diff --git a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaConsumerConfig.java b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaConsumerConfig.java index 5b689a39..e9694b89 100644 --- a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaConsumerConfig.java +++ b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaConsumerConfig.java @@ -1,5 +1,7 @@ package org.mifos.processor.bulk.kafka.config; +import java.util.HashMap; +import java.util.Map; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.springframework.beans.factory.annotation.Value; @@ -10,9 +12,6 @@ import org.springframework.kafka.core.ConsumerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; -import java.util.HashMap; -import java.util.Map; - @EnableKafka @Configuration public class KafkaConsumerConfig { diff --git a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaProducerConfig.java b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaProducerConfig.java index 4ef5d8d9..11b9aa2a 100644 --- a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaProducerConfig.java +++ b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaProducerConfig.java @@ -1,5 +1,7 @@ package org.mifos.processor.bulk.kafka.config; +import java.util.HashMap; +import java.util.Map; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.beans.factory.annotation.Value; @@ -9,9 +11,6 @@ import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.ProducerFactory; -import java.util.HashMap; -import java.util.Map; - @Configuration public class KafkaProducerConfig { diff --git a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaTopicConfig.java b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaTopicConfig.java index 625ddee2..7d2f2ff7 100644 --- a/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaTopicConfig.java +++ b/src/main/java/org/mifos/processor/bulk/kafka/config/KafkaTopicConfig.java @@ -1,5 +1,7 @@ package org.mifos.processor.bulk.kafka.config; +import java.util.HashMap; +import java.util.Map; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.NewTopic; import org.springframework.beans.factory.annotation.Value; @@ -7,11 +9,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.KafkaAdmin; -import java.util.HashMap; -import java.util.Map; - @Configuration public class KafkaTopicConfig { + @Value(value = "${kafka.bootstrapAddress}") private String bootstrapAddress; diff --git a/src/main/java/org/mifos/processor/bulk/properties/TenantImplementation.java b/src/main/java/org/mifos/processor/bulk/properties/TenantImplementation.java new file mode 100644 index 00000000..4d808354 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/properties/TenantImplementation.java @@ -0,0 +1,26 @@ +package org.mifos.processor.bulk.properties; + +import java.util.HashMap; + +public class TenantImplementation { + + String id; + HashMap flows; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public HashMap getFlows() { + return flows; + } + + public void setFlows(HashMap flows) { + this.flows = flows; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/properties/TenantImplementationProperties.java b/src/main/java/org/mifos/processor/bulk/properties/TenantImplementationProperties.java new file mode 100644 index 00000000..4c1a2fd3 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/properties/TenantImplementationProperties.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.properties; + +import java.util.List; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +@Component +@ConfigurationProperties(prefix = "bpmns") +public class TenantImplementationProperties { + + List tenants; + + public List getTenants() { + return tenants; + } + + public void setTenants(List tenants) { + this.tenants = tenants; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/AccountLookupResponseDTO.java b/src/main/java/org/mifos/processor/bulk/schema/AccountLookupResponseDTO.java new file mode 100644 index 00000000..e511edf1 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/AccountLookupResponseDTO.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.schema; + +import java.io.Serializable; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class AccountLookupResponseDTO implements Serializable { + + private String requestId; + private String payeeIdentity; + private List paymentModalityList; + private Boolean isValidated; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/AuthorizationRequest.java b/src/main/java/org/mifos/processor/bulk/schema/AuthorizationRequest.java new file mode 100644 index 00000000..5f479dc1 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/AuthorizationRequest.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class AuthorizationRequest { + + private String batchId; + + private String payerIdentifier; + + private String currency; + + private String amount; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/AuthorizationResponse.java b/src/main/java/org/mifos/processor/bulk/schema/AuthorizationResponse.java new file mode 100644 index 00000000..809f9d04 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/AuthorizationResponse.java @@ -0,0 +1,19 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class AuthorizationResponse { + + private String clientCorrelationId; + + private String status; + + private String reason; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/BatchAccountLookupResponseDTO.java b/src/main/java/org/mifos/processor/bulk/schema/BatchAccountLookupResponseDTO.java new file mode 100644 index 00000000..939fda60 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/BatchAccountLookupResponseDTO.java @@ -0,0 +1,18 @@ +package org.mifos.processor.bulk.schema; + +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class BatchAccountLookupResponseDTO { + + private String requestId; + private String registeringInstitutionId; + private List beneficiaryDTOList; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/BatchCallbackDTO.java b/src/main/java/org/mifos/processor/bulk/schema/BatchCallbackDTO.java new file mode 100644 index 00000000..735890dd --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/BatchCallbackDTO.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class BatchCallbackDTO { + + String clientCorrelationId; + String batchId; + String message; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/BatchDTO.java b/src/main/java/org/mifos/processor/bulk/schema/BatchDTO.java new file mode 100644 index 00000000..4212c75c --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/BatchDTO.java @@ -0,0 +1,57 @@ +package org.mifos.processor.bulk.schema; + +import java.math.BigDecimal; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class BatchDTO { + + private String batchId; + + private String requestId; + + private Long total; + + private Long ongoing; + + private Long failed; + + private Long successful; + + private BigDecimal totalAmount; + + private BigDecimal successfulAmount; + + private BigDecimal pendingAmount; + + private BigDecimal failedAmount; + + private String file; + + private String notes; + + private String createdAt; + + private String status; + + private String modes; + + private String purpose; + + private String failPercentage; + + private String successPercentage; + + private String registeringInstitutionId; + + private String payerFsp; + + private String correlationId; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/BatchRequestDTO.java b/src/main/java/org/mifos/processor/bulk/schema/BatchRequestDTO.java new file mode 100644 index 00000000..3ccce3f5 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/BatchRequestDTO.java @@ -0,0 +1,24 @@ +package org.mifos.processor.bulk.schema; + +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class BatchRequestDTO { + + String requestId; + List creditParty; + List debitParty; + + String paymentMode; + String amount; + String currency; + String descriptionText; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/BeneficiaryDTO.java b/src/main/java/org/mifos/processor/bulk/schema/BeneficiaryDTO.java new file mode 100644 index 00000000..b82db523 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/BeneficiaryDTO.java @@ -0,0 +1,19 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class BeneficiaryDTO { + + private String payeeIdentity; + private String paymentModality; + private String financialAddress; + private String bankingInstitutionCode; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/CamelApiResponse.java b/src/main/java/org/mifos/processor/bulk/schema/CamelApiResponse.java new file mode 100644 index 00000000..4fe28952 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/CamelApiResponse.java @@ -0,0 +1,14 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +public class CamelApiResponse { + + String body; + int status; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/CsvSchema.java b/src/main/java/org/mifos/processor/bulk/schema/CsvSchema.java new file mode 100644 index 00000000..95531f8a --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/CsvSchema.java @@ -0,0 +1,9 @@ +package org.mifos.processor.bulk.schema; + +public interface CsvSchema { + + String getCsvString(); + + String getCsvHeader(); + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/ExceptionMapperDTO.java b/src/main/java/org/mifos/processor/bulk/schema/ExceptionMapperDTO.java new file mode 100644 index 00000000..8b0ad44f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/ExceptionMapperDTO.java @@ -0,0 +1,18 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.ToString; + +@Getter +@Setter +@ToString +@NoArgsConstructor +@AllArgsConstructor +public class ExceptionMapperDTO { + + private String responseCode; + private String responseDescription; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/GSMATransaction.java b/src/main/java/org/mifos/processor/bulk/schema/GSMATransaction.java new file mode 100644 index 00000000..4d6af3ea --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/GSMATransaction.java @@ -0,0 +1,73 @@ +package org.mifos.processor.bulk.schema; + +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonPropertyOrder({ "id", "requestId", "paymentMode", "accountNumber", "amount", "note" }) +public class GSMATransaction implements CsvSchema { + + private int id; + private String requestId; + private String paymentMode; + private String accountNumber; + private String amount; + + private String note; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getAmount() { + return amount; + } + + public void setAmount(String amount) { + this.amount = amount; + } + + public String getNote() { + return note; + } + + public void setNote(String note) { + this.note = note; + } + + public String getRequestId() { + return requestId; + } + + public void setRequestId(String requestId) { + this.requestId = requestId; + } + + public String getPaymentMode() { + return paymentMode; + } + + public void setPaymentMode(String paymentMode) { + this.paymentMode = paymentMode; + } + + public String getAccountNumber() { + return accountNumber; + } + + public void setAccountNumber(String accountNumber) { + this.accountNumber = accountNumber; + } + + @Override + public String getCsvString() { + return String.format("%s,%s,%s,%s,%s,%s", id, requestId, paymentMode, accountNumber, amount, note); + } + + @Override + public String getCsvHeader() { + return "id,requestId,paymentMode,accountNumber,amount,note"; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/Party.java b/src/main/java/org/mifos/processor/bulk/schema/Party.java new file mode 100644 index 00000000..a795fc78 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/Party.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class Party { + + String key; + String value; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/PartyType.java b/src/main/java/org/mifos/processor/bulk/schema/PartyType.java new file mode 100644 index 00000000..36fbe060 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/PartyType.java @@ -0,0 +1,26 @@ +package org.mifos.processor.bulk.schema; + +import java.util.Arrays; + +public enum PartyType { + + MSISDN("msisdn"), ACCOUNT_NUMBER("accountnumber"), EMPTY(""); + + private String value; + + PartyType(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public static PartyType fromValue(String value) { + return Arrays.stream(values()).filter(ec -> ec.getValue().equalsIgnoreCase(value)).findFirst().orElseGet(() -> PartyType.EMPTY); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/PaymentModalityDTO.java b/src/main/java/org/mifos/processor/bulk/schema/PaymentModalityDTO.java new file mode 100644 index 00000000..035e6119 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/PaymentModalityDTO.java @@ -0,0 +1,17 @@ +package org.mifos.processor.bulk.schema; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class PaymentModalityDTO { + + private String paymentModality; + private String financialAddress; + private String bankingInstitutionCode; +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/SubBatchEntity.java b/src/main/java/org/mifos/processor/bulk/schema/SubBatchEntity.java new file mode 100644 index 00000000..5aa98f41 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/SubBatchEntity.java @@ -0,0 +1,55 @@ +package org.mifos.processor.bulk.schema; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Date; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@NoArgsConstructor +public class SubBatchEntity { + + private String batchId; + private String subBatchId; + private String requestId; + private String requestFile; + private String resultFile; + private String note; + private String paymentMode; + private String registeringInstitutionId; + private String payerFsp; + private String correlationId; + + private Long totalTransactions; + private Long ongoing; + private Long failed; + private Long completed; + private Long totalAmount; + private Long ongoingAmount; + private Long failedAmount; + private Long completedAmount; + private Long workflowKey; + private Long workflowInstanceKey; + private Long approvedAmount; + private Long approvedCount; + + private Date resultGeneratedAt; + private Date startedAt; + private Date completedAt; + + @JsonIgnore + public void setAllEmptyAmount() { + setTotalTransactions(0L); + setOngoing(0L); + setFailed(0L); + setCompleted(0L); + setTotalAmount(0L); + setOngoingAmount(0L); + setFailedAmount(0L); + setCompletedAmount(0L); + setApprovedAmount(0L); + setApprovedCount(0L); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/SubscriptionDTO.java b/src/main/java/org/mifos/processor/bulk/schema/SubscriptionDTO.java new file mode 100644 index 00000000..645fd957 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/SubscriptionDTO.java @@ -0,0 +1,123 @@ +package org.mifos.processor.bulk.schema; + +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class SubscriptionDTO { + + private String id; + + private String roomClass; + + private String roomCode; + + private String srcServiceCode; + + private String srcOperationId; + + private String dstServiceCode; + + private String dstOperationId; + + private String delivery; + + private SubscriptionOptionsDTO options; + + public static SubscriptionDTOBuilder subscriptionDTOBuilder = new SubscriptionDTOBuilder(); + + public static class SubscriptionDTOBuilder { + + private String id; + private String roomClass; + private String roomCode; + private String srcServiceCode; + private String srcOperationId; + private String dstServiceCode; + private String dstOperationId; + private String delivery; + private SubscriptionOptionsDTO options; + + public SubscriptionDTOBuilder roomClass(String roomClass) { + this.roomClass = roomClass; + return this; + } + + public SubscriptionDTOBuilder roomCode(String roomCode) { + this.roomCode = roomCode; + return this; + } + + public SubscriptionDTOBuilder srcServiceCode(String srcServiceCode) { + this.srcServiceCode = srcServiceCode; + return this; + } + + public SubscriptionDTOBuilder srcOperationId(String srcOperationId) { + this.srcOperationId = srcOperationId; + return this; + } + + public SubscriptionDTOBuilder dstServiceCode(String dstServiceCode) { + this.dstServiceCode = dstServiceCode; + return this; + } + + public SubscriptionDTOBuilder dstOperationId(String dstOperationId) { + this.dstOperationId = dstOperationId; + return this; + } + + public SubscriptionDTOBuilder delivery(String delivery) { + this.delivery = delivery; + return this; + } + + public SubscriptionDTOBuilder options(SubscriptionOptionsDTO options) { + this.options = options; + return this; + } + + private void check() { + if (this.id == null) { + this.id = UUID.randomUUID().toString(); + } + if (this.options == null) { + this.options = SubscriptionOptionsDTO.subscriptionOptionsDTOBuilder.build(); + } + if (this.delivery == null) { + this.delivery = "PUSH"; + } + if (roomClass == null) { + throw new RuntimeException("roomClass field cant be null"); + } + if (roomCode == null) { + throw new RuntimeException("roomCode field cant be null"); + } + if (srcServiceCode == null) { + throw new RuntimeException("srcServiceCode field cant be null"); + } + if (srcOperationId == null) { + throw new RuntimeException("srcOperationId field cant be null"); + } + if (dstServiceCode == null) { + throw new RuntimeException("dstServiceCode field cant be null"); + } + if (dstOperationId == null) { + throw new RuntimeException("dstOperationId field cant be null"); + } + } + + public SubscriptionDTO build() { + check(); + return new SubscriptionDTO(this.id, this.roomClass, this.roomCode, this.srcServiceCode, this.srcOperationId, + this.dstServiceCode, this.dstOperationId, this.delivery, this.options); + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/SubscriptionOptionsDTO.java b/src/main/java/org/mifos/processor/bulk/schema/SubscriptionOptionsDTO.java new file mode 100644 index 00000000..9cb1858b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/SubscriptionOptionsDTO.java @@ -0,0 +1,50 @@ +package org.mifos.processor.bulk.schema; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +public class SubscriptionOptionsDTO { + + @JsonProperty("TTL") + private int ttl; + + @JsonProperty("FIFO") + private boolean fifo; + + public static SubscriptionOptionsDTOBuilder subscriptionOptionsDTOBuilder = new SubscriptionOptionsDTOBuilder(); + + public static class SubscriptionOptionsDTOBuilder { + + private Integer ttl; + + private Boolean fifo; + + public SubscriptionOptionsDTOBuilder tTL(int tTL) { + this.ttl = tTL; + return this; + } + + public SubscriptionOptionsDTOBuilder fIFO(boolean fIFO) { + this.fifo = fIFO; + return this; + } + + public SubscriptionOptionsDTO build() { + if (this.ttl == null) { + this.ttl = 3600; + } + if (this.fifo == null) { + this.fifo = false; + } + return new SubscriptionOptionsDTO(this.ttl, this.fifo); + } + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/Transaction.java b/src/main/java/org/mifos/processor/bulk/schema/Transaction.java index bdaf49d3..ddaafd69 100644 --- a/src/main/java/org/mifos/processor/bulk/schema/Transaction.java +++ b/src/main/java/org/mifos/processor/bulk/schema/Transaction.java @@ -1,95 +1,109 @@ package org.mifos.processor.bulk.schema; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; - -@JsonPropertyOrder({ "id", "request_id", "payment_mode", "account_number", "amount", "currency", "note" }) -public class Transaction { - +import java.util.Objects; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonPropertyOrder({ "id", "request_id", "payment_mode", "payer_identifier_type", "payer_identifier", "payee_identifier_type", + "payee_identifier", "amount", "currency", "note" }) +public class Transaction implements CsvSchema { + + @JsonProperty("id") private int id; - private String request_id; - private String payment_mode; - private String account_number; - private String amount; - private String currency; - private String note; - private String batchId; + @JsonProperty("request_id") + private String requestId; - public int getId() { - return id; - } + @JsonProperty("payment_mode") + private String paymentMode; - public void setId(int id) { - this.id = id; - } + @JsonProperty("account_number") + private String accountNumber; - public String getRequest_id() { - return request_id; - } + @JsonProperty("amount") + private String amount; - public void setRequest_id(String request_id) { - this.request_id = request_id; - } + @JsonProperty("currency") + private String currency; - public String getPayment_mode() { - return payment_mode; + @Override + public boolean equals(Object transaction) { + if (this == transaction) { + return true; + } + if ((transaction == null) || (getClass() != transaction.getClass())) { + return false; + } + Transaction that = (Transaction) transaction; + return (id == that.id) && (Objects.equals(requestId, that.requestId)) && (Objects.equals(paymentMode, that.paymentMode)) + && (Objects.equals(accountNumber, that.accountNumber)) && (Objects.equals(amount, that.amount)) + && (Objects.equals(payeeDfspId, that.payeeDfspId)); } - public void setPayment_mode(String payment_mode) { - this.payment_mode = payment_mode; + @Override + public int hashCode() { + return Objects.hash(id, requestId, paymentMode, accountNumber, amount, currency, note, payerIdentifierType, payerIdentifier, + payeeIdentifierType, payeeIdentifier, payeeDfspId); } - public String getAccount_number() { - return account_number; - } + @JsonProperty("note") + private String note; - public void setAccount_number(String account_number) { - this.account_number = account_number; - } + @JsonProperty(value = "payer_identifier_type") + private String payerIdentifierType; - public String getAmount() { - return amount; - } + @JsonProperty("payer_identifier") + private String payerIdentifier; - public void setAmount(String amount) { - this.amount = amount; - } + @JsonProperty("payee_identifier_type") + private String payeeIdentifierType; - public String getCurrency() { - return currency; - } + @JsonProperty("payee_identifier") + private String payeeIdentifier; - public void setCurrency(String currency) { - this.currency = currency; - } + @JsonProperty("program_shortcode") + private String programShortCode; - public String getNote() { - return note; - } + @JsonProperty("cycle") + private String cycle; - public void setNote(String note) { - this.note = note; - } + @JsonProperty("payee_dfsp_id") + private String payeeDfspId; - public String getBatchId() { - return batchId; - } + @JsonProperty("batch_id") + private String batchId; - public void setBatchId(String batchId) { - this.batchId = batchId; + @Override + public String toString() { + StringBuilder buffer = new StringBuilder("Transaction{"); + buffer.append("id=").append(id); + buffer.append(", request_id='").append(requestId); + buffer.append(", payment_mode='").append(paymentMode); + buffer.append(", account_number='").append(accountNumber); + buffer.append(", amount='").append(amount); + buffer.append(", currency='").append(currency); + buffer.append(", note='").append(note); + buffer.append(", batchId='").append(batchId); + buffer.append(", status='").append(id).append('}'); + return buffer.toString(); + } + + @JsonIgnore + @Override + public String getCsvString() { + return String.format("%s,%s,%s,%s,%s,%s,%s", id, requestId, paymentMode, accountNumber, amount, currency, note); } + @JsonIgnore @Override - public String toString() { - return "Transaction{" + - "id=" + id + - ", request_id='" + request_id + '\'' + - ", payment_mode='" + payment_mode + '\'' + - ", account_number='" + account_number + '\'' + - ", amount='" + amount + '\'' + - ", currency='" + currency + '\'' + - ", note='" + note + '\'' + - ", batchId='" + batchId + '\'' + - '}'; + public String getCsvHeader() { + return "id,request_id,payment_mode,account_number,amount,currency,note,status"; } } diff --git a/src/main/java/org/mifos/processor/bulk/schema/TransactionOlder.java b/src/main/java/org/mifos/processor/bulk/schema/TransactionOlder.java new file mode 100644 index 00000000..a783105a --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/TransactionOlder.java @@ -0,0 +1,14 @@ +package org.mifos.processor.bulk.schema; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class TransactionOlder extends Transaction { + + @JsonProperty("batchId") + private String batchId; + +} diff --git a/src/main/java/org/mifos/processor/bulk/schema/TransactionResult.java b/src/main/java/org/mifos/processor/bulk/schema/TransactionResult.java new file mode 100644 index 00000000..185f2213 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/schema/TransactionResult.java @@ -0,0 +1,41 @@ +package org.mifos.processor.bulk.schema; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonPropertyOrder({ "id", "request_id", "payment_mode", "payer_identifier_type", "payer_identifier", "payee_identifier_type", + "payee_identifier", "amount", "currency", "note", "program_shortcode", "cycle", "status", "error_code", "error_description" }) +public class TransactionResult extends Transaction { + + @JsonProperty("status") + private String status; + + @JsonProperty("error_code") + private String errorCode; + + @JsonProperty("error_description") + private String errorDescription; + + @JsonProperty("account_number") + @JsonIgnore + private String accountNumber; + + @JsonIgnore + @Override + public void setAccountNumber(String accountNumber) { + super.setAccountNumber(accountNumber); + } + + @JsonIgnore + @Override + public String getAccountNumber() { + return super.getAccountNumber(); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/service/BatchAccountLookup.java b/src/main/java/org/mifos/processor/bulk/service/BatchAccountLookup.java new file mode 100644 index 00000000..f90a42a9 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/service/BatchAccountLookup.java @@ -0,0 +1,70 @@ +package org.mifos.processor.bulk.service; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REGISTERING_INSTITUTION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.camel.Exchange; +import org.mifos.connector.common.identityaccountmapper.dto.AccountMapperRequestDTO; +import org.mifos.connector.common.identityaccountmapper.dto.BeneficiaryDTO; +import org.mifos.processor.bulk.connectors.service.AccountLookupService; +import org.mifos.processor.bulk.schema.Transaction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class BatchAccountLookup { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private AccountLookupService accountLookupService; + + @Value("${identity_account_mapper.hostname}") + private String identityEndpoint; + + @Value("${identity_account_mapper.batch_account_lookup}") + private String batchAccountLookup; + + @SuppressWarnings("unchecked") + public void doBatchAccountLookup(Exchange exchange) throws IOException { + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + List beneficiaryDTOList = new ArrayList<>(); + + transactionList.forEach(transaction -> { + beneficiaryDTOList.add(new BeneficiaryDTO(transaction.getPayeeIdentifier(), "", "", "")); + }); + + String requestId = exchange.getProperty(REQUEST_ID, String.class); + String callbackUrl = exchange.getProperty(CALLBACK, String.class); + String registeringInstitutionId = exchange.getProperty(HEADER_REGISTERING_INSTITUTE_ID, String.class); + + AccountMapperRequestDTO accountMapperRequestDTO = new AccountMapperRequestDTO(requestId, registeringInstitutionId, + beneficiaryDTOList); + + String requestBody = objectMapper.writeValueAsString(accountMapperRequestDTO); + + exchange.getIn().setHeader(CALLBACK, callbackUrl); + exchange.getIn().setHeader(REGISTERING_INSTITUTION_ID, registeringInstitutionId); + exchange.getIn().setHeader("Content-type", "application/json"); + exchange.getIn().setBody(requestBody); + + Map headers = exchange.getIn().getHeaders(); + String fullUrl = identityEndpoint + batchAccountLookup; + + accountLookupService.accountLookupCall(identityEndpoint, fullUrl, accountMapperRequestDTO, headers); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/service/FileProcessingRouteService.java b/src/main/java/org/mifos/processor/bulk/service/FileProcessingRouteService.java new file mode 100644 index 00000000..84e11ac8 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/service/FileProcessingRouteService.java @@ -0,0 +1,100 @@ +package org.mifos.processor.bulk.service; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.OVERRIDE_HEADER; +import static org.mifos.processor.bulk.camel.config.CamelProperties.RESULT_TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_LENGTH; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FAILED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ONGOING_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TOTAL_AMOUNT; + +import com.fasterxml.jackson.databind.MappingIterator; +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import lombok.extern.slf4j.Slf4j; +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionResult; +import org.mifos.processor.bulk.utility.CsvWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +public class FileProcessingRouteService { + + @Autowired + private CsvMapper csvMapper; + + public void getTxnArray(Exchange exchange) throws IOException { + Double totalAmount = 0.0; + Long failedAmount = 0L; + Long completedAmount = 0L; + String filename = exchange.getProperty(LOCAL_FILE_PATH, String.class); + log.debug("Local file path: {}", filename); + CsvSchema schema = CsvSchema.emptySchema().withHeader(); + log.info("Filename: {}", filename); + FileReader reader = new FileReader(filename); + MappingIterator readValues = csvMapper.readerWithSchemaFor(Transaction.class).with(schema).readValues(reader); + List transactionList = new ArrayList<>(); + while (readValues.hasNext()) { + Transaction current = readValues.next(); + transactionList.add(current); + totalAmount += Double.parseDouble(current.getAmount()); + } + reader.close(); + exchange.setProperty(TRANSACTION_LIST, transactionList); + exchange.setProperty(TRANSACTION_LIST_LENGTH, transactionList.size()); + exchange.setProperty(TOTAL_AMOUNT, totalAmount); + exchange.setProperty(ONGOING_AMOUNT, totalAmount); + exchange.setProperty(FAILED_AMOUNT, failedAmount); + exchange.setProperty(COMPLETED_AMOUNT, completedAmount); + } + + @SuppressWarnings("unchecked") + public void updateResultFile(Exchange exchange) throws IOException { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + List transactionList = exchange.getProperty(RESULT_TRANSACTION_LIST, List.class); + + Boolean overrideHeader = exchange.getProperty(OVERRIDE_HEADER, Boolean.class); + + CsvWriter.writeToCsv(transactionList, TransactionResult.class, csvMapper, overrideHeader, filepath); + } + + @SuppressWarnings("unchecked") + public void updateFile(Exchange exchange) throws IOException { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + List transactionList = exchange.getProperty(TRANSACTION_LIST, List.class); + + log.info("updateFile() - filepath: {}", filepath); + log.info("updateFile() - transactionList: {} (size: {})", transactionList != null ? "present" : "NULL", + transactionList != null ? transactionList.size() : 0); + + // getting header + Boolean overrideHeader = exchange.getProperty(OVERRIDE_HEADER, Boolean.class); + CsvSchema csvSchema = csvMapper.schemaFor(Transaction.class); + if (overrideHeader) { + csvSchema = csvSchema.withHeader(); + } else { + csvSchema = csvSchema.withoutHeader(); + } + + File file = new File(filepath); + SequenceWriter writer = csvMapper.writerWithSchemaFor(Transaction.class).with(csvSchema).writeValues(file); + if (transactionList != null) { + for (Transaction transaction : transactionList) { + writer.write(transaction); + } + } + writer.close(); + log.info("updateFile() - wrote {} transactions to {}", transactionList != null ? transactionList.size() : 0, filepath); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/service/FileRouteService.java b/src/main/java/org/mifos/processor/bulk/service/FileRouteService.java new file mode 100644 index 00000000..ec50c22b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/service/FileRouteService.java @@ -0,0 +1,48 @@ +package org.mifos.processor.bulk.service; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.LOCAL_FILE_PATH; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.file.FileTransferService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +public class FileRouteService { + + @Autowired + @Qualifier("awsStorage") + private FileTransferService fileTransferService; + @Value("${application.bucket-name}") + private String bucketName; + private Logger logger = LoggerFactory.getLogger(this.getClass()); + + public void downloadFile(Exchange exchange) throws IOException { + String filename = exchange.getProperty(SERVER_FILE_NAME, String.class); + + byte[] csvFile = fileTransferService.downloadFile(filename, bucketName); + File file = new File(filename); + try (FileOutputStream fos = new FileOutputStream(file)) { + fos.write(csvFile); + } + exchange.setProperty(LOCAL_FILE_PATH, file.getAbsolutePath()); + logger.info("File downloaded"); + } + + public void uploadFile(Exchange exchange) { + String filepath = exchange.getProperty(LOCAL_FILE_PATH, String.class); + String serverFileName = fileTransferService.uploadFile(new File(filepath), bucketName); + exchange.setProperty(SERVER_FILE_NAME, serverFileName); + logger.info("Uploaded file: {}", serverFileName); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/service/SubscriptionService.java b/src/main/java/org/mifos/processor/bulk/service/SubscriptionService.java new file mode 100644 index 00000000..6c067201 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/service/SubscriptionService.java @@ -0,0 +1,10 @@ +package org.mifos.processor.bulk.service; + +import org.springframework.stereotype.Service; + +@Service +public interface SubscriptionService { + + void subscribeToEvent(); + +} diff --git a/src/main/java/org/mifos/processor/bulk/service/SubscriptionServiceImpl.java b/src/main/java/org/mifos/processor/bulk/service/SubscriptionServiceImpl.java new file mode 100644 index 00000000..774a0f60 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/service/SubscriptionServiceImpl.java @@ -0,0 +1,34 @@ +package org.mifos.processor.bulk.service; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.EVENT_TYPE; + +import javax.annotation.PostConstruct; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.apache.camel.support.DefaultExchange; +import org.springframework.stereotype.Service; + +@Service +public class SubscriptionServiceImpl implements SubscriptionService { + + private final ProducerTemplate producerTemplate; + + protected final CamelContext camelContext; + + public String eventType; + + public SubscriptionServiceImpl(ProducerTemplate producerTemplate, CamelContext camelContext) { + this.producerTemplate = producerTemplate; + this.camelContext = camelContext; + } + + @PostConstruct + @Override + public void subscribeToEvent() { + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(EVENT_TYPE, eventType); + producerTemplate.send("direct:subscribe", exchange); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/CsvWriter.java b/src/main/java/org/mifos/processor/bulk/utility/CsvWriter.java new file mode 100644 index 00000000..9012cded --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/CsvWriter.java @@ -0,0 +1,33 @@ +package org.mifos.processor.bulk.utility; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import java.io.File; +import java.io.IOException; +import java.util.List; + +public final class CsvWriter { + + private CsvWriter() {} + + public static void writeToCsv(List data, Class tClass, CsvMapper csvMapper, boolean overrideHeader, String filepath) + throws IOException { + CsvSchema csvSchema = csvMapper.schemaFor(tClass); + if (overrideHeader) { + csvSchema = csvSchema.withHeader(); + } else { + csvSchema = csvSchema.withoutHeader(); + } + + File file = new File(filepath); + if (!file.exists()) { + file.createNewFile(); + } + SequenceWriter writer = csvMapper.writerWithSchemaFor(tClass).with(csvSchema).writeValues(file); + for (T object : data) { + writer.write(object); + } + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/Headers.java b/src/main/java/org/mifos/processor/bulk/utility/Headers.java new file mode 100644 index 00000000..16dcde77 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/Headers.java @@ -0,0 +1,56 @@ +package org.mifos.processor.bulk.utility; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +public final class Headers { + + private Map headers; + + private Headers() {} + + private void setHeaders(Map headers) { + this.headers = headers; + } + + public Map getHeaders() { + return headers; + } + + public Set getHeadersKey() { + return this.headers.keySet(); + } + + public Object get(String key) { + return this.headers.get(key); + } + + public static Map convertHeaders(Map headers) { + Map stringHeaders = new HashMap<>(); + for (Map.Entry entry : headers.entrySet()) { + Object value = entry.getValue(); + if (value != null) { + stringHeaders.put(entry.getKey(), value.toString()); + } + } + return stringHeaders; + } + + public static class HeaderBuilder { + + private Map headers = new HashMap<>(); + + public HeaderBuilder addHeader(String key, Object value) { + headers.put(key, value); + return this; + } + + public Headers build() { + Headers headersClassInstance = new Headers(); + headersClassInstance.setHeaders(this.headers); + + return headersClassInstance; + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/PhaseUtils.java b/src/main/java/org/mifos/processor/bulk/utility/PhaseUtils.java new file mode 100644 index 00000000..ccbe3539 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/PhaseUtils.java @@ -0,0 +1,22 @@ +package org.mifos.processor.bulk.utility; + +import java.util.List; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +@Component +@ConfigurationProperties(prefix = "callbackphases") +public class PhaseUtils { + + private List values; + + public List getValues() { + return values; + } + + public void setValues(List values) { + this.values = values; + + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/SpringWrapperUtil.java b/src/main/java/org/mifos/processor/bulk/utility/SpringWrapperUtil.java new file mode 100644 index 00000000..ca60bcb9 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/SpringWrapperUtil.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.utility; + +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; + +public final class SpringWrapperUtil { + + private SpringWrapperUtil() {} + + public static Exchange getDefaultWrappedExchange(CamelContext camelContext, Headers headers) { + Exchange exchange = new DefaultExchange(camelContext); + + // Setting headers + for (String headerKey : headers.getHeadersKey()) { + exchange.getIn().setHeader(headerKey, headers.get(headerKey)); + } + + return exchange; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/TransactionParser.java b/src/main/java/org/mifos/processor/bulk/utility/TransactionParser.java new file mode 100644 index 00000000..03f4138f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/TransactionParser.java @@ -0,0 +1,66 @@ +package org.mifos.processor.bulk.utility; + +import org.mifos.processor.bulk.schema.Transaction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public final class TransactionParser { + + private static final Logger logger = LoggerFactory.getLogger(TransactionParser.class); + + private TransactionParser() { + throw new IllegalStateException("Utility class"); + } + + public static Transaction parseLineToTransaction(String line) { + try { + String[] parts = line.split(",", -1); + Transaction transaction = new Transaction(); + + if (parts.length > 0 && !parts[0].isEmpty()) { + transaction.setId(Integer.parseInt(parts[0])); + } + if (parts.length > 1) { + transaction.setRequestId(parts[1]); + } + if (parts.length > 2) { + transaction.setPaymentMode(parts[2]); + } + if (parts.length > 4) { + transaction.setPayerIdentifierType(parts[3]); + } + if (parts.length > 5) { + transaction.setPayerIdentifier(parts[4]); + } + if (parts.length > 6) { + transaction.setPayeeIdentifierType(parts[5]); + } + if (parts.length > 7) { + transaction.setPayeeIdentifier(parts[6]); + } + if (parts.length > 8) { + transaction.setAmount(parts[7]); + } + if (parts.length > 9) { + transaction.setCurrency(parts[8]); + } + if (parts.length > 10) { + transaction.setNote(parts[9]); + } + if (parts.length > 11) { + transaction.setProgramShortCode(parts[10]); + } + if (parts.length > 12) { + transaction.setCycle(parts[11]); + } + if (parts.length > 13) { + transaction.setPayeeDfspId(parts[12]); + } + + return transaction; + } catch (Exception e) { + logger.error("Error parsing line to Transaction object: {}", line, e); + return null; + } + } +} diff --git a/src/main/java/org/mifos/processor/bulk/utility/Utils.java b/src/main/java/org/mifos/processor/bulk/utility/Utils.java new file mode 100644 index 00000000..b896bc9d --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/utility/Utils.java @@ -0,0 +1,233 @@ +package org.mifos.processor.bulk.utility; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.connector.common.gsma.dto.Fee; +import org.mifos.connector.common.gsma.dto.GSMATransaction; +import org.mifos.connector.common.gsma.dto.GsmaParty; +import org.mifos.connector.common.gsma.dto.IdDocument; +import org.mifos.connector.common.gsma.dto.InternationalTransferInformation; +import org.mifos.connector.common.gsma.dto.Kyc; +import org.mifos.connector.common.gsma.dto.PostalAddress; +import org.mifos.connector.common.gsma.dto.SubjectName; +import org.mifos.connector.common.mojaloop.dto.MoneyData; +import org.mifos.connector.common.mojaloop.dto.Party; +import org.mifos.connector.common.mojaloop.dto.PartyIdInfo; +import org.mifos.connector.common.mojaloop.type.IdentifierType; +import org.mifos.processor.bulk.schema.Transaction; +import org.mifos.processor.bulk.schema.TransactionResult; + +@Slf4j +public final class Utils { + + private Utils() {} + + public static String getTenantSpecificWorkflowId(String originalWorkflowName, String tenantName) { + return originalWorkflowName.replace("{dfspid}", tenantName); + } + + public static String getBulkConnectorBpmnName(String originalWorkflowName, String paymentMode, String tenantName) { + return originalWorkflowName.replace("{MODE}", paymentMode.toLowerCase()).replace("{dfspid}", tenantName); + } + + public static String mergeCsvFile(String file1, String file2) { + try { + // create a writer for permFile + BufferedWriter out = new BufferedWriter(new FileWriter(file1, true)); + // create a reader for tmpFile + BufferedReader in = new BufferedReader(new FileReader(file2)); + String str; + boolean isFirstLine = true; + while ((str = in.readLine()) != null) { + if (isFirstLine) { + // used for skipping header writing + isFirstLine = false; + continue; + } + out.write(str + "\n"); + } + in.close(); + out.close(); + } catch (IOException e) { + log.debug(e.getMessage()); + return null; + } + + return file1; + } + + public static String getAwsFileUrl(String baseUrl, String filename) { + return String.format("%s/%s", baseUrl, filename); + } + + /** + * takes initial timer in the ISO 8601 durations format for more info check + * https://docs.camunda.io/docs/0.26/reference/bpmn-workflows/timer-events/#time-duration + * + * @param initialTimer + * initial timer in the ISO 8601 durations format, ex: PT45S + * @return next timer value in the ISO 8601 durations format + */ + public static String getNextTimer(String initialTimer) { + String stringSecondsValue = initialTimer.split("T")[1].split("S")[0]; + int initialSeconds = Integer.parseInt(stringSecondsValue); + + int currentPower = (int) (Math.log(initialSeconds) / Math.log(2)); + int next = (int) Math.pow(2, ++currentPower); + + return String.format("PT%sS", next); + } + + public static String getZeebeTimerValue(int timer) { + return String.format("PT%sS", timer); + } + + public static TransactionResult mapToResultDTO(Transaction transaction) { + TransactionResult transactionResult = new TransactionResult(); + transactionResult.setId(transaction.getId()); + transactionResult.setRequestId(transaction.getRequestId()); + transactionResult.setPaymentMode(transaction.getPaymentMode()); + transactionResult.setPayerIdentifierType("accountNumber"); + transactionResult.setPayerIdentifier(transaction.getPayerIdentifier()); + transactionResult.setAmount(transaction.getAmount()); + transactionResult.setCurrency(transactionResult.getCurrency()); + transactionResult.setNote(transactionResult.getNote()); + transactionResult.setPayeeIdentifierType("accountNumber"); + if (transaction.getAccountNumber() != null) { + transactionResult.setPayeeIdentifier(transaction.getAccountNumber()); + } else { + transactionResult.setPayeeIdentifier(transaction.getPayeeIdentifier()); + } + transactionResult.setProgramShortCode(transaction.getProgramShortCode()); + transactionResult.setCycle(transactionResult.getCycle()); + return transactionResult; + } + + public static GSMATransaction convertTxnToGSMA(Transaction transaction) { + GSMATransaction gsmaTransaction = new GSMATransaction(); + gsmaTransaction.setAmount(transaction.getAmount()); + gsmaTransaction.setCurrency(transaction.getCurrency()); + GsmaParty payer = new GsmaParty(); + // logger.info("Payer {} {}", transaction.getPayerIdentifier(),payer[0].); + payer.setKey("msisdn"); + payer.setValue(transaction.getPayerIdentifier()); + GsmaParty payee = new GsmaParty(); + payee.setKey("msisdn"); + payee.setValue(transaction.getPayeeIdentifier()); + GsmaParty[] debitParty = new GsmaParty[1]; + GsmaParty[] creditParty = new GsmaParty[1]; + debitParty[0] = payer; + creditParty[0] = payee; + gsmaTransaction.setDebitParty(debitParty); + gsmaTransaction.setCreditParty(creditParty); + gsmaTransaction.setRequestingOrganisationTransactionReference("string"); + gsmaTransaction.setSubType("string"); + gsmaTransaction.setDescriptionText("string"); + Fee fees = new Fee(); + fees.setFeeType(transaction.getAmount()); + fees.setFeeCurrency(transaction.getCurrency()); + fees.setFeeType("string"); + Fee[] fee = new Fee[1]; + fee[0] = fees; + gsmaTransaction.setFees(fee); + gsmaTransaction.setGeoCode("37.423825,-122.082900"); + InternationalTransferInformation internationalTransferInformation = new InternationalTransferInformation(); + internationalTransferInformation.setQuotationReference("string"); + internationalTransferInformation.setQuoteId("string"); + internationalTransferInformation.setDeliveryMethod("directtoaccount"); + internationalTransferInformation.setOriginCountry("USA"); + internationalTransferInformation.setReceivingCountry("USA"); + internationalTransferInformation.setRelationshipSender("string"); + internationalTransferInformation.setRemittancePurpose("string"); + gsmaTransaction.setInternationalTransferInformation(internationalTransferInformation); + gsmaTransaction.setOneTimeCode("string"); + IdDocument idDocument = new IdDocument(); + idDocument.setIdType("passport"); + idDocument.setIdNumber("string"); + idDocument.setIssuerCountry("USA"); + idDocument.setExpiryDate("2022-09-28T12:51:19.260+00:00"); + idDocument.setIssueDate("2022-09-28T12:51:19.260+00:00"); + idDocument.setIssuer("string"); + idDocument.setIssuerPlace("string"); + IdDocument[] idDocuments = new IdDocument[1]; + idDocuments[0] = idDocument; + PostalAddress postalAddress = new PostalAddress(); + postalAddress.setAddressLine1("string"); + postalAddress.setAddressLine2("string"); + postalAddress.setAddressLine3("string"); + postalAddress.setCity("string"); + postalAddress.setCountry("USA"); + postalAddress.setPostalCode("string"); + postalAddress.setStateProvince("string"); + SubjectName subjectName = new SubjectName(); + subjectName.setFirstName("string"); + subjectName.setLastName("string"); + subjectName.setMiddleName("string"); + subjectName.setTitle("string"); + subjectName.setNativeName("string"); + Kyc recieverKyc = new Kyc(); + recieverKyc.setBirthCountry("USA"); + recieverKyc.setDateOfBirth("2000-11-20"); + recieverKyc.setContactPhone("string"); + recieverKyc.setEmailAddress("string"); + recieverKyc.setEmployerName("string"); + recieverKyc.setGender('m'); + recieverKyc.setIdDocument(idDocuments); + recieverKyc.setNationality("USA"); + recieverKyc.setOccupation("string"); + recieverKyc.setPostalAddress(postalAddress); + recieverKyc.setSubjectName(subjectName); + Kyc senderKyc = new Kyc(); + senderKyc.setBirthCountry("USA"); + senderKyc.setDateOfBirth("2000-11-20"); + senderKyc.setContactPhone("string"); + senderKyc.setEmailAddress("string"); + senderKyc.setEmployerName("string"); + senderKyc.setGender('m'); + senderKyc.setIdDocument(idDocuments); + senderKyc.setNationality("USA"); + senderKyc.setOccupation("string"); + senderKyc.setPostalAddress(postalAddress); + senderKyc.setSubjectName(subjectName); + gsmaTransaction.setReceiverKyc(recieverKyc); + gsmaTransaction.setSenderKyc(senderKyc); + gsmaTransaction.setServicingIdentity("string"); + gsmaTransaction.setRequestDate("2022-09-28T12:51:19.260+00:00"); + + return gsmaTransaction; + } + + public static TransactionChannelRequestDTO convertTxnToInboundTransferPayload(Transaction transaction) { + TransactionChannelRequestDTO requestDTO = new TransactionChannelRequestDTO(); + + MoneyData moneyData = new MoneyData(); + moneyData.setCurrency(transaction.getCurrency()); + moneyData.setAmount(transaction.getAmount()); + requestDTO.setAmount(moneyData); + + IdentifierType identifierType; + try { + identifierType = IdentifierType.valueOf(transaction.getPaymentMode().toUpperCase()); + } catch (Exception e) { + identifierType = IdentifierType.MSISDN; + } + + // PAYER SETUP + Party payerParty = new Party(new PartyIdInfo(identifierType, transaction.getPayerIdentifier())); + + // PAYEE SETUP + Party payeeParty = new Party(new PartyIdInfo(identifierType, transaction.getPayeeIdentifier())); + + requestDTO.setPayer(payerParty); + requestDTO.setPayee(payeeParty); + requestDTO.setNote(transaction.getNote()); + + return requestDTO; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/BpmnConfig.java b/src/main/java/org/mifos/processor/bulk/zeebe/BpmnConfig.java new file mode 100644 index 00000000..f3238741 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/BpmnConfig.java @@ -0,0 +1,12 @@ +package org.mifos.processor.bulk.zeebe; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class BpmnConfig { + + @Value("${bpmn.flows.slcb}") + public String slcbBpmn; + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeClientConfiguration.java b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeClientConfiguration.java index 6c9007bc..585c5b5c 100644 --- a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeClientConfiguration.java +++ b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeClientConfiguration.java @@ -1,12 +1,11 @@ package org.mifos.processor.bulk.zeebe; import io.camunda.zeebe.client.ZeebeClient; +import java.time.Duration; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.time.Duration; - @Configuration public class ZeebeClientConfiguration { @@ -16,14 +15,13 @@ public class ZeebeClientConfiguration { @Value("${zeebe.client.max-execution-threads}") private int zeebeClientMaxThreads; + @Value("${zeebe.client.poll-interval}") + private int zeebeClientPollInterval; + @Bean public ZeebeClient setup() { - return ZeebeClient.newClientBuilder() - .gatewayAddress(zeebeBrokerContactpoint) - .usePlaintext() - .defaultJobPollInterval(Duration.ofMillis(1)) - .defaultJobWorkerMaxJobsActive(2000) - .numJobWorkerExecutionThreads(zeebeClientMaxThreads) - .build(); + return ZeebeClient.newClientBuilder().gatewayAddress(zeebeBrokerContactpoint).usePlaintext() + .defaultJobPollInterval(Duration.ofMillis(zeebeClientPollInterval)).defaultJobWorkerMaxJobsActive(2000) + .numJobWorkerExecutionThreads(zeebeClientMaxThreads).build(); } } diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeMessages.java b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeMessages.java index 71fa471f..ee6e9fb7 100644 --- a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeMessages.java +++ b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeMessages.java @@ -1,9 +1,12 @@ package org.mifos.processor.bulk.zeebe; -public class ZeebeMessages { +public final class ZeebeMessages { private ZeebeMessages() {} public static final String OPERATOR_MANUAL_RECOVERY = "operator-manual-recovery"; + public static final String ACCOUNT_LOOKUP = "account-lookup"; + + public static final String AUTHORIZATION_RESPONSE = "authorization-response"; } diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeProcessStarter.java b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeProcessStarter.java index 5be27664..538b8280 100644 --- a/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeProcessStarter.java +++ b/src/main/java/org/mifos/processor/bulk/zeebe/ZeebeProcessStarter.java @@ -2,18 +2,16 @@ import io.camunda.zeebe.client.ZeebeClient; import io.camunda.zeebe.client.api.response.ProcessInstanceEvent; +import java.time.Instant; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; import org.apache.camel.Exchange; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - - @Component public class ZeebeProcessStarter { @@ -42,6 +40,21 @@ public static void camelHeadersToZeebeVariables(Exchange exchange, Map extraVariables) { + + Map variables = new HashMap<>(); + variables.put(ZeebeVariables.ORIGIN_DATE, Instant.now().toEpochMilli()); + if (extraVariables != null) { + variables.putAll(extraVariables); + } + + // TODO if successful transfer response arrives in X timeout return it otherwise do callback + ProcessInstanceEvent join = zeebeClient.newCreateInstanceCommand().bpmnProcessId(workflowId).latestVersion().variables(variables) + .send().join(); + + logger.info("zeebee workflow instance from process {}", workflowId); + } + public String startZeebeWorkflow(String workflowId, String request, Map extraVariables) { String transactionId = generateTransactionId(); @@ -49,20 +62,16 @@ public String startZeebeWorkflow(String workflowId, String request, Map kafkaTemplate; - @Value("${application.bucket-name}") private String bucketName; @Value("${zeebe.client.evenly-allocated-max-jobs}") private int workerMaxJobs; - @Value(value = "${kafka.topic.gsma.name}") - private String gsmaTopicName; + @Autowired + private CamelContext camelContext; - @Value(value = "${kafka.topic.slcb.name}") - private String slcbTopicName; + @Autowired + private ProducerTemplate producerTemplate; - @PostConstruct + // @PostConstruct public void setupWorkers() { + workerBulkProcessor(); + workerCheckTransactions(); + workerSampleTransactions(); + } + + private void workerBulkProcessor() { + zeebeClient.newWorker().jobType("bulk-processor").handler((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + + Map variables = job.getVariablesAsMap(); + String batchId = (String) variables.get(BATCH_ID); + String fileName = (String) variables.get("fileName"); + + // TODO: How to get sender information? Hard coded in Channel connector? + InputStream csvFileInputStream = fileTransferService.streamFile(fileName, bucketName); + + CsvSchema schema = CsvSchema.emptySchema().withHeader(); + MappingIterator readValues = csvMapper.readerWithSchemaFor(Transaction.class).with(schema) + .readValues(csvFileInputStream); + + /* + * while (readValues.hasNext()) { Transaction current = readValues.next(); current.setBatchId(batchId); if + * (current.getPayment_mode().equals("gsma")) kafkaTemplate.send(gsmaTopicName, + * objectMapper.writeValueAsString(current)); else if (current.getPayment_mode().equals("sclb")) + * kafkaTemplate.send(slcbTopicName, objectMapper.writeValueAsString(current)); } + */ + + client.newCompleteCommand(job.getKey()).send(); + }).name("bulk-processor").maxJobsActive(workerMaxJobs).open(); + } + + private void workerCheckTransactions() { + String jobType = "check-transactions"; + zeebeClient.newWorker().jobType(jobType).handler((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + + Map variables = job.getVariablesAsMap(); + String batchId = (String) variables.get(BATCH_ID); + + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(BATCH_ID, batchId); + producerTemplate.send("direct:check-transactions", exchange); + + client.newCompleteCommand(job.getKey()).send(); + }).name(jobType).maxJobsActive(workerMaxJobs).open(); + } + + private void workerSampleTransactions() { + String jobType = "sample-transactions"; + zeebeClient.newWorker().jobType(jobType).handler((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + + Map variables = job.getVariablesAsMap(); + String batchId = (String) variables.get(BATCH_ID); - zeebeClient.newWorker() - .jobType("bulk-processor") - .handler((client, job) -> { - logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); - - Map variables = job.getVariablesAsMap(); - String batchId = (String) variables.get(BATCH_ID); - String fileName = (String) variables.get("fileName"); - - // TODO: How to get sender information? Hard coded in Channel connector? - byte[] csvFile = fileTransferService.downloadFile(fileName, bucketName); - - CsvSchema schema = CsvSchema.emptySchema().withHeader(); - MappingIterator readValues = csvMapper.readerWithSchemaFor(Transaction.class).with(schema).readValues(csvFile); - - while (readValues.hasNext()) { - Transaction current = readValues.next(); - current.setBatchId(batchId); - if (current.getPayment_mode().equals("gsma")) - kafkaTemplate.send(gsmaTopicName, objectMapper.writeValueAsString(current)); - else if (current.getPayment_mode().equals("sclb")) - kafkaTemplate.send(slcbTopicName, objectMapper.writeValueAsString(current)); - } - - client.newCompleteCommand(job.getKey()) - .send(); - }) - .name("bulk-processor") - .maxJobsActive(workerMaxJobs) - .open(); + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(BATCH_ID, batchId); + exchange.setProperty(IS_BATCH_READY, variables.get(IS_SAMPLE_READY)); + producerTemplate.send("direct:sample-transactions", exchange); + client.newCompleteCommand(job.getKey()).send(); + }).name(jobType).maxJobsActive(workerMaxJobs).open(); } -} \ No newline at end of file +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupCallbackWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupCallbackWorker.java new file mode 100644 index 00000000..a424dc2f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupCallbackWorker.java @@ -0,0 +1,48 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYEE_PARTY_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CHANNEL_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORIGIN_CHANNEL_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FSP_ID; +import static org.mifos.processor.bulk.zeebe.worker.Worker.ACCOUNT_LOOKUP_CALLBACK; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.util.Map; +import org.apache.camel.CamelContext; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.connector.common.mojaloop.dto.Party; +import org.mifos.connector.common.mojaloop.dto.PartyIdInfo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class AccountLookupCallbackWorker extends BaseWorker { + + @Autowired + private ZeebeClient zeebeClient; + @Autowired + private CamelContext camelContext; + @Autowired + private ObjectMapper objectMapper; + + @Override + public void setup() { + logger.info("## generating " + ACCOUNT_LOOKUP_CALLBACK + "zeebe worker"); + zeebeClient.newWorker().jobType(ACCOUNT_LOOKUP_CALLBACK.getValue()).handler((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map existingVariables = job.getVariablesAsMap(); + existingVariables.put(ORIGIN_CHANNEL_REQUEST, existingVariables.get(CHANNEL_REQUEST)); + TransactionChannelRequestDTO transactionChannelRequestDTO = objectMapper + .readValue((String) existingVariables.get(CHANNEL_REQUEST), TransactionChannelRequestDTO.class); + String payeeId = existingVariables.get(PAYEE_PARTY_ID).toString(); + String payeeFspId = existingVariables.get(PARTY_LOOKUP_FSP_ID).toString(); + PartyIdInfo partyIdInfo = new PartyIdInfo(transactionChannelRequestDTO.getPayee().getPartyIdInfo().getPartyIdType(), payeeId); + partyIdInfo.setFspId(payeeFspId); + Party payee = new Party(partyIdInfo); + transactionChannelRequestDTO.setPayee(payee); + existingVariables.put(CHANNEL_REQUEST, objectMapper.writeValueAsString(transactionChannelRequestDTO)); + client.newCompleteCommand(job.getKey()).variables(existingVariables).send().join(); + }).name(ACCOUNT_LOOKUP_CALLBACK.getValue()).open(); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupWorker.java new file mode 100644 index 00000000..6107c604 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AccountLookupWorker.java @@ -0,0 +1,89 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CACHED_TRANSACTION_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HOST; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYEE_IDENTITY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.PAYMENT_MODALITY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ACCOUNT_LOOKUP_RETRY_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CHANNEL_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INITIATOR_FSP_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.IS_RTP_REQUEST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORIGIN_DATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TRANSACTION_ID; +import static org.mifos.processor.bulk.zeebe.worker.Worker.ACCOUNT_LOOKUP; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.util.Map; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.apache.camel.support.DefaultExchange; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.connector.common.mojaloop.dto.PartyIdInfo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class AccountLookupWorker extends BaseWorker { + + @Autowired + private ZeebeClient zeebeClient; + + @Autowired + private ProducerTemplate producerTemplate; + @Autowired + private CamelContext camelContext; + @Autowired + private ObjectMapper objectMapper; + @Value("${identity_account_mapper.hostname}") + private String identityMapperURL; + @Value("${identity_account_mapper.account_lookup_callback}") + private String accountLookupCallback; + @Value("${identity_account_mapper.account_lookup}") + private String accountLookupEndpoint; + + @Override + public void setup() { + logger.info("## generating " + ACCOUNT_LOOKUP + "zeebe worker"); + zeebeClient.newWorker().jobType(ACCOUNT_LOOKUP.getValue()).handler((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map existingVariables = job.getVariablesAsMap(); + logger.info(existingVariables.toString()); + existingVariables.put(ACCOUNT_LOOKUP_RETRY_COUNT, 1); + existingVariables.put(CACHED_TRANSACTION_ID, job.getKey()); + + boolean isTransactionRequest = (boolean) existingVariables.get(IS_RTP_REQUEST); + String tenantId = (String) existingVariables.get(TENANT_ID); + Object channelRequest = existingVariables.get(CHANNEL_REQUEST); + TransactionChannelRequestDTO request = objectMapper.readValue((String) channelRequest, TransactionChannelRequestDTO.class); + + existingVariables.put(INITIATOR_FSP_ID, tenantId); + PartyIdInfo requestedParty = isTransactionRequest ? request.getPayer().getPartyIdInfo() : request.getPayee().getPartyIdInfo(); + + String payeeIdentity = requestedParty.getPartyIdentifier(); + String paymentModality = requestedParty.getPartyIdType().toString(); + + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(HOST, identityMapperURL); + exchange.setProperty(PAYEE_IDENTITY, payeeIdentity); + exchange.setProperty(PAYMENT_MODALITY, paymentModality); + exchange.setProperty(CALLBACK, identityMapperURL + accountLookupCallback); + exchange.setProperty(TRANSACTION_ID, existingVariables.get(TRANSACTION_ID)); + exchange.setProperty("requestId", job.getKey()); + exchange.setProperty(CHANNEL_REQUEST, channelRequest); + exchange.setProperty(ORIGIN_DATE, existingVariables.get(ORIGIN_DATE)); + exchange.setProperty(TENANT_ID, tenantId); + exchange.setProperty(HEADER_REGISTERING_INSTITUTE_ID, existingVariables.get(HEADER_REGISTERING_INSTITUTE_ID)); + producerTemplate.send("direct:send-account-lookup", exchange); + + client.newCompleteCommand(job.getKey()).variables(existingVariables).send(); + }).name(String.valueOf(ACCOUNT_LOOKUP)).open(); + + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/AggregateWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AggregateWorker.java new file mode 100644 index 00000000..9bdd5d3f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AggregateWorker.java @@ -0,0 +1,73 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.BATCH_STATUS_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_RATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_DESCRIPTION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASE_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.stereotype.Component; + +@Component +public class AggregateWorker extends BaseWorker { + + @Override + public void setup() { + // newWorker(Worker.BATCH_STATUS, (client, job) -> { + newWorker(Worker.BATCH_AGGREGATE, (client, job) -> { + logger.info("Started batchAggregateWorker"); + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + + int retry = (int) variables.getOrDefault(RETRY, 0); + int successRate = (int) variables.getOrDefault(COMPLETION_RATE, 0); + + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(CLIENT_CORRELATION_ID, variables.get(CLIENT_CORRELATION_ID)); + exchange.setProperty(BATCH_ID, variables.get(BATCH_ID)); + exchange.setProperty(TENANT_ID, variables.get(TENANT_ID)); + exchange.setProperty(MAX_CALLBACK_RETRY, variables.get(MAX_CALLBACK_RETRY)); + exchange.setProperty(CALLBACK_RETRY, variables.getOrDefault(CALLBACK_RETRY, 0)); + exchange.setProperty(CALLBACK, variables.get(CALLBACK)); + // exchange.setProperty(COMPLETION_RATE, variables.get(COMPLETION_RATE)); + exchange.setProperty(PHASES, variables.get(PHASES)); + exchange.setProperty(PHASE_COUNT, variables.get(PHASE_COUNT)); + sendToCamelRoute(RouteId.BATCH_AGGREGATE, exchange); + + Boolean batchStatusFailed = exchange.getProperty(BATCH_STATUS_FAILED, Boolean.class); + if (batchStatusFailed == null || !batchStatusFailed) { + if (exchange.getException() != null && exchange.getException().getMessage() != null + && exchange.getException().getMessage().contains("404")) { + logger.error("An error occurred, retrying"); + successRate = 0; + } else { + successRate = exchange.getProperty(COMPLETION_RATE, Long.class).intValue(); + } + } else { + variables.put(ERROR_CODE, exchange.getProperty(ERROR_CODE)); + variables.put(ERROR_DESCRIPTION, exchange.getProperty(ERROR_DESCRIPTION)); + logger.info("Error: {}, {}", variables.get(ERROR_CODE), variables.get(ERROR_DESCRIPTION)); + } + + variables.put(COMPLETION_RATE, successRate); + variables.put(RETRY, ++retry); + + logger.info("Retry: {} and Success Rate: {}", retry, successRate); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + logger.info("Completed batchAggregateWorker"); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/ApprovalWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/ApprovalWorker.java new file mode 100644 index 00000000..302197cf --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/ApprovalWorker.java @@ -0,0 +1,25 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.APPROVAL_FAILED; + +import java.util.Map; +import org.springframework.stereotype.Component; + +@Component +public class ApprovalWorker extends BaseWorker { + + @Override + public void setup() { + newWorker(Worker.APPROVAL, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + + if (workerConfig.isApprovalWorkerEnabled) { + variables.put(APPROVAL_FAILED, false); + } + + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/AuthorizationWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AuthorizationWorker.java new file mode 100644 index 00000000..b8f22132 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/AuthorizationWorker.java @@ -0,0 +1,97 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.APPROVED_AMOUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_ACCEPTED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.AUTHORIZATION_SUCCESSFUL; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.micrometer.core.instrument.util.StringUtils; +import java.util.Map; +import org.mifos.processor.bulk.schema.AuthorizationRequest; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Component; +import org.springframework.web.client.RestTemplate; + +@Component +public class AuthorizationWorker extends BaseWorker { + + @Value("${batch-authorization.callback-url}") + private String callbackURLPath; + + @Value("${mock-payment-schema.contactpoint}") + private String mockPaymentSchemaContactPoint; + + @Value("${mock-payment-schema.endpoints.authorization}") + private String authorizationEndpoint; + + private static final String X_CLIENT_CORRELATION_ID = "X-Client-Correlation-ID"; + + private static final String X_CALLBACK_URL = "X-CallbackURL"; + + @Autowired + ObjectMapper objectMapper; + + @Override + public void setup() { + newWorker(Worker.AUTHORIZATION, (client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + + if (!workerConfig.isAuthorizationWorkerEnabled) { + variables.put(AUTHORIZATION_SUCCESSFUL, true); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + return; + } + + logger.debug("Variables: {}", variables); + + String payerIdentifier = (String) variables.get("payerIdentifier"); + String totalBatchAmount = (String) variables.get("partyLookupSuccessfulTransactionAmount"); + String currency = (String) variables.get("currency"); + + String batchId = (String) variables.get(BATCH_ID); + String clientCorrelationId = Long.toString(job.getKey()); + + AuthorizationRequest requestPayload = new AuthorizationRequest(batchId, payerIdentifier, currency, totalBatchAmount); + HttpStatus httpStatus = invokeBatchAuthorizationApi(batchId, requestPayload, clientCorrelationId); + + logger.info("Httpstatus: {}", httpStatus); + + variables.put(APPROVED_AMOUNT, totalBatchAmount); + variables.put(CLIENT_CORRELATION_ID, clientCorrelationId); + variables.put(AUTHORIZATION_ACCEPTED, httpStatus.is2xxSuccessful()); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + + private HttpStatus invokeBatchAuthorizationApi(String batchId, AuthorizationRequest requestPayload, String clientCorrelationId) + throws JsonProcessingException { + logger.info("Calling auth API"); + if (StringUtils.isBlank(requestPayload.getAmount())) { + requestPayload.setAmount("0"); + } + RestTemplate restTemplate = new RestTemplate(); + HttpHeaders headers = new HttpHeaders(); + headers.add(X_CLIENT_CORRELATION_ID, clientCorrelationId); + headers.add(X_CALLBACK_URL, callbackURLPath); + + HttpEntity requestEntity = new HttpEntity<>(requestPayload, headers); + String endpoint = mockPaymentSchemaContactPoint + authorizationEndpoint + batchId; + endpoint = endpoint + "?command=authorize"; + + logger.debug("Auth API request headers: {}", headers); + logger.info("MockPaymentSchema endpoint: {}", endpoint); + logger.debug("Body: {}", objectMapper.writeValueAsString(requestPayload)); + ResponseEntity responseEntity = restTemplate.exchange(endpoint, HttpMethod.POST, requestEntity, String.class); + return responseEntity.getStatusCode(); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/BaseWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BaseWorker.java new file mode 100644 index 00000000..8262561f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BaseWorker.java @@ -0,0 +1,58 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import io.camunda.zeebe.client.ZeebeClient; +import io.camunda.zeebe.client.api.worker.JobHandler; +import javax.annotation.PostConstruct; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public abstract class BaseWorker { + + @Autowired + private ZeebeClient zeebeClient; + + @Autowired + private CsvMapper csvMapper; + + @Autowired + private ObjectMapper objectMapper; + + @Value("${application.bucket-name}") + private String bucketName; + + @Value("${zeebe.client.evenly-allocated-max-jobs}") + private int workerMaxJobs; + + @Autowired + protected CamelContext camelContext; + + @Autowired + private ProducerTemplate producerTemplate; + + @Autowired + protected WorkerConfig workerConfig; + + protected Logger logger = LoggerFactory.getLogger(this.getClass()); + + @PostConstruct + public abstract void setup(); + + public void newWorker(Worker worker, JobHandler handler) { + zeebeClient.newWorker().jobType(worker.getValue()).handler(handler).name(worker.getValue()).maxJobsActive(workerMaxJobs).open(); + } + + public void sendToCamelRoute(RouteId routeId, Exchange exchange) { + producerTemplate.send(routeId.getValue(), exchange); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupCallbackWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupCallbackWorker.java new file mode 100644 index 00000000..f99c070b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupCallbackWorker.java @@ -0,0 +1,45 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; +import static org.mifos.processor.bulk.zeebe.worker.Worker.BATCH_ACCOUNT_LOOKUP_CALLBACK; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.util.Map; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class BatchAccountLookupCallbackWorker extends BaseWorker { + + @Autowired + private ZeebeClient zeebeClient; + @Autowired + private CamelContext camelContext; + @Autowired + private ObjectMapper objectMapper; + + @Override + public void setup() { + logger.info("## generating " + BATCH_ACCOUNT_LOOKUP_CALLBACK + "zeebe worker"); + newWorker(BATCH_ACCOUNT_LOOKUP_CALLBACK, ((client, job) -> { + Map variables = job.getVariablesAsMap(); + Exchange exchange = new DefaultExchange(camelContext); + String filename = (String) variables.get(FILE_NAME); + String batchAccountLookupCallback = (String) variables.get("batchAccountLookupCallback"); + variables.put(PARTY_LOOKUP_FAILED, false); + variables.put("batchAccountLookup", true); + exchange.setProperty(SERVER_FILE_NAME, filename); + exchange.setProperty("batchAccountLookupCallback", batchAccountLookupCallback); + exchange.setProperty("workflowInstanceKey", job.getProcessInstanceKey()); + sendToCamelRoute(RouteId.ACCOUNT_LOOKUP_CALLBACK, exchange); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + })); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupWorker.java new file mode 100644 index 00000000..d86d535f --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchAccountLookupWorker.java @@ -0,0 +1,76 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CACHED_TRANSACTION_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.HEADER_REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.REGISTERING_INSTITUTE_ID; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.worker.Worker.BATCH_ACCOUNT_LOOKUP; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.camunda.zeebe.client.ZeebeClient; +import java.util.Map; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class BatchAccountLookupWorker extends BaseWorker { + + @Autowired + private ZeebeClient zeebeClient; + + @Autowired + private ProducerTemplate producerTemplate; + @Autowired + private CamelContext camelContext; + @Autowired + private ObjectMapper objectMapper; + @Value("${identity_account_mapper.hostname}") + private String identityMapperURL; + @Value("${bulk-processor.hostname}") + private String bulkURL; + @Value("${identity_account_mapper.batch_account_lookup_callback}") + private String batchAccountLookupCallback; + + @Override + public void setup() { + + newWorker(BATCH_ACCOUNT_LOOKUP, ((client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + Exchange exchange = new DefaultExchange(camelContext); + String filename = (String) variables.get(FILE_NAME); + Object registeringInstituteIdObj = variables.get(REGISTERING_INSTITUTE_ID); + String registeringInstituteId = registeringInstituteIdObj != null ? registeringInstituteIdObj.toString() : null; + logger.info("registeringInstituteId in worker {}", registeringInstituteId); + variables.put(CACHED_TRANSACTION_ID, job.getKey()); + exchange.setProperty(HEADER_REGISTERING_INSTITUTE_ID, registeringInstituteId); + exchange.setProperty(SERVER_FILE_NAME, filename); + exchange.setProperty(REQUEST_ID, job.getKey()); + exchange.setProperty(CALLBACK, identityMapperURL + batchAccountLookupCallback); + + try { + logger.info("=== BATCH ACCOUNT LOOKUP WORKER DEBUG ==="); + logger.info("Sending to ACCOUNT_LOOKUP route with registeringInstituteId: {}", registeringInstituteId); + logger.info("Filename: {}", filename); + logger.info("Callback URL: {}", identityMapperURL + batchAccountLookupCallback); + sendToCamelRoute(RouteId.ACCOUNT_LOOKUP, exchange); + logger.info("ACCOUNT_LOOKUP route call completed successfully"); + } catch (Exception e) { + logger.error("=== ACCOUNT LOOKUP FAILED ==="); + logger.error("Exception calling ACCOUNT_LOOKUP route: " + e.getMessage(), e); + variables.put(PARTY_LOOKUP_FAILED, true); + } + client.newCompleteCommand(job.getKey()).variables(variables).send(); + })); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchStatusWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchStatusWorker.java new file mode 100644 index 00000000..600f571b --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/BatchStatusWorker.java @@ -0,0 +1,71 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_RATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.Map; +import org.mifos.processor.bulk.OperationsAppConfig; +import org.mifos.processor.bulk.schema.BatchDTO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.UriComponentsBuilder; + +public class BatchStatusWorker extends BaseWorker { + + @Autowired + public OperationsAppConfig operationsAppConfig; + + @Override + public void setup() { + + newWorker(Worker.BATCH_STATUS, ((client, job) -> { + logger.info("Started batchStatusWorker"); + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + + Map variables = job.getVariablesAsMap(); + String batchId = (String) variables.get(BATCH_ID); + String tenantId = (String) variables.get(TENANT_ID); + BatchDTO batchDTOResponse = invokeBatchAggregationApi(batchId, tenantId); + float successRate = calculateSuccessPercentage(batchDTOResponse); + variables.put(COMPLETION_RATE, successRate); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + logger.info("Completed batchStatusWorker"); + })); + } + + private float calculateSuccessPercentage(BatchDTO batchDTO) { + if (batchDTO.getTotal() != null && batchDTO.getTotal() != 0) { + return (((float) batchDTO.getSuccessful() / batchDTO.getTotal()) * 100); + } + return 0; + } + + private BatchDTO invokeBatchAggregationApi(String batchId, String tenantId) { + RestTemplate restTemplate = new RestTemplate(); + HttpHeaders headers = new HttpHeaders(); + headers.set("Platform-TenantId", tenantId); + String url = operationsAppConfig.batchSummaryUrl; + + UriComponentsBuilder uriBuilder = UriComponentsBuilder.fromUriString(url).queryParam("batchId", batchId); + String finalUrl = uriBuilder.toUriString(); + + ResponseEntity response = restTemplate.exchange(finalUrl, HttpMethod.GET, new HttpEntity<>(null, headers), String.class); + String batchAggregationResponse = response != null ? response.getBody() : null; + ObjectMapper objectMapper = new ObjectMapper(); + BatchDTO batchDTO = null; + try { + batchDTO = objectMapper.readValue(batchAggregationResponse, BatchDTO.class); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + logger.info("Batch summary response: {}", batchDTO); + return batchDTO; + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/DeDuplicationWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/DeDuplicationWorker.java new file mode 100644 index 00000000..5448dcdd --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/DeDuplicationWorker.java @@ -0,0 +1,55 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DE_DUPLICATION_ENABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DE_DUPLICATION_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.DUPLICATE_TRANSACTION_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FAILED_TRANSACTION_FILE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; + +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.stereotype.Component; + +@Component +public class DeDuplicationWorker extends BaseWorker { + + @Override + public void setup() { + newWorker(Worker.DE_DEPLICATION, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + logger.info("Started {} worker", Worker.DE_DEPLICATION.getValue()); + Map variables = job.getVariablesAsMap(); + + if ((Boolean) variables.get(DE_DUPLICATION_ENABLE)) { + variables.put(DE_DUPLICATION_FAILED, false); + } + + Exchange exchange = new DefaultExchange(camelContext); + + String filename = (String) variables.get(FILE_NAME); + + logger.info("Filename in worker before duplication is: {}", filename); + + exchange.setProperty(SERVER_FILE_NAME, filename); + + sendToCamelRoute(RouteId.DE_DUPLICATION, exchange); + + boolean deDuplicationFailed = (Boolean) exchange.getProperty(DE_DUPLICATION_FAILED); + int duplicateTransactionCount = exchange.getProperty(DUPLICATE_TRANSACTION_COUNT, Integer.class); + if (duplicateTransactionCount > 0) { + // if duplicate txn exist + variables.put(FAILED_TRANSACTION_FILE, exchange.getProperty(FAILED_TRANSACTION_FILE, String.class)); + } + variables.put(DE_DUPLICATION_FAILED, deDuplicationFailed); + variables.put(DUPLICATE_TRANSACTION_COUNT, duplicateTransactionCount); + + logger.debug("Zeebe variables in dedup: {}", variables); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + logger.info("Completed {} worker", Worker.DE_DEPLICATION); + }); + + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/FormattingWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/FormattingWorker.java new file mode 100644 index 00000000..34c40d58 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/FormattingWorker.java @@ -0,0 +1,50 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FORMATTING_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FORMATTING_STANDARD; + +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.stereotype.Component; + +@Component +public class FormattingWorker extends BaseWorker { + + @Override + public void setup() { + + /** + * Starts the new worker for formatting of the data. Performs below tasks 1. Downloads the file from cloud. 2. + * Parse the data into POJO. 3. Format the data based on field configured in application.yaml 4. Uploads the + * updated file in cloud + */ + newWorker(Worker.FORMATTING, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + if (workerConfig.isFormattingWorkerEnabled) { + variables.put(FORMATTING_FAILED, false); + } + + String filename = (String) variables.get(FILE_NAME); + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(SERVER_FILE_NAME, filename); + + try { + sendToCamelRoute(RouteId.FORMATTING, exchange); + assert !exchange.getProperty(FORMATTING_FAILED, Boolean.class); + } catch (Exception e) { + variables.put(FORMATTING_FAILED, true); + } + + variables.put(FORMATTING_FAILED, false); + variables.put(FORMATTING_STANDARD, exchange.getProperty(FORMATTING_STANDARD)); + + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/InitSubBatchWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/InitSubBatchWorker.java new file mode 100644 index 00000000..f4fe7f76 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/InitSubBatchWorker.java @@ -0,0 +1,139 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_DETAILS; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_ENTITY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TENANT_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.ZEEBE_VARIABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_FAILURE_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_SUB_BATCH_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_SUCCESS_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PURPOSE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REMAINING_SUB_BATCH; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.REQUEST_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SPLITTING_ENABLED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.TENANT_ID; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.mifos.processor.bulk.schema.SubBatchEntity; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class InitSubBatchWorker extends BaseWorker { + + @Autowired + private ObjectMapper objectMapper; + + // FIXED: Generic-safe, null-safe, no inference issues + private static List toStringList(Object obj) { + if (obj == null) { + return new ArrayList<>(); + } + if (obj instanceof List list) { + List result = new ArrayList<>(); + for (Object item : list) { + if (item != null) { + result.add(item.toString()); + } + } + return result; + } + return new ArrayList<>(); + } + + // FIXED: For SUB_BATCH_DETAILS → List expected + private static List toObjectList(Object obj) { + if (obj == null) { + return new ArrayList<>(); + } + if (obj instanceof List list) { + return new ArrayList<>(list); // safe: List → List + } + return new ArrayList<>(); + } + + @Override + public void setup() { + newWorker(Worker.INIT_SUB_BATCH, (client, job) -> { + logger.info("Started INIT_SUB_BATCH worker"); + + Map variables = job.getVariablesAsMap(); + + // 100% SAFE LISTS — NO NPE, NO COMPILATION ISSUES + List subBatches = toStringList(variables.get(SUB_BATCHES)); + List successSubBatches = toStringList(variables.get(INIT_SUCCESS_SUB_BATCHES)); + List failureSubBatches = toStringList(variables.get(INIT_FAILURE_SUB_BATCHES)); + List subBatchDetails = toObjectList(variables.get(SUB_BATCH_DETAILS)); + + // Early exit + if (subBatches.isEmpty()) { + logger.info("No sub-batches to process. Completing job early."); + variables.put(REMAINING_SUB_BATCH, 0); + variables.put(SUB_BATCHES, new ArrayList()); + variables.put(INIT_SUCCESS_SUB_BATCHES, new ArrayList()); + variables.put(INIT_FAILURE_SUB_BATCHES, new ArrayList()); + client.newCompleteCommand(job.getKey()).variables(variables).send().join(); + return; + } + + // Handle non-splitting mode + Boolean splittingEnabled = (Boolean) variables.get(SPLITTING_ENABLED); + if (Boolean.FALSE.equals(splittingEnabled)) { + String fileName = (String) variables.get(FILE_NAME); + if (fileName != null && !subBatches.contains(fileName)) { + subBatches.add(fileName); + } + } + + // Safe remove + String currentFile = subBatches.remove(0); + + // Parse sub-batch details + List subBatchEntityList = objectMapper.convertValue(subBatchDetails, + new TypeReference>() {}); + + SubBatchEntity subBatchEntity = subBatchEntityList.stream() + .filter(e -> e.getRequestFile() != null && e.getRequestFile().contains(currentFile)).findFirst().orElse(null); + + // Setup Camel exchange + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(TENANT_NAME, variables.get(TENANT_ID)); + exchange.setProperty(SERVER_FILE_NAME, currentFile); + exchange.setProperty(BATCH_ID, variables.get(BATCH_ID)); + exchange.setProperty(REQUEST_ID, variables.get(REQUEST_ID)); + exchange.setProperty(PURPOSE, variables.get(PURPOSE)); + exchange.setProperty(ZEEBE_VARIABLE, variables); + exchange.setProperty(SUB_BATCH_ENTITY, subBatchEntity); + + sendToCamelRoute(RouteId.INIT_SUB_BATCH, exchange); + + Boolean failed = exchange.getProperty(INIT_SUB_BATCH_FAILED, Boolean.class); + if (Boolean.TRUE.equals(failed)) { + failureSubBatches.add(currentFile); + } else { + successSubBatches.add(currentFile); + } + + // Update Zeebe variables + variables.put(REMAINING_SUB_BATCH, subBatches.size()); + variables.put(SUB_BATCHES, new ArrayList<>(subBatches)); + variables.put(INIT_SUCCESS_SUB_BATCHES, new ArrayList<>(successSubBatches)); + variables.put(INIT_FAILURE_SUB_BATCHES, new ArrayList<>(failureSubBatches)); + + client.newCompleteCommand(job.getKey()).variables(variables).send().join(); + + logger.info("Completed INIT_SUB_BATCH worker. Remaining sub-batches: {}", subBatches.size()); + }); + } +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/MergeBackWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/MergeBackWorker.java new file mode 100644 index 00000000..f5ec54de --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/MergeBackWorker.java @@ -0,0 +1,83 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_FAILURE_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_SUCCESS_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_COMPLETED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_FILE_LIST; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MERGE_ITERATION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RESULT_FILE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SUB_BATCHES; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.stereotype.Component; + +@Component +public class MergeBackWorker extends BaseWorker { + + @Override + public void setup() { + newWorker(Worker.MERGE_BACK, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + if (workerConfig.isMergeBackWorkerEnabled) { + variables.put(MERGE_FAILED, false); + } + + int mergeIteration = (int) variables.getOrDefault(MERGE_ITERATION, 1); + List subBatches = (List) variables.get(SUB_BATCHES); + List successSubBatches = (List) variables.get(INIT_SUCCESS_SUB_BATCHES); + List failureSubBatches = (List) variables.get(INIT_FAILURE_SUB_BATCHES); + + for (int i = 0; i < successSubBatches.size(); i++) { + String initFile = successSubBatches.remove(i); + successSubBatches.add(i, String.format("Result_%s", initFile)); + } + for (int i = 0; i < failureSubBatches.size(); i++) { + String initFile = failureSubBatches.remove(i); + failureSubBatches.add(i, String.format("Result_%s", initFile)); + } + + List mergeFileList = (List) variables.get(MERGE_FILE_LIST); + if (mergeFileList == null) { + mergeFileList = new ArrayList<>(); + mergeFileList.addAll(successSubBatches); + mergeFileList.addAll(failureSubBatches); + mergeFileList.addAll(subBatches); + } + + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(MERGE_FILE_LIST, mergeFileList); + exchange.setProperty(MERGE_ITERATION, mergeIteration); + exchange.setProperty(BATCH_ID, variables.get(BATCH_ID)); + + logger.info("Merge list: {}", mergeFileList); + + sendToCamelRoute(RouteId.MERGE_BACK, exchange); + + Boolean mergeCompletedObj = exchange.getProperty(MERGE_COMPLETED, Boolean.class); + boolean mergeCompleted = mergeCompletedObj != null ? mergeCompletedObj : false; + if (mergeCompleted) { + Boolean mergeFailedObj = exchange.getProperty(MERGE_FAILED, Boolean.class); + variables.put(MERGE_FAILED, mergeFailedObj != null ? mergeFailedObj : false); + String resultFile = exchange.getProperty(RESULT_FILE, String.class); + if (resultFile != null && !resultFile.isEmpty()) { + variables.put(RESULT_FILE, resultFile); + } + } + + variables.put(MERGE_FILE_LIST, exchange.getProperty(MERGE_FILE_LIST, List.class)); + variables.put(MERGE_COMPLETED, mergeCompleted); + variables.put(MERGE_ITERATION, ++mergeIteration); + + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/OrderingWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/OrderingWorker.java new file mode 100644 index 00000000..d8970e77 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/OrderingWorker.java @@ -0,0 +1,105 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORDERED_BY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ORDERING_FAILED; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.mifos.processor.bulk.schema.Transaction; +import org.springframework.stereotype.Component; + +@Component +public class OrderingWorker extends BaseWorker { + + @Override + public void setup() { + + /** + * This worker is responsible for ordering the data set based on field configuration. Performs below tasks. 1. + * Downloads the file from cloud. 2. Parse the data into POJO. 3. Re-order the data based on field configured in + * application.yaml 4. Uploads the updated file in cloud + */ + newWorker(Worker.ORDERING, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + Exchange exchange = new DefaultExchange(camelContext); + + if (workerConfig.isOrderingWorkerEnabled) { + variables.put(ORDERING_FAILED, false); + String filename = (String) variables.get(FILE_NAME); + exchange.setProperty(SERVER_FILE_NAME, filename); + + try { + sendToCamelRoute(RouteId.ORDERING, exchange); + assert !exchange.getProperty(ORDERING_FAILED, Boolean.class); + } catch (Exception e) { + variables.put(ORDERING_FAILED, true); + } + variables.put(ORDERING_FAILED, false); + variables.put(ORDERED_BY, exchange.getProperty(ORDERED_BY)); + } + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + + private void removeDuplicates(List transactionList, boolean orderingEnabled) { + if (orderingEnabled) { + removeDuplicatesIfOrderingEnabled(transactionList); + return; + } + removeDuplicatesIfOrderingDisabled(transactionList); + } + + private void removeDuplicatesIfOrderingEnabled(List transactionList) { + + for (int i = 0; i < transactionList.size() - 1; i++) { + Transaction currentTransaction = transactionList.get(i); + Transaction nextTransaction = transactionList.get(i + 1); + + if (currentTransaction == null || nextTransaction == null) { + continue; + } + String currentPayeeDetail = fetchPayeeDetail(currentTransaction); + String nextPayeeDetail = fetchPayeeDetail(nextTransaction); + + if (currentPayeeDetail.equals(nextPayeeDetail)) { + currentTransaction.setNote("Duplicate transaction."); + } + } + } + + private void removeDuplicatesIfOrderingDisabled(List transactionList) { + Set set = new HashSet<>(); + + if (Objects.isNull(transactionList)) { + return; + } + + for (Transaction transaction : transactionList) { + String payeeDetail = fetchPayeeDetail(transaction); + if (set.contains(payeeDetail)) { + transaction.setNote("Duplicate transaction."); + } else { + set.add(payeeDetail); + } + } + } + + private String fetchPayeeDetail(Transaction transaction) { + String payeeIdentifier = transaction.getPayeeIdentifier(); + String payeeIdentifierType = transaction.getPayeeIdentifierType(); + String amount = transaction.getAmount(); + String currency = transaction.getCurrency(); + + return String.format("%s%s%s%s", payeeIdentifier, payeeIdentifierType, amount, currency); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/PartyLookupWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/PartyLookupWorker.java new file mode 100644 index 00000000..0b8a0301 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/PartyLookupWorker.java @@ -0,0 +1,25 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; + +import java.util.Map; +import org.springframework.stereotype.Component; + +@Component +public class PartyLookupWorker extends BaseWorker { + + @Override + public void setup() { + newWorker(Worker.PARTY_LOOKUP, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + + if (workerConfig.isPartyLookUpWorkerEnabled) { + variables.put(PARTY_LOOKUP_FAILED, false); + } + + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/SendCallbackWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/SendCallbackWorker.java new file mode 100644 index 00000000..723e64b2 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/SendCallbackWorker.java @@ -0,0 +1,78 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.CALLBACK_RESPONSE_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.BATCH_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CALLBACK_SUCCESS; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.CLIENT_CORRELATION_ID; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_RATE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.COMPLETION_THRESHOLD; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_CODE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.ERROR_DESCRIPTION; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_CALLBACK_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.MAX_STATUS_RETRY; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PHASE_COUNT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.RETRY; + +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.springframework.stereotype.Component; + +@Component +public class SendCallbackWorker extends BaseWorker { + + @Override + public void setup() { + newWorker(Worker.SEND_CALLBACK, (client, job) -> { + logger.debug("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + + int retry = variables.getOrDefault(CALLBACK_RETRY, 0).equals(variables.get(MAX_STATUS_RETRY)) ? 0 + : (int) variables.getOrDefault(CALLBACK_RETRY, 0); + Exchange exchange = new DefaultExchange(camelContext); + if (variables.get(CALLBACK_RETRY) != null && variables.get(CALLBACK_RETRY).equals(variables.get(MAX_CALLBACK_RETRY))) { + exchange.setProperty(CALLBACK_SUCCESS, false); + exchange.setProperty(CALLBACK_RESPONSE_CODE, variables.get(CALLBACK_RESPONSE_CODE)); + } else { + exchange = new DefaultExchange(camelContext); + exchange.setProperty(MAX_CALLBACK_RETRY, variables.get(MAX_CALLBACK_RETRY)); + exchange.setProperty(CALLBACK_RETRY, variables.getOrDefault(CALLBACK_RETRY, 0)); + exchange.setProperty(CALLBACK, variables.get(CALLBACK)); + exchange.setProperty(COMPLETION_RATE, variables.get(COMPLETION_RATE)); + exchange.setProperty(PHASES, variables.get(PHASES)); + exchange.setProperty(PHASE_COUNT, variables.get(PHASE_COUNT)); + exchange.setProperty(BATCH_ID, variables.get(BATCH_ID)); + exchange.setProperty(CLIENT_CORRELATION_ID, variables.get(CLIENT_CORRELATION_ID)); + Integer maxRetry = Integer.parseInt(variables.get(MAX_STATUS_RETRY).toString()); + Integer completionRate = Integer.parseInt(variables.get(COMPLETION_RATE).toString()); + Integer completionThreshold = Integer.parseInt(variables.get(COMPLETION_THRESHOLD).toString()); + Integer statusRetry = Integer.parseInt(variables.get(RETRY).toString()); + if (statusRetry >= maxRetry || completionRate >= completionThreshold) { + sendToCamelRoute(RouteId.SEND_CALLBACK, exchange); + } + } + Boolean callbackSuccess = exchange.getProperty(CALLBACK_SUCCESS, Boolean.class); + if (callbackSuccess == null || !callbackSuccess) { + variables.put(ERROR_CODE, exchange.getProperty(ERROR_CODE)); + variables.put(ERROR_DESCRIPTION, exchange.getProperty(ERROR_DESCRIPTION)); + logger.info("Error: {}, {}", variables.get(ERROR_CODE), variables.get(ERROR_DESCRIPTION)); + } else { + variables.put(CALLBACK_SUCCESS, true); + } + + variables.put(CALLBACK_RETRY, exchange.getProperty(CALLBACK_RETRY)); + variables.put(CALLBACK_RESPONSE_CODE, exchange.getProperty(CALLBACK_RESPONSE_CODE)); + variables.put(PHASE_COUNT, exchange.getProperty(PHASE_COUNT)); + variables.put(PHASES, exchange.getProperty(PHASES)); + + logger.debug("Retry: {} and Response Code {}", exchange.getProperty(CALLBACK_RETRY), + exchange.getProperty(CALLBACK_RESPONSE_CODE)); + client.newCompleteCommand(job.getKey()).variables(variables).send(); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/SplittingWorker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/SplittingWorker.java new file mode 100644 index 00000000..f3bd3088 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/SplittingWorker.java @@ -0,0 +1,80 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_FILE_NAME; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SERVER_SUB_BATCH_FILE_NAME_ARRAY; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_CREATED; +import static org.mifos.processor.bulk.camel.config.CamelProperties.SUB_BATCH_DETAILS; +import static org.mifos.processor.bulk.camel.config.CamelProperties.ZEEBE_VARIABLE; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.FILE_NAME; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_FAILURE_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.INIT_SUCCESS_SUB_BATCHES; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PARTY_LOOKUP_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SPLITTING_FAILED; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.SUB_BATCHES; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.camel.Exchange; +import org.apache.camel.support.DefaultExchange; +import org.mifos.processor.bulk.camel.routes.RouteId; +import org.mifos.processor.bulk.schema.SubBatchEntity; +import org.springframework.stereotype.Component; + +@Component +public class SplittingWorker extends BaseWorker { + + @Override + public void setup() { + + /** + * This worker performs below tasks 1. Downloads the original CSV from cloud 2. Splits entire CSV into multiple + * CSV of sub-batches, based on configured sub-batch size. 3. Uploads the sub-batch CSVs to cloud 4. Sets + * zeebeVariable [SPLITTING_FAILED, SUB_BATCHES, SUB_BATCH_CREATED] + */ + newWorker(Worker.SPLITTING, (client, job) -> { + logger.info("Job '{}' started from process '{}' with key {}", job.getType(), job.getBpmnProcessId(), job.getKey()); + Map variables = job.getVariablesAsMap(); + if (workerConfig.isSplittingWorkerEnabled) { + variables.put(SPLITTING_FAILED, false); + } + + String filename = (String) variables.get(FILE_NAME); + Boolean partyLookupFailed = (Boolean) variables.get(PARTY_LOOKUP_FAILED); + Exchange exchange = new DefaultExchange(camelContext); + exchange.setProperty(SERVER_FILE_NAME, filename); + exchange.setProperty(ZEEBE_VARIABLE, variables); + exchange.setProperty("partyLookupFailed", partyLookupFailed); + exchange.setProperty("batchAccountLookup", + variables.get("batchAccountLookup") != null ? variables.get("batchAccountLookup") : false); + + exchange.setProperty(SUB_BATCH_DETAILS, new ArrayList()); + + try { + sendToCamelRoute(RouteId.SPLITTING, exchange); + assert !exchange.getProperty(SPLITTING_FAILED, Boolean.class); + } catch (Exception e) { + variables.put(SPLITTING_FAILED, true); + } + + Boolean subBatchCreated = exchange.getProperty(SUB_BATCH_CREATED, Boolean.class); + List serverSubBatchFileList = exchange.getProperty(SERVER_SUB_BATCH_FILE_NAME_ARRAY, List.class); + if (subBatchCreated != null && !subBatchCreated && serverSubBatchFileList != null && serverSubBatchFileList.isEmpty()) { + // if no sub-batches is created, insert the original filename in sub batch array + serverSubBatchFileList.add(filename); + subBatchCreated = false; + } + + variables.put(SPLITTING_FAILED, false); + variables.put(SUB_BATCHES, serverSubBatchFileList); + variables.put(SUB_BATCH_DETAILS, exchange.getProperty(SUB_BATCH_DETAILS, ArrayList.class)); + variables.put(INIT_SUCCESS_SUB_BATCHES, new ArrayList()); + variables.put(INIT_FAILURE_SUB_BATCHES, new ArrayList()); + variables.put(SUB_BATCH_CREATED, subBatchCreated); + + client.newCompleteCommand(job.getKey()).variables(variables).send(); + logger.info("Splitting worker completed"); + }); + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/Worker.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/Worker.java new file mode 100644 index 00000000..f508dca5 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/Worker.java @@ -0,0 +1,21 @@ +package org.mifos.processor.bulk.zeebe.worker; + +public enum Worker { + + PARTY_LOOKUP("partyLookup"), APPROVAL("approval"), ORDERING("ordering"), SPLITTING("splitting"), FORMATTING("formatting"), BATCH_STATUS( + "batchStatus"), SEND_CALLBACK("sendCallback"), MERGE_BACK("mergeSubBatch"), INIT_SUB_BATCH("initSubBatch"), ACCOUNT_LOOKUP( + "accountLookup"), ACCOUNT_LOOKUP_CALLBACK("accountLookupCallback"), BATCH_AGGREGATE("batchAggregate"), AUTHORIZATION( + "authorization"), DE_DEPLICATION("deduplicate"), BATCH_ACCOUNT_LOOKUP( + "batchAccountLookup"), BATCH_ACCOUNT_LOOKUP_CALLBACK("batchAccountLookupCallback"); + + private final String value; + + Worker(String s) { + value = s; + } + + public String getValue() { + return value; + } + +} diff --git a/src/main/java/org/mifos/processor/bulk/zeebe/worker/WorkerConfig.java b/src/main/java/org/mifos/processor/bulk/zeebe/worker/WorkerConfig.java new file mode 100644 index 00000000..29da2840 --- /dev/null +++ b/src/main/java/org/mifos/processor/bulk/zeebe/worker/WorkerConfig.java @@ -0,0 +1,39 @@ +package org.mifos.processor.bulk.zeebe.worker; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class WorkerConfig { + + @Value("${config.batchAggregate.enable}") + public boolean isBatchAggregateEnabled; + + @Value("${config.partylookup.enable}") + public boolean isPartyLookUpWorkerEnabled; + + @Value("${config.approval.enable}") + public boolean isApprovalWorkerEnabled; + + @Value("${config.ordering.enable}") + public boolean isOrderingWorkerEnabled; + + @Value("${config.splitting.enable}") + public boolean isSplittingWorkerEnabled; + + @Value("${config.formatting.enable}") + public boolean isFormattingWorkerEnabled; + + @Value("${config.mergeback.enable}") + public boolean isMergeBackWorkerEnabled; + + @Value("${config.completion-threshold-check.enable}") + public boolean isCompletionThresholdCheckEnabled; + + @Value("${config.deduplication.enabled}") + public boolean isTransactionDeduplicationEnabled; + + @Value("${config.authorization.enabled}") + public boolean isAuthorizationWorkerEnabled; + +} diff --git a/src/main/java/org/mifos/processor/exceptionmapper/GlobalExceptionMapper.java b/src/main/java/org/mifos/processor/exceptionmapper/GlobalExceptionMapper.java new file mode 100644 index 00000000..5b35c288 --- /dev/null +++ b/src/main/java/org/mifos/processor/exceptionmapper/GlobalExceptionMapper.java @@ -0,0 +1,25 @@ +package org.mifos.processor.exceptionmapper; + +import io.camunda.zeebe.client.api.command.ClientStatusException; +import org.mifos.processor.bulk.schema.ExceptionMapperDTO; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.RestControllerAdvice; + +@RestControllerAdvice +public class GlobalExceptionMapper { + + @ExceptionHandler(ClientStatusException.class) + public ResponseEntity handleClientStatusException(ClientStatusException ex) { + ExceptionMapperDTO dto = new ExceptionMapperDTO("01", "Process definition not found"); + return ResponseEntity.status(HttpStatus.PRECONDITION_FAILED).contentType(MediaType.APPLICATION_JSON).body(dto); + } + + @ExceptionHandler(Exception.class) + public ResponseEntity handleException(Exception ex) { + ExceptionMapperDTO dto = new ExceptionMapperDTO("01", ex.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).contentType(MediaType.APPLICATION_JSON).body(dto); + } +} diff --git a/src/main/resources/application-paymentmode.yaml b/src/main/resources/application-paymentmode.yaml new file mode 100644 index 00000000..3159888a --- /dev/null +++ b/src/main/resources/application-paymentmode.yaml @@ -0,0 +1,15 @@ +payment-mode: + mappings: + - id: "GSMA" + type: "PAYMENT" + endpoint: "/channel/gsma/transfer" + - id: "MOJALOOP" + type: "PAYMENT" + endpoint: "/channel/transfer" + - id: "SLCB" + type: "BULK" + endpoint: "bulk_connector_{MODE}-{dfspid}" + - id: "CLOSEDLOOP" + type: "BULK" + endpoint: "bulk_connector_{MODE}-{dfspid}" + debulkingDfspid: "lion" \ No newline at end of file diff --git a/src/main/resources/application-test.yaml b/src/main/resources/application-test.yaml new file mode 100644 index 00000000..2b80f785 --- /dev/null +++ b/src/main/resources/application-test.yaml @@ -0,0 +1,9 @@ +camel: + server-port: 5002 + +kafka: + bootstrapAddress: "localhost:9092" + +zeebe: + broker: + contactpoint: "localhost:26500" diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml index bc1cf35d..9a33c08f 100644 --- a/src/main/resources/application.yaml +++ b/src/main/resources/application.yaml @@ -1,13 +1,17 @@ camel: - server-port: 5000 + server-port: 0 + disable-ssl: false springboot: main-run-controller: true dataformat: json-jackson: auto-discover-object-mapper: true -application: - bucket-name: paymenthub-ee-dev +cors: + allowed-origins: + - "http://ops.mifos.gazelle.localhost" + - "https://ops.mifos.gazelle.test" + kafka: bootstrapAddress: "kafka:9092" @@ -17,24 +21,48 @@ kafka: slcb: name: slcb +application: + bucket-name: paymenthub-ee + zeebe: client: - max-execution-threads: 1000 + max-execution-threads: 50 evenly-allocated-max-jobs: 1000 + poll-interval: 10 # max-execution-threads: 100 # number-of-workers: 8 # evenly-allocated-max-jobs: "#{${zeebe.client.max-execution-threads} / ${zeebe.client.number-of-workers}}" broker: contactpoint: "zeebe-zeebe-gateway:26500" +operations-app: + contactpoint: "https://ops-bk.mifos.gazelle.localhost" + username: "mifos" + password: "password" + endpoints: + auth: "/oauth/token" + batch-summary: "/api/v1/batch" + batch-transaction: "/api/v1/batch/transactions" + batch-aggregate: "/api/v1/batch/" + +mock-payment-schema: + contactpoint: "http://ph-ee-connector-mock-payment-schema:8080" + endpoints: + authorization: "/batches/" + +channel: + hostname: "https://ph-ee-connector-channel:8443" + cloud: aws: enabled: true + s3BaseUrl: "http://minio.paymenthub.svc.cluster.local:9000" credentials: - access-key: ${AWS_ACCESS_KEY:AKIAX32JM37TYOG3QUJU} - secret-key: ${AWS_SECRET_KEY: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} + access-key: ${AWS_ACCESS_KEY:root} + secret-key: ${AWS_SECRET_KEY:password} + region: - static: us-east-2 + static: us-east-1 stack: auto: false azure: @@ -43,8 +71,149 @@ cloud: connection-string: -dfspids: "ibank-usa,ibank-india" +tenants: "greenbank, bluebank, redbank" + +#payment mode moved to a separate application property file for helm rewrite + +config: + minimum-successful-tx-ratio: 0.90 + batchAggregate: + enable: true + partylookup: + enable: true + authorization: + enabled: true + approval: + enable: true + ordering: + enable: true + field: "payerIdentifier" + splitting: + enable: true + sub-batch-size: 5 + formatting: + enable: false + standard: "DEFAULT" + mergeback: + enable: false + backpressure: + enable: false + completion-threshold-check: + enable: false + completion-threshold: 95 # in percentage + max-retry: 3 #can be as high as 30 + delay: 2 # in seconds + deduplication: + enabled: true + +callback: + max-retry: 3 + url: "http://httpstat.us/503" + +pollingApi: + path : "/batch/Summary/" + timer: "120" + +callback-phases: + values: + - 20 + - 40 + - 60 + - 80 + - 100 +server: + ssl: + key-alias: "tomcat-https" + key-store: "file:/tls/keystore.p12" + key-store-type: PKCS12 + key-password: "changeit" + key-store-password: "changeit" + port: 8443 # HTTPS port for Spring Boot (external traffic, gRPC) + + +security: + jws: + enable: false + response: + enable: false + +identity_account_mapper: + hostname : "http://ph-ee-identity-account-mapper:80" + account_lookup: /beneficiary + account_lookup_callback: /accountLookupCallback + batch_account_lookup: /accountLookup + batch_account_lookup_callback: /batchAccountLookupCallback + +bulk_processor: + hostname : "http://ph-ee-connector-bulk:82" + +csv: + columnNames: "id,request_id,payment_mode,payer_identifier_type,payer_identifier,payee_identifier_type,payee_identifier,amount,currency,note" + size : 100000 # in bytes + +budget-account: + registeringInstitutions: + - id: "greenbank" + programs: + - id: "SocialWelfare" + name: "Social Welfare" + identifierType: "MSISDN" + identifierValue: "0413509790" + +management: + endpoint: + health: + enabled: true + probes: + enabled: true + liveness: + enabled: true + readiness: + enabled: true + +payment-mode: + mappings: + - id: "GSMA" + type: "PAYMENT" + endpoint: "/channel/gsma/transfer" + - id: "MOJALOOP" + type: "PAYMENT" + endpoint: "/channel/transfer" + - id: "SLCB" + type: "BULK" + endpoint: "bulk_connector_{MODE}-{dfspid}" + - id: "CLOSEDLOOP" + type: "BULK" + endpoint: "bulk_connector_{MODE}-{dfspid}" + - id: "MASTERCARD_CBS" + type: "PAYMENT" + endpoint: "/channel/transfer" + +batch-authorization: + callback-url: "${BULK_PROCESSOR_CALLBACK_URL:https://bulk-processor.mifos.gazelle.test/authorization/callback}" + +pubsub: + room: + code: "covid-19" + class: "GOV" + event: + type: "bulk" + +security-server: + country: "INDIA" + organisation: mifos + host: https://SECURITYSERVER + baseuri: /r1/{country}/GOV/{orgs} + endpoints: + subs: /room/subs + +gov-stack-client: + header-key: "X-GovStack-Client" + header-value: "PAYMENT-BB" + +# TD Dec 2025 bpmn.flows looks like it is not actually used by bulk-processor +# so commenting out but leaving for now as bpmn: flows: payment-transfer: "PayerFundTransfer-{dfspid}" @@ -57,4 +226,33 @@ bpmn: gsma-link-based-payment: "gsma_link_transfer" international-remittance-payee: "international_remittance_payee_process-{dfspid}" international-remittance-payer: "international_remittance_payer_process-{dfspid}" - debit-party-process: "debit_party_process-{dfspid}" \ No newline at end of file + debit-party-process: "debit_party_process-{dfspid}" + #bulk-processor: "bulk_processor-{dfspid}" + bulk-processor: "bulk_processor_account_lookup-DFSPID.bpmn" + slcb: "slcb-{dfspid}" + +bpmns: + tenants: + - id: "greenbank" + flows: + payment-transfer: "minimal_mock_fund_transfer-{dfspid}" + batch-transactions: "bulk_processor-{dfspid}" + batch-transactions-govstack: "bulk_processor_account_lookup-{dfspid}" + - id: "greenbank-mastercard" + flows: + payment-transfer: "MastercardFundTransfer-{dfspid}" + batch-transactions: "bulk_processor_account_lookup-{dfspid}" + - id: "redbank" + flows: + payment-transfer: "minimal_mock_fund_transfer-{dfspid}" + batch-transactions: "bulk_processor-{dfspid}" + batch-transactions-govstack: "bulk_processor_account_lookup-{dfspid}" + - id: "bluebank" + flows: + batch-transactions: "bulk_processor-{dfspid}" + # - id: "rhino" + # flows: + # batch-transactions: "bulk_processor_account_lookup-{dfspid}" + # - id: "lion" + # flows: + # batch-transactions: "bulk_processor-{dfspid}" diff --git a/src/test/java/org/mifos/pheeprocessorbulk/BulkProcessorApplicationTests.java b/src/test/java/org/mifos/pheeprocessorbulk/BulkProcessorApplicationTests.java deleted file mode 100644 index 9c62bd7b..00000000 --- a/src/test/java/org/mifos/pheeprocessorbulk/BulkProcessorApplicationTests.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.mifos.pheeprocessorbulk; - -import org.junit.jupiter.api.Test; -import org.springframework.boot.test.context.SpringBootTest; - -import java.util.UUID; - -@SpringBootTest -class BulkProcessorApplicationTests { - - @Test - void contextLoads() { - System.out.println(UUID.randomUUID().toString()); - } - -} diff --git a/src/test/java/org/mifos/processor/BulkProcessorApplicationTests.java b/src/test/java/org/mifos/processor/BulkProcessorApplicationTests.java new file mode 100644 index 00000000..a6473dd9 --- /dev/null +++ b/src/test/java/org/mifos/processor/BulkProcessorApplicationTests.java @@ -0,0 +1,21 @@ +package org.mifos.processor; + +import java.util.UUID; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; + +@SpringBootTest +@ActiveProfiles("test") +class BulkProcessorApplicationTests { + + public Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Test + void contextLoads() { + logger.debug("{}", UUID.randomUUID()); + } + +} diff --git a/src/test/java/org/mifos/processor/cucumber/CucumberContext.java b/src/test/java/org/mifos/processor/cucumber/CucumberContext.java new file mode 100644 index 00000000..dd58ba78 --- /dev/null +++ b/src/test/java/org/mifos/processor/cucumber/CucumberContext.java @@ -0,0 +1,29 @@ +package org.mifos.processor.cucumber; + +import static com.google.common.truth.Truth.assertThat; + +import io.cucumber.spring.CucumberContextConfiguration; +import org.apache.camel.ProducerTemplate; +import org.apache.camel.test.spring.junit5.CamelSpringBootTest; +import org.apache.camel.test.spring.junit5.UseAdviceWith; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; + +@CucumberContextConfiguration +@SpringBootTest +@CamelSpringBootTest +@UseAdviceWith +@ActiveProfiles("test") +public class CucumberContext { + + @Autowired + private ProducerTemplate producerTemplate; + + @Test + void contextLoads() { + assertThat(producerTemplate).isNotNull(); + } + +} diff --git a/src/test/java/org/mifos/processor/cucumber/stepdef/BaseStepDef.java b/src/test/java/org/mifos/processor/cucumber/stepdef/BaseStepDef.java new file mode 100644 index 00000000..5d00142d --- /dev/null +++ b/src/test/java/org/mifos/processor/cucumber/stepdef/BaseStepDef.java @@ -0,0 +1,40 @@ +package org.mifos.processor.cucumber.stepdef; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.ProducerTemplate; +import org.mifos.processor.bulk.config.ExternalApiPayloadConfig; +import org.mifos.processor.bulk.config.PaymentModeConfiguration; +import org.mifos.processor.bulk.config.PaymentModeMapping; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +// this class is the base for all the cucumber step definitions +public class BaseStepDef { + + @Autowired + ProducerTemplate template; + + @Autowired + CamelContext context; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + PaymentModeConfiguration paymentModeConfiguration; + + @Autowired + ExternalApiPayloadConfig externalApiPayloadConfig; + + Logger logger = LoggerFactory.getLogger(this.getClass()); + + protected static String tenant; + protected static String paymentMode; + protected static PaymentModeMapping paymentModeMapping; + + protected static Exchange exchange; + +} diff --git a/src/test/java/org/mifos/processor/cucumber/stepdef/ConfigurationTestStepDef.java b/src/test/java/org/mifos/processor/cucumber/stepdef/ConfigurationTestStepDef.java new file mode 100644 index 00000000..c4957f46 --- /dev/null +++ b/src/test/java/org/mifos/processor/cucumber/stepdef/ConfigurationTestStepDef.java @@ -0,0 +1,81 @@ +package org.mifos.processor.cucumber.stepdef; + +import static com.google.common.truth.Truth.assertThat; + +import io.cucumber.java.en.And; +import io.cucumber.java.en.Given; +import io.cucumber.java.en.Then; +import io.cucumber.java.en.When; +import java.util.function.Function; +import org.apache.camel.Exchange; +import org.mifos.processor.bulk.config.PaymentModeMapping; +import org.mifos.processor.bulk.config.PaymentModeType; +import org.mifos.processor.bulk.utility.Utils; + +public class ConfigurationTestStepDef extends BaseStepDef { + + @Given("Application context is loaded") + public void applicationContextIsLoaded() { + assertThat(context).isNotNull(); + } + + @When("I assert the payment mode config") + public void paymentModeConfigAssert() { + assertThat(paymentModeConfiguration).isNotNull(); + } + + @Then("I should get the non empty payment modes") + public void nonEmptyPaymentModesCheck() { + assertThat(paymentModeConfiguration.getMappings()).isNotEmpty(); + } + + @And("I should be able fetch the mapping for mode {string}") + public void fetchMappingForMode(String mode) { + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(mode); + assertThat(mapping).isNotNull(); + BaseStepDef.paymentModeMapping = mapping; + } + + @And("I should get enum value {} for mode {string}") + public void getEnumValueForMode(PaymentModeType modeType, String mode) { + PaymentModeMapping mapping = paymentModeConfiguration.getByMode(mode); + assertThat(mapping.getType()).isEqualTo(modeType); + } + + @When("I have payment mode {string}") + public void setPaymentMode(String paymentMode) { + BaseStepDef.paymentMode = paymentMode; + assertThat(BaseStepDef.paymentMode).isNotEmpty(); + } + + @Then("I should get the bulk connector bpmn name {string}") + public void validateBulkConnectorBpmnName(String bpmnName) { + PaymentModeMapping mapping = BaseStepDef.paymentModeMapping; + String generatedBpmnName = Utils.getBulkConnectorBpmnName(mapping.getEndpoint(), mapping.getId(), BaseStepDef.tenant); + assertThat(bpmnName).isEqualTo(generatedBpmnName); + } + + @And("I have tenant as {string}") + public void setTenant(String tenant) { + BaseStepDef.tenant = tenant; + assertThat(BaseStepDef.tenant).isNotEmpty(); + } + + @When("I assert the external api payload config") + public void externalApiPayloadConfigAssert() { + assertThat(externalApiPayloadConfig).isNotNull(); + } + + @Then("I should get the non empty external api payload config") + public void nonEmptyExternalApiPayloadConfigCheck() { + int size = externalApiPayloadConfig.getPayloadMap().keySet().size(); + assertThat(size).isGreaterThan(0); + } + + @And("I should be able fetch the payload setter for mode {string}") + public void fetchPayloadSetterForMode(String mode) { + Function payloadSetter = externalApiPayloadConfig.getApiPayloadSetter(mode); + assertThat(payloadSetter).isNotNull(); + } + +} diff --git a/src/test/java/org/mifos/processor/cucumber/stepdef/InitRouteStepDef.java b/src/test/java/org/mifos/processor/cucumber/stepdef/InitRouteStepDef.java new file mode 100644 index 00000000..97717ba6 --- /dev/null +++ b/src/test/java/org/mifos/processor/cucumber/stepdef/InitRouteStepDef.java @@ -0,0 +1,95 @@ +package org.mifos.processor.cucumber.stepdef; + +import static com.google.common.truth.Truth.assertThat; +import static org.mifos.processor.bulk.camel.config.CamelProperties.TRANSACTION_LIST_ELEMENT; +import static org.mifos.processor.bulk.zeebe.ZeebeVariables.PAYMENT_MODE; + +import com.fasterxml.jackson.core.JsonProcessingException; +import io.cucumber.java.en.And; +import io.cucumber.java.en.Given; +import io.cucumber.java.en.Then; +import io.cucumber.java.en.When; +import java.util.UUID; +import org.mifos.connector.common.channel.dto.TransactionChannelRequestDTO; +import org.mifos.connector.common.gsma.dto.GSMATransaction; +import org.mifos.processor.bulk.schema.Transaction; + +public class InitRouteStepDef extends BaseStepDef { + + @Given("I can load camel context") + public void loadCamelContext() { + context.start(); + assertThat(template).isNotNull(); + assertThat(context).isNotNull(); + } + + @When("I call the payment-mode-validation route with {string} payment mode") + public void callPaymentModeValidationRoute(String paymentMode) { + exchange = template.send("direct:validate-payment-mode", exchange -> { + exchange.setProperty(PAYMENT_MODE, paymentMode); + }); + } + + @Then("I should get a non null exchange variable") + public void exchangeVariableNullCheck() { + assertThat(exchange).isNotNull(); + } + + @And("{string} exchange variable should be {string}") + public void exchangeVariableBooleanCheck(String variableKey, String variableValue) { + if (variableValue.equalsIgnoreCase("true")) { + assertThat(exchange.getProperty(variableKey, Boolean.class)).isTrue(); + } else { + assertThat(exchange.getProperty(variableKey, Boolean.class)).isFalse(); + } + } + + @When("I call the runtime-payload route with {string} payment mode") + public void callRuntimePayloadTestRoute(String paymentMode) { + exchange = template.send("direct:dynamic-payload-setter", exchange -> { + exchange.setProperty(PAYMENT_MODE, paymentMode); + + Transaction transaction = new Transaction(); + transaction.setId(0); + transaction.setRequestId(UUID.randomUUID().toString()); + transaction.setPaymentMode(paymentMode); + transaction.setAmount("100"); + transaction.setPayerIdentifierType("MSISDN"); + transaction.setPayeeIdentifierType("MSISDN"); + transaction.setPayerIdentifier("1234567890"); + transaction.setPayeeIdentifier("0987654321"); + transaction.setCurrency("INR"); + exchange.setProperty(TRANSACTION_LIST_ELEMENT, new Transaction()); + + }); + + } + + @And("The body should be of GSMA parcelable") + public void gsmaBodyDeserializeCheck() { + String body = exchange.getIn().getBody(String.class); + assertThat(body).isNotNull(); + + GSMATransaction gsmaTransaction; + try { + gsmaTransaction = objectMapper.readValue(body, GSMATransaction.class); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + assertThat(gsmaTransaction).isNotNull(); + } + + @And("The body should be of MOJALOOP parcelable") + public void mojaloopBodyDeserializeCheck() { + String body = exchange.getIn().getBody(String.class); + assertThat(body).isNotNull(); + + TransactionChannelRequestDTO payload; + try { + payload = objectMapper.readValue(body, TransactionChannelRequestDTO.class); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + assertThat(payload).isNotNull(); + } +} diff --git a/src/test/java/resources/configuration.feature b/src/test/java/resources/configuration.feature new file mode 100644 index 00000000..24c16a13 --- /dev/null +++ b/src/test/java/resources/configuration.feature @@ -0,0 +1,21 @@ +Feature: configuration test + + Scenario: Payment mode config test + Given Application context is loaded + When I assert the payment mode config + Then I should get the non empty payment modes + And I should be able fetch the mapping for mode "GSMA" + And I should get enum value PAYMENT for mode "MOJALOOP" + + Scenario: Bulk connector bpmn name test + Given Application context is loaded + When I have payment mode "SLCB" + And I have tenant as "gorilla" + And I should be able fetch the mapping for mode "SLCB" + Then I should get the bulk connector bpmn name "bulk_connector_slcb-gorilla" + + Scenario: External api payload config test + Given Application context is loaded + When I assert the external api payload config + Then I should get the non empty external api payload config + And I should be able fetch the payload setter for mode "GSMA" diff --git a/src/test/java/resources/init_route_test.feature b/src/test/java/resources/init_route_test.feature new file mode 100644 index 00000000..453505ae --- /dev/null +++ b/src/test/java/resources/init_route_test.feature @@ -0,0 +1,31 @@ +Feature: init route test + + Scenario: payment mode +ve validation route test + Given I can load camel context + When I call the payment-mode-validation route with "GSMA" payment mode + Then I should get a non null exchange variable + And "isPaymentModeValid" exchange variable should be "true" + + Scenario: payment mode -ve validation route test + Given I can load camel context + When I call the payment-mode-validation route with "P2P" payment mode + Then I should get a non null exchange variable + And "isPaymentModeValid" exchange variable should be "false" + + Scenario: runtime payload test with GSMA mode + Given I can load camel context + When I call the runtime-payload route with "GSMA" payment mode + Then I should get a non null exchange variable + And The body should be of GSMA parcelable + + Scenario: runtime payload test with gsma mode + Given I can load camel context + When I call the runtime-payload route with "gsma" payment mode + Then I should get a non null exchange variable + And The body should be of GSMA parcelable + + Scenario: runtime payload test with MOJALOOP mode + Given I can load camel context + When I call the runtime-payload route with "MOJALOOP" payment mode + Then I should get a non null exchange variable + And The body should be of MOJALOOP parcelable