diff --git a/.gitignore b/.gitignore index 9aa0ca7..154365f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,21 @@ Ftl2Jcl/target/maven-archiver/pom.properties Ftl2Jcl/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst Ftl2Jcl/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst Ftl2Jcl/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst +DB2CHECK/target/db2check-1.0.0-jar-with-dependencies.jar +DB2CHECK/target/db2check-1.0.0.jar +DB2CHECK/target/classes/application.properties +DB2CHECK/target/classes/org/genevaers/db2check/CommandLineHandler.class +DB2CHECK/target/classes/org/genevaers/db2check/GersEnvironment.class +DB2CHECK/target/classes/org/genevaers/db2check/TemplateApplier.class +DB2CHECK/target/maven-archiver/pom.properties +DB2CHECK/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst +DB2CHECK/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst +DB2CHECK/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaValidateMain.class +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaConfig.class +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaValidateA.class +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaValidateB.class +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaValidateC.class +DB2CHECK/target/classes/org/genevaers/db2check/GvbSchemaValidateD.class +DB2CHECK/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst +FTL2JCL/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst diff --git a/DB2CHECK/README.md b/DB2CHECK/README.md new file mode 100644 index 0000000..6c13d3e --- /dev/null +++ b/DB2CHECK/README.md @@ -0,0 +1,43 @@ +# Validation of GenevaERS DB2 Schema definition + +This application is provided to allow you to validate the DB2 schema has been correctly defined. It is written in Java and uses JDBC driver for DB2. The program uses digest values for each of the following sets of schema definitions to ensure correctlness. This validates the correctness of the schema structure. + +## DB2 schema items checked +
+1) stored procedures
+2) table definitions
+3) index definitions
+4) foreign keys
+
+ +## Configuration file + +This contains the userid, password, url of DB2 target and a matching value for the name of schema. It must be located in your home directory. +
+USERID
+RACFPWD
+jdbc:db2://SP13.pok.stglabs.ibm.com:5036/DM13
+SAFR0002
+
+ +## Compiling the program + +From directory DB2CHECK/ use the command: +
+mvn install
+
+ +## Executing the program + +From directory DB2CHECK/target/ use the command: +
+java -jar db2check-1.0.1-jar-with-dependencies.jar
+
+ +## Command line options + +From directory DB2CHECK/target/ use the command: +This generates files containing your schema definitions +
+java -jar db2check-1.0.1-jar-with-dependencies.jar -D
+
\ No newline at end of file diff --git a/DB2CHECK/build.sh b/DB2CHECK/build.sh new file mode 100644 index 0000000..2493257 --- /dev/null +++ b/DB2CHECK/build.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +# Copyright Contributors to the GenevaERS Project. +# SPDX-License-Identifier: Apache-2.0 (c) Copyright IBM Corporation +# 2008 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +mvn clean +mvn install $1 + +export rev=`grep "" pom.xml | awk -F'||' '{print $2}'`; +echo DB2CHECK release number $rev; + +cp ./target/*-jar-with-dependencies.jar $GERS_RCA_JAR_DIR/db2check-$rev.jar; + +cd $GERS_RCA_JAR_DIR; + +touch db2check-latest.jar; +rm db2check-latest.jar; +ln -s db2check -$rev.jar db2check-latest.jar; diff --git a/DB2CHECK/pom.xml b/DB2CHECK/pom.xml new file mode 100644 index 0000000..2f59515 --- /dev/null +++ b/DB2CHECK/pom.xml @@ -0,0 +1,148 @@ + + + + + + + 4.0.0 + + + Geneva ERS + https://genevaers.org/ + + + + UTF-8 + 11 + 8 + 3.8.4 + 2.22.2 + 1.8 + 1.8 + ${maven.build.timestamp} + yyyy-MM-dd HH:mm + + 1.0.1 + + + org.genevaers + db2check + ${revision} + + GenevaERS Build + GenevaERS Test Apps + + + + com.google.flogger + flogger + 0.8 + + + com.google.flogger + flogger-system-backend + 0.8 + + + org.freemarker + freemarker + 2.3.32 + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + 2.13.0 + + + com.ibm + db2jcc_license_cu + 4 + + + com.ibm + db2jcc4 + 4 + + + com.ibm + db2jcc_license_cisuz + 4 + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.2.0 + + + add-source + generate-test-sources + + add-test-source + + + + target/generated-sources + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.7.1 + + + jar-with-dependencies + + + + org.genevaers.db2check.GvbSchemaValidateMain + + + + + + make-assembly + package + + single + + + + + + + + true + src/main/resources + + application.properties + + + + + + \ No newline at end of file diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaConfig.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaConfig.java new file mode 100644 index 0000000..89a76c4 --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaConfig.java @@ -0,0 +1,66 @@ +package org.genevaers.db2check; + +import java.io.BufferedWriter; +import java.sql.*; +import java.util.HashMap; + +public class GvbSchemaConfig { + + private String digestType; + private Connection con; + private String schema_mask; + private Boolean makeHash; + private Boolean makeDef; + private BufferedWriter fwriter; + private BufferedWriter hwriter; + private BufferedWriter[] dwriter; + private HashMap spmap; + private HashMap tbmap; + private HashMap ixmap; + private HashMap fkmap; + + // Constructor + public GvbSchemaConfig(String digestType, Connection con, String schema_mask, Boolean makeHash, Boolean makeDef, + BufferedWriter fwriter, BufferedWriter hwriter, BufferedWriter dwriter[], + HashMap spmap, HashMap tbmap, HashMap ixmap, HashMap fkmap) + { + this.digestType = digestType; + this.con = con; + this.schema_mask = schema_mask; + this.makeHash = makeHash; + this.makeDef = makeDef; + this.fwriter = fwriter; + this.hwriter = hwriter; + this.dwriter = dwriter; + this.spmap = spmap; + this.tbmap = tbmap; + this.ixmap = ixmap; + this.fkmap = fkmap; + } + + public String getDigestType() { return digestType; } + public Connection getCon() { return con; } + public String getSchemaMask() { return schema_mask; } + public Boolean getMakeHash() { return makeHash; } + public Boolean getMakeDef() { return makeDef; } + public BufferedWriter getFwriter() { return fwriter; } + public BufferedWriter getHwriter() { return hwriter; } + public BufferedWriter[] getDwriter() { return dwriter; } + public HashMap getSpmap() { return spmap;} + public HashMap getTbmap() { return tbmap;} + public HashMap getIxmap() { return ixmap;} + public HashMap getFkmap() { return fkmap;} + + public void setDigestType(String digestType) { this.digestType = digestType; } + public void setCon(Connection con) {this.con = con; } + public void setSchemaMask(String schema_mask) {this.schema_mask = schema_mask; } + public void setMakeHash(Boolean makeHash) {this.makeHash = makeHash; } + public void setMakeDef(Boolean makeDef) {this.makeDef = makeDef; } + public void setFwriter(BufferedWriter fwriter) {this.fwriter = fwriter; } + public void setHwriter(BufferedWriter hwriter) {this.hwriter = hwriter; } + public void setDwriter(BufferedWriter[] dwriter) {this.dwriter = dwriter; } + public void setSpmap(HashMap spmap) {this.spmap = spmap;} + public void setTbmap(HashMap tbmap) {this.tbmap = tbmap;} + public void setIxmap(HashMap ixmap) {this.ixmap = ixmap;} + public void setFkmap(HashMap fkmap) {this.fkmap = fkmap;} +} diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateA.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateA.java new file mode 100644 index 0000000..ab2ad03 --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateA.java @@ -0,0 +1,199 @@ +// https://www.ibm.com/docs/en/db2-for-zos/12.0.0?topic=samples-example-simple-jdbc-application + +// VALIDATE STORED PROCEDURES + +package org.genevaers.db2check; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.logging.Logger; + +import com.ibm.db2.jcc.a.f; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; + +import java.sql.*; + +public class GvbSchemaValidateA { + + private Integer rc; + private static final Logger logger = Logger.getLogger(GvbSchemaValidateA.class.getName()); + + public GvbSchemaValidateA(GvbSchemaConfig sc) + { + Boolean match = true; + + BufferedWriter hwriter = sc.getHwriter(); // For writing digest values of hashmaps + BufferedWriter[] dwriter = new BufferedWriter[4]; // For writing Schema definitions + BufferedWriter fwriter = sc.getFwriter(); // General output writers + dwriter = sc.getDwriter(); + String digestType = sc.getDigestType(); + Connection con = sc.getCon(); + String schema_mask = sc.getSchemaMask(); + Boolean makeHash = sc.getMakeHash(); + Boolean makeDef = sc.getMakeDef(); + HashMap spmap = sc.getSpmap(); + + String schema; + String nname; + String vversion; + String ttext; + Statement stmt; + ResultSet rs; + + logger.info("GvbSchemaValidateA: checking stored procedures for schema: " + schema_mask); + //System.out.println ("**** GvbSchemaValidateA: checking stored procedures for schema: " + schema_mask); + + String SQLstmt = "SELECT SCHEMA, NAME, VERSION, TEXT FROM SYSIBM.SYSROUTINES WHERE SCHEMA LIKE '"+schema_mask+"' ORDER BY SCHEMA, NAME"; + + try { + // Print generated digest value to separate file if requested -A + if ( makeHash ) { + hwriter.write(" // HashMap spmap = new HashMap<>(30);\n"); + hwriter.write(" // Populate digest map of stored procedures using " + digestType +"\n"); + } + + // Create the SQL statement + stmt = con.createStatement(); + logger.fine("Created JDBC Statement object"); + //System.out.println("**** Created JDBC Statement object"); + + // Execute a query and generate a ResultSet instance + rs = stmt.executeQuery(SQLstmt); + logger.fine("Created JDBC ResultSet object"); + //System.out.println("**** Created JDBC ResultSet object"); + + fwriter.write("\nStored Procedures Validation Report for schema: " + schema_mask + "\n\n"); + + MessageDigest md = MessageDigest.getInstance(digestType); + while (rs.next()) { + schema = rs.getString(1); + nname = rs.getString(2); + vversion = rs.getString(3); + ttext = rs.getString(4); + + byte[] hashedBytes = md.digest(ttext.getBytes()); + String encodedHash = Base64.getEncoder().encodeToString(hashedBytes); + + if (makeHash) { + hwriter.write(nname+ "," + encodedHash); //populate digest hash map + hwriter.write("\n"); + } + else + { + // report on schema correctness + fwriter.write("Stored Procedure: " + nname + " " + vversion + " Digest: " + digestType + ": " + encodedHash + "\n"); + + // report on schema correctness +// fwriter.write(schema + " " + nname + " " + vversion + "\n"); + //System.out.println(schema + " " + nname + " " + vversion ); // + "\n " + ttext); +// fwriter.write(digestType + ": " + encodedHash + "\n"); + //System.out.println(digestType + ": " + encodedHash); + } + + // Print all of the definition data to separate file dwriter if requested -D + if (makeDef) { + dwriter[0].write(schema+":"+nname+"============================================\n"); + dwriter[0].write(ttext); + dwriter[0].write("\n"); + } + + if (makeHash) { + // Do nothing + } + else { + // Report on correctness of schema definitions + String hashvalue = spmap.get(nname); + if ( hashvalue == null) { + logger.warning("HASH value mismatch for stored procedure: " + nname + " - no stored hash value"); + fwriter.write("HASH value mismatch for stored procedure: " + nname + " - no stored hash value\n"); + //System.out.println("HASH value mismatch for stored procedure: " + nname); + //System.out.println("No stored hash value"); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + else + { + if ( hashvalue.equals(encodedHash)) { + fwriter.write("HASH value matches\n"); + //System.out.println("HASH value matches for stored procedure: " + nname); + } + else + { + logger.warning("HASH value mismatch for stored procedure: " + nname); + fwriter.write("HASH value mismatch for stored procedure: " + nname + "\n"); + //System.out.println("HASH value mismatch for stored procedure: " + nname); + fwriter.write("Computed hash value: " + encodedHash + "\n"); + fwriter.write("Stored hash value : " + hashvalue + "\n"); + //System.out.println("Stored hash value: " + hashvalue); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + } + } + } + logger.fine("Fetched all rows from JDBC ResultSet"); + //System.out.println("**** Fetched all rows from JDBC ResultSet"); + + // Close the ResultSet + rs.close(); + + logger.fine("Closed JDBC ResultSet"); + //System.out.println("**** Closed JDBC ResultSet"); + + // Close the Statement + stmt.close(); + logger.fine("Closed JDBC Statement"); + //System.out.println("**** Closed JDBC Statement"); + + } catch (SQLException e) { + logger.severe("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt + e.getMessage()); + //System.out.println("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt); + //e.printStackTrace(); + rc = 4; + return; + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateA"); + //System.out.println("IO exception encountered in GvbSchemaValidateA"); + //e.printStackTrace(); + rc = 8; + return; + } catch (NoSuchAlgorithmException e) { + logger.severe("Digest algorithm: " + digestType + " not available"); + //System.out.println("Digest algorithm: " + digestType + " not available"); + //e.printStackTrace(); + rc = 12; + return; + } + + if (makeHash) { + logger.info("Stored procedure digest hashmap created"); + //System.out.println("\nStored procedure digest hashmap created\n"); + rc = 2; + return; + } else { + if ( match ) + { + logger.info("All stored procedure definitions match"); + //System.out.println("\nAll stored procedure definitions match.\n"); + rc = 0; + return; + } + else + { + logger.warning("One or more stored procedures do not match expected definitions ***"); + //System.out.println("\nOne or more stored procedures do not match expected definitions !!!\n"); + rc = 1; + return; + } + } + } + + public Integer getRc() {return rc;} + +} \ No newline at end of file diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateB.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateB.java new file mode 100644 index 0000000..4d47f13 --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateB.java @@ -0,0 +1,204 @@ +// https://www.ibm.com/docs/en/db2-for-zos/12.0.0?topic=samples-example-simple-jdbc-application + +// VALIDATE TABLES AND COLUMNS + +package org.genevaers.db2check; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.logging.Logger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; + +import java.sql.*; + +public class GvbSchemaValidateB { + + private Integer rc; + private static final Logger logger = Logger.getLogger(GvbSchemaValidateB.class.getName()); + + public GvbSchemaValidateB(GvbSchemaConfig sc) + { + Boolean match = true; + + BufferedWriter hwriter = sc.getHwriter(); // For writing digest values of hashmaps + BufferedWriter[] dwriter = new BufferedWriter[4]; // For writing Schema definitions + BufferedWriter fwriter = sc.getFwriter(); // General output writers + dwriter = sc.getDwriter(); + String digestType = sc.getDigestType(); + Connection con = sc.getCon(); + String schema_mask = sc.getSchemaMask(); + Boolean makeHash = sc.getMakeHash(); + Boolean makeDef = sc.getMakeDef(); + HashMap tbmap = sc.getTbmap(); + + String schema; + String tname; + String cname; + String typename; + int length; + String lastTab = ""; + + Statement stmt; + ResultSet rs; + + logger.info("GvbSchemaValidateB: checking tables and columns for schema: " + schema_mask); + //System.out.println ("**** GvbSchemaValidateB: checking tables and colums for schema: " + schema_mask); + + String SQLstmt = "SELECT TBCREATOR, TBNAME, NAME, COLTYPE, LENGTH FROM SYSIBM.SYSCOLUMNS WHERE TBCREATOR LIKE '" + schema_mask + "' ORDER BY TBNAME, NAME"; + + try { + StringBuilder sb = new StringBuilder(""); + // Print generated digest value to separate file if requested -A + if ( makeHash ) { + hwriter.write(" // HashMap tbmap = new HashMap<>(100);\n"); + hwriter.write(" // Populate digest map of tables using " + digestType +"\n"); + } + + // Create the SQL statement + stmt = con.createStatement(); + logger.fine("Created JDBC Statement object"); + //System.out.println("**** Created JDBC Statement object"); + + // Execute a query and generate a ResultSet instance + rs = stmt.executeQuery(SQLstmt); + logger.fine("Created JDBC ResultSet object"); + //System.out.println("**** Created JDBC ResultSet object"); + + fwriter.write("\nTable and Column Validation Report for schema: " + schema_mask + "\n\n"); + + MessageDigest md = MessageDigest.getInstance(digestType); + while (rs.next()) { + schema = rs.getString(1); + tname = rs.getString(2); + cname = rs.getString(3); + typename = rs.getString(4); + length = rs.getInt(5); + + if ( lastTab.equals(tname)) { + if ( makeDef ) { + dwriter[1].write(schema + " " + tname + " " + cname + " " + typename + " " + length + "\n"); + } + sb.append(schema + " " + tname + " " + cname + " " + typename + " " + length); + } + else{ + if (sb.length() > 0 ) { + byte[] hashedBytes = md.digest((sb.toString()).getBytes()); + String encodedHash = Base64.getEncoder().encodeToString(hashedBytes); + + if ( makeHash ) { + hwriter.write(tname + "," + encodedHash); //populate digest hash map + hwriter.write("\n"); + } + else { + // report on schema correctness + fwriter.write("Table: " + tname + " Digest: " + digestType + ": " + encodedHash + "\n"); + //System.out.println("Table: " + tname + " Digest: " + digestType + ": " + encodedHash); + String hashvalue = tbmap.get(tname); + if (hashvalue == null) + { + logger.warning("HASH value mismatch for table: " + tname + " - no stored hash value"); + fwriter.write("HASH value mismatch for table: " + tname + " - no stored hash value\n"); + //System.out.println("HASH value mismatch for table: " + tname); + //System.out.println("No stored hash value"); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; } + else + { + if ( hashvalue.equals(encodedHash)) + { + fwriter.write("HASH value matches for table: " + tname + "\n"); + //System.out.println("HASH value matches for table: " + tname); + } + else + { + logger.warning("HASH value mismatch for table: " + tname); + fwriter.write("HASH value mismatch for table: " + tname + "\n"); + //System.out.println("HASH value mismatch for table: " + tname); + fwriter.write("Computed hash value: " + encodedHash + "\n"); + //System.out.println("Computed hash value: " + encodedHash); + fwriter.write("Stored hash value : " + hashvalue + "\n"); + //System.out.println("Stored hash value: " + hashvalue); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + } + } + } + + // Print all of the definition data to separate file dwriter if requested -D + if ( makeDef ) { + dwriter[1].write("\n" + schema + " TABLE: " + tname+" ============================================\n"); + dwriter[1].write(schema + " " + tname + " " + cname + " " + typename + " " + length + "\n"); + } + + sb.delete(0, sb.length()); + sb.append(schema + " " + tname + " " + cname + " " + typename + " " + length); + } + lastTab = tname; + } + logger.fine("Fetched all rows from JDBC ResultSet"); + //System.out.println("**** Fetched all rows from JDBC ResultSet"); + + // Close the ResultSet + rs.close(); + logger.fine("Closed JDBC ResultSet"); + //System.out.println("**** Closed JDBC ResultSet"); + + // Close the Statement + stmt.close(); + logger.fine("Closed JDBC Statement"); + //System.out.println("**** Closed JDBC Statement"); + + } catch (SQLException e) { + logger.severe("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt + e.getMessage()); + //System.out.println("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt); + //e.printStackTrace(); + rc = 4; + return; + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateB"); + //System.out.println("IO exception encountered in GvbSchemaValidateB"); + //e.printStackTrace(); + rc = 8; + return; + } catch (NoSuchAlgorithmException e) { + logger.severe("Digest algorithm: " + digestType + " not available"); + //System.out.println("Digest algorithm: " + digestType + " not available"); + //e.printStackTrace(); + rc = 12; + return; + } + + if ( makeHash ) { + logger.info("Table digest hashmap created"); + //System.out.println("\nTable digest hashmap created\n"); + rc = 2; + return; + } + else + { + if ( match ) + { + logger.info("All table definitions match"); + //System.out.println("\nAll table definitions match.\n"); + rc = 0; + return; + } + else + { + logger.warning("One or more tables do not match expected definitions ***"); + //System.out.println("\nOne or more tables do not match expected definitions !!!\n"); + rc = 1; + return; + } + } + } + + public Integer getRc() {return rc;} + +} \ No newline at end of file diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateC.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateC.java new file mode 100644 index 0000000..017b5ea --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateC.java @@ -0,0 +1,199 @@ +// https://www.ibm.com/docs/en/db2-for-zos/12.0.0?topic=samples-example-simple-jdbc-application + +// VALIDATE INDEXES + +package org.genevaers.db2check; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.logging.Logger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; + +import java.sql.*; + +public class GvbSchemaValidateC { + + private Integer rc; + private static final Logger logger = Logger.getLogger(GvbSchemaValidateC.class.getName()); + + public GvbSchemaValidateC(GvbSchemaConfig sc) + { + Boolean match = true; + + BufferedWriter hwriter = sc.getHwriter(); // For writing digest values of hashmaps + BufferedWriter[] dwriter = new BufferedWriter[4]; // For writing Schema definitions + BufferedWriter fwriter = sc.getFwriter(); // General output writers + dwriter = sc.getDwriter(); + String digestType = sc.getDigestType(); + Connection con = sc.getCon(); + String schema_mask = sc.getSchemaMask(); + Boolean makeHash = sc.getMakeHash(); + Boolean makeDef = sc.getMakeDef(); + HashMap ixmap = sc.getIxmap(); + + String schema; + String tname; + String iname; + String uniqueR; + String lastTab = ""; + + Statement stmt; + ResultSet rs; + + logger.info("GvbSchemaValidateC: checking indexes for schema: " + schema_mask); + //System.out.println ("**** GvbSchemaValidateC: checking indexes for schema: " + schema_mask); + + String SQLstmt = "SELECT CREATOR, TBNAME, NAME, UNIQUERULE FROM SYSIBM.SYSINDEXES WHERE CREATOR LIKE '" + schema_mask + "' ORDER BY TBNAME, NAME;"; + + try { + StringBuilder sb = new StringBuilder(""); + // Print generated digest value to separate file if requested -A + if ( makeHash ) { + hwriter.write(" // HashMap ixmap = new HashMap<>(100);\n"); + hwriter.write(" // Populate digest map of indexes using " + digestType +"\n"); + } + + // Create SQL Statement + stmt = con.createStatement(); + logger.fine("Created JDBC Statement object"); + //System.out.println("**** Created JDBC Statement object"); + + // Execute a query and generate a ResultSet instance + rs = stmt.executeQuery(SQLstmt); + logger.fine("Created JDBC ResultSet object"); + //System.out.println("**** Created JDBC ResultSet object"); + + fwriter.write("\nIndex Validation Report by table for schema: " + schema_mask + "\n\n"); + + MessageDigest md = MessageDigest.getInstance(digestType); + while (rs.next()) { + schema = rs.getString(1); + tname = rs.getString(2); + iname = rs.getString(3); + uniqueR = rs.getString(4); + + if ( lastTab.equals(tname)) { + if ( makeDef ) { + dwriter[2].write(schema + " " + tname + " " + iname + " " + uniqueR + "\n"); + } + sb.append(schema + " " + tname + " " + iname + " " + uniqueR); + } + else{ + if (sb.length() > 0 ) { + byte[] hashedBytes = md.digest((sb.toString()).getBytes()); + String encodedHash = Base64.getEncoder().encodeToString(hashedBytes); + + if ( makeHash) { + hwriter.write(tname+ "," + encodedHash); //populate hash map + hwriter.write("\n"); + } + else { + // report on schema correctness + fwriter.write("Indexes for Table: " + tname + " Digest: " + digestType + ": " + encodedHash + "\n"); + //System.out.println("Table: " + tname + " Digest: " + digestType + ": " + encodedHash); + String hashvalue = ixmap.get(tname); + + if (hashvalue == null) { + logger.warning("HASH value mismatch for indexes of table: " + tname + " - no stored hash value"); + fwriter.write("HASH value mismatch for indexes of table: " + tname + " - no stored hash value\n"); + //System.out.println("HASH value mismatch for table: " + tname); + //System.out.println("No stored hash value"); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + else { + if ( hashvalue.equals(encodedHash) ) { + fwriter.write("HASH value matches for indexes of table: " + tname + "\n"); + //System.out.println("HASH value matches for table: " + tname); + } + else + { + logger.warning("HASH value mismatch for indexes of table: " + tname); + fwriter.write("HASH value mismatch for indexes of table: " + tname + "\n"); + //System.out.println("HASH value mismatch for table: " + tname); + fwriter.write("Computed hash value: " + encodedHash + "\n"); + //System.out.println("Computed hash value: " + encodedHash); + fwriter.write("Stored hash value : " + hashvalue + "\n"); + //System.out.println("Stored hash value: " + hashvalue); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + } + } + } + + if ( makeDef ) { + dwriter[2].write("\n" + schema + " TABLE: " + tname+" ============================================\n"); + dwriter[2].write(schema + " " + tname + " " + iname + " " + uniqueR + "\n"); + } + + sb.delete(0, sb.length()); + sb.append(schema + " " + tname + " " + iname + " " + uniqueR); + } + lastTab = tname; + } + logger.fine("Fetched all rows from JDBC ResultSet"); + //System.out.println("**** Fetched all rows from JDBC ResultSet"); + + // Close the ResultSet + rs.close(); + logger.fine("Closed JDBC ResultSet"); + //System.out.println("**** Closed JDBC ResultSet"); + + // Close the Statement + stmt.close(); + logger.fine("Closed JDBC Statement"); + //System.out.println("**** Closed JDBC Statement"); + + } catch (SQLException e) { + logger.severe("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt + e.getMessage()); + //System.out.println("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt); + //e.printStackTrace(); + rc = 4; + return; + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateC"); + //System.out.println("IO exception encountered in GvbSchemaValidateC"); + //e.printStackTrace(); + rc = 8; + return; + } catch (NoSuchAlgorithmException e) { + logger.severe("Digest algorithm: " + digestType + " not available"); + //System.out.println("Digest algorithm: " + digestType + " not available"); + //e.printStackTrace(); + rc = 12; + return; + } + + if ( makeHash ) { + logger.info("Index digest hashmap created"); + //System.out.println("\nIndex digest hashmap created\n"); + rc = 2; + return; + } + else { + if ( match ) + { + logger.info("All index definitions match"); + //System.out.println("\nAll index definitions match.\n"); + rc = 0; + return; + } + else + { + logger.warning("One or more indexes do not match expected definitions ***"); + //System.out.println("\nOne or more indexes do not match expected definitions !!!\n"); + rc = 1; + return; + } + } + } + + public Integer getRc() {return rc;} + +} \ No newline at end of file diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateD.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateD.java new file mode 100644 index 0000000..dce5717 --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateD.java @@ -0,0 +1,196 @@ +// https://www.ibm.com/docs/en/db2-for-zos/12.0.0?topic=samples-example-simple-jdbc-application + +// VALIDATE FOREIGN KEYS + +package org.genevaers.db2check; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.logging.Logger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; + +import java.sql.*; + +public class GvbSchemaValidateD { + + private Integer rc; + private static final Logger logger = Logger.getLogger(GvbSchemaValidateC.class.getName()); + + public GvbSchemaValidateD(GvbSchemaConfig sc) + { + Boolean match = true; + + BufferedWriter hwriter = sc.getHwriter(); // For writing digest values of hashmaps + BufferedWriter[] dwriter = new BufferedWriter[4]; // For writing Schema definitions + BufferedWriter fwriter = sc.getFwriter(); // General output writers + dwriter = sc.getDwriter(); + String digestType = sc.getDigestType(); + Connection con = sc.getCon(); + String schema_mask = sc.getSchemaMask(); + Boolean makeHash = sc.getMakeHash(); + Boolean makeDef = sc.getMakeDef(); + HashMap fkmap = sc.getFkmap(); + + String schema; + String tname; + String rname; + String cname; + String lastTab = ""; + + Statement stmt; + ResultSet rs; + + logger.info("GvbSchemaValidateD: checking foreign keys for schema: " + schema_mask); + //System.out.println ("**** GvbSchemaValidateD: checking foreign keys for schema: " + schema_mask); + + String SQLstmt = "SELECT CREATOR, TBNAME, RELNAME, COLNAME FROM SYSIBM.SYSFOREIGNKEYS WHERE CREATOR LIKE '" + schema_mask + "' ORDER BY TBNAME, RELNAME, COLNAME;"; + + try { + StringBuilder sb = new StringBuilder(""); + // Print generated digest value to separate file if requested -A + if ( makeHash ) { + hwriter.write(" // HashMap fkmap = new HashMap<>(100);\n"); + hwriter.write(" // Populate digest map of foreign keys using " + digestType +"\n"); + } + + // Create the Statement + stmt = con.createStatement(); + logger.fine("Created JDBC Statement object"); + //System.out.println("**** Created JDBC Statement object"); + + // Execute a query and generate a ResultSet instance + rs = stmt.executeQuery(SQLstmt); + logger.fine("Created JDBC ResultSet object"); + //System.out.println("**** Created JDBC ResultSet object"); + + fwriter.write("\nForeign key Validation Report by table for schema: " + schema_mask + "\n\n"); + + MessageDigest md = MessageDigest.getInstance(digestType); + while (rs.next()) { + schema = rs.getString(1); + tname = rs.getString(2); + rname = rs.getString(3); + cname = rs.getString(4); + + if ( lastTab.equals(tname)) { + if ( makeDef ) { + dwriter[3].write(schema + " " + tname + " " + rname + " " + cname + "\n"); + } + sb.append(schema + " " + tname + " " + rname + " " + cname); + } + else{ + if (sb.length() > 0 ) { + byte[] hashedBytes = md.digest((sb.toString()).getBytes()); + String encodedHash = Base64.getEncoder().encodeToString(hashedBytes); + + if ( makeHash) { + hwriter.write(tname+ "," + encodedHash); //populate hash map + hwriter.write("\n"); + } + else { + // report on schema correctness + fwriter.write("Foreign keys of table: " + tname + " Digest: " + digestType + ": " + encodedHash + "\n"); + //System.out.println("Foreign keys of table: " + tname + " Digest: " + digestType + ": " + encodedHash); + String hashvalue = fkmap.get(tname); + + if (hashvalue == null) { + logger.warning("HASH value mismatch for foreign key of table: " + tname + " - no stored hash value"); + fwriter.write("HASH value mismatch for foreign key of table: " + tname + " - no stored hash value\n"); + //System.out.println("HASH value mismatch for foreign key of table: " + tname); + //System.out.println("No stored hash value"); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + else { + if ( hashvalue.equals(encodedHash) ) { + fwriter.write("HASH value matches for foreign keys of table: " + tname + "\n"); + //System.out.println("HASH value matches for foreign keys of table: " + tname); + } + else + { + logger.warning("HASH value mismatch for foreign keys of table: " + tname); + fwriter.write("HASH value mismatch for foreign keys of table: " + tname + "\n"); + //System.out.println("HASH value mismatch for foreign keys of table: " + tname); + fwriter.write("Computed hash value: " + encodedHash + "\n"); + fwriter.write("Stored hash value : " + hashvalue + "\n"); + //System.out.println("Stored hash value: " + hashvalue); + fwriter.write("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n"); + //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); + match = false; + } + } + } + } + + if (makeDef) { + dwriter[3].write("\n" + schema + " TABLE: " + tname+" ============================================\n"); + dwriter[3].write(schema + " " + tname + " " + rname + " " + cname + "\n"); + } + + sb.delete(0, sb.length()); + sb.append(schema + " " + tname + " " + rname + " " + cname); + } + lastTab = tname; + } + logger.fine("Fetched all rows from JDBC ResultSet"); + //System.out.println("**** Fetched all rows from JDBC ResultSet"); + + // Close the ResultSet + rs.close(); + logger.fine("Closed JDBC ResultSet"); + //System.out.println("**** Closed JDBC ResultSet"); + + // Close the Statement + stmt.close(); + logger.fine("Closed JDBC Statement"); + //System.out.println("**** Closed JDBC Statement"); + + } catch (SQLException e) { + logger.severe("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt + e.getMessage()); + //System.out.println("SQLSTATE: " + e.getSQLState() + " executing: " + SQLstmt); + //e.printStackTrace(); + rc = 4; + return; + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateD"); + //System.out.println("IO exception encountered in GvbSchemaValidateD"); + //e.printStackTrace(); + rc = 8; + return; + } catch (NoSuchAlgorithmException e) { + logger.severe("Digest algorithm: " + digestType + " not available"); + //System.out.println("Digest algorithm: " + digestType + " not available"); + //e.printStackTrace(); + rc = 12; + return; + } + + if ( makeHash ) { + logger.info("Foreign key digest hashmap created"); + //System.out.println("\nForeign key digest hashmap created\n"); + rc = 2; + return; + } + else { + if ( match ) { + logger.info("All foreign key definitions match."); + //System.out.println("\nAll foreign key definitions match.\n"); + rc = 0; + return; + } + else { + logger.warning("One or more foreign keys do not match expected definitions ***"); + //System.out.println("\nOne or more foreign keys do not match expected definitions !!!\n"); + rc = 1; + return; + } + } + } + + public Integer getRc() {return rc;} + +} \ No newline at end of file diff --git a/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateMain.java b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateMain.java new file mode 100644 index 0000000..469fc38 --- /dev/null +++ b/DB2CHECK/src/main/java/org/genevaers/db2check/GvbSchemaValidateMain.java @@ -0,0 +1,344 @@ +package org.genevaers.db2check; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +//import java.lang.System.Logger; +import java.util.HashMap; +import java.io.File; +import java.util.logging.Logger; + +import java.sql.*; + +public class GvbSchemaValidateMain { + @SuppressWarnings("resource") + + private static final Logger logger = Logger.getLogger(GvbSchemaValidateMain.class.getName()); + + public static void main(String[] args) + { + HashMap spmap = new HashMap<>(30); // stored procedures digest + HashMap tbmap = new HashMap<>(100); // table/column digest + HashMap ixmap = new HashMap<>(100); // index digest + HashMap fkmap = new HashMap<>(100); // foreign key digest + + Integer ii = 0; + Integer finalI = 0; + BufferedReader reader; + String url = ""; + String user = ""; + String digestType = "SHA3-512"; + String password = ""; + String schema_mask = ""; + Boolean makeHash = false; + Boolean makeDef = false; + Boolean makeF = true; + BufferedWriter fwriter = null; // general output + BufferedWriter hwriter = null; // digest output + BufferedWriter[] dwriter = new BufferedWriter[4]; // Definition files: stored procedures, table/columns, indexes and foreign keys + dwriter[0] = null; + dwriter[1] = null; + dwriter[2] = null; + dwriter[3] = null; + Connection con; + + String userhome = System.getProperty("user.home"); + + logger.info("Running GvbSchemaValidateMain: checking DB2 Schema."); + //System.out.println ("**** Running GvbSchemaValidateMain: checking DB2 Schema"); + + Integer nArgs =args.length; + Integer n; + + // parse + // command line argument[s] + for (n = 0; n < nArgs; n++) { + if (args[n].substring(0,1).equals("-")) { + switch( args[n].substring(1,2)) + { + // generate hash map -- does NOT validate the schema + case "A": + makeHash = true; + break; + // generate DDL statement -- available in all cases + case "D": + makeDef = true; + break; + case "h": + logger.info("-D (-D (write schema definitions)\n-A (create schema digest map)"); + //System.out.println("-D (write schema definitions)\n-A (create schema digest map)"); + return; + default: + break; + } + } + } + + if (makeHash) { + logger.info("Option set to generate Schema digest from DB2 catalog"); + //System.out.println("Option to generate Schema digest from DB2 catalog: " + makeHash); + } + if (makeDef) { + logger.info("Option set to generate Schema definitions from DB2 catalog"); + //System.out.println("Option to generate Schema definitions from DB2 catalog: " + makeDef); + } + + // read configurtion information from home directory + try { + ii = 0; + reader = new BufferedReader(new FileReader(userhome + "/db2check.config")); + String line = reader.readLine(); + while (line != null) { + if ( 0 == ii) { + user = line.substring(0); + } + else + { + if ( 1 == ii ) { + password = line.substring(0); + } + else + { + if ( 2 == ii ) { + url = line.substring(0); + } + else + { + if ( 3 == ii ) { + schema_mask = line.substring(0); + } + } + } + } + // read next line + ii = ii + 1; + line = reader.readLine(); + } + finalI = ii; + reader.close(); + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateMain reading configuration file: " + e.getMessage()); + // System.out.println("IO exception encountered in GvbSchemaValidateMain reading configuration file"); + //e.printStackTrace(); + return; + } + // configuration information has been read + logger.fine("User: " + user + " Url: " + url + " Schema mask: " + schema_mask + ". Config lines read: " + finalI); + //System.out.println("User: " + user + " Url: " + url + " Schema mask: " + schema_mask + ". Config lines read: " + finalI); + + + // read Digest information from home directory + if (makeHash) { + // check if our directory exists: if it doesn't create it, if it does fine + File newDir = new File(userhome+"/GenevaERS"); + if (newDir.mkdir()) { + logger.info("Directory: " + userhome+"/GenevaERS" + " created"); + //System.out.println("Directory: " + userhome+"/GenevaERS" + " created."); + } else { + // this is ok too + } + } else { + try { + Integer State = -1; + + // check if our directory exists: it MUST + File newDir = new File(userhome+"/GenevaERS"); + if (newDir.mkdir()) { + logger.info("Directory: " + userhome + "/GenevaERS" + " did not previously exist"); + logger.severe("Digest file does not exist. Terminating application"); + //System.out.println("Directory: " + userhome + "/GenevaERS" + " did not previously exist."); + //System.out.println("Digest file does not exist. Terminating application."); + return; + } else { + // this is the happy path + } + + ii = 0; + reader = new BufferedReader(new FileReader(userhome + "/GenevaERS/SchemaDigest.txt")); + String line = reader.readLine(); + while (line != null) { + if (line.contains("// Populate digest map of stored procedures")) { + State = 1; + } + else { + if (line.contains("// Populate digest map of tables")) { + State = 2; + } + else { + if (line.contains("// Populate digest map of indexes")) { + State = 3; + } + else { + if (line.contains("// Populate digest map of foreign")) { + State = 4; + } + else { + //System.out.println("Regexing: " + line); + String[] values = line.split(","); // Split by comma + //System.out.println("Values: " + values[0] + values[1]); + switch (State) { + case -1: + State = 0; + break; + case 1: + spmap.put(values[0], values[1]); + break; + case 2: + tbmap.put(values[0], values[1]); + break; + case 3: + ixmap.put(values[0], values[1]); + break; + case 4: + fkmap.put(values[0], values[1]); + break; + default: + logger.severe("Invalid State:" + State + " Line: " + ii + " record: " + line); + //System.out.println("Invalid State:" + State + " Line: " + ii + " record: " + line); + break; + } + } + } + } + } + // read next line + ii = ii + 1; + line = reader.readLine(); + } + finalI = ii; + reader.close(); + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateMain reading schema digest file: " + e.getMessage()); + //System.out.println("IO exception encountered in GvbSchemaValidateMain reading schema digest file"); + //e.printStackTrace(); + return; + } + } + + // load db2 jdbc + try { + Class.forName("com.ibm.db2.jcc.DB2Driver"); + } catch (ClassNotFoundException e) { + logger.severe("Error encountered loading DB2 SQLJ driver: " + e.getMessage()); + //System.out.println("Error encountered loading DB2 SQLJ driver"); + //e.printStackTrace(); + return; + } + logger.fine("Loaded the JDBC driver"); + //System.out.println("**** Loaded the JDBC driver"); + + // Create the connection using the IBM Data Server Driver for JDBC and SQLJ and open output file[s] + try { + con = DriverManager.getConnection (url, user, password); + con.setAutoCommit(false); + logger.fine("Created a JDBC connection to the data source\n"); + //System.out.println("**** Created a JDBC connection to the data source\n"); + + if ( makeF ) { + fwriter = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/Schema_report.txt")); + fwriter.write("GenevaERS DB2 Schema Report for: " + schema_mask + "\n"); + } + + if (makeHash) { + hwriter = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/SchemaDigest.txt")); + } + + if (makeDef) { + dwriter[0] = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/StoredProcedures.txt")); // stored procedures + dwriter[1] = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/Tabledata.txt")); // tables and columns + dwriter[2] = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/Indexdata.txt")); // indexes + dwriter[3] = new BufferedWriter(new FileWriter(userhome + "/GenevaERS/Foreignkeydata.txt")); // foreign assets + } + + // construct configuration object + GvbSchemaConfig sc= new GvbSchemaConfig(digestType, con, schema_mask, makeHash, makeDef, fwriter, hwriter, dwriter, spmap, tbmap, ixmap, fkmap); + + // call stored procedure validation + GvbSchemaValidateA mA = new GvbSchemaValidateA(sc); + + // call table column validation + GvbSchemaValidateB mB = new GvbSchemaValidateB(sc); + + // call table column validation + GvbSchemaValidateC mC = new GvbSchemaValidateC(sc); + + // call table column validation + GvbSchemaValidateD mD = new GvbSchemaValidateD(sc); + + Integer maxRc = Math.max( mA.getRc(), Math.max( mB.getRc(), Math.max ( mC.getRc(), mD.getRc() ))); + + switch ( maxRc ) { + case 0: + logger.info("All parts of schema validated successfully\n"); + //System.out.println("All parts of schema validated successfully\n"); + break; + case 1: + logger.warning("One or more parts of schema failed validation\n"); + //System.out.println("One or more parts of schema failed validation\n"); + break; + case 2: + logger.info("Schema digest map created\n"); + //System.out.println("Schema digest map created\n"); + break; + case 4: + logger.severe("DB2 SQL error\n"); + //System.out.println("DB2 SQL error\n"); + break; + case 8: + logger.severe("IO error\n"); + //System.out.println("IO error\n"); + break; + case 12: + logger.severe("IO and DB2 SQL error\n"); + //System.out.println("No such algorithm found: " + digestType + "\n" ); + break; + default: + logger.severe("Incorrect max return code: " + maxRc + "\n"); + //System.out.println("Incorrect max return code: " + maxRc + "\n"); + break; + } + + if ( makeF ) { + fwriter.close(); + } + + if (makeHash) { + hwriter.close(); + } + + if (makeDef) { + dwriter[0].close(); + dwriter[1].close(); + dwriter[2].close(); + dwriter[3].close(); + } + + // Connection must be on a unit-of-work boundary to allow close + con.commit(); + logger.fine("SQL statements completed on transaction boundary"); + //System.out.println ( "**** SQL statements completed on transaction boundary" ); + + // Close the connection + con.close(); + logger.fine(userhome + "Disconnected from data source"); + //System.out.println("**** Disconnected from data source"); + logger.fine("JDBC completed - no DB2 errors"); + //System.out.println("**** JDBC completed - no DB2 errors"); + + } catch (SQLException e) { + logger.severe("SQLSTATE: " + e.getSQLState() + " creating database connection for: " + url + e.getMessage()); + //System.out.println("SQLSTATE: " + e.getSQLState() + " creating database connection for: " + url); + //e.printStackTrace(); + return; + + } catch (IOException e) { + logger.severe("IO exception encountered in GvbSchemaValidateMain" + e.getMessage()); + //System.out.println("IO exception encountered in GvbSchemaValidateMain"); + //e.printStackTrace(); + return; + } + return; + } +} diff --git a/DB2CHECK/src/main/resources/application.properties b/DB2CHECK/src/main/resources/application.properties new file mode 100644 index 0000000..a6130e1 --- /dev/null +++ b/DB2CHECK/src/main/resources/application.properties @@ -0,0 +1,17 @@ +# Copyright Contributors to the GenevaERS Project. SPDX-License-Identifier: Apache-2.0 (c) Copyright IBM Corporation 2008 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +app.name=@project.artifactId@ +build.version=@project.version@ diff --git a/DB2CHECK/target/classes/application.properties b/DB2CHECK/target/classes/application.properties new file mode 100644 index 0000000..c40538c --- /dev/null +++ b/DB2CHECK/target/classes/application.properties @@ -0,0 +1,17 @@ +# Copyright Contributors to the GenevaERS Project. SPDX-License-Identifier: Apache-2.0 (c) Copyright IBM Corporation 2008 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +app.name=db2check +build.version=1.0.1 diff --git a/SH/Allocate.sh b/SH/Allocate.sh index 1d8240a..25f6b8a 100755 --- a/SH/Allocate.sh +++ b/SH/Allocate.sh @@ -12,6 +12,7 @@ cat ../JCL/ALLOCDONE.jcl >> ../JCL/ALLOC.jcl; echo "$(date) ${BASH_SOURCE##*/} Submit JCL to allocate the build data sets"; . ./JobSubmitter.sh '../JCL/ALLOC.jcl' allocdone 1>> $err_log; +exitIfError; echo "$(date) ${BASH_SOURCE##*/} JobID: $GERS_JOBID" ; . ./JobWaiter.sh 60 allocdone 1>> $err_log; exitIfError; diff --git a/SH/DataSetAlias.sh b/SH/DataSetAlias.sh index 8979a31..8098b2e 100755 --- a/SH/DataSetAlias.sh +++ b/SH/DataSetAlias.sh @@ -12,6 +12,7 @@ cat ../JCL/ALIASDONE.jcl >> ../JCL/ALIAS.jcl; echo "$(date) ${BASH_SOURCE##*/} Submit JCL to set aliases for the build data sets"; . ./JobSubmitter.sh '../JCL/ALIAS.jcl' aliasdone 1>> $err_log; +exitIfError; echo "$(date) ${BASH_SOURCE##*/} JobID: $GERS_JOBID" ; . ./JobWaiter.sh 60 aliasdone 1>> $err_log; exitIfError; diff --git a/SH/SaveJobInfo.sh b/SH/SaveJobInfo.sh index 878eb38..6b187f1 100644 --- a/SH/SaveJobInfo.sh +++ b/SH/SaveJobInfo.sh @@ -11,6 +11,7 @@ cat ../JCL/SAVEDONE.jcl >> ../JCL/SAVEJOB.jcl; echo "$(date) ${BASH_SOURCE##*/} Submit JCL to copy job output"; . ./JobSubmitter.sh '../JCL/SAVEJOB.jcl' savedone 1>> $err_log; +exitIfError; echo "$(date) ${BASH_SOURCE##*/} JobID: $GERS_JOBID" ; . ./JobWaiter.sh 60 savedone 1>> $err_log; exitIfError; diff --git a/SH/SubBuild.sh b/SH/SubBuild.sh index 75ca6a6..252f406 100755 --- a/SH/SubBuild.sh +++ b/SH/SubBuild.sh @@ -12,6 +12,7 @@ cd $save_pwd ; echo "$(date) ${BASH_SOURCE##*/} Submit the generated JCL to assemble and link the load modules"; . ./JobSubmitter.sh '../JCL/BUILDPE.jcl' asmdone 1>> $err_log; +exitIfError; echo "$(date) ${BASH_SOURCE##*/} JobID: $GERS_JOBID" ; . ./JobWaiter.sh 120 asmdone 1>> $err_log ; exitIfError;