From dab9586e2710c1367898e3093d06fd3495be1ab0 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Wed, 3 Sep 2025 17:54:59 +0530 Subject: [PATCH 01/11] adding streaming support in fetch for varcharmax type --- mssql_python/cursor.py | 36 +++--- mssql_python/pybind/ddbc_bindings.cpp | 175 ++++++++++++++++++-------- tests/test_004_cursor.py | 113 +++++++++++++++++ 3 files changed, 251 insertions(+), 73 deletions(-) diff --git a/mssql_python/cursor.py b/mssql_python/cursor.py index f3c76853f..7b130ec84 100644 --- a/mssql_python/cursor.py +++ b/mssql_python/cursor.py @@ -348,16 +348,16 @@ def _map_sql_type(self, param, parameters_list, i): if utf16_len > MAX_INLINE_CHAR: # Long strings -> DAE if is_unicode: return ( - ddbc_sql_const.SQL_WLONGVARCHAR.value, + ddbc_sql_const.SQL_WVARCHAR.value, ddbc_sql_const.SQL_C_WCHAR.value, - utf16_len, + 0, 0, True, ) return ( - ddbc_sql_const.SQL_LONGVARCHAR.value, + ddbc_sql_const.SQL_VARCHAR.value, ddbc_sql_const.SQL_C_CHAR.value, - len(param), + 0, 0, True, ) @@ -753,20 +753,20 @@ def execute( # Executing a new statement. Reset is_stmt_prepared to false self.is_stmt_prepared = [False] - log('debug', "Executing query: %s", operation) - for i, param in enumerate(parameters): - log('debug', - """Parameter number: %s, Parameter: %s, - Param Python Type: %s, ParamInfo: %s, %s, %s, %s, %s""", - i + 1, - param, - str(type(param)), - parameters_type[i].paramSQLType, - parameters_type[i].paramCType, - parameters_type[i].columnSize, - parameters_type[i].decimalDigits, - parameters_type[i].inputOutputType, - ) + # log('debug', "Executing query: %s", operation) + # for i, param in enumerate(parameters): + # log('debug', + # """Parameter number: %s, Parameter: %s, + # Param Python Type: %s, ParamInfo: %s, %s, %s, %s, %s""", + # i + 1, + # param, + # str(type(param)), + # parameters_type[i].paramSQLType, + # parameters_type[i].paramCType, + # parameters_type[i].columnSize, + # parameters_type[i].decimalDigits, + # parameters_type[i].inputOutputType, + # ) ret = ddbc_bindings.DDBCSQLExecute( self.hstmt, diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index bbc3a2f52..8b575fdd9 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1725,8 +1725,88 @@ SQLRETURN SQLFetch_wrap(SqlHandlePtr StatementHandle) { return SQLFetch_ptr(StatementHandle->get()); } +static py::object FetchLobColumnData(SQLHSTMT hStmt, + SQLUSMALLINT colIndex, + SQLSMALLINT cType, + bool isWideChar, + bool isBinary) +{ + std::vector buffer; + SQLLEN indicator = 0; + SQLRETURN ret; + int loopCount = 0; + + while (true) { + ++loopCount; + std::vector chunk(DAE_CHUNK_SIZE); + ret = SQLGetData_ptr( + hStmt, + colIndex, + cType, + chunk.data(), + DAE_CHUNK_SIZE, + &indicator + ); + if (indicator == SQL_NULL_DATA) { + LOG("Loop {}: Column {} is NULL", loopCount, colIndex); + return py::none(); + } + if (!SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { + LOG("Loop {}: Error fetching col={} with cType={} ret={}", loopCount, colIndex, cType, ret); + return py::none(); + } + SQLLEN copyCount = 0; + if (indicator > 0 && indicator != SQL_NO_TOTAL) { + copyCount = std::min(indicator, DAE_CHUNK_SIZE); + } else { + copyCount = DAE_CHUNK_SIZE; + } + + // Check if last byte(s) is a null terminator + if (copyCount > 0) { + if (!isWideChar && chunk[copyCount - 1] == '\0') { + --copyCount; + LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); + } else if (isWideChar) { + auto wcharBuf = reinterpret_cast(chunk.data()); + if (wcharBuf[(copyCount / sizeof(wchar_t)) - 1] == L'\0') { + copyCount -= sizeof(wchar_t); + LOG("Loop {}: Trimmed null terminator (wide)", loopCount); + } + } + } + if (copyCount > 0) { + buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + copyCount); + LOG("Loop {}: Appended {} bytes", loopCount, copyCount); + } + if (ret == SQL_SUCCESS) { + LOG("Loop {}: SQL_SUCCESS → no more data", loopCount); + break; + } + } + LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); + + if (indicator == 0 || buffer.empty()) { + LOG("FetchLobColumnData: Returning empty string for col {}", colIndex); + return py::str(""); + } + + if (isWideChar) { + std::wstring wstr(reinterpret_cast(buffer.data()), + buffer.size() / sizeof(wchar_t)); + LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); + return py::cast(wstr); + } + if (isBinary) { + LOG("FetchLobColumnData: Returning binary of {} bytes", buffer.size()); + return py::bytes(buffer.data(), buffer.size()); + } + std::string str(buffer.data(), buffer.size()); + LOG("FetchLobColumnData: Returning narrow string of length {}", str.length()); + return py::str(str); +} + // Helper function to retrieve column data -// TODO: Handle variable length data correctly SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, py::list& row) { LOG("Get data from columns"); if (!SQLGetData_ptr) { @@ -1749,7 +1829,6 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p if (!SQL_SUCCEEDED(ret)) { LOG("Error retrieving data for column - {}, SQLDescribeCol return code - {}", i, ret); row.append(py::none()); - // TODO: Do we want to continue in this case or return? continue; } @@ -1757,60 +1836,46 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p case SQL_CHAR: case SQL_VARCHAR: case SQL_LONGVARCHAR: { - // TODO: revisit - HandleZeroColumnSizeAtFetch(columnSize); - uint64_t fetchBufferSize = columnSize + 1 /* null-termination */; - std::vector dataBuffer(fetchBufferSize); - SQLLEN dataLen; - // TODO: Handle the return code better - ret = SQLGetData_ptr(hStmt, i, SQL_C_CHAR, dataBuffer.data(), dataBuffer.size(), - &dataLen); - - if (SQL_SUCCEEDED(ret)) { - // TODO: Refactor these if's across other switches to avoid code duplication - // columnSize is in chars, dataLen is in bytes - if (dataLen > 0) { - uint64_t numCharsInData = dataLen / sizeof(SQLCHAR); - // NOTE: dataBuffer.size() includes null-terminator, dataLen doesn't. Hence use '<'. - if (numCharsInData < dataBuffer.size()) { - // SQLGetData will null-terminate the data -#if defined(__APPLE__) || defined(__linux__) - std::string fullStr(reinterpret_cast(dataBuffer.data())); - row.append(fullStr); - LOG("macOS/Linux: Appended CHAR string of length {} to result row", fullStr.length()); -#else - row.append(std::string(reinterpret_cast(dataBuffer.data()))); -#endif - } else { - // In this case, buffer size is smaller, and data to be retrieved is longer - // TODO: Revisit - std::ostringstream oss; - oss << "Buffer length for fetch (" << dataBuffer.size()-1 << ") is smaller, & data " - << "to be retrieved is longer (" << numCharsInData << "). ColumnID - " - << i << ", datatype - " << dataType; - ThrowStdException(oss.str()); + if (columnSize == SQL_NO_TOTAL || columnSize == 0 || columnSize > 8000) { + LOG("Streaming LOB for column {}", i); + row.append(FetchLobColumnData(hStmt, i, SQL_C_CHAR, false, false)); + } else { + uint64_t fetchBufferSize = columnSize + 1 /* null-termination */; + std::vector dataBuffer(fetchBufferSize); + SQLLEN dataLen; + ret = SQLGetData_ptr(hStmt, i, SQL_C_CHAR, dataBuffer.data(), dataBuffer.size(), + &dataLen); + if (SQL_SUCCEEDED(ret)) { + // columnSize is in chars, dataLen is in bytes + if (dataLen > 0) { + uint64_t numCharsInData = dataLen / sizeof(SQLCHAR); + if (numCharsInData < dataBuffer.size()) { + // SQLGetData will null-terminate the data + #if defined(__APPLE__) || defined(__linux__) + std::string fullStr(reinterpret_cast(dataBuffer.data())); + row.append(fullStr); + LOG("macOS/Linux: Appended CHAR string of length {} to result row", fullStr.length()); + #else + row.append(std::string(reinterpret_cast(dataBuffer.data()))); + #endif + } + } else if (dataLen == SQL_NULL_DATA) { + LOG("Column {} is NULL (CHAR)", i); + row.append(py::none()); + } else if (dataLen == 0) { + row.append(py::str("")); + } else { + assert(dataLen == SQL_NO_TOTAL); + LOG("SQLGetData couldn't determine the length of the data. " + "Returning NULL value instead. Column ID - {}", i); + row.append(py::none()); } - } else if (dataLen == SQL_NULL_DATA) { - row.append(py::none()); - } else if (dataLen == 0) { - // Handle zero-length (non-NULL) data - row.append(std::string("")); - } else if (dataLen == SQL_NO_TOTAL) { - // This means the length of the data couldn't be determined - LOG("SQLGetData couldn't determine the length of the data. " - "Returning NULL value instead. Column ID - {}, Data Type - {}", i, dataType); - } else if (dataLen < 0) { - // This is unexpected - LOG("SQLGetData returned an unexpected negative data length. " - "Raising exception. Column ID - {}, Data Type - {}, Data Length - {}", - i, dataType, dataLen); - ThrowStdException("SQLGetData returned an unexpected negative data length"); + } else { + LOG("Error retrieving data for column - {}, data type - {}, SQLGetData return " + "code - {}. Returning NULL value instead", + i, dataType, ret); + row.append(py::none()); } - } else { - LOG("Error retrieving data for column - {}, data type - {}, SQLGetData return " - "code - {}. Returning NULL value instead", - i, dataType, ret); - row.append(py::none()); } break; } @@ -1819,7 +1884,7 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p case SQL_WLONGVARCHAR: { // TODO: revisit HandleZeroColumnSizeAtFetch(columnSize); - uint64_t fetchBufferSize = columnSize + 1 /* null-termination */; + uint64_t fetchBufferSize = columnSize + 1 /* null-termination */; std::vector dataBuffer(fetchBufferSize); SQLLEN dataLen; ret = SQLGetData_ptr(hStmt, i, SQL_C_WCHAR, dataBuffer.data(), diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 864a42a9d..81e80749a 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -5861,6 +5861,119 @@ def test_empty_string_chunk(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS #pytest_empty_string") db_connection.commit() + +def test_varcharmax_short(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + short_str = "hello" + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [short_str]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [short_str]) + assert cursor.fetchone()[0] == short_str + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_boundary(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + boundary_str = "X" * 8000 + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [boundary_str]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [boundary_str]) + assert cursor.fetchone()[0] == boundary_str + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_streaming(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + streaming_str = "Y" * 8100 + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [streaming_str]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [streaming_str]) + assert cursor.fetchone()[0] == streaming_str + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_large(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + large_str = "Z" * 100_000 + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [large_str]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [large_str]) + assert cursor.fetchone()[0] == large_str + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_empty_string(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [""]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [""]) + assert cursor.fetchone()[0] == "" + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_null(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [None]) + db_connection.commit() + cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col IS NULL") + assert cursor.fetchone()[0] is None + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_transaction_rollback(cursor, db_connection): + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + db_connection.autocommit = False + rollback_str = "ROLLBACK" * 2000 + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [rollback_str]) + db_connection.rollback() + cursor.execute("SELECT COUNT(*) FROM #pytest_varcharmax WHERE col = ?", [rollback_str]) + assert cursor.fetchone()[0] == 0 + finally: + db_connection.autocommit = True # reset state + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + def test_empty_char_single_and_batch_fetch(cursor, db_connection): """Test that empty CHAR data is handled correctly in both single and batch fetch""" try: From f815937f0ad0e411b68bc06876b45f44b0b9f6a6 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Wed, 3 Sep 2025 17:59:42 +0530 Subject: [PATCH 02/11] uncomment log --- mssql_python/cursor.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/mssql_python/cursor.py b/mssql_python/cursor.py index 7b130ec84..7e8c58f8b 100644 --- a/mssql_python/cursor.py +++ b/mssql_python/cursor.py @@ -753,20 +753,20 @@ def execute( # Executing a new statement. Reset is_stmt_prepared to false self.is_stmt_prepared = [False] - # log('debug', "Executing query: %s", operation) - # for i, param in enumerate(parameters): - # log('debug', - # """Parameter number: %s, Parameter: %s, - # Param Python Type: %s, ParamInfo: %s, %s, %s, %s, %s""", - # i + 1, - # param, - # str(type(param)), - # parameters_type[i].paramSQLType, - # parameters_type[i].paramCType, - # parameters_type[i].columnSize, - # parameters_type[i].decimalDigits, - # parameters_type[i].inputOutputType, - # ) + log('debug', "Executing query: %s", operation) + for i, param in enumerate(parameters): + log('debug', + """Parameter number: %s, Parameter: %s, + Param Python Type: %s, ParamInfo: %s, %s, %s, %s, %s""", + i + 1, + param, + str(type(param)), + parameters_type[i].paramSQLType, + parameters_type[i].paramCType, + parameters_type[i].columnSize, + parameters_type[i].decimalDigits, + parameters_type[i].inputOutputType, + ) ret = ddbc_bindings.DDBCSQLExecute( self.hstmt, From d58a1c335dbb275476a37f761ad88a117b243ad3 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Wed, 3 Sep 2025 18:08:47 +0530 Subject: [PATCH 03/11] copilot comment --- mssql_python/pybind/ddbc_bindings.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 8b575fdd9..23a6d659d 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1767,7 +1767,7 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, if (!isWideChar && chunk[copyCount - 1] == '\0') { --copyCount; LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); - } else if (isWideChar) { + } else if (copyCount >= sizeof(wchar_t)) { auto wcharBuf = reinterpret_cast(chunk.data()); if (wcharBuf[(copyCount / sizeof(wchar_t)) - 1] == L'\0') { copyCount -= sizeof(wchar_t); @@ -1785,7 +1785,7 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, } } LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); - + if (indicator == 0 || buffer.empty()) { LOG("FetchLobColumnData: Returning empty string for col {}", colIndex); return py::str(""); From 2ddc09f014b546ff0a2a3d311c8b71797870410f Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Wed, 10 Sep 2025 12:55:20 +0530 Subject: [PATCH 04/11] fix comments --- mssql_python/pybind/CMakeLists.txt | 2 +- mssql_python/pybind/ddbc_bindings.cpp | 199 +++++++++++++++++++++----- mssql_python/pybind/ddbc_bindings.h | 12 ++ 3 files changed, 179 insertions(+), 34 deletions(-) diff --git a/mssql_python/pybind/CMakeLists.txt b/mssql_python/pybind/CMakeLists.txt index 489dfd459..8f58b31c9 100644 --- a/mssql_python/pybind/CMakeLists.txt +++ b/mssql_python/pybind/CMakeLists.txt @@ -272,7 +272,7 @@ target_compile_definitions(ddbc_bindings PRIVATE # Add warning level flags for MSVC if(MSVC) - target_compile_options(ddbc_bindings PRIVATE /W4 /WX) + target_compile_options(ddbc_bindings PRIVATE /W4 ) endif() # Add macOS-specific string conversion fix diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 23a6d659d..640127190 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1725,6 +1725,112 @@ SQLRETURN SQLFetch_wrap(SqlHandlePtr StatementHandle) { return SQLFetch_ptr(StatementHandle->get()); } +// static py::object FetchLobColumnData(SQLHSTMT hStmt, +// SQLUSMALLINT colIndex, +// SQLSMALLINT cType, +// bool isWideChar, +// bool isBinary) +// { +// std::vector buffer; +// SQLLEN indicator = 0; +// SQLRETURN ret; +// int loopCount = 0; + +// while (true) { +// ++loopCount; +// std::vector chunk(DAE_CHUNK_SIZE); +// ret = SQLGetData_ptr( +// hStmt, +// colIndex, +// cType, +// chunk.data(), +// DAE_CHUNK_SIZE, +// &indicator +// ); +// if (indicator == SQL_NULL_DATA) { +// LOG("Loop {}: Column {} is NULL", loopCount, colIndex); +// return py::none(); +// } +// if (!SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { +// LOG("Loop {}: Error fetching col={} with cType={} ret={}", loopCount, colIndex, cType, ret); +// ThrowStdException("Error fetching column data"); +// } +// // SQLLEN copyCount = 0; +// SQLLEN copyCount = DAE_CHUNK_SIZE; +// if (indicator >= 0 && indicator != SQL_NO_TOTAL) { +// copyCount = std::min(indicator - buffer.size(), DAE_CHUNK_SIZE); +// } +// // else { +// // copyCount = DAE_CHUNK_SIZE; +// // } +// if (isWideChar && (copyCount % sizeof(SQLWCHAR) != 0)) { +// LOG("Loop {}: Warning – copyCount {} not multiple of {}", loopCount, copyCount, sizeof(SQLWCHAR)); +// copyCount -= copyCount % sizeof(SQLWCHAR); +// } + + +// // Check if last byte(s) is a null terminator +// if (copyCount > 0) { +// if (!isWideChar && chunk[copyCount - 1] == '\0') { +// --copyCount; +// LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); +// } else if (copyCount >= sizeof(SQLWCHAR)) { +// auto wcharBuf = reinterpret_cast(chunk.data()); +// if (wcharBuf[(copyCount / sizeof(SQLWCHAR)) - 1] == L'\0') { +// copyCount -= sizeof(SQLWCHAR); +// LOG("Loop {}: Trimmed null terminator (wide)", loopCount); +// } +// } +// } +// if (copyCount > 0) { +// buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + copyCount); +// LOG("Loop {}: Appended {} bytes", loopCount, copyCount); +// } +// if (ret == SQL_SUCCESS) { +// LOG("Loop {}: SQL_SUCCESS → no more data", loopCount); +// break; +// } +// } +// LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); + +// if (indicator == 0 || buffer.empty()) { +// LOG("FetchLobColumnData: Returning empty string for col {}", colIndex); +// return py::str(""); +// } + + // if (isWideChar) { + // // std::wstring wstr(reinterpret_cast(buffer.data()), + // // buffer.size() / sizeof(wchar_t)); + // // LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); + // // return py::cast(wstr); + // std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), buffer.size() / sizeof(SQLWCHAR)); + // LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); + // return py::cast(wstr); +// // } +// if (isWideChar) { +// if (buffer.size() % sizeof(SQLWCHAR) != 0) { +// LOG("FetchLobColumnData: Buffer size {} not aligned with {}", buffer.size(), sizeof(SQLWCHAR)); +// throw std::runtime_error("Invalid wide char buffer size"); +// } +// #ifdef _WIN32 +// std::wstring wstr(reinterpret_cast(buffer.data()), buffer.size() / sizeof(wchar_t)); +// #else +// size_t length = buffer.size() / sizeof(SQLWCHAR); +// std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), length); +// #endif +// LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); +// return py::cast(wstr); +// } +// if (isBinary) { + +// LOG("FetchLobColumnData: Returning binary of {} bytes", buffer.size()); +// return py::bytes(buffer.data(), buffer.size()); +// } +// std::string str(buffer.data(), buffer.size()); +// LOG("FetchLobColumnData: Returning narrow string of length {}", str.length()); +// return py::str(str); +// } + static py::object FetchLobColumnData(SQLHSTMT hStmt, SQLUSMALLINT colIndex, SQLSMALLINT cType, @@ -1732,80 +1838,107 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, bool isBinary) { std::vector buffer; - SQLLEN indicator = 0; - SQLRETURN ret; + SQLRETURN ret = SQL_SUCCESS_WITH_INFO; int loopCount = 0; while (true) { ++loopCount; - std::vector chunk(DAE_CHUNK_SIZE); - ret = SQLGetData_ptr( - hStmt, - colIndex, - cType, - chunk.data(), - DAE_CHUNK_SIZE, - &indicator - ); + + std::vector chunk(DAE_CHUNK_SIZE, 0); // Fill with zeros to handle padding safely + SQLLEN indicator = 0; + + ret = SQLGetData_ptr(hStmt, + colIndex, + cType, + chunk.data(), + DAE_CHUNK_SIZE, + &indicator); + if (indicator == SQL_NULL_DATA) { LOG("Loop {}: Column {} is NULL", loopCount, colIndex); return py::none(); } if (!SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { - LOG("Loop {}: Error fetching col={} with cType={} ret={}", loopCount, colIndex, cType, ret); - return py::none(); + LOG("Loop {}: Error fetching column {} with cType={} ret={}", loopCount, colIndex, cType, ret); + ThrowStdException("Error fetching column data"); } - SQLLEN copyCount = 0; + + size_t bytesRead = 0; + + // Determine how many bytes to process if (indicator > 0 && indicator != SQL_NO_TOTAL) { - copyCount = std::min(indicator, DAE_CHUNK_SIZE); + bytesRead = std::min(static_cast(indicator), DAE_CHUNK_SIZE); } else { - copyCount = DAE_CHUNK_SIZE; + // If unknown, assume full buffer minus possible null terminator padding + bytesRead = DAE_CHUNK_SIZE; } - // Check if last byte(s) is a null terminator - if (copyCount > 0) { - if (!isWideChar && chunk[copyCount - 1] == '\0') { - --copyCount; - LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); - } else if (copyCount >= sizeof(wchar_t)) { - auto wcharBuf = reinterpret_cast(chunk.data()); - if (wcharBuf[(copyCount / sizeof(wchar_t)) - 1] == L'\0') { - copyCount -= sizeof(wchar_t); - LOG("Loop {}: Trimmed null terminator (wide)", loopCount); + // For character data, trim trailing null terminators + if (!isBinary && bytesRead > 0) { + if (!isWideChar) { + while (bytesRead > 0 && chunk[bytesRead - 1] == '\0') { + --bytesRead; + } + if (bytesRead < DAE_CHUNK_SIZE) { + LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); + } + } else { + // Wide characters + size_t wcharSize = sizeof(SQLWCHAR); + if (bytesRead >= wcharSize) { + auto wcharBuf = reinterpret_cast(chunk.data()); + size_t wcharCount = bytesRead / wcharSize; + while (wcharCount > 0 && wcharBuf[wcharCount - 1] == 0) { + --wcharCount; + bytesRead -= wcharSize; + } + if (bytesRead < DAE_CHUNK_SIZE) { + LOG("Loop {}: Trimmed null terminator (wide)", loopCount); + } } } } - if (copyCount > 0) { - buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + copyCount); - LOG("Loop {}: Appended {} bytes", loopCount, copyCount); + + if (bytesRead > 0) { + buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + bytesRead); + LOG("Loop {}: Appended {} bytes", loopCount, bytesRead); } + if (ret == SQL_SUCCESS) { LOG("Loop {}: SQL_SUCCESS → no more data", loopCount); break; } } + LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); - if (indicator == 0 || buffer.empty()) { - LOG("FetchLobColumnData: Returning empty string for col {}", colIndex); + // If buffer is empty, return empty string or bytes + if (buffer.empty()) { + if (isBinary) { + return py::bytes(""); + } return py::str(""); } + // Convert the collected buffer to appropriate Python type if (isWideChar) { - std::wstring wstr(reinterpret_cast(buffer.data()), - buffer.size() / sizeof(wchar_t)); + std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), buffer.size() / sizeof(SQLWCHAR)); LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); return py::cast(wstr); } + if (isBinary) { LOG("FetchLobColumnData: Returning binary of {} bytes", buffer.size()); return py::bytes(buffer.data(), buffer.size()); } + + // Default: narrow string std::string str(buffer.data(), buffer.size()); LOG("FetchLobColumnData: Returning narrow string of length {}", str.length()); return py::str(str); } + // Helper function to retrieve column data SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, py::list& row) { LOG("Get data from columns"); diff --git a/mssql_python/pybind/ddbc_bindings.h b/mssql_python/pybind/ddbc_bindings.h index 521a007bb..81e429527 100644 --- a/mssql_python/pybind/ddbc_bindings.h +++ b/mssql_python/pybind/ddbc_bindings.h @@ -38,6 +38,18 @@ inline std::vector WStringToSQLWCHAR(const std::wstring& str) { result.push_back(0); return result; } + +inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = SQL_NTS) { + if (!sqlwStr) return std::wstring(); + + if (length == SQL_NTS) { + size_t i = 0; + while (sqlwStr[i] != 0) ++i; + length = i; + } + return std::wstring(reinterpret_cast(sqlwStr), length); +} + #endif #if defined(__APPLE__) || defined(__linux__) From d9c257fee56fa58ecda984273295a74296fbacae Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Wed, 10 Sep 2025 16:39:08 +0530 Subject: [PATCH 05/11] fix review comments --- mssql_python/pybind/ddbc_bindings.cpp | 164 +++++--------------------- 1 file changed, 28 insertions(+), 136 deletions(-) diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 640127190..3a5c85a01 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1725,112 +1725,6 @@ SQLRETURN SQLFetch_wrap(SqlHandlePtr StatementHandle) { return SQLFetch_ptr(StatementHandle->get()); } -// static py::object FetchLobColumnData(SQLHSTMT hStmt, -// SQLUSMALLINT colIndex, -// SQLSMALLINT cType, -// bool isWideChar, -// bool isBinary) -// { -// std::vector buffer; -// SQLLEN indicator = 0; -// SQLRETURN ret; -// int loopCount = 0; - -// while (true) { -// ++loopCount; -// std::vector chunk(DAE_CHUNK_SIZE); -// ret = SQLGetData_ptr( -// hStmt, -// colIndex, -// cType, -// chunk.data(), -// DAE_CHUNK_SIZE, -// &indicator -// ); -// if (indicator == SQL_NULL_DATA) { -// LOG("Loop {}: Column {} is NULL", loopCount, colIndex); -// return py::none(); -// } -// if (!SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { -// LOG("Loop {}: Error fetching col={} with cType={} ret={}", loopCount, colIndex, cType, ret); -// ThrowStdException("Error fetching column data"); -// } -// // SQLLEN copyCount = 0; -// SQLLEN copyCount = DAE_CHUNK_SIZE; -// if (indicator >= 0 && indicator != SQL_NO_TOTAL) { -// copyCount = std::min(indicator - buffer.size(), DAE_CHUNK_SIZE); -// } -// // else { -// // copyCount = DAE_CHUNK_SIZE; -// // } -// if (isWideChar && (copyCount % sizeof(SQLWCHAR) != 0)) { -// LOG("Loop {}: Warning – copyCount {} not multiple of {}", loopCount, copyCount, sizeof(SQLWCHAR)); -// copyCount -= copyCount % sizeof(SQLWCHAR); -// } - - -// // Check if last byte(s) is a null terminator -// if (copyCount > 0) { -// if (!isWideChar && chunk[copyCount - 1] == '\0') { -// --copyCount; -// LOG("Loop {}: Trimmed null terminator (narrow)", loopCount); -// } else if (copyCount >= sizeof(SQLWCHAR)) { -// auto wcharBuf = reinterpret_cast(chunk.data()); -// if (wcharBuf[(copyCount / sizeof(SQLWCHAR)) - 1] == L'\0') { -// copyCount -= sizeof(SQLWCHAR); -// LOG("Loop {}: Trimmed null terminator (wide)", loopCount); -// } -// } -// } -// if (copyCount > 0) { -// buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + copyCount); -// LOG("Loop {}: Appended {} bytes", loopCount, copyCount); -// } -// if (ret == SQL_SUCCESS) { -// LOG("Loop {}: SQL_SUCCESS → no more data", loopCount); -// break; -// } -// } -// LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); - -// if (indicator == 0 || buffer.empty()) { -// LOG("FetchLobColumnData: Returning empty string for col {}", colIndex); -// return py::str(""); -// } - - // if (isWideChar) { - // // std::wstring wstr(reinterpret_cast(buffer.data()), - // // buffer.size() / sizeof(wchar_t)); - // // LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); - // // return py::cast(wstr); - // std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), buffer.size() / sizeof(SQLWCHAR)); - // LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); - // return py::cast(wstr); -// // } -// if (isWideChar) { -// if (buffer.size() % sizeof(SQLWCHAR) != 0) { -// LOG("FetchLobColumnData: Buffer size {} not aligned with {}", buffer.size(), sizeof(SQLWCHAR)); -// throw std::runtime_error("Invalid wide char buffer size"); -// } -// #ifdef _WIN32 -// std::wstring wstr(reinterpret_cast(buffer.data()), buffer.size() / sizeof(wchar_t)); -// #else -// size_t length = buffer.size() / sizeof(SQLWCHAR); -// std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), length); -// #endif -// LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); -// return py::cast(wstr); -// } -// if (isBinary) { - -// LOG("FetchLobColumnData: Returning binary of {} bytes", buffer.size()); -// return py::bytes(buffer.data(), buffer.size()); -// } -// std::string str(buffer.data(), buffer.size()); -// LOG("FetchLobColumnData: Returning narrow string of length {}", str.length()); -// return py::str(str); -// } - static py::object FetchLobColumnData(SQLHSTMT hStmt, SQLUSMALLINT colIndex, SQLSMALLINT cType, @@ -1843,39 +1737,39 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, while (true) { ++loopCount; - - std::vector chunk(DAE_CHUNK_SIZE, 0); // Fill with zeros to handle padding safely - SQLLEN indicator = 0; - + std::vector chunk(DAE_CHUNK_SIZE, 0); + SQLLEN actualRead = 0; ret = SQLGetData_ptr(hStmt, colIndex, cType, chunk.data(), DAE_CHUNK_SIZE, - &indicator); + &actualRead); - if (indicator == SQL_NULL_DATA) { + if (ret == SQL_ERROR || !SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { + LOG("Loop {}: Error fetching column {} with cType={}", loopCount, colIndex, cType); + ThrowStdException("Error fetching column data"); + } + if (actualRead == SQL_NULL_DATA) { LOG("Loop {}: Column {} is NULL", loopCount, colIndex); return py::none(); } - if (!SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { - LOG("Loop {}: Error fetching column {} with cType={} ret={}", loopCount, colIndex, cType, ret); - ThrowStdException("Error fetching column data"); - } size_t bytesRead = 0; - - // Determine how many bytes to process - if (indicator > 0 && indicator != SQL_NO_TOTAL) { - bytesRead = std::min(static_cast(indicator), DAE_CHUNK_SIZE); + if (actualRead >= 0) { + bytesRead = static_cast(actualRead); + if (bytesRead > DAE_CHUNK_SIZE) { + bytesRead = DAE_CHUNK_SIZE; + } } else { - // If unknown, assume full buffer minus possible null terminator padding + // fallback: use full buffer size if actualRead is unknown bytesRead = DAE_CHUNK_SIZE; } // For character data, trim trailing null terminators if (!isBinary && bytesRead > 0) { if (!isWideChar) { + // Narrow characters while (bytesRead > 0 && chunk[bytesRead - 1] == '\0') { --bytesRead; } @@ -1886,9 +1780,9 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, // Wide characters size_t wcharSize = sizeof(SQLWCHAR); if (bytesRead >= wcharSize) { - auto wcharBuf = reinterpret_cast(chunk.data()); + auto sqlwBuf = reinterpret_cast(chunk.data()); size_t wcharCount = bytesRead / wcharSize; - while (wcharCount > 0 && wcharBuf[wcharCount - 1] == 0) { + while (wcharCount > 0 && sqlwBuf[wcharCount - 1] == 0) { --wcharCount; bytesRead -= wcharSize; } @@ -1898,47 +1792,45 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, } } } - if (bytesRead > 0) { buffer.insert(buffer.end(), chunk.begin(), chunk.begin() + bytesRead); LOG("Loop {}: Appended {} bytes", loopCount, bytesRead); } - if (ret == SQL_SUCCESS) { LOG("Loop {}: SQL_SUCCESS → no more data", loopCount); break; } } - LOG("FetchLobColumnData: Total bytes collected = {}", buffer.size()); - // If buffer is empty, return empty string or bytes if (buffer.empty()) { if (isBinary) { return py::bytes(""); } return py::str(""); } - - // Convert the collected buffer to appropriate Python type if (isWideChar) { - std::wstring wstr = SQLWCHARToWString(reinterpret_cast(buffer.data()), buffer.size() / sizeof(SQLWCHAR)); - LOG("FetchLobColumnData: Returning wide string of length {}", wstr.length()); - return py::cast(wstr); +#if defined(_WIN32) + std::wstring wstr(reinterpret_cast(buffer.data()), buffer.size() / sizeof(wchar_t)); + std::string utf8str = WideToUTF8(wstr); + return py::str(utf8str); +#else + // Linux/macOS handling + size_t wcharCount = buffer.size() / sizeof(SQLWCHAR); + const SQLWCHAR* sqlwBuf = reinterpret_cast(buffer.data()); + std::string utf8str = SQLWCHARToUTF8String(sqlwBuf, wcharCount); + return py::str(utf8str); +#endif } - if (isBinary) { LOG("FetchLobColumnData: Returning binary of {} bytes", buffer.size()); return py::bytes(buffer.data(), buffer.size()); } - - // Default: narrow string std::string str(buffer.data(), buffer.size()); LOG("FetchLobColumnData: Returning narrow string of length {}", str.length()); return py::str(str); } - // Helper function to retrieve column data SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, py::list& row) { LOG("Get data from columns"); From 133921dccadef007e1f06aadd8c2a3d28cab8722 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Thu, 11 Sep 2025 10:48:02 +0530 Subject: [PATCH 06/11] resolved comments --- mssql_python/pybind/CMakeLists.txt | 2 +- mssql_python/pybind/ddbc_bindings.cpp | 10 +++++++--- tests/test_004_cursor.py | 14 ++++++++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/mssql_python/pybind/CMakeLists.txt b/mssql_python/pybind/CMakeLists.txt index 8f58b31c9..489dfd459 100644 --- a/mssql_python/pybind/CMakeLists.txt +++ b/mssql_python/pybind/CMakeLists.txt @@ -272,7 +272,7 @@ target_compile_definitions(ddbc_bindings PRIVATE # Add warning level flags for MSVC if(MSVC) - target_compile_options(ddbc_bindings PRIVATE /W4 ) + target_compile_options(ddbc_bindings PRIVATE /W4 /WX) endif() # Add macOS-specific string conversion fix diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 3a5c85a01..bfca7b148 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1889,11 +1889,15 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p row.append(py::none()); } else if (dataLen == 0) { row.append(py::str("")); - } else { - assert(dataLen == SQL_NO_TOTAL); + } else if (dataLen == SQL_NO_TOTAL) { LOG("SQLGetData couldn't determine the length of the data. " - "Returning NULL value instead. Column ID - {}", i); + "Returning NULL value instead. Column ID - {}, Data Type - {}", i, dataType); row.append(py::none()); + } else if (dataLen < 0) { + LOG("SQLGetData returned an unexpected negative data length. " + "Raising exception. Column ID - {}, Data Type - {}, Data Length - {}", + i, dataType, dataLen); + ThrowStdException("SQLGetData returned an unexpected negative data length"); } } else { LOG("Error retrieving data for column - {}, data type - {}, SQLGetData return " diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 81e80749a..3f637ba72 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -5863,6 +5863,8 @@ def test_empty_string_chunk(cursor, db_connection): def test_varcharmax_short(cursor, db_connection): + """Test inserting and retrieving a small string well below any size thresholds. + # Verifies basic functionality for VARCHAR(MAX) with typical input size.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5879,6 +5881,9 @@ def test_varcharmax_short(cursor, db_connection): def test_varcharmax_boundary(cursor, db_connection): + """Test inserting and retrieving a string at the boundary size (8000 characters), + which is the largest size supported without switching to streaming or large object handling. + Ensures proper handling at this edge case.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5895,6 +5900,9 @@ def test_varcharmax_boundary(cursor, db_connection): def test_varcharmax_streaming(cursor, db_connection): + """Test inserting and retrieving a string just above the boundary size (8100 characters), + which requires streaming mechanisms to handle data efficiently. + Validates that larger data triggers correct processing without truncation.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5911,6 +5919,8 @@ def test_varcharmax_streaming(cursor, db_connection): def test_varcharmax_large(cursor, db_connection): + """Test inserting and retrieving a very large string (100,000 characters), + which is well beyond typical sizes and ensures that the system can handle large VARCHAR(MAX) values.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5927,6 +5937,7 @@ def test_varcharmax_large(cursor, db_connection): def test_varcharmax_empty_string(cursor, db_connection): + """Test inserting and retrieving an empty string to verify correct handling of zero-length data.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5942,6 +5953,7 @@ def test_varcharmax_empty_string(cursor, db_connection): def test_varcharmax_null(cursor, db_connection): + """Test inserting and retrieving a NULL value to ensure proper handling of SQL NULLs.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") @@ -5957,6 +5969,8 @@ def test_varcharmax_null(cursor, db_connection): def test_varcharmax_transaction_rollback(cursor, db_connection): + """Test that inserting a large VARCHAR(MAX) within a transaction that is rolled back + does not persist the data, ensuring transactional integrity.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") From 7f67326c38b4430df4f326fe9c5eb09f65910bba Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Thu, 11 Sep 2025 11:19:53 +0530 Subject: [PATCH 07/11] fix linux --- mssql_python/pybind/ddbc_bindings.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index bfca7b148..11ba09cf7 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1818,7 +1818,8 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, // Linux/macOS handling size_t wcharCount = buffer.size() / sizeof(SQLWCHAR); const SQLWCHAR* sqlwBuf = reinterpret_cast(buffer.data()); - std::string utf8str = SQLWCHARToUTF8String(sqlwBuf, wcharCount); + std::wstring wstr = SQLWCHARToWString(sqlwBuf, wcharCount); + std::string utf8str = WideToUTF8(wstr); return py::str(utf8str); #endif } From fba171c480d6c00f61e63680b2c4cdcf7c46d850 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Mon, 15 Sep 2025 08:29:07 +0530 Subject: [PATCH 08/11] FEAT: streaming support in fetchone for nvarcharmax data type (#220) ### Work Item / Issue Reference > [AB#38110](https://sqlclientdrivers.visualstudio.com/c6d89619-62de-46a0-8b46-70b92a84d85e/_workitems/edit/38110) [AB#34162](https://sqlclientdrivers.visualstudio.com/c6d89619-62de-46a0-8b46-70b92a84d85e/_workitems/edit/34162) > GitHub Issue: # ------------------------------------------------------------------- ### Summary This pull request improves NVARCHAR data handling in the SQL Server Python bindings and adds comprehensive tests for NVARCHAR(MAX) scenarios. The main changes include switching to streaming for large NVARCHAR values, optimizing direct fetch for smaller values, and adding tests for edge cases and boundaries to ensure correctness. **NVARCHAR data handling improvements:** * Updated the logic in `ddbc_bindings.cpp` to use streaming for large NVARCHAR/NCHAR columns (over 4000 characters or unknown size) and direct fetch for smaller values, optimizing performance and reliability. * Refactored data conversion for NVARCHAR fetches, using `std::wstring` for conversion and simplifying platform-specific handling for both macOS/Linux and Windows. * Improved handling of empty strings and NULLs for NVARCHAR columns, ensuring correct Python types are returned and logging is more descriptive. **Testing enhancements:** * Added new tests in `test_004_cursor.py` for NVARCHAR(MAX) covering short strings, boundary conditions (4000 chars), streaming (4100+ chars), large values (100,000 chars), empty strings, NULLs, and transaction rollback scenarios to verify correct behavior across all edge cases. **VARCHAR/CHAR fetch improvements:** * Improved direct fetch logic for small VARCHAR/CHAR columns and fixed string conversion to use the actual data length, preventing potential issues with null-termination and buffer size. [[1]](diffhunk://#diff-dde2297345718ec449a14e7dff91b7bb2342b008ecc071f562233646d71144a1R1825-R1830) [[2]](diffhunk://#diff-dde2297345718ec449a14e7dff91b7bb2342b008ecc071f562233646d71144a1L1841-L1850) --- mssql_python/pybind/ddbc_bindings.cpp | 208 +++++---- tests/test_004_cursor.py | 614 ++++++++++++-------------- 2 files changed, 397 insertions(+), 425 deletions(-) diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 11ba09cf7..8314d2550 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -31,6 +31,7 @@ #define ARCHITECTURE "win64" // Default to win64 if not defined during compilation #endif #define DAE_CHUNK_SIZE 8192 +#define SQL_MAX_LOB_SIZE 8000 //------------------------------------------------------------------------------------------------- // Class definitions //------------------------------------------------------------------------------------------------- @@ -1747,8 +1748,13 @@ static py::object FetchLobColumnData(SQLHSTMT hStmt, &actualRead); if (ret == SQL_ERROR || !SQL_SUCCEEDED(ret) && ret != SQL_SUCCESS_WITH_INFO) { - LOG("Loop {}: Error fetching column {} with cType={}", loopCount, colIndex, cType); - ThrowStdException("Error fetching column data"); + std::ostringstream oss; + oss << "Error fetching LOB for column " << colIndex + << ", cType=" << cType + << ", loop=" << loopCount + << ", SQLGetData return=" << ret; + LOG(oss.str()); + ThrowStdException(oss.str()); } if (actualRead == SQL_NULL_DATA) { LOG("Loop {}: Column {} is NULL", loopCount, colIndex); @@ -1862,7 +1868,7 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p case SQL_CHAR: case SQL_VARCHAR: case SQL_LONGVARCHAR: { - if (columnSize == SQL_NO_TOTAL || columnSize == 0 || columnSize > 8000) { + if (columnSize == SQL_NO_TOTAL || columnSize == 0 || columnSize > SQL_MAX_LOB_SIZE) { LOG("Streaming LOB for column {}", i); row.append(FetchLobColumnData(hStmt, i, SQL_C_CHAR, false, false)); } else { @@ -1884,6 +1890,10 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p #else row.append(std::string(reinterpret_cast(dataBuffer.data()))); #endif + } else { + // Buffer too small, fallback to streaming + LOG("CHAR column {} data truncated, using streaming LOB", i); + row.append(FetchLobColumnData(hStmt, i, SQL_C_CHAR, false, false)); } } else if (dataLen == SQL_NULL_DATA) { LOG("Column {} is NULL (CHAR)", i); @@ -1911,62 +1921,53 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p } case SQL_WCHAR: case SQL_WVARCHAR: - case SQL_WLONGVARCHAR: { - // TODO: revisit - HandleZeroColumnSizeAtFetch(columnSize); - uint64_t fetchBufferSize = columnSize + 1 /* null-termination */; - std::vector dataBuffer(fetchBufferSize); - SQLLEN dataLen; - ret = SQLGetData_ptr(hStmt, i, SQL_C_WCHAR, dataBuffer.data(), - dataBuffer.size() * sizeof(SQLWCHAR), &dataLen); - - if (SQL_SUCCEEDED(ret)) { - // TODO: Refactor these if's across other switches to avoid code duplication - if (dataLen > 0) { - uint64_t numCharsInData = dataLen / sizeof(SQLWCHAR); - if (numCharsInData < dataBuffer.size()) { - // SQLGetData will null-terminate the data + case SQL_WLONGVARCHAR: { + if (columnSize == SQL_NO_TOTAL || columnSize == 0 || columnSize > 4000) { + LOG("Streaming LOB for column {} (NVARCHAR)", i); + row.append(FetchLobColumnData(hStmt, i, SQL_C_WCHAR, true, false)); + } else { + uint64_t fetchBufferSize = (columnSize + 1) * sizeof(SQLWCHAR); // +1 for null terminator + std::vector dataBuffer(columnSize + 1); + SQLLEN dataLen; + ret = SQLGetData_ptr(hStmt, i, SQL_C_WCHAR, dataBuffer.data(), fetchBufferSize, &dataLen); + if (SQL_SUCCEEDED(ret)) { + if (dataLen > 0) { + uint64_t numCharsInData = dataLen / sizeof(SQLWCHAR); + if (numCharsInData < dataBuffer.size()) { #if defined(__APPLE__) || defined(__linux__) - auto raw_bytes = reinterpret_cast(dataBuffer.data()); - size_t actualBufferSize = dataBuffer.size() * sizeof(SQLWCHAR); - if (dataLen < 0 || static_cast(dataLen) > actualBufferSize) { - LOG("Error: py::bytes creation request exceeds buffer size. dataLen={} buffer={}", - dataLen, actualBufferSize); - ThrowStdException("Invalid buffer length for py::bytes"); - } - py::bytes py_bytes(raw_bytes, dataLen); - py::str decoded = py_bytes.attr("decode")("utf-16-le"); - row.append(decoded); + const SQLWCHAR* sqlwBuf = reinterpret_cast(dataBuffer.data()); + std::wstring wstr = SQLWCHARToWString(sqlwBuf, numCharsInData); + std::string utf8str = WideToUTF8(wstr); + row.append(py::str(utf8str)); #else - row.append(std::wstring(dataBuffer.data())); + std::wstring wstr(reinterpret_cast(dataBuffer.data())); + row.append(py::cast(wstr)); #endif - } else { - // In this case, buffer size is smaller, and data to be retrieved is longer - // TODO: Revisit - std::ostringstream oss; - oss << "Buffer length for fetch (" << dataBuffer.size()-1 << ") is smaller, & data " - << "to be retrieved is longer (" << numCharsInData << "). ColumnID - " - << i << ", datatype - " << dataType; - ThrowStdException(oss.str()); + LOG("Appended NVARCHAR string of length {} to result row", numCharsInData); + } else { + // Buffer too small, fallback to streaming + LOG("NVARCHAR column {} data truncated, using streaming LOB", i); + row.append(FetchLobColumnData(hStmt, i, SQL_C_WCHAR, true, false)); + } + } else if (dataLen == SQL_NULL_DATA) { + LOG("Column {} is NULL (CHAR)", i); + row.append(py::none()); + } else if (dataLen == 0) { + row.append(py::str("")); + } else if (dataLen == SQL_NO_TOTAL) { + LOG("SQLGetData couldn't determine the length of the NVARCHAR data. Returning NULL. Column ID - {}", i); + row.append(py::none()); + } else if (dataLen < 0) { + LOG("SQLGetData returned an unexpected negative data length. " + "Raising exception. Column ID - {}, Data Type - {}, Data Length - {}", + i, dataType, dataLen); + ThrowStdException("SQLGetData returned an unexpected negative data length"); } - } else if (dataLen == SQL_NULL_DATA) { - row.append(py::none()); - } else if (dataLen == 0) { - // Handle zero-length (non-NULL) data - row.append(py::str("")); - } else if (dataLen < 0) { - // This is unexpected - LOG("SQLGetData returned an unexpected negative data length. " - "Raising exception. Column ID - {}, Data Type - {}, Data Length - {}", - i, dataType, dataLen); - ThrowStdException("SQLGetData returned an unexpected negative data length"); + } else { + LOG("Error retrieving data for column {} (NVARCHAR), SQLGetData return code {}", i, ret); + row.append(py::none()); } - } else { - LOG("Error retrieving data for column - {}, data type - {}, SQLGetData return " - "code - {}. Returning NULL value instead", - i, dataType, ret); - row.append(py::none()); - } + } break; } case SQL_INTEGER: { @@ -2411,7 +2412,7 @@ SQLRETURN SQLBindColums(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& column // Fetch rows in batches // TODO: Move to anonymous namespace, since it is not used outside this file SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& columnNames, - py::list& rows, SQLUSMALLINT numCols, SQLULEN& numRowsFetched) { + py::list& rows, SQLUSMALLINT numCols, SQLULEN& numRowsFetched, const std::vector& lobColumns) { LOG("Fetching data in batches"); SQLRETURN ret = SQLFetchScroll_ptr(hStmt, SQL_FETCH_NEXT, 0); if (ret == SQL_NO_DATA) { @@ -2471,25 +2472,19 @@ SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& colum case SQL_CHAR: case SQL_VARCHAR: case SQL_LONGVARCHAR: { - // TODO: variable length data needs special handling, this logic wont suffice SQLULEN columnSize = columnMeta["ColumnSize"].cast(); HandleZeroColumnSizeAtFetch(columnSize); uint64_t fetchBufferSize = columnSize + 1 /*null-terminator*/; uint64_t numCharsInData = dataLen / sizeof(SQLCHAR); + bool isLob = std::find(lobColumns.begin(), lobColumns.end(), col) != lobColumns.end(); // fetchBufferSize includes null-terminator, numCharsInData doesn't. Hence '<' - if (numCharsInData < fetchBufferSize) { + if (!isLob && numCharsInData < fetchBufferSize) { // SQLFetch will nullterminate the data row.append(std::string( reinterpret_cast(&buffers.charBuffers[col - 1][i * fetchBufferSize]), numCharsInData)); } else { - // In this case, buffer size is smaller, and data to be retrieved is longer - // TODO: Revisit - std::ostringstream oss; - oss << "Buffer length for fetch (" << columnSize << ") is smaller, & data " - << "to be retrieved is longer (" << numCharsInData << "). ColumnID - " - << col << ", datatype - " << dataType; - ThrowStdException(oss.str()); + row.append(FetchLobColumnData(hStmt, col, SQL_C_CHAR, false, false)); } break; } @@ -2501,8 +2496,9 @@ SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& colum HandleZeroColumnSizeAtFetch(columnSize); uint64_t fetchBufferSize = columnSize + 1 /*null-terminator*/; uint64_t numCharsInData = dataLen / sizeof(SQLWCHAR); + bool isLob = std::find(lobColumns.begin(), lobColumns.end(), col) != lobColumns.end(); // fetchBufferSize includes null-terminator, numCharsInData doesn't. Hence '<' - if (numCharsInData < fetchBufferSize) { + if (!isLob && numCharsInData < fetchBufferSize) { // SQLFetch will nullterminate the data #if defined(__APPLE__) || defined(__linux__) // Use unix-specific conversion to handle the wchar_t/SQLWCHAR size difference @@ -2516,13 +2512,7 @@ SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& colum numCharsInData)); #endif } else { - // In this case, buffer size is smaller, and data to be retrieved is longer - // TODO: Revisit - std::ostringstream oss; - oss << "Buffer length for fetch (" << columnSize << ") is smaller, & data " - << "to be retrieved is longer (" << numCharsInData << "). ColumnID - " - << col << ", datatype - " << dataType; - ThrowStdException(oss.str()); + row.append(FetchLobColumnData(hStmt, col, SQL_C_WCHAR, true, false)); } break; } @@ -2608,21 +2598,15 @@ SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& colum case SQL_BINARY: case SQL_VARBINARY: case SQL_LONGVARBINARY: { - // TODO: variable length data needs special handling, this logic wont suffice SQLULEN columnSize = columnMeta["ColumnSize"].cast(); HandleZeroColumnSizeAtFetch(columnSize); - if (static_cast(dataLen) <= columnSize) { + bool isLob = std::find(lobColumns.begin(), lobColumns.end(), col) != lobColumns.end(); + if (!isLob && static_cast(dataLen) <= columnSize) { row.append(py::bytes(reinterpret_cast( &buffers.charBuffers[col - 1][i * columnSize]), dataLen)); } else { - // In this case, buffer size is smaller, and data to be retrieved is longer - // TODO: Revisit - std::ostringstream oss; - oss << "Buffer length for fetch (" << columnSize << ") is smaller, & data " - << "to be retrieved is longer (" << dataLen << "). ColumnID - " - << col << ", datatype - " << dataType; - ThrowStdException(oss.str()); + row.append(FetchLobColumnData(hStmt, col, SQL_C_BINARY, false, true)); } break; } @@ -2751,6 +2735,35 @@ SQLRETURN FetchMany_wrap(SqlHandlePtr StatementHandle, py::list& rows, int fetch return ret; } + std::vector lobColumns; + for (SQLSMALLINT i = 0; i < numCols; i++) { + auto colMeta = columnNames[i].cast(); + SQLSMALLINT dataType = colMeta["DataType"].cast(); + SQLULEN columnSize = colMeta["ColumnSize"].cast(); + + if ((dataType == SQL_WVARCHAR || dataType == SQL_WLONGVARCHAR || + dataType == SQL_VARCHAR || dataType == SQL_LONGVARCHAR || + dataType == SQL_VARBINARY || dataType == SQL_LONGVARBINARY) && + (columnSize == 0 || columnSize == SQL_NO_TOTAL || columnSize > SQL_MAX_LOB_SIZE)) { + lobColumns.push_back(i + 1); // 1-based + } + } + + // If we have LOBs → fall back to row-by-row fetch + SQLGetData_wrap + if (!lobColumns.empty()) { + LOG("LOB columns detected → using per-row SQLGetData path"); + while (true) { + ret = SQLFetch_ptr(hStmt); + if (ret == SQL_NO_DATA) break; + if (!SQL_SUCCEEDED(ret)) return ret; + + py::list row; + SQLGetData_wrap(StatementHandle, numCols, row); // <-- streams LOBs correctly + rows.append(row); + } + return SQL_SUCCESS; + } + // Initialize column buffers ColumnBuffers buffers(numCols, fetchSize); @@ -2765,7 +2778,7 @@ SQLRETURN FetchMany_wrap(SqlHandlePtr StatementHandle, py::list& rows, int fetch SQLSetStmtAttr_ptr(hStmt, SQL_ATTR_ROW_ARRAY_SIZE, (SQLPOINTER)(intptr_t)fetchSize, 0); SQLSetStmtAttr_ptr(hStmt, SQL_ATTR_ROWS_FETCHED_PTR, &numRowsFetched, 0); - ret = FetchBatchData(hStmt, buffers, columnNames, rows, numCols, numRowsFetched); + ret = FetchBatchData(hStmt, buffers, columnNames, rows, numCols, numRowsFetched, lobColumns); if (!SQL_SUCCEEDED(ret) && ret != SQL_NO_DATA) { LOG("Error when fetching data"); return ret; @@ -2844,6 +2857,35 @@ SQLRETURN FetchAll_wrap(SqlHandlePtr StatementHandle, py::list& rows) { } LOG("Fetching data in batch sizes of {}", fetchSize); + std::vector lobColumns; + for (SQLSMALLINT i = 0; i < numCols; i++) { + auto colMeta = columnNames[i].cast(); + SQLSMALLINT dataType = colMeta["DataType"].cast(); + SQLULEN columnSize = colMeta["ColumnSize"].cast(); + + if ((dataType == SQL_WVARCHAR || dataType == SQL_WLONGVARCHAR || + dataType == SQL_VARCHAR || dataType == SQL_LONGVARCHAR || + dataType == SQL_VARBINARY || dataType == SQL_LONGVARBINARY) && + (columnSize == 0 || columnSize == SQL_NO_TOTAL || columnSize > SQL_MAX_LOB_SIZE)) { + lobColumns.push_back(i + 1); // 1-based + } + } + + // If we have LOBs → fall back to row-by-row fetch + SQLGetData_wrap + if (!lobColumns.empty()) { + LOG("LOB columns detected → using per-row SQLGetData path"); + while (true) { + ret = SQLFetch_ptr(hStmt); + if (ret == SQL_NO_DATA) break; + if (!SQL_SUCCEEDED(ret)) return ret; + + py::list row; + SQLGetData_wrap(StatementHandle, numCols, row); // <-- streams LOBs correctly + rows.append(row); + } + return SQL_SUCCESS; + } + ColumnBuffers buffers(numCols, fetchSize); // Bind columns @@ -2858,7 +2900,7 @@ SQLRETURN FetchAll_wrap(SqlHandlePtr StatementHandle, py::list& rows) { SQLSetStmtAttr_ptr(hStmt, SQL_ATTR_ROWS_FETCHED_PTR, &numRowsFetched, 0); while (ret != SQL_NO_DATA) { - ret = FetchBatchData(hStmt, buffers, columnNames, rows, numCols, numRowsFetched); + ret = FetchBatchData(hStmt, buffers, columnNames, rows, numCols, numRowsFetched, lobColumns); if (!SQL_SUCCEEDED(ret) && ret != SQL_NO_DATA) { LOG("Error when fetching data"); return ret; diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 3f637ba72..df1020640 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -523,60 +523,6 @@ def test_varbinary_full_capacity(cursor, db_connection): cursor.execute("DROP TABLE #pytest_varbinary_test") db_connection.commit() -def test_varchar_max(cursor, db_connection): - """Test SQL_VARCHAR with MAX length""" - try: - cursor.execute("CREATE TABLE #pytest_varchar_test (varchar_column VARCHAR(MAX))") - db_connection.commit() - cursor.execute("INSERT INTO #pytest_varchar_test (varchar_column) VALUES (?), (?)", ["ABCDEFGHI", None]) - db_connection.commit() - expectedRows = 2 - # fetchone test - cursor.execute("SELECT varchar_column FROM #pytest_varchar_test") - rows = [] - for i in range(0, expectedRows): - rows.append(cursor.fetchone()) - assert cursor.fetchone() == None, "varchar_column is expected to have only {} rows".format(expectedRows) - assert rows[0] == ["ABCDEFGHI"], "SQL_VARCHAR parsing failed for fetchone - row 0" - assert rows[1] == [None], "SQL_VARCHAR parsing failed for fetchone - row 1" - # fetchall test - cursor.execute("SELECT varchar_column FROM #pytest_varchar_test") - rows = cursor.fetchall() - assert rows[0] == ["ABCDEFGHI"], "SQL_VARCHAR parsing failed for fetchall - row 0" - assert rows[1] == [None], "SQL_VARCHAR parsing failed for fetchall - row 1" - except Exception as e: - pytest.fail(f"SQL_VARCHAR parsing test failed: {e}") - finally: - cursor.execute("DROP TABLE #pytest_varchar_test") - db_connection.commit() - -def test_wvarchar_max(cursor, db_connection): - """Test SQL_WVARCHAR with MAX length""" - try: - cursor.execute("CREATE TABLE #pytest_wvarchar_test (wvarchar_column NVARCHAR(MAX))") - db_connection.commit() - cursor.execute("INSERT INTO #pytest_wvarchar_test (wvarchar_column) VALUES (?), (?)", ["!@#$%^&*()_+", None]) - db_connection.commit() - expectedRows = 2 - # fetchone test - cursor.execute("SELECT wvarchar_column FROM #pytest_wvarchar_test") - rows = [] - for i in range(0, expectedRows): - rows.append(cursor.fetchone()) - assert cursor.fetchone() == None, "wvarchar_column is expected to have only {} rows".format(expectedRows) - assert rows[0] == ["!@#$%^&*()_+"], "SQL_WVARCHAR parsing failed for fetchone - row 0" - assert rows[1] == [None], "SQL_WVARCHAR parsing failed for fetchone - row 1" - # fetchall test - cursor.execute("SELECT wvarchar_column FROM #pytest_wvarchar_test") - rows = cursor.fetchall() - assert rows[0] == ["!@#$%^&*()_+"], "SQL_WVARCHAR parsing failed for fetchall - row 0" - assert rows[1] == [None], "SQL_WVARCHAR parsing failed for fetchall - row 1" - except Exception as e: - pytest.fail(f"SQL_WVARCHAR parsing test failed: {e}") - finally: - cursor.execute("DROP TABLE #pytest_wvarchar_test") - db_connection.commit() - def test_varbinary_max(cursor, db_connection): """Test SQL_VARBINARY with MAX length""" try: @@ -5680,311 +5626,42 @@ def test_emoji_round_trip(cursor, db_connection): except Exception as e: pytest.fail(f"Error for input {repr(text)}: {e}") -def test_varchar_max_insert_non_lob(cursor, db_connection): - """Test small VARCHAR(MAX) insert (non-LOB path).""" - try: - cursor.execute("CREATE TABLE #pytest_varchar_nonlob (col VARCHAR(MAX))") - db_connection.commit() - - small_str = "Hello, world!" # small, non-LOB - cursor.execute( - "INSERT INTO #pytest_varchar_nonlob (col) VALUES (?)", - [small_str] - ) - db_connection.commit() - - empty_str = "" - cursor.execute( - "INSERT INTO #pytest_varchar_nonlob (col) VALUES (?)", - [empty_str] - ) - db_connection.commit() - - # None value - cursor.execute( - "INSERT INTO #pytest_varchar_nonlob (col) VALUES (?)", - [None] - ) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_varchar_nonlob") - # rows = cursor.fetchall() - # assert rows == [[small_str], [empty_str], [None]] - - finally: - pass - - -def test_varchar_max_insert_lob(cursor, db_connection): - """Test large VARCHAR(MAX) insert (LOB path).""" - try: - cursor.execute("CREATE TABLE #pytest_varchar_lob (col VARCHAR(MAX))") - db_connection.commit() - - large_str = "A" * 100_000 # > 8k to trigger LOB - cursor.execute( - "INSERT INTO #pytest_varchar_lob (col) VALUES (?)", - [large_str] - ) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_varchar_lob") - # rows = cursor.fetchall() - # assert rows == [[large_str]] - - finally: - pass - - -def test_nvarchar_max_insert_non_lob(cursor, db_connection): - """Test small NVARCHAR(MAX) insert (non-LOB path).""" - try: - cursor.execute("CREATE TABLE #pytest_nvarchar_nonlob (col NVARCHAR(MAX))") - db_connection.commit() - - small_str = "Unicode ✨ test" - cursor.execute( - "INSERT INTO #pytest_nvarchar_nonlob (col) VALUES (?)", - [small_str] - ) - db_connection.commit() - - empty_str = "" - cursor.execute( - "INSERT INTO #pytest_nvarchar_nonlob (col) VALUES (?)", - [empty_str] - ) - db_connection.commit() - - cursor.execute( - "INSERT INTO #pytest_nvarchar_nonlob (col) VALUES (?)", - [None] - ) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_nvarchar_nonlob") - # rows = cursor.fetchall() - # assert rows == [[small_str], [empty_str], [None]] - - finally: - pass - - -def test_nvarchar_max_insert_lob(cursor, db_connection): - """Test large NVARCHAR(MAX) insert (LOB path).""" - try: - cursor.execute("CREATE TABLE #pytest_nvarchar_lob (col NVARCHAR(MAX))") - db_connection.commit() - - large_str = "📝" * 50_000 # each emoji = 2 UTF-16 code units, total > 100k bytes - cursor.execute( - "INSERT INTO #pytest_nvarchar_lob (col) VALUES (?)", - [large_str] - ) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_nvarchar_lob") - # rows = cursor.fetchall() - # assert rows == [[large_str]] - - finally: - pass - -def test_nvarchar_max_boundary(cursor, db_connection): - """Test NVARCHAR(MAX) at LOB boundary sizes.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_nvarchar_boundary") - cursor.execute("CREATE TABLE #pytest_nvarchar_boundary (col NVARCHAR(MAX))") - db_connection.commit() - - # 4k BMP chars = 8k bytes - cursor.execute("INSERT INTO #pytest_nvarchar_boundary (col) VALUES (?)", ["A" * 4096]) - # 4k emojis = 8k UTF-16 code units (16k bytes) - cursor.execute("INSERT INTO #pytest_nvarchar_boundary (col) VALUES (?)", ["📝" * 4096]) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_nvarchar_boundary") - # rows = cursor.fetchall() - # assert rows == [["A" * 4096], ["📝" * 4096]] - finally: - pass - - -def test_nvarchar_max_chunk_edge(cursor, db_connection): - """Test NVARCHAR(MAX) insert slightly larger than a chunk.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_nvarchar_chunk") - cursor.execute("CREATE TABLE #pytest_nvarchar_chunk (col NVARCHAR(MAX))") - db_connection.commit() - - chunk_size = 8192 # bytes - test_str = "📝" * ((chunk_size // 4) + 3) # slightly > 1 chunk - cursor.execute("INSERT INTO #pytest_nvarchar_chunk (col) VALUES (?)", [test_str]) - db_connection.commit() - - # Fetch commented for now - # cursor.execute("SELECT col FROM #pytest_nvarchar_chunk") - # row = cursor.fetchone() - # assert row[0] == test_str - finally: - pass - -def test_empty_string_chunk(cursor, db_connection): - """Test inserting empty strings into VARCHAR(MAX) and NVARCHAR(MAX).""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_empty_string") - cursor.execute(""" - CREATE TABLE #pytest_empty_string ( - varchar_col VARCHAR(MAX), - nvarchar_col NVARCHAR(MAX) - ) - """) - db_connection.commit() - - empty_varchar = "" - empty_nvarchar = "" - cursor.execute( - "INSERT INTO #pytest_empty_string (varchar_col, nvarchar_col) VALUES (?, ?)", - [empty_varchar, empty_nvarchar] - ) - db_connection.commit() - - cursor.execute("SELECT LEN(varchar_col), LEN(nvarchar_col) FROM #pytest_empty_string") - row = tuple(int(x) for x in cursor.fetchone()) - assert row == (0, 0), f"Expected lengths (0,0), got {row}" - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_empty_string") - db_connection.commit() - - -def test_varcharmax_short(cursor, db_connection): - """Test inserting and retrieving a small string well below any size thresholds. - # Verifies basic functionality for VARCHAR(MAX) with typical input size.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") - db_connection.commit() - - short_str = "hello" - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [short_str]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [short_str]) - assert cursor.fetchone()[0] == short_str - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - db_connection.commit() - - -def test_varcharmax_boundary(cursor, db_connection): - """Test inserting and retrieving a string at the boundary size (8000 characters), - which is the largest size supported without switching to streaming or large object handling. - Ensures proper handling at this edge case.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") - db_connection.commit() - - boundary_str = "X" * 8000 - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [boundary_str]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [boundary_str]) - assert cursor.fetchone()[0] == boundary_str - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - db_connection.commit() - - -def test_varcharmax_streaming(cursor, db_connection): - """Test inserting and retrieving a string just above the boundary size (8100 characters), - which requires streaming mechanisms to handle data efficiently. - Validates that larger data triggers correct processing without truncation.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") - db_connection.commit() - - streaming_str = "Y" * 8100 - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [streaming_str]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [streaming_str]) - assert cursor.fetchone()[0] == streaming_str - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - db_connection.commit() - - -def test_varcharmax_large(cursor, db_connection): - """Test inserting and retrieving a very large string (100,000 characters), - which is well beyond typical sizes and ensures that the system can handle large VARCHAR(MAX) values.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") - db_connection.commit() - - large_str = "Z" * 100_000 - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [large_str]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [large_str]) - assert cursor.fetchone()[0] == large_str - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - db_connection.commit() - - -def test_varcharmax_empty_string(cursor, db_connection): - """Test inserting and retrieving an empty string to verify correct handling of zero-length data.""" - try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") - db_connection.commit() - - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [""]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col = ?", [""]) - assert cursor.fetchone()[0] == "" - finally: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - db_connection.commit() - - -def test_varcharmax_null(cursor, db_connection): - """Test inserting and retrieving a NULL value to ensure proper handling of SQL NULLs.""" +def test_varcharmax_transaction_rollback(cursor, db_connection): + """Test that inserting a large VARCHAR(MAX) within a transaction that is rolled back + does not persist the data, ensuring transactional integrity.""" try: cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") db_connection.commit() - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [None]) - db_connection.commit() - cursor.execute("SELECT col FROM #pytest_varcharmax WHERE col IS NULL") - assert cursor.fetchone()[0] is None + db_connection.autocommit = False + rollback_str = "ROLLBACK" * 2000 + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [rollback_str]) + db_connection.rollback() + cursor.execute("SELECT COUNT(*) FROM #pytest_varcharmax WHERE col = ?", [rollback_str]) + assert cursor.fetchone()[0] == 0 finally: + db_connection.autocommit = True # reset state cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") db_connection.commit() - -def test_varcharmax_transaction_rollback(cursor, db_connection): - """Test that inserting a large VARCHAR(MAX) within a transaction that is rolled back +def test_nvarcharmax_transaction_rollback(cursor, db_connection): + """Test that inserting a large NVARCHAR(MAX) within a transaction that is rolled back does not persist the data, ensuring transactional integrity.""" try: - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") - cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + cursor.execute("DROP TABLE IF EXISTS #pytest_nvarcharmax") + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") db_connection.commit() db_connection.autocommit = False rollback_str = "ROLLBACK" * 2000 - cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [rollback_str]) + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [rollback_str]) db_connection.rollback() - cursor.execute("SELECT COUNT(*) FROM #pytest_varcharmax WHERE col = ?", [rollback_str]) + cursor.execute("SELECT COUNT(*) FROM #pytest_nvarcharmax WHERE col = ?", [rollback_str]) assert cursor.fetchone()[0] == 0 finally: - db_connection.autocommit = True # reset state - cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.autocommit = True + cursor.execute("DROP TABLE IF EXISTS #pytest_nvarcharmax") db_connection.commit() @@ -6675,6 +6352,259 @@ def test_only_null_and_empty_binary(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_null_empty_binary") db_connection.commit() +# ---------------------- VARCHAR(MAX) ---------------------- + +def test_varcharmax_short_fetch(cursor, db_connection): + """Small VARCHAR(MAX), fetchone/fetchall/fetchmany.""" + try: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + + values = ["hello", "world"] + for val in values: + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [val]) + db_connection.commit() + + # fetchone + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY col") + row1 = cursor.fetchone()[0] + row2 = cursor.fetchone()[0] + assert {row1, row2} == set(values) + assert cursor.fetchone() is None + + # fetchall + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY col") + all_rows = [r[0] for r in cursor.fetchall()] + assert set(all_rows) == set(values) + + # fetchmany + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY col") + many = [r[0] for r in cursor.fetchmany(1)] + assert many[0] in values + finally: + cursor.execute("DROP TABLE IF EXISTS #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_empty_string(cursor, db_connection): + """Empty string in VARCHAR(MAX).""" + try: + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [""]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_varcharmax") + assert cursor.fetchone()[0] == "" + finally: + cursor.execute("DROP TABLE #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_null(cursor, db_connection): + """NULL in VARCHAR(MAX).""" + try: + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [None]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_varcharmax") + assert cursor.fetchone()[0] is None + finally: + cursor.execute("DROP TABLE #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_boundary(cursor, db_connection): + """Boundary at 8000 (inline limit).""" + try: + boundary_str = "X" * 8000 + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [boundary_str]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_varcharmax") + assert cursor.fetchone()[0] == boundary_str + finally: + cursor.execute("DROP TABLE #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_streaming(cursor, db_connection): + """Streaming fetch > 8k with all fetch modes.""" + try: + values = ["Y" * 8100, "Z" * 10000] + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + for v in values: + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [v]) + db_connection.commit() + + # --- fetchall --- + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY LEN(col)") + rows = [r[0] for r in cursor.fetchall()] + assert rows == sorted(values, key=len) + + # --- fetchone --- + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY LEN(col)") + r1 = cursor.fetchone()[0] + r2 = cursor.fetchone()[0] + assert {r1, r2} == set(values) + assert cursor.fetchone() is None + + # --- fetchmany --- + cursor.execute("SELECT col FROM #pytest_varcharmax ORDER BY LEN(col)") + batch = [r[0] for r in cursor.fetchmany(1)] + assert batch[0] in values + finally: + cursor.execute("DROP TABLE #pytest_varcharmax") + db_connection.commit() + + +def test_varcharmax_large(cursor, db_connection): + """Very large VARCHAR(MAX).""" + try: + large_str = "L" * 100_000 + cursor.execute("CREATE TABLE #pytest_varcharmax (col VARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_varcharmax VALUES (?)", [large_str]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_varcharmax") + assert cursor.fetchone()[0] == large_str + finally: + cursor.execute("DROP TABLE #pytest_varcharmax") + db_connection.commit() + + +# ---------------------- NVARCHAR(MAX) ---------------------- + +def test_nvarcharmax_short_fetch(cursor, db_connection): + """Small NVARCHAR(MAX), unicode, fetch modes.""" + try: + values = ["hello", "world_ß"] + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + for v in values: + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [v]) + db_connection.commit() + + # fetchone + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY col") + r1 = cursor.fetchone()[0] + r2 = cursor.fetchone()[0] + assert {r1, r2} == set(values) + assert cursor.fetchone() is None + + # fetchall + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY col") + all_rows = [r[0] for r in cursor.fetchall()] + assert set(all_rows) == set(values) + + # fetchmany + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY col") + many = [r[0] for r in cursor.fetchmany(1)] + assert many[0] in values + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + + +def test_nvarcharmax_empty_string(cursor, db_connection): + """Empty string in NVARCHAR(MAX).""" + try: + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [""]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_nvarcharmax") + assert cursor.fetchone()[0] == "" + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + + +def test_nvarcharmax_null(cursor, db_connection): + """NULL in NVARCHAR(MAX).""" + try: + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [None]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_nvarcharmax") + assert cursor.fetchone()[0] is None + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + + +def test_nvarcharmax_boundary(cursor, db_connection): + """Boundary at 4000 characters (inline limit).""" + try: + boundary_str = "X" * 4000 + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [boundary_str]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_nvarcharmax") + assert cursor.fetchone()[0] == boundary_str + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + + +def test_nvarcharmax_streaming(cursor, db_connection): + """Streaming fetch > 4k unicode with all fetch modes.""" + try: + values = ["Ω" * 4100, "漢" * 5000] + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + for v in values: + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [v]) + db_connection.commit() + + # --- fetchall --- + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY LEN(col)") + rows = [r[0] for r in cursor.fetchall()] + assert rows == sorted(values, key=len) + + # --- fetchone --- + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY LEN(col)") + r1 = cursor.fetchone()[0] + r2 = cursor.fetchone()[0] + assert {r1, r2} == set(values) + assert cursor.fetchone() is None + + # --- fetchmany --- + cursor.execute("SELECT col FROM #pytest_nvarcharmax ORDER BY LEN(col)") + batch = [r[0] for r in cursor.fetchmany(1)] + assert batch[0] in values + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + + +def test_nvarcharmax_large(cursor, db_connection): + """Very large NVARCHAR(MAX).""" + try: + large_str = "漢" * 50_000 + cursor.execute("CREATE TABLE #pytest_nvarcharmax (col NVARCHAR(MAX))") + db_connection.commit() + cursor.execute("INSERT INTO #pytest_nvarcharmax VALUES (?)", [large_str]) + db_connection.commit() + + cursor.execute("SELECT col FROM #pytest_nvarcharmax") + assert cursor.fetchone()[0] == large_str + finally: + cursor.execute("DROP TABLE #pytest_nvarcharmax") + db_connection.commit() + def test_close(db_connection): """Test closing the cursor""" try: @@ -6684,4 +6614,4 @@ def test_close(db_connection): except Exception as e: pytest.fail(f"Cursor close test failed: {e}") finally: - cursor = db_connection.cursor() \ No newline at end of file + cursor = db_connection.cursor() From 6441534ed3acb79c06c414a776caef4b5d16ee3e Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Mon, 15 Sep 2025 10:26:34 +0530 Subject: [PATCH 09/11] mac test --- mssql_python/pybind/ddbc_bindings.cpp | 5 ++- mssql_python/pybind/ddbc_bindings.h | 65 ++++++++++++++++++++++----- 2 files changed, 59 insertions(+), 11 deletions(-) diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index d0d5be140..28c6b926c 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1860,6 +1860,8 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p row.append(py::none()); continue; } + LOG("Fetching column {}: size={}, type={}", i, columnSize, dataType); + switch (dataType) { case SQL_CHAR: @@ -1919,7 +1921,8 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p case SQL_WCHAR: case SQL_WVARCHAR: case SQL_WLONGVARCHAR: { - if (columnSize == SQL_NO_TOTAL || columnSize == 0 || columnSize > 4000) { + if (columnSize == SQL_NO_TOTAL || columnSize > 4000) { + std::cout << "Column size: " << columnSize << std::endl; LOG("Streaming LOB for column {} (NVARCHAR)", i); row.append(FetchLobColumnData(hStmt, i, SQL_C_WCHAR, true, false)); } else { diff --git a/mssql_python/pybind/ddbc_bindings.h b/mssql_python/pybind/ddbc_bindings.h index 81e429527..8010fb20c 100644 --- a/mssql_python/pybind/ddbc_bindings.h +++ b/mssql_python/pybind/ddbc_bindings.h @@ -70,9 +70,55 @@ inline bool IsValidUnicodeScalar(uint32_t cp) { !(cp >= UNICODE_SURROGATE_HIGH_START && cp <= UNICODE_SURROGATE_LOW_END); } +// inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = SQL_NTS) { +// if (!sqlwStr) return std::wstring(); + +// if (length == SQL_NTS) { +// size_t i = 0; +// while (sqlwStr[i] != 0) ++i; +// length = i; +// } +// std::wstring result; +// result.reserve(length); + +// if constexpr (sizeof(SQLWCHAR) == 2) { +// // Decode UTF-16 to UTF-32 (with surrogate pair handling) +// for (size_t i = 0; i < length; ++i) { +// uint16_t wc = static_cast(sqlwStr[i]); +// // Check if this is a high surrogate (U+D800–U+DBFF) +// if (wc >= UNICODE_SURROGATE_HIGH_START && wc <= UNICODE_SURROGATE_HIGH_END && i + 1 < length) { +// uint16_t low = static_cast(sqlwStr[i + 1]); +// // Check if the next code unit is a low surrogate (U+DC00–U+DFFF) +// if (low >= UNICODE_SURROGATE_LOW_START && low <= UNICODE_SURROGATE_LOW_END) { +// // Combine surrogate pair into a single code point +// uint32_t cp = (((wc - UNICODE_SURROGATE_HIGH_START) << 10) | (low - UNICODE_SURROGATE_LOW_START)) + 0x10000; +// result.push_back(static_cast(cp)); +// ++i; // Skip the low surrogate +// continue; +// } +// } +// // If valid scalar then append, else append replacement char (U+FFFD) +// if (IsValidUnicodeScalar(wc)) { +// result.push_back(static_cast(wc)); +// } else { +// result.push_back(static_cast(UNICODE_REPLACEMENT_CHAR)); +// } +// } +// } else { +// // SQLWCHAR is UTF-32, so just copy with validation +// for (size_t i = 0; i < length; ++i) { +// uint32_t cp = static_cast(sqlwStr[i]); +// if (IsValidUnicodeScalar(cp)) { +// result.push_back(static_cast(cp)); +// } else { +// result.push_back(static_cast(UNICODE_REPLACEMENT_CHAR)); +// } +// } +// } +// return result; +// } inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = SQL_NTS) { if (!sqlwStr) return std::wstring(); - if (length == SQL_NTS) { size_t i = 0; while (sqlwStr[i] != 0) ++i; @@ -80,29 +126,28 @@ inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = S } std::wstring result; result.reserve(length); - if constexpr (sizeof(SQLWCHAR) == 2) { - // Decode UTF-16 to UTF-32 (with surrogate pair handling) - for (size_t i = 0; i < length; ++i) { + for (size_t i = 0; i < length; ) { // Use a manual increment to handle skipping uint16_t wc = static_cast(sqlwStr[i]); - // Check if this is a high surrogate (U+D800–U+DBFF) - if (wc >= UNICODE_SURROGATE_HIGH_START && wc <= UNICODE_SURROGATE_HIGH_END && i + 1 < length) { + // Check for high surrogate and valid low surrogate + if (wc >= UNICODE_SURROGATE_HIGH_START && wc <= UNICODE_SURROGATE_HIGH_END && (i + 1 < length)) { uint16_t low = static_cast(sqlwStr[i + 1]); - // Check if the next code unit is a low surrogate (U+DC00–U+DFFF) if (low >= UNICODE_SURROGATE_LOW_START && low <= UNICODE_SURROGATE_LOW_END) { - // Combine surrogate pair into a single code point + // Combine into a single code point uint32_t cp = (((wc - UNICODE_SURROGATE_HIGH_START) << 10) | (low - UNICODE_SURROGATE_LOW_START)) + 0x10000; result.push_back(static_cast(cp)); - ++i; // Skip the low surrogate + i += 2; // Move past both surrogates continue; } } - // If valid scalar then append, else append replacement char (U+FFFD) + // If we reach here, it's not a valid surrogate pair or is a BMP character. + // Check if it's a valid scalar and append, otherwise append replacement char. if (IsValidUnicodeScalar(wc)) { result.push_back(static_cast(wc)); } else { result.push_back(static_cast(UNICODE_REPLACEMENT_CHAR)); } + ++i; // Move to the next code unit } } else { // SQLWCHAR is UTF-32, so just copy with validation From 1773c671a336ba6671a482674a1466c998298669 Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Mon, 15 Sep 2025 11:03:39 +0530 Subject: [PATCH 10/11] mac test --- eng/pipelines/pr-validation-pipeline.yml | 2616 +++++++++++----------- main.py | 52 +- mssql_python/pybind/ddbc_bindings.h | 47 +- 3 files changed, 1400 insertions(+), 1315 deletions(-) diff --git a/eng/pipelines/pr-validation-pipeline.yml b/eng/pipelines/pr-validation-pipeline.yml index 6621b10df..8da044f21 100644 --- a/eng/pipelines/pr-validation-pipeline.yml +++ b/eng/pipelines/pr-validation-pipeline.yml @@ -7,76 +7,76 @@ trigger: - main jobs: -- job: PytestOnWindows - displayName: 'Windows x64' - pool: - vmImage: 'windows-latest' +# - job: PytestOnWindows +# displayName: 'Windows x64' +# pool: +# vmImage: 'windows-latest' - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.13' - addToPath: true - githubToken: $(GITHUB_TOKEN) - displayName: 'Use Python 3.13' - - - script: | - python -m pip install --upgrade pip - pip install -r requirements.txt - displayName: 'Install dependencies' - - # Start LocalDB instance - - powershell: | - sqllocaldb create MSSQLLocalDB - sqllocaldb start MSSQLLocalDB - displayName: 'Start LocalDB instance' - - # Create database and user - - powershell: | - sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE DATABASE TestDB" - sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE LOGIN testuser WITH PASSWORD = '$(DB_PASSWORD)'" - sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "CREATE USER testuser FOR LOGIN testuser" - sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "ALTER ROLE db_owner ADD MEMBER testuser" - displayName: 'Setup database and user' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - cd mssql_python\pybind - build.bat x64 - displayName: 'Build .pyd file' - - - script: | - python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear - displayName: 'Run tests with coverage' - env: - DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' - - - task: PublishBuildArtifacts@1 - inputs: - PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pyd' - ArtifactName: 'ddbc_bindings' - publishLocation: 'Container' - displayName: 'Publish pyd file as artifact' - - - task: PublishBuildArtifacts@1 - inputs: - PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pdb' - ArtifactName: 'ddbc_bindings' - publishLocation: 'Container' - displayName: 'Publish pdb file as artifact' - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results.xml' - testRunTitle: 'Publish test results' - - - task: PublishCodeCoverageResults@1 - inputs: - codeCoverageTool: 'Cobertura' - summaryFileLocation: 'coverage.xml' - displayName: 'Publish code coverage results' +# steps: +# - task: UsePythonVersion@0 +# inputs: +# versionSpec: '3.13' +# addToPath: true +# githubToken: $(GITHUB_TOKEN) +# displayName: 'Use Python 3.13' + +# - script: | +# python -m pip install --upgrade pip +# pip install -r requirements.txt +# displayName: 'Install dependencies' + +# # Start LocalDB instance +# - powershell: | +# sqllocaldb create MSSQLLocalDB +# sqllocaldb start MSSQLLocalDB +# displayName: 'Start LocalDB instance' + +# # Create database and user +# - powershell: | +# sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE DATABASE TestDB" +# sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE LOGIN testuser WITH PASSWORD = '$(DB_PASSWORD)'" +# sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "CREATE USER testuser FOR LOGIN testuser" +# sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "ALTER ROLE db_owner ADD MEMBER testuser" +# displayName: 'Setup database and user' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# cd mssql_python\pybind +# build.bat x64 +# displayName: 'Build .pyd file' + +# - script: | +# python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear +# displayName: 'Run tests with coverage' +# env: +# DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' + +# - task: PublishBuildArtifacts@1 +# inputs: +# PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pyd' +# ArtifactName: 'ddbc_bindings' +# publishLocation: 'Container' +# displayName: 'Publish pyd file as artifact' + +# - task: PublishBuildArtifacts@1 +# inputs: +# PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pdb' +# ArtifactName: 'ddbc_bindings' +# publishLocation: 'Container' +# displayName: 'Publish pdb file as artifact' + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results.xml' +# testRunTitle: 'Publish test results' + +# - task: PublishCodeCoverageResults@1 +# inputs: +# codeCoverageTool: 'Cobertura' +# summaryFileLocation: 'coverage.xml' +# displayName: 'Publish code coverage results' - job: PytestOnMacOS displayName: 'macOS x86_64' @@ -149,6 +149,8 @@ jobs: - script: | echo "Build successful, running tests now" + + python main.py python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear displayName: 'Run pytest with coverage' env: @@ -161,1319 +163,1319 @@ jobs: testResultsFiles: '**/test-results.xml' testRunTitle: 'Publish pytest results on macOS' -- job: PytestOnLinux - displayName: 'Linux x86_64' - pool: - vmImage: 'ubuntu-latest' - - strategy: - matrix: - Ubuntu: - dockerImage: 'ubuntu:22.04' - distroName: 'Ubuntu' - Debian: - dockerImage: 'debian:12' - distroName: 'Debian' - - steps: - - script: | - # Create a Docker container for testing - docker run -d --name test-container-$(distroName) \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - $(dockerImage) \ - tail -f /dev/null - displayName: 'Create $(distroName) container' - - - script: | - # Start SQL Server container - docker run -d --name sqlserver-$(distroName) \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# - job: PytestOnLinux +# displayName: 'Linux x86_64' +# pool: +# vmImage: 'ubuntu-latest' + +# strategy: +# matrix: +# Ubuntu: +# dockerImage: 'ubuntu:22.04' +# distroName: 'Ubuntu' +# Debian: +# dockerImage: 'debian:12' +# distroName: 'Debian' + +# steps: +# - script: | +# # Create a Docker container for testing +# docker run -d --name test-container-$(distroName) \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# $(dockerImage) \ +# tail -f /dev/null +# displayName: 'Create $(distroName) container' + +# - script: | +# # Start SQL Server container +# docker run -d --name sqlserver-$(distroName) \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-$(distroName) \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-$(distroName) \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-$(distroName) \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for $(distroName)' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the container - if [ "$(distroName)" = "Ubuntu" ]; then - docker exec test-container-$(distroName) bash -c " - export DEBIAN_FRONTEND=noninteractive - export TZ=UTC - ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone - apt-get update && - apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev - " - else - # Debian - docker exec test-container-$(distroName) bash -c " - export DEBIAN_FRONTEND=noninteractive - export TZ=UTC - ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone - apt-get update && - apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev - " - fi - displayName: 'Install basic dependencies in $(distroName) container' - - - script: | - # Install ODBC driver in the container - docker exec test-container-$(distroName) bash -c " - export DEBIAN_FRONTEND=noninteractive +# # Create test database +# docker exec sqlserver-$(distroName) \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for $(distroName)' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the container +# if [ "$(distroName)" = "Ubuntu" ]; then +# docker exec test-container-$(distroName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# export TZ=UTC +# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone +# apt-get update && +# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev +# " +# else +# # Debian +# docker exec test-container-$(distroName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# export TZ=UTC +# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone +# apt-get update && +# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev +# " +# fi +# displayName: 'Install basic dependencies in $(distroName) container' + +# - script: | +# # Install ODBC driver in the container +# docker exec test-container-$(distroName) bash -c " +# export DEBIAN_FRONTEND=noninteractive - # Download the package to configure the Microsoft repo - if [ '$(distroName)' = 'Ubuntu' ]; then - curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb - else - # Debian 12 - curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb - fi +# # Download the package to configure the Microsoft repo +# if [ '$(distroName)' = 'Ubuntu' ]; then +# curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb +# else +# # Debian 12 +# curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb +# fi - # Install the package - dpkg -i packages-microsoft-prod.deb || true - rm packages-microsoft-prod.deb +# # Install the package +# dpkg -i packages-microsoft-prod.deb || true +# rm packages-microsoft-prod.deb - # Update package list - apt-get update +# # Update package list +# apt-get update - # Install the driver - ACCEPT_EULA=Y apt-get install -y msodbcsql18 - # optional: for bcp and sqlcmd - ACCEPT_EULA=Y apt-get install -y mssql-tools18 - # optional: for unixODBC development headers - apt-get install -y unixodbc-dev - " - displayName: 'Install ODBC Driver in $(distroName) container' - - - script: | - # Install Python dependencies in the container using virtual environment - docker exec test-container-$(distroName) bash -c " - # Create a virtual environment - python3 -m venv /opt/venv - source /opt/venv/bin/activate +# # Install the driver +# ACCEPT_EULA=Y apt-get install -y msodbcsql18 +# # optional: for bcp and sqlcmd +# ACCEPT_EULA=Y apt-get install -y mssql-tools18 +# # optional: for unixODBC development headers +# apt-get install -y unixodbc-dev +# " +# displayName: 'Install ODBC Driver in $(distroName) container' + +# - script: | +# # Install Python dependencies in the container using virtual environment +# docker exec test-container-$(distroName) bash -c " +# # Create a virtual environment +# python3 -m venv /opt/venv +# source /opt/venv/bin/activate - # Install dependencies in the virtual environment - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Install dependencies in the virtual environment +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Make the virtual environment globally available - echo 'source /opt/venv/bin/activate' >> ~/.bashrc - " - displayName: 'Install Python dependencies in $(distroName) container' - - - script: | - # Build pybind bindings in the container - docker exec test-container-$(distroName) bash -c " - source /opt/venv/bin/activate - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in $(distroName) container' - - - script: | - # Uninstall ODBC Driver before running tests - docker exec test-container-$(distroName) bash -c " - export DEBIAN_FRONTEND=noninteractive - apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql - rm -f /lib/x86_64-linux-gnu/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true - echo 'Uninstalled ODBC Driver and cleaned up libraries' - echo 'Verifying x86_64 debian_ubuntu driver library signatures:' - ldd mssql_python/libs/linux/debian_ubuntu/x86_64/lib/libmsodbcsql-18.5.so.1.1 - " - displayName: 'Uninstall ODBC Driver before running tests in $(distroName) container' - - - script: | - # Run tests in the container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-$(distroName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# # Make the virtual environment globally available +# echo 'source /opt/venv/bin/activate' >> ~/.bashrc +# " +# displayName: 'Install Python dependencies in $(distroName) container' + +# - script: | +# # Build pybind bindings in the container +# docker exec test-container-$(distroName) bash -c " +# source /opt/venv/bin/activate +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in $(distroName) container' + +# - script: | +# # Uninstall ODBC Driver before running tests +# docker exec test-container-$(distroName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql +# rm -f /lib/x86_64-linux-gnu/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true +# echo 'Uninstalled ODBC Driver and cleaned up libraries' +# echo 'Verifying x86_64 debian_ubuntu driver library signatures:' +# ldd mssql_python/libs/linux/debian_ubuntu/x86_64/lib/libmsodbcsql-18.5.so.1.1 +# " +# displayName: 'Uninstall ODBC Driver before running tests in $(distroName) container' + +# - script: | +# # Run tests in the container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-$(distroName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-$(distroName) bash -c " - source /opt/venv/bin/activate - echo 'Build successful, running tests now on $(distroName)' - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - python -m pytest -v --junitxml=test-results-$(distroName).xml --cov=. --cov-report=xml:coverage-$(distroName).xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in $(distroName) container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-$(distroName):/workspace/test-results-$(distroName).xml $(Build.SourcesDirectory)/ - docker cp test-container-$(distroName):/workspace/coverage-$(distroName).xml $(Build.SourcesDirectory)/ - displayName: 'Copy test results from $(distroName) container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-$(distroName) || true - docker rm test-container-$(distroName) || true - docker stop sqlserver-$(distroName) || true - docker rm sqlserver-$(distroName) || true - displayName: 'Clean up $(distroName) containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-$(distroName).xml' - testRunTitle: 'Publish pytest results on $(distroName)' - -- job: PytestOnLinux_ARM64 - displayName: 'Linux ARM64' - pool: - vmImage: 'ubuntu-latest' - - strategy: - matrix: - Ubuntu_ARM64: - dockerImage: 'ubuntu:22.04' - distroName: 'Ubuntu' - archName: 'arm64' - Debian_ARM64: - dockerImage: 'debian:12' - distroName: 'Debian' - archName: 'arm64' - - steps: - - script: | - # Set up Docker buildx for multi-architecture support - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - docker buildx create --name multiarch --driver docker-container --use - docker buildx inspect --bootstrap - displayName: 'Setup Docker buildx for ARM64 emulation' - - - script: | - # Create a Docker container for testing on ARM64 - docker run -d --name test-container-$(distroName)-$(archName) \ - --platform linux/arm64 \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - $(dockerImage) \ - tail -f /dev/null - displayName: 'Create $(distroName) ARM64 container' - - - script: | - # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) - docker run -d --name sqlserver-$(distroName)-$(archName) \ - --platform linux/amd64 \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-$(distroName) bash -c " +# source /opt/venv/bin/activate +# echo 'Build successful, running tests now on $(distroName)' +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# python -m pytest -v --junitxml=test-results-$(distroName).xml --cov=. --cov-report=xml:coverage-$(distroName).xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in $(distroName) container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-$(distroName):/workspace/test-results-$(distroName).xml $(Build.SourcesDirectory)/ +# docker cp test-container-$(distroName):/workspace/coverage-$(distroName).xml $(Build.SourcesDirectory)/ +# displayName: 'Copy test results from $(distroName) container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-$(distroName) || true +# docker rm test-container-$(distroName) || true +# docker stop sqlserver-$(distroName) || true +# docker rm sqlserver-$(distroName) || true +# displayName: 'Clean up $(distroName) containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-$(distroName).xml' +# testRunTitle: 'Publish pytest results on $(distroName)' + +# - job: PytestOnLinux_ARM64 +# displayName: 'Linux ARM64' +# pool: +# vmImage: 'ubuntu-latest' + +# strategy: +# matrix: +# Ubuntu_ARM64: +# dockerImage: 'ubuntu:22.04' +# distroName: 'Ubuntu' +# archName: 'arm64' +# Debian_ARM64: +# dockerImage: 'debian:12' +# distroName: 'Debian' +# archName: 'arm64' + +# steps: +# - script: | +# # Set up Docker buildx for multi-architecture support +# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +# docker buildx create --name multiarch --driver docker-container --use +# docker buildx inspect --bootstrap +# displayName: 'Setup Docker buildx for ARM64 emulation' + +# - script: | +# # Create a Docker container for testing on ARM64 +# docker run -d --name test-container-$(distroName)-$(archName) \ +# --platform linux/arm64 \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# $(dockerImage) \ +# tail -f /dev/null +# displayName: 'Create $(distroName) ARM64 container' + +# - script: | +# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) +# docker run -d --name sqlserver-$(distroName)-$(archName) \ +# --platform linux/amd64 \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-$(distroName)-$(archName) \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-$(distroName)-$(archName) \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-$(distroName)-$(archName) \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for $(distroName) ARM64' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the ARM64 container - if [ "$(distroName)" = "Ubuntu" ]; then - docker exec test-container-$(distroName)-$(archName) bash -c " - export DEBIAN_FRONTEND=noninteractive - export TZ=UTC - ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone - apt-get update && - apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev - # Verify architecture - uname -m - dpkg --print-architecture - " - else - # Debian ARM64 - docker exec test-container-$(distroName)-$(archName) bash -c " - export DEBIAN_FRONTEND=noninteractive - export TZ=UTC - ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone - apt-get update && - apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev - # Verify architecture - uname -m - dpkg --print-architecture - " - fi - displayName: 'Install basic dependencies in $(distroName) ARM64 container' - - - script: | - # Install ODBC driver in the ARM64 container - docker exec test-container-$(distroName)-$(archName) bash -c " - export DEBIAN_FRONTEND=noninteractive +# # Create test database +# docker exec sqlserver-$(distroName)-$(archName) \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for $(distroName) ARM64' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the ARM64 container +# if [ "$(distroName)" = "Ubuntu" ]; then +# docker exec test-container-$(distroName)-$(archName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# export TZ=UTC +# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone +# apt-get update && +# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev +# # Verify architecture +# uname -m +# dpkg --print-architecture +# " +# else +# # Debian ARM64 +# docker exec test-container-$(distroName)-$(archName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# export TZ=UTC +# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone +# apt-get update && +# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev +# # Verify architecture +# uname -m +# dpkg --print-architecture +# " +# fi +# displayName: 'Install basic dependencies in $(distroName) ARM64 container' + +# - script: | +# # Install ODBC driver in the ARM64 container +# docker exec test-container-$(distroName)-$(archName) bash -c " +# export DEBIAN_FRONTEND=noninteractive - # Download the package to configure the Microsoft repo - if [ '$(distroName)' = 'Ubuntu' ]; then - curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb - else - # Debian 12 - curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb - fi +# # Download the package to configure the Microsoft repo +# if [ '$(distroName)' = 'Ubuntu' ]; then +# curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb +# else +# # Debian 12 +# curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb +# fi - # Install the package - dpkg -i packages-microsoft-prod.deb || true - rm packages-microsoft-prod.deb +# # Install the package +# dpkg -i packages-microsoft-prod.deb || true +# rm packages-microsoft-prod.deb - # Update package list - apt-get update +# # Update package list +# apt-get update - # Install the driver (ARM64 version) - ACCEPT_EULA=Y apt-get install -y msodbcsql18 - # optional: for bcp and sqlcmd - ACCEPT_EULA=Y apt-get install -y mssql-tools18 - # optional: for unixODBC development headers - apt-get install -y unixodbc-dev - " - displayName: 'Install ODBC Driver in $(distroName) ARM64 container' - - - script: | - # Install Python dependencies in the ARM64 container using virtual environment - docker exec test-container-$(distroName)-$(archName) bash -c " - # Create a virtual environment - python3 -m venv /opt/venv - source /opt/venv/bin/activate +# # Install the driver (ARM64 version) +# ACCEPT_EULA=Y apt-get install -y msodbcsql18 +# # optional: for bcp and sqlcmd +# ACCEPT_EULA=Y apt-get install -y mssql-tools18 +# # optional: for unixODBC development headers +# apt-get install -y unixodbc-dev +# " +# displayName: 'Install ODBC Driver in $(distroName) ARM64 container' + +# - script: | +# # Install Python dependencies in the ARM64 container using virtual environment +# docker exec test-container-$(distroName)-$(archName) bash -c " +# # Create a virtual environment +# python3 -m venv /opt/venv +# source /opt/venv/bin/activate - # Install dependencies in the virtual environment - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Install dependencies in the virtual environment +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Make the virtual environment globally available - echo 'source /opt/venv/bin/activate' >> ~/.bashrc - " - displayName: 'Install Python dependencies in $(distroName) ARM64 container' - - - script: | - # Build pybind bindings in the ARM64 container - docker exec test-container-$(distroName)-$(archName) bash -c " - source /opt/venv/bin/activate - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in $(distroName) ARM64 container' - - - script: | - # Uninstall ODBC Driver before running tests - docker exec test-container-$(distroName)-$(archName) bash -c " - export DEBIAN_FRONTEND=noninteractive - apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql - rm -f /lib/aarch64-linux-gnu/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true - echo 'Uninstalled ODBC Driver and cleaned up libraries' - echo 'Verifying arm64 debian_ubuntu driver library signatures:' - ldd mssql_python/libs/linux/debian_ubuntu/arm64/lib/libmsodbcsql-18.5.so.1.1 - " - displayName: 'Uninstall ODBC Driver before running tests in $(distroName) ARM64 container' - - - script: | - # Run tests in the ARM64 container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-$(distroName)-$(archName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# # Make the virtual environment globally available +# echo 'source /opt/venv/bin/activate' >> ~/.bashrc +# " +# displayName: 'Install Python dependencies in $(distroName) ARM64 container' + +# - script: | +# # Build pybind bindings in the ARM64 container +# docker exec test-container-$(distroName)-$(archName) bash -c " +# source /opt/venv/bin/activate +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in $(distroName) ARM64 container' + +# - script: | +# # Uninstall ODBC Driver before running tests +# docker exec test-container-$(distroName)-$(archName) bash -c " +# export DEBIAN_FRONTEND=noninteractive +# apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql +# rm -f /lib/aarch64-linux-gnu/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true +# echo 'Uninstalled ODBC Driver and cleaned up libraries' +# echo 'Verifying arm64 debian_ubuntu driver library signatures:' +# ldd mssql_python/libs/linux/debian_ubuntu/arm64/lib/libmsodbcsql-18.5.so.1.1 +# " +# displayName: 'Uninstall ODBC Driver before running tests in $(distroName) ARM64 container' + +# - script: | +# # Run tests in the ARM64 container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-$(distroName)-$(archName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-$(distroName)-$(archName) bash -c " - source /opt/venv/bin/activate - echo 'Build successful, running tests now on $(distroName) ARM64' - echo 'Architecture:' \$(uname -m) - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - python main.py - python -m pytest -v --junitxml=test-results-$(distroName)-$(archName).xml --cov=. --cov-report=xml:coverage-$(distroName)-$(archName).xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in $(distroName) ARM64 container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-$(distroName)-$(archName):/workspace/test-results-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ - docker cp test-container-$(distroName)-$(archName):/workspace/coverage-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ - displayName: 'Copy test results from $(distroName) ARM64 container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-$(distroName)-$(archName) || true - docker rm test-container-$(distroName)-$(archName) || true - docker stop sqlserver-$(distroName)-$(archName) || true - docker rm sqlserver-$(distroName)-$(archName) || true - displayName: 'Clean up $(distroName) ARM64 containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-$(distroName)-$(archName).xml' - testRunTitle: 'Publish pytest results on $(distroName) ARM64' - -- job: PytestOnLinux_RHEL9 - displayName: 'Linux RedHat x86_64' - pool: - vmImage: 'ubuntu-latest' - - steps: - - script: | - # Create a Docker container for testing - docker run -d --name test-container-rhel9 \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - redhat/ubi9:latest \ - tail -f /dev/null - displayName: 'Create RHEL 9 container' - - - script: | - # Start SQL Server container - docker run -d --name sqlserver-rhel9 \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-$(distroName)-$(archName) bash -c " +# source /opt/venv/bin/activate +# echo 'Build successful, running tests now on $(distroName) ARM64' +# echo 'Architecture:' \$(uname -m) +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# python main.py +# python -m pytest -v --junitxml=test-results-$(distroName)-$(archName).xml --cov=. --cov-report=xml:coverage-$(distroName)-$(archName).xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in $(distroName) ARM64 container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-$(distroName)-$(archName):/workspace/test-results-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ +# docker cp test-container-$(distroName)-$(archName):/workspace/coverage-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ +# displayName: 'Copy test results from $(distroName) ARM64 container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-$(distroName)-$(archName) || true +# docker rm test-container-$(distroName)-$(archName) || true +# docker stop sqlserver-$(distroName)-$(archName) || true +# docker rm sqlserver-$(distroName)-$(archName) || true +# displayName: 'Clean up $(distroName) ARM64 containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-$(distroName)-$(archName).xml' +# testRunTitle: 'Publish pytest results on $(distroName) ARM64' + +# - job: PytestOnLinux_RHEL9 +# displayName: 'Linux RedHat x86_64' +# pool: +# vmImage: 'ubuntu-latest' + +# steps: +# - script: | +# # Create a Docker container for testing +# docker run -d --name test-container-rhel9 \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# redhat/ubi9:latest \ +# tail -f /dev/null +# displayName: 'Create RHEL 9 container' + +# - script: | +# # Start SQL Server container +# docker run -d --name sqlserver-rhel9 \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-rhel9 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-rhel9 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-rhel9 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for RHEL 9' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the RHEL 9 container - docker exec test-container-rhel9 bash -c " - # Enable CodeReady Builder repository for additional packages - dnf update -y - dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm - subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder +# # Create test database +# docker exec sqlserver-rhel9 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for RHEL 9' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the RHEL 9 container +# docker exec test-container-rhel9 bash -c " +# # Enable CodeReady Builder repository for additional packages +# dnf update -y +# dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +# subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder - # Install Python 3.9 (available in RHEL 9 UBI) and development tools - dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers - dnf install -y python3-libs python3-debug - dnf install -y gcc gcc-c++ make binutils - dnf install -y cmake - # If that doesn't work, try installing from different repositories - if ! which gcc; then - echo 'Trying alternative gcc installation...' - dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ - fi - # Verify installation - python3 --version - which gcc && which g++ - gcc --version - g++ --version - " - displayName: 'Install basic dependencies in RHEL 9 container' - - - script: | - # Verify compiler installation and set environment for RHEL 9 - docker exec test-container-rhel9 bash -c " - # Verify compilers are available - which gcc || echo 'GCC not found' - which g++ || echo 'G++ not found' - gcc --version || echo 'GCC version check failed' - g++ --version || echo 'G++ version check failed' +# # Install Python 3.9 (available in RHEL 9 UBI) and development tools +# dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers +# dnf install -y python3-libs python3-debug +# dnf install -y gcc gcc-c++ make binutils +# dnf install -y cmake +# # If that doesn't work, try installing from different repositories +# if ! which gcc; then +# echo 'Trying alternative gcc installation...' +# dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ +# fi +# # Verify installation +# python3 --version +# which gcc && which g++ +# gcc --version +# g++ --version +# " +# displayName: 'Install basic dependencies in RHEL 9 container' + +# - script: | +# # Verify compiler installation and set environment for RHEL 9 +# docker exec test-container-rhel9 bash -c " +# # Verify compilers are available +# which gcc || echo 'GCC not found' +# which g++ || echo 'G++ not found' +# gcc --version || echo 'GCC version check failed' +# g++ --version || echo 'G++ version check failed' - # Set compiler environment variables - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - echo 'CC set to:' \$CC - echo 'CXX set to:' \$CXX +# # Set compiler environment variables +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ +# echo 'CC set to:' \$CC +# echo 'CXX set to:' \$CXX - # Create a wrapper script to preserve environment - cat > /workspace/setup_env.sh << 'EOF' - #!/bin/bash - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - export PATH=/usr/bin:\$PATH - exec \"\$@\" - EOF - chmod +x /workspace/setup_env.sh - " - displayName: 'Verify and configure compilers in RHEL 9 container' - - - script: | - # Install ODBC driver in the RHEL 9 container - docker exec test-container-rhel9 bash -c " - # Add Microsoft repository for RHEL 9 - curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo +# # Create a wrapper script to preserve environment +# cat > /workspace/setup_env.sh << 'EOF' +# #!/bin/bash +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ +# export PATH=/usr/bin:\$PATH +# exec \"\$@\" +# EOF +# chmod +x /workspace/setup_env.sh +# " +# displayName: 'Verify and configure compilers in RHEL 9 container' + +# - script: | +# # Install ODBC driver in the RHEL 9 container +# docker exec test-container-rhel9 bash -c " +# # Add Microsoft repository for RHEL 9 +# curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo - # Install the driver - ACCEPT_EULA=Y dnf install -y msodbcsql18 - # optional: for bcp and sqlcmd - ACCEPT_EULA=Y dnf install -y mssql-tools18 - # optional: for unixODBC development headers - dnf install -y unixODBC-devel - " - displayName: 'Install ODBC Driver in RHEL 9 container' - - - script: | - # Install Python dependencies in the container using virtual environment - docker exec test-container-rhel9 bash -c " - # Create a virtual environment with Python 3.9 - python3 -m venv myvenv - source myvenv/bin/activate - - # Install dependencies in the virtual environment - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Install the driver +# ACCEPT_EULA=Y dnf install -y msodbcsql18 +# # optional: for bcp and sqlcmd +# ACCEPT_EULA=Y dnf install -y mssql-tools18 +# # optional: for unixODBC development headers +# dnf install -y unixODBC-devel +# " +# displayName: 'Install ODBC Driver in RHEL 9 container' + +# - script: | +# # Install Python dependencies in the container using virtual environment +# docker exec test-container-rhel9 bash -c " +# # Create a virtual environment with Python 3.9 +# python3 -m venv myvenv +# source myvenv/bin/activate + +# # Install dependencies in the virtual environment +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Make the virtual environment globally available - echo 'source myvenv/bin/activate' >> ~/.bashrc - " - displayName: 'Install Python dependencies in RHEL 9 container' - - - script: | - # Build pybind bindings in the container - docker exec test-container-rhel9 bash -c " - source myvenv/bin/activate - ls /usr/include/python3.9 - # Set compiler environment variables - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in RHEL 9 container' - - - script: | - # Uninstall ODBC Driver before running tests - docker exec test-container-rhel9 bash -c " - dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql - rm -f /lib64/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true - echo 'Uninstalled ODBC Driver and cleaned up libraries' - echo 'Verifying x86_64 rhel driver library signatures:' - ldd mssql_python/libs/linux/rhel/x86_64/lib/libmsodbcsql-18.5.so.1.1 - " - displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 container' - - - script: | - # Run tests in the container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-rhel9 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# # Make the virtual environment globally available +# echo 'source myvenv/bin/activate' >> ~/.bashrc +# " +# displayName: 'Install Python dependencies in RHEL 9 container' + +# - script: | +# # Build pybind bindings in the container +# docker exec test-container-rhel9 bash -c " +# source myvenv/bin/activate +# ls /usr/include/python3.9 +# # Set compiler environment variables +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ + +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in RHEL 9 container' + +# - script: | +# # Uninstall ODBC Driver before running tests +# docker exec test-container-rhel9 bash -c " +# dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql +# rm -f /lib64/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true +# echo 'Uninstalled ODBC Driver and cleaned up libraries' +# echo 'Verifying x86_64 rhel driver library signatures:' +# ldd mssql_python/libs/linux/rhel/x86_64/lib/libmsodbcsql-18.5.so.1.1 +# " +# displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 container' + +# - script: | +# # Run tests in the container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-rhel9 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-rhel9 bash -c " - source myvenv/bin/activate - echo 'Build successful, running tests now on RHEL 9' - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - python main.py - python -m pytest -v --junitxml=test-results-rhel9.xml --cov=. --cov-report=xml:coverage-rhel9.xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in RHEL 9 container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-rhel9:/workspace/test-results-rhel9.xml $(Build.SourcesDirectory)/ - docker cp test-container-rhel9:/workspace/coverage-rhel9.xml $(Build.SourcesDirectory)/ - displayName: 'Copy test results from RHEL 9 container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-rhel9 || true - docker rm test-container-rhel9 || true - docker stop sqlserver-rhel9 || true - docker rm sqlserver-rhel9 || true - displayName: 'Clean up RHEL 9 containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-rhel9.xml' - testRunTitle: 'Publish pytest results on RHEL 9' - -- job: PytestOnLinux_RHEL9_ARM64 - displayName: 'Linux RedHat ARM64' - pool: - vmImage: 'ubuntu-latest' - - steps: - - script: | - # Set up Docker buildx for multi-architecture support - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - docker buildx create --name multiarch --driver docker-container --use - docker buildx inspect --bootstrap - displayName: 'Setup Docker buildx for ARM64 emulation' - - - script: | - # Create a Docker container for testing on ARM64 - docker run -d --name test-container-rhel9-arm64 \ - --platform linux/arm64 \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - redhat/ubi9:latest \ - tail -f /dev/null - displayName: 'Create RHEL 9 ARM64 container' - - - script: | - # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) - docker run -d --name sqlserver-rhel9-arm64 \ - --platform linux/amd64 \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-rhel9 bash -c " +# source myvenv/bin/activate +# echo 'Build successful, running tests now on RHEL 9' +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# python main.py +# python -m pytest -v --junitxml=test-results-rhel9.xml --cov=. --cov-report=xml:coverage-rhel9.xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in RHEL 9 container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-rhel9:/workspace/test-results-rhel9.xml $(Build.SourcesDirectory)/ +# docker cp test-container-rhel9:/workspace/coverage-rhel9.xml $(Build.SourcesDirectory)/ +# displayName: 'Copy test results from RHEL 9 container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-rhel9 || true +# docker rm test-container-rhel9 || true +# docker stop sqlserver-rhel9 || true +# docker rm sqlserver-rhel9 || true +# displayName: 'Clean up RHEL 9 containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-rhel9.xml' +# testRunTitle: 'Publish pytest results on RHEL 9' + +# - job: PytestOnLinux_RHEL9_ARM64 +# displayName: 'Linux RedHat ARM64' +# pool: +# vmImage: 'ubuntu-latest' + +# steps: +# - script: | +# # Set up Docker buildx for multi-architecture support +# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +# docker buildx create --name multiarch --driver docker-container --use +# docker buildx inspect --bootstrap +# displayName: 'Setup Docker buildx for ARM64 emulation' + +# - script: | +# # Create a Docker container for testing on ARM64 +# docker run -d --name test-container-rhel9-arm64 \ +# --platform linux/arm64 \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# redhat/ubi9:latest \ +# tail -f /dev/null +# displayName: 'Create RHEL 9 ARM64 container' + +# - script: | +# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) +# docker run -d --name sqlserver-rhel9-arm64 \ +# --platform linux/amd64 \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-rhel9-arm64 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-rhel9-arm64 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-rhel9-arm64 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for RHEL 9 ARM64' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the RHEL 9 ARM64 container - docker exec test-container-rhel9-arm64 bash -c " - # Enable CodeReady Builder repository for additional packages - dnf update -y - dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm - subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder +# # Create test database +# docker exec sqlserver-rhel9-arm64 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for RHEL 9 ARM64' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the RHEL 9 ARM64 container +# docker exec test-container-rhel9-arm64 bash -c " +# # Enable CodeReady Builder repository for additional packages +# dnf update -y +# dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +# subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder - # Install Python 3.9 (available in RHEL 9 UBI) and development tools - dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers - dnf install -y python3-libs python3-debug - dnf install -y gcc gcc-c++ make binutils - dnf install -y cmake - # If that doesn't work, try installing from different repositories - if ! which gcc; then - echo 'Trying alternative gcc installation...' - dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ - fi - # Verify installation and architecture - python3 --version - which gcc && which g++ - gcc --version - g++ --version - uname -m - echo 'Architecture:' \$(uname -m) - " - displayName: 'Install basic dependencies in RHEL 9 ARM64 container' - - - script: | - # Verify compiler installation and set environment for RHEL 9 ARM64 - docker exec test-container-rhel9-arm64 bash -c " - # Verify compilers are available - which gcc || echo 'GCC not found' - which g++ || echo 'G++ not found' - gcc --version || echo 'GCC version check failed' - g++ --version || echo 'G++ version check failed' +# # Install Python 3.9 (available in RHEL 9 UBI) and development tools +# dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers +# dnf install -y python3-libs python3-debug +# dnf install -y gcc gcc-c++ make binutils +# dnf install -y cmake +# # If that doesn't work, try installing from different repositories +# if ! which gcc; then +# echo 'Trying alternative gcc installation...' +# dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ +# fi +# # Verify installation and architecture +# python3 --version +# which gcc && which g++ +# gcc --version +# g++ --version +# uname -m +# echo 'Architecture:' \$(uname -m) +# " +# displayName: 'Install basic dependencies in RHEL 9 ARM64 container' + +# - script: | +# # Verify compiler installation and set environment for RHEL 9 ARM64 +# docker exec test-container-rhel9-arm64 bash -c " +# # Verify compilers are available +# which gcc || echo 'GCC not found' +# which g++ || echo 'G++ not found' +# gcc --version || echo 'GCC version check failed' +# g++ --version || echo 'G++ version check failed' - # Set compiler environment variables - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - echo 'CC set to:' \$CC - echo 'CXX set to:' \$CXX - echo 'Running on architecture:' \$(uname -m) +# # Set compiler environment variables +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ +# echo 'CC set to:' \$CC +# echo 'CXX set to:' \$CXX +# echo 'Running on architecture:' \$(uname -m) - # Create a wrapper script to preserve environment - cat > /workspace/setup_env.sh << 'EOF' - #!/bin/bash - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - export PATH=/usr/bin:\$PATH - exec \"\$@\" - EOF - chmod +x /workspace/setup_env.sh - " - displayName: 'Verify and configure compilers in RHEL 9 ARM64 container' - - - script: | - # Install ODBC driver in the RHEL 9 ARM64 container - docker exec test-container-rhel9-arm64 bash -c " - # Add Microsoft repository for RHEL 9 - curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo +# # Create a wrapper script to preserve environment +# cat > /workspace/setup_env.sh << 'EOF' +# #!/bin/bash +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ +# export PATH=/usr/bin:\$PATH +# exec \"\$@\" +# EOF +# chmod +x /workspace/setup_env.sh +# " +# displayName: 'Verify and configure compilers in RHEL 9 ARM64 container' + +# - script: | +# # Install ODBC driver in the RHEL 9 ARM64 container +# docker exec test-container-rhel9-arm64 bash -c " +# # Add Microsoft repository for RHEL 9 +# curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo - # Install the driver (ARM64 version) - ACCEPT_EULA=Y dnf install -y msodbcsql18 - # optional: for bcp and sqlcmd - ACCEPT_EULA=Y dnf install -y mssql-tools18 - # optional: for unixODBC development headers - dnf install -y unixODBC-devel - " - displayName: 'Install ODBC Driver in RHEL 9 ARM64 container' - - - script: | - # Install Python dependencies in the container using virtual environment - docker exec test-container-rhel9-arm64 bash -c " - # Create a virtual environment with Python 3.9 - python3 -m venv myvenv - source myvenv/bin/activate - - # Install dependencies in the virtual environment - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Install the driver (ARM64 version) +# ACCEPT_EULA=Y dnf install -y msodbcsql18 +# # optional: for bcp and sqlcmd +# ACCEPT_EULA=Y dnf install -y mssql-tools18 +# # optional: for unixODBC development headers +# dnf install -y unixODBC-devel +# " +# displayName: 'Install ODBC Driver in RHEL 9 ARM64 container' + +# - script: | +# # Install Python dependencies in the container using virtual environment +# docker exec test-container-rhel9-arm64 bash -c " +# # Create a virtual environment with Python 3.9 +# python3 -m venv myvenv +# source myvenv/bin/activate + +# # Install dependencies in the virtual environment +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Make the virtual environment globally available - echo 'source myvenv/bin/activate' >> ~/.bashrc - " - displayName: 'Install Python dependencies in RHEL 9 ARM64 container' - - - script: | - # Build pybind bindings in the ARM64 container - docker exec test-container-rhel9-arm64 bash -c " - source myvenv/bin/activate - ls /usr/include/python3.9 - # Set compiler environment variables - export CC=/usr/bin/gcc - export CXX=/usr/bin/g++ - - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in RHEL 9 ARM64 container' - - - script: | - # Uninstall ODBC Driver before running tests - docker exec test-container-rhel9-arm64 bash -c " - dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql - rm -f /lib64/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true - echo 'Uninstalled ODBC Driver and cleaned up libraries' - echo 'Verifying arm64 rhel driver library signatures:' - ldd mssql_python/libs/linux/rhel/arm64/lib/libmsodbcsql-18.5.so.1.1 - " - displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 ARM64 container' - - - script: | - # Run tests in the ARM64 container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-rhel9-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# # Make the virtual environment globally available +# echo 'source myvenv/bin/activate' >> ~/.bashrc +# " +# displayName: 'Install Python dependencies in RHEL 9 ARM64 container' + +# - script: | +# # Build pybind bindings in the ARM64 container +# docker exec test-container-rhel9-arm64 bash -c " +# source myvenv/bin/activate +# ls /usr/include/python3.9 +# # Set compiler environment variables +# export CC=/usr/bin/gcc +# export CXX=/usr/bin/g++ + +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in RHEL 9 ARM64 container' + +# - script: | +# # Uninstall ODBC Driver before running tests +# docker exec test-container-rhel9-arm64 bash -c " +# dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql +# rm -f /lib64/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true +# echo 'Uninstalled ODBC Driver and cleaned up libraries' +# echo 'Verifying arm64 rhel driver library signatures:' +# ldd mssql_python/libs/linux/rhel/arm64/lib/libmsodbcsql-18.5.so.1.1 +# " +# displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 ARM64 container' + +# - script: | +# # Run tests in the ARM64 container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-rhel9-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-rhel9-arm64 bash -c " - source myvenv/bin/activate - echo 'Build successful, running tests now on RHEL 9 ARM64' - echo 'Architecture:' \$(uname -m) - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - python -m pytest -v --junitxml=test-results-rhel9-arm64.xml --cov=. --cov-report=xml:coverage-rhel9-arm64.xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in RHEL 9 ARM64 container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-rhel9-arm64:/workspace/test-results-rhel9-arm64.xml $(Build.SourcesDirectory)/ - docker cp test-container-rhel9-arm64:/workspace/coverage-rhel9-arm64.xml $(Build.SourcesDirectory)/ - displayName: 'Copy test results from RHEL 9 ARM64 container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-rhel9-arm64 || true - docker rm test-container-rhel9-arm64 || true - docker stop sqlserver-rhel9-arm64 || true - docker rm sqlserver-rhel9-arm64 || true - displayName: 'Clean up RHEL 9 ARM64 containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-rhel9-arm64.xml' - testRunTitle: 'Publish pytest results on RHEL 9 ARM64' - -- job: PytestOnLinux_Alpine - displayName: 'Linux Alpine x86_64' - pool: - vmImage: 'ubuntu-latest' - - steps: - - script: | - # Set up Docker buildx for multi-architecture support - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - docker buildx create --name multiarch --driver docker-container --use - docker buildx inspect --bootstrap - displayName: 'Setup Docker buildx for multi-architecture support' - - - script: | - # Create a Docker container for testing on x86_64 - docker run -d --name test-container-alpine \ - --platform linux/amd64 \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - alpine:latest \ - tail -f /dev/null - displayName: 'Create Alpine x86_64 container' - - - script: | - # Start SQL Server container (x86_64) - docker run -d --name sqlserver-alpine \ - --platform linux/amd64 \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-rhel9-arm64 bash -c " +# source myvenv/bin/activate +# echo 'Build successful, running tests now on RHEL 9 ARM64' +# echo 'Architecture:' \$(uname -m) +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# python -m pytest -v --junitxml=test-results-rhel9-arm64.xml --cov=. --cov-report=xml:coverage-rhel9-arm64.xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in RHEL 9 ARM64 container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-rhel9-arm64:/workspace/test-results-rhel9-arm64.xml $(Build.SourcesDirectory)/ +# docker cp test-container-rhel9-arm64:/workspace/coverage-rhel9-arm64.xml $(Build.SourcesDirectory)/ +# displayName: 'Copy test results from RHEL 9 ARM64 container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-rhel9-arm64 || true +# docker rm test-container-rhel9-arm64 || true +# docker stop sqlserver-rhel9-arm64 || true +# docker rm sqlserver-rhel9-arm64 || true +# displayName: 'Clean up RHEL 9 ARM64 containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-rhel9-arm64.xml' +# testRunTitle: 'Publish pytest results on RHEL 9 ARM64' + +# - job: PytestOnLinux_Alpine +# displayName: 'Linux Alpine x86_64' +# pool: +# vmImage: 'ubuntu-latest' + +# steps: +# - script: | +# # Set up Docker buildx for multi-architecture support +# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +# docker buildx create --name multiarch --driver docker-container --use +# docker buildx inspect --bootstrap +# displayName: 'Setup Docker buildx for multi-architecture support' + +# - script: | +# # Create a Docker container for testing on x86_64 +# docker run -d --name test-container-alpine \ +# --platform linux/amd64 \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# alpine:latest \ +# tail -f /dev/null +# displayName: 'Create Alpine x86_64 container' + +# - script: | +# # Start SQL Server container (x86_64) +# docker run -d --name sqlserver-alpine \ +# --platform linux/amd64 \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-alpine \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-alpine \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-alpine \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for Alpine x86_64' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the Alpine x86_64 container - docker exec test-container-alpine sh -c " - # Update package index - apk update +# # Create test database +# docker exec sqlserver-alpine \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for Alpine x86_64' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the Alpine x86_64 container +# docker exec test-container-alpine sh -c " +# # Update package index +# apk update - # Install build tools and system dependencies - apk add --no-cache \ - build-base \ - cmake \ - clang \ - git \ - bash \ - wget \ - curl \ - gnupg \ - unixodbc \ - unixodbc-dev \ - libffi-dev \ - openssl-dev \ - zlib-dev \ - py3-pip \ - python3-dev \ - patchelf +# # Install build tools and system dependencies +# apk add --no-cache \ +# build-base \ +# cmake \ +# clang \ +# git \ +# bash \ +# wget \ +# curl \ +# gnupg \ +# unixodbc \ +# unixodbc-dev \ +# libffi-dev \ +# openssl-dev \ +# zlib-dev \ +# py3-pip \ +# python3-dev \ +# patchelf - # Create symlinks for Python compatibility - ln -sf python3 /usr/bin/python || true - ln -sf pip3 /usr/bin/pip || true +# # Create symlinks for Python compatibility +# ln -sf python3 /usr/bin/python || true +# ln -sf pip3 /usr/bin/pip || true - # Verify installation and architecture - uname -m - python --version - which cmake - " - displayName: 'Install basic dependencies in Alpine x86_64 container' - - - script: | - # Install ODBC driver in the Alpine x86_64 container - docker exec test-container-alpine bash -c " - # Detect architecture for ODBC driver download - case \$(uname -m) in - x86_64) architecture='amd64' ;; - arm64|aarch64) architecture='arm64' ;; - *) architecture='unsupported' ;; - esac +# # Verify installation and architecture +# uname -m +# python --version +# which cmake +# " +# displayName: 'Install basic dependencies in Alpine x86_64 container' + +# - script: | +# # Install ODBC driver in the Alpine x86_64 container +# docker exec test-container-alpine bash -c " +# # Detect architecture for ODBC driver download +# case \$(uname -m) in +# x86_64) architecture='amd64' ;; +# arm64|aarch64) architecture='arm64' ;; +# *) architecture='unsupported' ;; +# esac - if [[ 'unsupported' == '\$architecture' ]]; then - echo 'Alpine architecture \$(uname -m) is not currently supported.' - exit 1 - fi +# if [[ 'unsupported' == '\$architecture' ]]; then +# echo 'Alpine architecture \$(uname -m) is not currently supported.' +# exit 1 +# fi - echo 'Detected architecture: '\$architecture +# echo 'Detected architecture: '\$architecture - # Download the packages - curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk - curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Download the packages +# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk +# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk - # Download signatures for verification - curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig - curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig +# # Download signatures for verification +# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig +# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig - # Import Microsoft GPG key and verify packages - curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - - gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk - gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Import Microsoft GPG key and verify packages +# curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - +# gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk +# gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk - # Install the packages - apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk - apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Install the packages +# apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk +# apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk - # Cleanup - rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* +# # Cleanup +# rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* - # Add mssql-tools to PATH - export PATH=\"\$PATH:/opt/mssql-tools18/bin\" - echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc - " - displayName: 'Install ODBC Driver in Alpine x86_64 container' - - - script: | - # Install Python dependencies in the Alpine x86_64 container using virtual environment - docker exec test-container-alpine bash -c " - # Create virtual environment - python -m venv /workspace/venv +# # Add mssql-tools to PATH +# export PATH=\"\$PATH:/opt/mssql-tools18/bin\" +# echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc +# " +# displayName: 'Install ODBC Driver in Alpine x86_64 container' + +# - script: | +# # Install Python dependencies in the Alpine x86_64 container using virtual environment +# docker exec test-container-alpine bash -c " +# # Create virtual environment +# python -m venv /workspace/venv - # Activate virtual environment and install dependencies - source /workspace/venv/bin/activate +# # Activate virtual environment and install dependencies +# source /workspace/venv/bin/activate - # Upgrade pip and install dependencies - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Upgrade pip and install dependencies +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Verify virtual environment is active - which python - which pip - " - displayName: 'Install Python dependencies in Alpine x86_64 container' - - - script: | - # Build pybind bindings in the Alpine x86_64 container - docker exec test-container-alpine bash -c " - # Activate virtual environment - source /workspace/venv/bin/activate +# # Verify virtual environment is active +# which python +# which pip +# " +# displayName: 'Install Python dependencies in Alpine x86_64 container' + +# - script: | +# # Build pybind bindings in the Alpine x86_64 container +# docker exec test-container-alpine bash -c " +# # Activate virtual environment +# source /workspace/venv/bin/activate - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in Alpine x86_64 container' - - - script: | - # Uninstall ODBC Driver before running tests to use bundled libraries - docker exec test-container-alpine bash -c " - # Remove system ODBC installation - apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql18 - rm -f /usr/lib/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true - echo 'Uninstalled system ODBC Driver and cleaned up libraries' - echo 'Verifying x86_64 alpine driver library signatures:' - ldd mssql_python/libs/linux/alpine/x86_64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' - " - displayName: 'Uninstall system ODBC Driver before running tests in Alpine x86_64 container' - - - script: | - # Run tests in the Alpine x86_64 container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-alpine --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in Alpine x86_64 container' + +# - script: | +# # Uninstall ODBC Driver before running tests to use bundled libraries +# docker exec test-container-alpine bash -c " +# # Remove system ODBC installation +# apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql18 +# rm -f /usr/lib/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true +# echo 'Uninstalled system ODBC Driver and cleaned up libraries' +# echo 'Verifying x86_64 alpine driver library signatures:' +# ldd mssql_python/libs/linux/alpine/x86_64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' +# " +# displayName: 'Uninstall system ODBC Driver before running tests in Alpine x86_64 container' + +# - script: | +# # Run tests in the Alpine x86_64 container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-alpine --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-alpine bash -c " - echo 'Build successful, running tests now on Alpine x86_64' - echo 'Architecture:' \$(uname -m) - echo 'Alpine version:' \$(cat /etc/alpine-release) - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-alpine bash -c " +# echo 'Build successful, running tests now on Alpine x86_64' +# echo 'Architecture:' \$(uname -m) +# echo 'Alpine version:' \$(cat /etc/alpine-release) +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - # Activate virtual environment - source /workspace/venv/bin/activate +# # Activate virtual environment +# source /workspace/venv/bin/activate - # Test basic Python import first - python -c 'import mssql_python; print(\"mssql_python imported successfully\")' +# # Test basic Python import first +# python -c 'import mssql_python; print(\"mssql_python imported successfully\")' - # Run main.py if it exists - if [ -f main.py ]; then - echo 'Running main.py...' - python main.py - fi +# # Run main.py if it exists +# if [ -f main.py ]; then +# echo 'Running main.py...' +# python main.py +# fi - # Run pytest - python -m pytest -v --junitxml=test-results-alpine.xml --cov=. --cov-report=xml:coverage-alpine.xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in Alpine x86_64 container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-alpine:/workspace/test-results-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' - docker cp test-container-alpine:/workspace/coverage-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' - displayName: 'Copy test results from Alpine x86_64 container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-alpine || true - docker rm test-container-alpine || true - docker stop sqlserver-alpine || true - docker rm sqlserver-alpine || true - displayName: 'Clean up Alpine x86_64 containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-alpine.xml' - testRunTitle: 'Publish pytest results on Alpine x86_64' - -- job: PytestOnLinux_Alpine_ARM64 - displayName: 'Linux Alpine ARM64' - pool: - vmImage: 'ubuntu-latest' - - steps: - - script: | - # Set up Docker buildx for multi-architecture support - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - docker buildx create --name multiarch --driver docker-container --use - docker buildx inspect --bootstrap - displayName: 'Setup Docker buildx for ARM64 emulation' - - - script: | - # Create a Docker container for testing on ARM64 - docker run -d --name test-container-alpine-arm64 \ - --platform linux/arm64 \ - -v $(Build.SourcesDirectory):/workspace \ - -w /workspace \ - --network bridge \ - alpine:latest \ - tail -f /dev/null - displayName: 'Create Alpine ARM64 container' - - - script: | - # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) - docker run -d --name sqlserver-alpine-arm64 \ - --platform linux/amd64 \ - -e ACCEPT_EULA=Y \ - -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ - -p 1433:1433 \ - mcr.microsoft.com/mssql/server:2022-latest +# # Run pytest +# python -m pytest -v --junitxml=test-results-alpine.xml --cov=. --cov-report=xml:coverage-alpine.xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in Alpine x86_64 container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-alpine:/workspace/test-results-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' +# docker cp test-container-alpine:/workspace/coverage-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' +# displayName: 'Copy test results from Alpine x86_64 container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-alpine || true +# docker rm test-container-alpine || true +# docker stop sqlserver-alpine || true +# docker rm sqlserver-alpine || true +# displayName: 'Clean up Alpine x86_64 containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-alpine.xml' +# testRunTitle: 'Publish pytest results on Alpine x86_64' + +# - job: PytestOnLinux_Alpine_ARM64 +# displayName: 'Linux Alpine ARM64' +# pool: +# vmImage: 'ubuntu-latest' + +# steps: +# - script: | +# # Set up Docker buildx for multi-architecture support +# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +# docker buildx create --name multiarch --driver docker-container --use +# docker buildx inspect --bootstrap +# displayName: 'Setup Docker buildx for ARM64 emulation' + +# - script: | +# # Create a Docker container for testing on ARM64 +# docker run -d --name test-container-alpine-arm64 \ +# --platform linux/arm64 \ +# -v $(Build.SourcesDirectory):/workspace \ +# -w /workspace \ +# --network bridge \ +# alpine:latest \ +# tail -f /dev/null +# displayName: 'Create Alpine ARM64 container' + +# - script: | +# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) +# docker run -d --name sqlserver-alpine-arm64 \ +# --platform linux/amd64 \ +# -e ACCEPT_EULA=Y \ +# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ +# -p 1433:1433 \ +# mcr.microsoft.com/mssql/server:2022-latest - # Wait for SQL Server to be ready - echo "Waiting for SQL Server to start..." - for i in {1..60}; do - if docker exec sqlserver-alpine-arm64 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "SELECT 1" >/dev/null 2>&1; then - echo "SQL Server is ready!" - break - fi - echo "Waiting... ($i/60)" - sleep 2 - done +# # Wait for SQL Server to be ready +# echo "Waiting for SQL Server to start..." +# for i in {1..60}; do +# if docker exec sqlserver-alpine-arm64 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "SELECT 1" >/dev/null 2>&1; then +# echo "SQL Server is ready!" +# break +# fi +# echo "Waiting... ($i/60)" +# sleep 2 +# done - # Create test database - docker exec sqlserver-alpine-arm64 \ - /opt/mssql-tools18/bin/sqlcmd \ - -S localhost \ - -U SA \ - -P "$(DB_PASSWORD)" \ - -C -Q "CREATE DATABASE TestDB" - displayName: 'Start SQL Server container for Alpine ARM64' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Install dependencies in the Alpine ARM64 container - docker exec test-container-alpine-arm64 sh -c " - # Update package index - apk update +# # Create test database +# docker exec sqlserver-alpine-arm64 \ +# /opt/mssql-tools18/bin/sqlcmd \ +# -S localhost \ +# -U SA \ +# -P "$(DB_PASSWORD)" \ +# -C -Q "CREATE DATABASE TestDB" +# displayName: 'Start SQL Server container for Alpine ARM64' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Install dependencies in the Alpine ARM64 container +# docker exec test-container-alpine-arm64 sh -c " +# # Update package index +# apk update - # Install build tools and system dependencies - apk add --no-cache \ - build-base \ - cmake \ - clang \ - git \ - bash \ - wget \ - curl \ - gnupg \ - unixodbc \ - unixodbc-dev \ - libffi-dev \ - openssl-dev \ - zlib-dev \ - py3-pip \ - python3-dev \ - patchelf +# # Install build tools and system dependencies +# apk add --no-cache \ +# build-base \ +# cmake \ +# clang \ +# git \ +# bash \ +# wget \ +# curl \ +# gnupg \ +# unixodbc \ +# unixodbc-dev \ +# libffi-dev \ +# openssl-dev \ +# zlib-dev \ +# py3-pip \ +# python3-dev \ +# patchelf - # Create symlinks for Python compatibility - ln -sf python3 /usr/bin/python || true - ln -sf pip3 /usr/bin/pip || true +# # Create symlinks for Python compatibility +# ln -sf python3 /usr/bin/python || true +# ln -sf pip3 /usr/bin/pip || true - # Verify installation and architecture - uname -m - python --version - which cmake - " - displayName: 'Install basic dependencies in Alpine ARM64 container' - - - script: | - # Install ODBC driver in the Alpine ARM64 container - docker exec test-container-alpine-arm64 bash -c " - # Detect architecture for ODBC driver download - case \$(uname -m) in - x86_64) architecture='amd64' ;; - arm64|aarch64) architecture='arm64' ;; - *) architecture='unsupported' ;; - esac +# # Verify installation and architecture +# uname -m +# python --version +# which cmake +# " +# displayName: 'Install basic dependencies in Alpine ARM64 container' + +# - script: | +# # Install ODBC driver in the Alpine ARM64 container +# docker exec test-container-alpine-arm64 bash -c " +# # Detect architecture for ODBC driver download +# case \$(uname -m) in +# x86_64) architecture='amd64' ;; +# arm64|aarch64) architecture='arm64' ;; +# *) architecture='unsupported' ;; +# esac - if [[ 'unsupported' == '\$architecture' ]]; then - echo 'Alpine architecture \$(uname -m) is not currently supported.' - exit 1 - fi +# if [[ 'unsupported' == '\$architecture' ]]; then +# echo 'Alpine architecture \$(uname -m) is not currently supported.' +# exit 1 +# fi - echo 'Detected architecture: '\$architecture +# echo 'Detected architecture: '\$architecture - # Download the packages - curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk - curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Download the packages +# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk +# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk - # Download signatures for verification - curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig - curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig +# # Download signatures for verification +# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig +# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig - # Import Microsoft GPG key and verify packages - curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - - gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk - gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Import Microsoft GPG key and verify packages +# curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - +# gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk +# gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk - # Install the packages - apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk - apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk +# # Install the packages +# apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk +# apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk - # Cleanup - rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* +# # Cleanup +# rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* - # Add mssql-tools to PATH - export PATH=\"\$PATH:/opt/mssql-tools18/bin\" - echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc - " - displayName: 'Install ODBC Driver in Alpine ARM64 container' - - - script: | - # Install Python dependencies in the Alpine ARM64 container using virtual environment - docker exec test-container-alpine-arm64 bash -c " - # Create virtual environment - python -m venv /workspace/venv +# # Add mssql-tools to PATH +# export PATH=\"\$PATH:/opt/mssql-tools18/bin\" +# echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc +# " +# displayName: 'Install ODBC Driver in Alpine ARM64 container' + +# - script: | +# # Install Python dependencies in the Alpine ARM64 container using virtual environment +# docker exec test-container-alpine-arm64 bash -c " +# # Create virtual environment +# python -m venv /workspace/venv - # Activate virtual environment and install dependencies - source /workspace/venv/bin/activate +# # Activate virtual environment and install dependencies +# source /workspace/venv/bin/activate - # Upgrade pip and install dependencies - python -m pip install --upgrade pip - python -m pip install -r requirements.txt +# # Upgrade pip and install dependencies +# python -m pip install --upgrade pip +# python -m pip install -r requirements.txt - # Verify virtual environment is active - which python - which pip - " - displayName: 'Install Python dependencies in Alpine ARM64 container' - - - script: | - # Build pybind bindings in the Alpine ARM64 container - docker exec test-container-alpine-arm64 bash -c " - # Activate virtual environment - source /workspace/venv/bin/activate +# # Verify virtual environment is active +# which python +# which pip +# " +# displayName: 'Install Python dependencies in Alpine ARM64 container' + +# - script: | +# # Build pybind bindings in the Alpine ARM64 container +# docker exec test-container-alpine-arm64 bash -c " +# # Activate virtual environment +# source /workspace/venv/bin/activate - cd mssql_python/pybind - chmod +x build.sh - ./build.sh - " - displayName: 'Build pybind bindings (.so) in Alpine ARM64 container' - - - script: | - # Uninstall ODBC Driver before running tests to use bundled libraries - docker exec test-container-alpine-arm64 bash -c " - # Remove system ODBC installation - apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' - rm -f /usr/bin/sqlcmd - rm -f /usr/bin/bcp - rm -rf /opt/microsoft/msodbcsql18 - rm -f /usr/lib/libodbcinst.so.2 - odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true - echo 'Uninstalled system ODBC Driver and cleaned up libraries' - echo 'Verifying arm64 alpine driver library signatures:' - ldd mssql_python/libs/linux/alpine/arm64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' - " - displayName: 'Uninstall system ODBC Driver before running tests in Alpine ARM64 container' - - - script: | - # Run tests in the Alpine ARM64 container - # Get SQL Server container IP - SQLSERVER_IP=$(docker inspect sqlserver-alpine-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') - echo "SQL Server IP: $SQLSERVER_IP" +# cd mssql_python/pybind +# chmod +x build.sh +# ./build.sh +# " +# displayName: 'Build pybind bindings (.so) in Alpine ARM64 container' + +# - script: | +# # Uninstall ODBC Driver before running tests to use bundled libraries +# docker exec test-container-alpine-arm64 bash -c " +# # Remove system ODBC installation +# apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' +# rm -f /usr/bin/sqlcmd +# rm -f /usr/bin/bcp +# rm -rf /opt/microsoft/msodbcsql18 +# rm -f /usr/lib/libodbcinst.so.2 +# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true +# echo 'Uninstalled system ODBC Driver and cleaned up libraries' +# echo 'Verifying arm64 alpine driver library signatures:' +# ldd mssql_python/libs/linux/alpine/arm64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' +# " +# displayName: 'Uninstall system ODBC Driver before running tests in Alpine ARM64 container' + +# - script: | +# # Run tests in the Alpine ARM64 container +# # Get SQL Server container IP +# SQLSERVER_IP=$(docker inspect sqlserver-alpine-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') +# echo "SQL Server IP: $SQLSERVER_IP" - docker exec \ - -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ - -e DB_PASSWORD="$(DB_PASSWORD)" \ - test-container-alpine-arm64 bash -c " - echo 'Build successful, running tests now on Alpine ARM64' - echo 'Architecture:' \$(uname -m) - echo 'Alpine version:' \$(cat /etc/alpine-release) - echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' +# docker exec \ +# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ +# -e DB_PASSWORD="$(DB_PASSWORD)" \ +# test-container-alpine-arm64 bash -c " +# echo 'Build successful, running tests now on Alpine ARM64' +# echo 'Architecture:' \$(uname -m) +# echo 'Alpine version:' \$(cat /etc/alpine-release) +# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' - # Activate virtual environment - source /workspace/venv/bin/activate +# # Activate virtual environment +# source /workspace/venv/bin/activate - # Test basic Python import first - python -c 'import mssql_python; print(\"mssql_python imported successfully\")' +# # Test basic Python import first +# python -c 'import mssql_python; print(\"mssql_python imported successfully\")' - # Run main.py if it exists - if [ -f main.py ]; then - echo 'Running main.py...' - python main.py - fi +# # Run main.py if it exists +# if [ -f main.py ]; then +# echo 'Running main.py...' +# python main.py +# fi - # Run pytest - python -m pytest -v --junitxml=test-results-alpine-arm64.xml --cov=. --cov-report=xml:coverage-alpine-arm64.xml --capture=tee-sys --cache-clear - " - displayName: 'Run pytest with coverage in Alpine ARM64 container' - env: - DB_PASSWORD: $(DB_PASSWORD) - - - script: | - # Copy test results from container to host - docker cp test-container-alpine-arm64:/workspace/test-results-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' - docker cp test-container-alpine-arm64:/workspace/coverage-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' - displayName: 'Copy test results from Alpine ARM64 container' - condition: always() - - - script: | - # Clean up containers - docker stop test-container-alpine-arm64 || true - docker rm test-container-alpine-arm64 || true - docker stop sqlserver-alpine-arm64 || true - docker rm sqlserver-alpine-arm64 || true - displayName: 'Clean up Alpine ARM64 containers' - condition: always() - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-results-alpine-arm64.xml' - testRunTitle: 'Publish pytest results on Alpine ARM64' +# # Run pytest +# python -m pytest -v --junitxml=test-results-alpine-arm64.xml --cov=. --cov-report=xml:coverage-alpine-arm64.xml --capture=tee-sys --cache-clear +# " +# displayName: 'Run pytest with coverage in Alpine ARM64 container' +# env: +# DB_PASSWORD: $(DB_PASSWORD) + +# - script: | +# # Copy test results from container to host +# docker cp test-container-alpine-arm64:/workspace/test-results-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' +# docker cp test-container-alpine-arm64:/workspace/coverage-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' +# displayName: 'Copy test results from Alpine ARM64 container' +# condition: always() + +# - script: | +# # Clean up containers +# docker stop test-container-alpine-arm64 || true +# docker rm test-container-alpine-arm64 || true +# docker stop sqlserver-alpine-arm64 || true +# docker rm sqlserver-alpine-arm64 || true +# displayName: 'Clean up Alpine ARM64 containers' +# condition: always() + +# - task: PublishTestResults@2 +# condition: succeededOrFailed() +# inputs: +# testResultsFiles: '**/test-results-alpine-arm64.xml' +# testRunTitle: 'Publish pytest results on Alpine ARM64' diff --git a/main.py b/main.py index b45b88d73..0a4370c14 100644 --- a/main.py +++ b/main.py @@ -1,3 +1,4 @@ +import pytest from mssql_python import connect from mssql_python import setup_logging import os @@ -5,17 +6,56 @@ setup_logging('stdout') -conn_str = os.getenv("DB_CONNECTION_STRING") +# conn_str = os.getenv("DB_CONNECTION_STRING") +conn_str = "Server=Saumya;DATABASE=master;UID=sa;PWD=HappyPass1234;Trust_Connection=yes;TrustServerCertificate=yes;" + conn = connect(conn_str) +cursor = conn.cursor() + +test_inputs = [ +"Hello 😄", +"Flags 🇮🇳🇺🇸", +"Family 👨‍👩‍👧‍👦", +"Skin tone 👍🏽", +"Brain 🧠", +"Ice 🧊", +"Melting face 🫠", +"Accented éüñç", +"Chinese: 中文", +"Japanese: 日本語", +"Hello 🚀 World", +"admin🔒user", +"1🚀' OR '1'='1", +] + +cursor.execute(""" +CREATE TABLE #pytest_emoji_test ( +id INT IDENTITY PRIMARY KEY, +content NVARCHAR(MAX) +); +""") +conn.commit() +for text in test_inputs: + try: + cursor.execute("INSERT INTO #pytest_emoji_test (content) OUTPUT INSERTED.id VALUES (?)", [text]) + inserted_id = cursor.fetchone()[0] + cursor.execute("SELECT content FROM #pytest_emoji_test WHERE id = ?", [inserted_id]) + result = cursor.fetchone() + assert result is not None, f"No row returned for ID {inserted_id}" + assert result[0] == text, f"Mismatch! Sent: {text}, Got: {result[0]}" + print(f"Test passed for input: {repr(text)}") + + except Exception as e: + print(f"Error for input {repr(text)}: {e}") # conn.autocommit = True -cursor = conn.cursor() -cursor.execute("SELECT database_id, name from sys.databases;") -rows = cursor.fetchall() -for row in rows: - print(f"Database ID: {row[0]}, Name: {row[1]}") +# cursor.execute("SELECT database_id, name from sys.databases;") +# rows = cursor.fetchall() + +# for row in rows: +# print(f"Database ID: {row[0]}, Name: {row[1]}") cursor.close() conn.close() \ No newline at end of file diff --git a/mssql_python/pybind/ddbc_bindings.h b/mssql_python/pybind/ddbc_bindings.h index 8010fb20c..4922830e7 100644 --- a/mssql_python/pybind/ddbc_bindings.h +++ b/mssql_python/pybind/ddbc_bindings.h @@ -401,8 +401,25 @@ struct ErrorInfo { }; ErrorInfo SQLCheckError_Wrap(SQLSMALLINT handleType, SqlHandlePtr handle, SQLRETURN retcode); +// inline std::string WideToUTF8(const std::wstring& wstr) { +// if (wstr.empty()) return {}; +// #if defined(_WIN32) +// int size_needed = WideCharToMultiByte(CP_UTF8, 0, wstr.data(), static_cast(wstr.size()), nullptr, 0, nullptr, nullptr); +// if (size_needed == 0) return {}; +// std::string result(size_needed, 0); +// int converted = WideCharToMultiByte(CP_UTF8, 0, wstr.data(), static_cast(wstr.size()), result.data(), size_needed, nullptr, nullptr); +// if (converted == 0) return {}; +// return result; +// #else +// std::wstring_convert> converter; +// return converter.to_bytes(wstr); +// #endif +// } + + inline std::string WideToUTF8(const std::wstring& wstr) { if (wstr.empty()) return {}; + #if defined(_WIN32) int size_needed = WideCharToMultiByte(CP_UTF8, 0, wstr.data(), static_cast(wstr.size()), nullptr, 0, nullptr, nullptr); if (size_needed == 0) return {}; @@ -411,8 +428,34 @@ inline std::string WideToUTF8(const std::wstring& wstr) { if (converted == 0) return {}; return result; #else - std::wstring_convert> converter; - return converter.to_bytes(wstr); + // Manual UTF-32 to UTF-8 conversion for macOS/Linux + std::string utf8_string; + utf8_string.reserve(wstr.size() * 4); // Reserve enough space for worst case (4 bytes per character) + + for (wchar_t wc : wstr) { + uint32_t code_point = static_cast(wc); + + if (code_point <= 0x7F) { + // 1-byte UTF-8 sequence for ASCII characters + utf8_string += static_cast(code_point); + } else if (code_point <= 0x7FF) { + // 2-byte UTF-8 sequence + utf8_string += static_cast(0xC0 | ((code_point >> 6) & 0x1F)); + utf8_string += static_cast(0x80 | (code_point & 0x3F)); + } else if (code_point <= 0xFFFF) { + // 3-byte UTF-8 sequence + utf8_string += static_cast(0xE0 | ((code_point >> 12) & 0x0F)); + utf8_string += static_cast(0x80 | ((code_point >> 6) & 0x3F)); + utf8_string += static_cast(0x80 | (code_point & 0x3F)); + } else if (code_point <= 0x10FFFF) { + // 4-byte UTF-8 sequence for characters like emojis (e.g., U+1F604) + utf8_string += static_cast(0xF0 | ((code_point >> 18) & 0x07)); + utf8_string += static_cast(0x80 | ((code_point >> 12) & 0x3F)); + utf8_string += static_cast(0x80 | ((code_point >> 6) & 0x3F)); + utf8_string += static_cast(0x80 | (code_point & 0x3F)); + } + } + return utf8_string; #endif } From a6db45fa3cd6a249c9e44a84a1a072eac99f0a9a Mon Sep 17 00:00:00 2001 From: gargsaumya Date: Mon, 15 Sep 2025 11:42:29 +0530 Subject: [PATCH 11/11] mac test --- eng/pipelines/pr-validation-pipeline.yml | 2616 +++++++++++----------- main.py | 52 +- mssql_python/pybind/ddbc_bindings.cpp | 3 - mssql_python/pybind/ddbc_bindings.h | 63 - 4 files changed, 1313 insertions(+), 1421 deletions(-) diff --git a/eng/pipelines/pr-validation-pipeline.yml b/eng/pipelines/pr-validation-pipeline.yml index 8da044f21..6621b10df 100644 --- a/eng/pipelines/pr-validation-pipeline.yml +++ b/eng/pipelines/pr-validation-pipeline.yml @@ -7,76 +7,76 @@ trigger: - main jobs: -# - job: PytestOnWindows -# displayName: 'Windows x64' -# pool: -# vmImage: 'windows-latest' +- job: PytestOnWindows + displayName: 'Windows x64' + pool: + vmImage: 'windows-latest' -# steps: -# - task: UsePythonVersion@0 -# inputs: -# versionSpec: '3.13' -# addToPath: true -# githubToken: $(GITHUB_TOKEN) -# displayName: 'Use Python 3.13' - -# - script: | -# python -m pip install --upgrade pip -# pip install -r requirements.txt -# displayName: 'Install dependencies' - -# # Start LocalDB instance -# - powershell: | -# sqllocaldb create MSSQLLocalDB -# sqllocaldb start MSSQLLocalDB -# displayName: 'Start LocalDB instance' - -# # Create database and user -# - powershell: | -# sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE DATABASE TestDB" -# sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE LOGIN testuser WITH PASSWORD = '$(DB_PASSWORD)'" -# sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "CREATE USER testuser FOR LOGIN testuser" -# sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "ALTER ROLE db_owner ADD MEMBER testuser" -# displayName: 'Setup database and user' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# cd mssql_python\pybind -# build.bat x64 -# displayName: 'Build .pyd file' - -# - script: | -# python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear -# displayName: 'Run tests with coverage' -# env: -# DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' - -# - task: PublishBuildArtifacts@1 -# inputs: -# PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pyd' -# ArtifactName: 'ddbc_bindings' -# publishLocation: 'Container' -# displayName: 'Publish pyd file as artifact' - -# - task: PublishBuildArtifacts@1 -# inputs: -# PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pdb' -# ArtifactName: 'ddbc_bindings' -# publishLocation: 'Container' -# displayName: 'Publish pdb file as artifact' - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results.xml' -# testRunTitle: 'Publish test results' - -# - task: PublishCodeCoverageResults@1 -# inputs: -# codeCoverageTool: 'Cobertura' -# summaryFileLocation: 'coverage.xml' -# displayName: 'Publish code coverage results' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.13' + addToPath: true + githubToken: $(GITHUB_TOKEN) + displayName: 'Use Python 3.13' + + - script: | + python -m pip install --upgrade pip + pip install -r requirements.txt + displayName: 'Install dependencies' + + # Start LocalDB instance + - powershell: | + sqllocaldb create MSSQLLocalDB + sqllocaldb start MSSQLLocalDB + displayName: 'Start LocalDB instance' + + # Create database and user + - powershell: | + sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE DATABASE TestDB" + sqlcmd -S "(localdb)\MSSQLLocalDB" -Q "CREATE LOGIN testuser WITH PASSWORD = '$(DB_PASSWORD)'" + sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "CREATE USER testuser FOR LOGIN testuser" + sqlcmd -S "(localdb)\MSSQLLocalDB" -d TestDB -Q "ALTER ROLE db_owner ADD MEMBER testuser" + displayName: 'Setup database and user' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + cd mssql_python\pybind + build.bat x64 + displayName: 'Build .pyd file' + + - script: | + python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear + displayName: 'Run tests with coverage' + env: + DB_CONNECTION_STRING: 'Server=(localdb)\MSSQLLocalDB;Database=TestDB;Uid=testuser;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes' + + - task: PublishBuildArtifacts@1 + inputs: + PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pyd' + ArtifactName: 'ddbc_bindings' + publishLocation: 'Container' + displayName: 'Publish pyd file as artifact' + + - task: PublishBuildArtifacts@1 + inputs: + PathtoPublish: 'mssql_python/ddbc_bindings.cp313-amd64.pdb' + ArtifactName: 'ddbc_bindings' + publishLocation: 'Container' + displayName: 'Publish pdb file as artifact' + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results.xml' + testRunTitle: 'Publish test results' + + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'Cobertura' + summaryFileLocation: 'coverage.xml' + displayName: 'Publish code coverage results' - job: PytestOnMacOS displayName: 'macOS x86_64' @@ -149,8 +149,6 @@ jobs: - script: | echo "Build successful, running tests now" - - python main.py python -m pytest -v --junitxml=test-results.xml --cov=. --cov-report=xml --capture=tee-sys --cache-clear displayName: 'Run pytest with coverage' env: @@ -163,1319 +161,1319 @@ jobs: testResultsFiles: '**/test-results.xml' testRunTitle: 'Publish pytest results on macOS' -# - job: PytestOnLinux -# displayName: 'Linux x86_64' -# pool: -# vmImage: 'ubuntu-latest' - -# strategy: -# matrix: -# Ubuntu: -# dockerImage: 'ubuntu:22.04' -# distroName: 'Ubuntu' -# Debian: -# dockerImage: 'debian:12' -# distroName: 'Debian' - -# steps: -# - script: | -# # Create a Docker container for testing -# docker run -d --name test-container-$(distroName) \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# $(dockerImage) \ -# tail -f /dev/null -# displayName: 'Create $(distroName) container' - -# - script: | -# # Start SQL Server container -# docker run -d --name sqlserver-$(distroName) \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest +- job: PytestOnLinux + displayName: 'Linux x86_64' + pool: + vmImage: 'ubuntu-latest' + + strategy: + matrix: + Ubuntu: + dockerImage: 'ubuntu:22.04' + distroName: 'Ubuntu' + Debian: + dockerImage: 'debian:12' + distroName: 'Debian' + + steps: + - script: | + # Create a Docker container for testing + docker run -d --name test-container-$(distroName) \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + $(dockerImage) \ + tail -f /dev/null + displayName: 'Create $(distroName) container' + + - script: | + # Start SQL Server container + docker run -d --name sqlserver-$(distroName) \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-$(distroName) \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-$(distroName) \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-$(distroName) \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for $(distroName)' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the container -# if [ "$(distroName)" = "Ubuntu" ]; then -# docker exec test-container-$(distroName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# export TZ=UTC -# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone -# apt-get update && -# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev -# " -# else -# # Debian -# docker exec test-container-$(distroName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# export TZ=UTC -# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone -# apt-get update && -# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev -# " -# fi -# displayName: 'Install basic dependencies in $(distroName) container' - -# - script: | -# # Install ODBC driver in the container -# docker exec test-container-$(distroName) bash -c " -# export DEBIAN_FRONTEND=noninteractive + # Create test database + docker exec sqlserver-$(distroName) \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for $(distroName)' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the container + if [ "$(distroName)" = "Ubuntu" ]; then + docker exec test-container-$(distroName) bash -c " + export DEBIAN_FRONTEND=noninteractive + export TZ=UTC + ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone + apt-get update && + apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev + " + else + # Debian + docker exec test-container-$(distroName) bash -c " + export DEBIAN_FRONTEND=noninteractive + export TZ=UTC + ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone + apt-get update && + apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev + " + fi + displayName: 'Install basic dependencies in $(distroName) container' + + - script: | + # Install ODBC driver in the container + docker exec test-container-$(distroName) bash -c " + export DEBIAN_FRONTEND=noninteractive -# # Download the package to configure the Microsoft repo -# if [ '$(distroName)' = 'Ubuntu' ]; then -# curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -# else -# # Debian 12 -# curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -# fi + # Download the package to configure the Microsoft repo + if [ '$(distroName)' = 'Ubuntu' ]; then + curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb + else + # Debian 12 + curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb + fi -# # Install the package -# dpkg -i packages-microsoft-prod.deb || true -# rm packages-microsoft-prod.deb + # Install the package + dpkg -i packages-microsoft-prod.deb || true + rm packages-microsoft-prod.deb -# # Update package list -# apt-get update + # Update package list + apt-get update -# # Install the driver -# ACCEPT_EULA=Y apt-get install -y msodbcsql18 -# # optional: for bcp and sqlcmd -# ACCEPT_EULA=Y apt-get install -y mssql-tools18 -# # optional: for unixODBC development headers -# apt-get install -y unixodbc-dev -# " -# displayName: 'Install ODBC Driver in $(distroName) container' - -# - script: | -# # Install Python dependencies in the container using virtual environment -# docker exec test-container-$(distroName) bash -c " -# # Create a virtual environment -# python3 -m venv /opt/venv -# source /opt/venv/bin/activate + # Install the driver + ACCEPT_EULA=Y apt-get install -y msodbcsql18 + # optional: for bcp and sqlcmd + ACCEPT_EULA=Y apt-get install -y mssql-tools18 + # optional: for unixODBC development headers + apt-get install -y unixodbc-dev + " + displayName: 'Install ODBC Driver in $(distroName) container' + + - script: | + # Install Python dependencies in the container using virtual environment + docker exec test-container-$(distroName) bash -c " + # Create a virtual environment + python3 -m venv /opt/venv + source /opt/venv/bin/activate -# # Install dependencies in the virtual environment -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Install dependencies in the virtual environment + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Make the virtual environment globally available -# echo 'source /opt/venv/bin/activate' >> ~/.bashrc -# " -# displayName: 'Install Python dependencies in $(distroName) container' - -# - script: | -# # Build pybind bindings in the container -# docker exec test-container-$(distroName) bash -c " -# source /opt/venv/bin/activate -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in $(distroName) container' - -# - script: | -# # Uninstall ODBC Driver before running tests -# docker exec test-container-$(distroName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql -# rm -f /lib/x86_64-linux-gnu/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true -# echo 'Uninstalled ODBC Driver and cleaned up libraries' -# echo 'Verifying x86_64 debian_ubuntu driver library signatures:' -# ldd mssql_python/libs/linux/debian_ubuntu/x86_64/lib/libmsodbcsql-18.5.so.1.1 -# " -# displayName: 'Uninstall ODBC Driver before running tests in $(distroName) container' - -# - script: | -# # Run tests in the container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-$(distroName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + # Make the virtual environment globally available + echo 'source /opt/venv/bin/activate' >> ~/.bashrc + " + displayName: 'Install Python dependencies in $(distroName) container' + + - script: | + # Build pybind bindings in the container + docker exec test-container-$(distroName) bash -c " + source /opt/venv/bin/activate + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in $(distroName) container' + + - script: | + # Uninstall ODBC Driver before running tests + docker exec test-container-$(distroName) bash -c " + export DEBIAN_FRONTEND=noninteractive + apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql + rm -f /lib/x86_64-linux-gnu/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true + echo 'Uninstalled ODBC Driver and cleaned up libraries' + echo 'Verifying x86_64 debian_ubuntu driver library signatures:' + ldd mssql_python/libs/linux/debian_ubuntu/x86_64/lib/libmsodbcsql-18.5.so.1.1 + " + displayName: 'Uninstall ODBC Driver before running tests in $(distroName) container' + + - script: | + # Run tests in the container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-$(distroName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-$(distroName) bash -c " -# source /opt/venv/bin/activate -# echo 'Build successful, running tests now on $(distroName)' -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# python -m pytest -v --junitxml=test-results-$(distroName).xml --cov=. --cov-report=xml:coverage-$(distroName).xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in $(distroName) container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-$(distroName):/workspace/test-results-$(distroName).xml $(Build.SourcesDirectory)/ -# docker cp test-container-$(distroName):/workspace/coverage-$(distroName).xml $(Build.SourcesDirectory)/ -# displayName: 'Copy test results from $(distroName) container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-$(distroName) || true -# docker rm test-container-$(distroName) || true -# docker stop sqlserver-$(distroName) || true -# docker rm sqlserver-$(distroName) || true -# displayName: 'Clean up $(distroName) containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-$(distroName).xml' -# testRunTitle: 'Publish pytest results on $(distroName)' - -# - job: PytestOnLinux_ARM64 -# displayName: 'Linux ARM64' -# pool: -# vmImage: 'ubuntu-latest' - -# strategy: -# matrix: -# Ubuntu_ARM64: -# dockerImage: 'ubuntu:22.04' -# distroName: 'Ubuntu' -# archName: 'arm64' -# Debian_ARM64: -# dockerImage: 'debian:12' -# distroName: 'Debian' -# archName: 'arm64' - -# steps: -# - script: | -# # Set up Docker buildx for multi-architecture support -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -# docker buildx create --name multiarch --driver docker-container --use -# docker buildx inspect --bootstrap -# displayName: 'Setup Docker buildx for ARM64 emulation' - -# - script: | -# # Create a Docker container for testing on ARM64 -# docker run -d --name test-container-$(distroName)-$(archName) \ -# --platform linux/arm64 \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# $(dockerImage) \ -# tail -f /dev/null -# displayName: 'Create $(distroName) ARM64 container' - -# - script: | -# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) -# docker run -d --name sqlserver-$(distroName)-$(archName) \ -# --platform linux/amd64 \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-$(distroName) bash -c " + source /opt/venv/bin/activate + echo 'Build successful, running tests now on $(distroName)' + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + python -m pytest -v --junitxml=test-results-$(distroName).xml --cov=. --cov-report=xml:coverage-$(distroName).xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in $(distroName) container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-$(distroName):/workspace/test-results-$(distroName).xml $(Build.SourcesDirectory)/ + docker cp test-container-$(distroName):/workspace/coverage-$(distroName).xml $(Build.SourcesDirectory)/ + displayName: 'Copy test results from $(distroName) container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-$(distroName) || true + docker rm test-container-$(distroName) || true + docker stop sqlserver-$(distroName) || true + docker rm sqlserver-$(distroName) || true + displayName: 'Clean up $(distroName) containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-$(distroName).xml' + testRunTitle: 'Publish pytest results on $(distroName)' + +- job: PytestOnLinux_ARM64 + displayName: 'Linux ARM64' + pool: + vmImage: 'ubuntu-latest' + + strategy: + matrix: + Ubuntu_ARM64: + dockerImage: 'ubuntu:22.04' + distroName: 'Ubuntu' + archName: 'arm64' + Debian_ARM64: + dockerImage: 'debian:12' + distroName: 'Debian' + archName: 'arm64' + + steps: + - script: | + # Set up Docker buildx for multi-architecture support + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker buildx create --name multiarch --driver docker-container --use + docker buildx inspect --bootstrap + displayName: 'Setup Docker buildx for ARM64 emulation' + + - script: | + # Create a Docker container for testing on ARM64 + docker run -d --name test-container-$(distroName)-$(archName) \ + --platform linux/arm64 \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + $(dockerImage) \ + tail -f /dev/null + displayName: 'Create $(distroName) ARM64 container' + + - script: | + # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) + docker run -d --name sqlserver-$(distroName)-$(archName) \ + --platform linux/amd64 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-$(distroName)-$(archName) \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-$(distroName)-$(archName) \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-$(distroName)-$(archName) \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for $(distroName) ARM64' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the ARM64 container -# if [ "$(distroName)" = "Ubuntu" ]; then -# docker exec test-container-$(distroName)-$(archName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# export TZ=UTC -# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone -# apt-get update && -# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev -# # Verify architecture -# uname -m -# dpkg --print-architecture -# " -# else -# # Debian ARM64 -# docker exec test-container-$(distroName)-$(archName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# export TZ=UTC -# ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone -# apt-get update && -# apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev -# # Verify architecture -# uname -m -# dpkg --print-architecture -# " -# fi -# displayName: 'Install basic dependencies in $(distroName) ARM64 container' - -# - script: | -# # Install ODBC driver in the ARM64 container -# docker exec test-container-$(distroName)-$(archName) bash -c " -# export DEBIAN_FRONTEND=noninteractive + # Create test database + docker exec sqlserver-$(distroName)-$(archName) \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for $(distroName) ARM64' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the ARM64 container + if [ "$(distroName)" = "Ubuntu" ]; then + docker exec test-container-$(distroName)-$(archName) bash -c " + export DEBIAN_FRONTEND=noninteractive + export TZ=UTC + ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone + apt-get update && + apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev + # Verify architecture + uname -m + dpkg --print-architecture + " + else + # Debian ARM64 + docker exec test-container-$(distroName)-$(archName) bash -c " + export DEBIAN_FRONTEND=noninteractive + export TZ=UTC + ln -snf /usr/share/zoneinfo/\$TZ /etc/localtime && echo \$TZ > /etc/timezone + apt-get update && + apt-get install -y python3 python3-pip python3-venv python3-full cmake curl wget gnupg software-properties-common build-essential python3-dev pybind11-dev + # Verify architecture + uname -m + dpkg --print-architecture + " + fi + displayName: 'Install basic dependencies in $(distroName) ARM64 container' + + - script: | + # Install ODBC driver in the ARM64 container + docker exec test-container-$(distroName)-$(archName) bash -c " + export DEBIAN_FRONTEND=noninteractive -# # Download the package to configure the Microsoft repo -# if [ '$(distroName)' = 'Ubuntu' ]; then -# curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -# else -# # Debian 12 -# curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -# fi + # Download the package to configure the Microsoft repo + if [ '$(distroName)' = 'Ubuntu' ]; then + curl -sSL -O https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb + else + # Debian 12 + curl -sSL -O https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb + fi -# # Install the package -# dpkg -i packages-microsoft-prod.deb || true -# rm packages-microsoft-prod.deb + # Install the package + dpkg -i packages-microsoft-prod.deb || true + rm packages-microsoft-prod.deb -# # Update package list -# apt-get update + # Update package list + apt-get update -# # Install the driver (ARM64 version) -# ACCEPT_EULA=Y apt-get install -y msodbcsql18 -# # optional: for bcp and sqlcmd -# ACCEPT_EULA=Y apt-get install -y mssql-tools18 -# # optional: for unixODBC development headers -# apt-get install -y unixodbc-dev -# " -# displayName: 'Install ODBC Driver in $(distroName) ARM64 container' - -# - script: | -# # Install Python dependencies in the ARM64 container using virtual environment -# docker exec test-container-$(distroName)-$(archName) bash -c " -# # Create a virtual environment -# python3 -m venv /opt/venv -# source /opt/venv/bin/activate + # Install the driver (ARM64 version) + ACCEPT_EULA=Y apt-get install -y msodbcsql18 + # optional: for bcp and sqlcmd + ACCEPT_EULA=Y apt-get install -y mssql-tools18 + # optional: for unixODBC development headers + apt-get install -y unixodbc-dev + " + displayName: 'Install ODBC Driver in $(distroName) ARM64 container' + + - script: | + # Install Python dependencies in the ARM64 container using virtual environment + docker exec test-container-$(distroName)-$(archName) bash -c " + # Create a virtual environment + python3 -m venv /opt/venv + source /opt/venv/bin/activate -# # Install dependencies in the virtual environment -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Install dependencies in the virtual environment + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Make the virtual environment globally available -# echo 'source /opt/venv/bin/activate' >> ~/.bashrc -# " -# displayName: 'Install Python dependencies in $(distroName) ARM64 container' - -# - script: | -# # Build pybind bindings in the ARM64 container -# docker exec test-container-$(distroName)-$(archName) bash -c " -# source /opt/venv/bin/activate -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in $(distroName) ARM64 container' - -# - script: | -# # Uninstall ODBC Driver before running tests -# docker exec test-container-$(distroName)-$(archName) bash -c " -# export DEBIAN_FRONTEND=noninteractive -# apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql -# rm -f /lib/aarch64-linux-gnu/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true -# echo 'Uninstalled ODBC Driver and cleaned up libraries' -# echo 'Verifying arm64 debian_ubuntu driver library signatures:' -# ldd mssql_python/libs/linux/debian_ubuntu/arm64/lib/libmsodbcsql-18.5.so.1.1 -# " -# displayName: 'Uninstall ODBC Driver before running tests in $(distroName) ARM64 container' - -# - script: | -# # Run tests in the ARM64 container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-$(distroName)-$(archName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + # Make the virtual environment globally available + echo 'source /opt/venv/bin/activate' >> ~/.bashrc + " + displayName: 'Install Python dependencies in $(distroName) ARM64 container' + + - script: | + # Build pybind bindings in the ARM64 container + docker exec test-container-$(distroName)-$(archName) bash -c " + source /opt/venv/bin/activate + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in $(distroName) ARM64 container' + + - script: | + # Uninstall ODBC Driver before running tests + docker exec test-container-$(distroName)-$(archName) bash -c " + export DEBIAN_FRONTEND=noninteractive + apt-get remove --purge -y msodbcsql18 mssql-tools18 unixodbc-dev + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql + rm -f /lib/aarch64-linux-gnu/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true + echo 'Uninstalled ODBC Driver and cleaned up libraries' + echo 'Verifying arm64 debian_ubuntu driver library signatures:' + ldd mssql_python/libs/linux/debian_ubuntu/arm64/lib/libmsodbcsql-18.5.so.1.1 + " + displayName: 'Uninstall ODBC Driver before running tests in $(distroName) ARM64 container' + + - script: | + # Run tests in the ARM64 container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-$(distroName)-$(archName) --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-$(distroName)-$(archName) bash -c " -# source /opt/venv/bin/activate -# echo 'Build successful, running tests now on $(distroName) ARM64' -# echo 'Architecture:' \$(uname -m) -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# python main.py -# python -m pytest -v --junitxml=test-results-$(distroName)-$(archName).xml --cov=. --cov-report=xml:coverage-$(distroName)-$(archName).xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in $(distroName) ARM64 container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-$(distroName)-$(archName):/workspace/test-results-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ -# docker cp test-container-$(distroName)-$(archName):/workspace/coverage-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ -# displayName: 'Copy test results from $(distroName) ARM64 container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-$(distroName)-$(archName) || true -# docker rm test-container-$(distroName)-$(archName) || true -# docker stop sqlserver-$(distroName)-$(archName) || true -# docker rm sqlserver-$(distroName)-$(archName) || true -# displayName: 'Clean up $(distroName) ARM64 containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-$(distroName)-$(archName).xml' -# testRunTitle: 'Publish pytest results on $(distroName) ARM64' - -# - job: PytestOnLinux_RHEL9 -# displayName: 'Linux RedHat x86_64' -# pool: -# vmImage: 'ubuntu-latest' - -# steps: -# - script: | -# # Create a Docker container for testing -# docker run -d --name test-container-rhel9 \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# redhat/ubi9:latest \ -# tail -f /dev/null -# displayName: 'Create RHEL 9 container' - -# - script: | -# # Start SQL Server container -# docker run -d --name sqlserver-rhel9 \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-$(distroName)-$(archName) bash -c " + source /opt/venv/bin/activate + echo 'Build successful, running tests now on $(distroName) ARM64' + echo 'Architecture:' \$(uname -m) + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + python main.py + python -m pytest -v --junitxml=test-results-$(distroName)-$(archName).xml --cov=. --cov-report=xml:coverage-$(distroName)-$(archName).xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in $(distroName) ARM64 container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-$(distroName)-$(archName):/workspace/test-results-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ + docker cp test-container-$(distroName)-$(archName):/workspace/coverage-$(distroName)-$(archName).xml $(Build.SourcesDirectory)/ + displayName: 'Copy test results from $(distroName) ARM64 container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-$(distroName)-$(archName) || true + docker rm test-container-$(distroName)-$(archName) || true + docker stop sqlserver-$(distroName)-$(archName) || true + docker rm sqlserver-$(distroName)-$(archName) || true + displayName: 'Clean up $(distroName) ARM64 containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-$(distroName)-$(archName).xml' + testRunTitle: 'Publish pytest results on $(distroName) ARM64' + +- job: PytestOnLinux_RHEL9 + displayName: 'Linux RedHat x86_64' + pool: + vmImage: 'ubuntu-latest' + + steps: + - script: | + # Create a Docker container for testing + docker run -d --name test-container-rhel9 \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + redhat/ubi9:latest \ + tail -f /dev/null + displayName: 'Create RHEL 9 container' + + - script: | + # Start SQL Server container + docker run -d --name sqlserver-rhel9 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-rhel9 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-rhel9 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-rhel9 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for RHEL 9' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the RHEL 9 container -# docker exec test-container-rhel9 bash -c " -# # Enable CodeReady Builder repository for additional packages -# dnf update -y -# dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm -# subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder + # Create test database + docker exec sqlserver-rhel9 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for RHEL 9' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the RHEL 9 container + docker exec test-container-rhel9 bash -c " + # Enable CodeReady Builder repository for additional packages + dnf update -y + dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm + subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder -# # Install Python 3.9 (available in RHEL 9 UBI) and development tools -# dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers -# dnf install -y python3-libs python3-debug -# dnf install -y gcc gcc-c++ make binutils -# dnf install -y cmake -# # If that doesn't work, try installing from different repositories -# if ! which gcc; then -# echo 'Trying alternative gcc installation...' -# dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ -# fi -# # Verify installation -# python3 --version -# which gcc && which g++ -# gcc --version -# g++ --version -# " -# displayName: 'Install basic dependencies in RHEL 9 container' - -# - script: | -# # Verify compiler installation and set environment for RHEL 9 -# docker exec test-container-rhel9 bash -c " -# # Verify compilers are available -# which gcc || echo 'GCC not found' -# which g++ || echo 'G++ not found' -# gcc --version || echo 'GCC version check failed' -# g++ --version || echo 'G++ version check failed' + # Install Python 3.9 (available in RHEL 9 UBI) and development tools + dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers + dnf install -y python3-libs python3-debug + dnf install -y gcc gcc-c++ make binutils + dnf install -y cmake + # If that doesn't work, try installing from different repositories + if ! which gcc; then + echo 'Trying alternative gcc installation...' + dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ + fi + # Verify installation + python3 --version + which gcc && which g++ + gcc --version + g++ --version + " + displayName: 'Install basic dependencies in RHEL 9 container' + + - script: | + # Verify compiler installation and set environment for RHEL 9 + docker exec test-container-rhel9 bash -c " + # Verify compilers are available + which gcc || echo 'GCC not found' + which g++ || echo 'G++ not found' + gcc --version || echo 'GCC version check failed' + g++ --version || echo 'G++ version check failed' -# # Set compiler environment variables -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ -# echo 'CC set to:' \$CC -# echo 'CXX set to:' \$CXX + # Set compiler environment variables + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + echo 'CC set to:' \$CC + echo 'CXX set to:' \$CXX -# # Create a wrapper script to preserve environment -# cat > /workspace/setup_env.sh << 'EOF' -# #!/bin/bash -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ -# export PATH=/usr/bin:\$PATH -# exec \"\$@\" -# EOF -# chmod +x /workspace/setup_env.sh -# " -# displayName: 'Verify and configure compilers in RHEL 9 container' - -# - script: | -# # Install ODBC driver in the RHEL 9 container -# docker exec test-container-rhel9 bash -c " -# # Add Microsoft repository for RHEL 9 -# curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo + # Create a wrapper script to preserve environment + cat > /workspace/setup_env.sh << 'EOF' + #!/bin/bash + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + export PATH=/usr/bin:\$PATH + exec \"\$@\" + EOF + chmod +x /workspace/setup_env.sh + " + displayName: 'Verify and configure compilers in RHEL 9 container' + + - script: | + # Install ODBC driver in the RHEL 9 container + docker exec test-container-rhel9 bash -c " + # Add Microsoft repository for RHEL 9 + curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo -# # Install the driver -# ACCEPT_EULA=Y dnf install -y msodbcsql18 -# # optional: for bcp and sqlcmd -# ACCEPT_EULA=Y dnf install -y mssql-tools18 -# # optional: for unixODBC development headers -# dnf install -y unixODBC-devel -# " -# displayName: 'Install ODBC Driver in RHEL 9 container' - -# - script: | -# # Install Python dependencies in the container using virtual environment -# docker exec test-container-rhel9 bash -c " -# # Create a virtual environment with Python 3.9 -# python3 -m venv myvenv -# source myvenv/bin/activate - -# # Install dependencies in the virtual environment -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Install the driver + ACCEPT_EULA=Y dnf install -y msodbcsql18 + # optional: for bcp and sqlcmd + ACCEPT_EULA=Y dnf install -y mssql-tools18 + # optional: for unixODBC development headers + dnf install -y unixODBC-devel + " + displayName: 'Install ODBC Driver in RHEL 9 container' + + - script: | + # Install Python dependencies in the container using virtual environment + docker exec test-container-rhel9 bash -c " + # Create a virtual environment with Python 3.9 + python3 -m venv myvenv + source myvenv/bin/activate + + # Install dependencies in the virtual environment + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Make the virtual environment globally available -# echo 'source myvenv/bin/activate' >> ~/.bashrc -# " -# displayName: 'Install Python dependencies in RHEL 9 container' - -# - script: | -# # Build pybind bindings in the container -# docker exec test-container-rhel9 bash -c " -# source myvenv/bin/activate -# ls /usr/include/python3.9 -# # Set compiler environment variables -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ - -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in RHEL 9 container' - -# - script: | -# # Uninstall ODBC Driver before running tests -# docker exec test-container-rhel9 bash -c " -# dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql -# rm -f /lib64/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true -# echo 'Uninstalled ODBC Driver and cleaned up libraries' -# echo 'Verifying x86_64 rhel driver library signatures:' -# ldd mssql_python/libs/linux/rhel/x86_64/lib/libmsodbcsql-18.5.so.1.1 -# " -# displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 container' - -# - script: | -# # Run tests in the container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-rhel9 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + # Make the virtual environment globally available + echo 'source myvenv/bin/activate' >> ~/.bashrc + " + displayName: 'Install Python dependencies in RHEL 9 container' + + - script: | + # Build pybind bindings in the container + docker exec test-container-rhel9 bash -c " + source myvenv/bin/activate + ls /usr/include/python3.9 + # Set compiler environment variables + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in RHEL 9 container' + + - script: | + # Uninstall ODBC Driver before running tests + docker exec test-container-rhel9 bash -c " + dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql + rm -f /lib64/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 11 for SQL Server' || true + echo 'Uninstalled ODBC Driver and cleaned up libraries' + echo 'Verifying x86_64 rhel driver library signatures:' + ldd mssql_python/libs/linux/rhel/x86_64/lib/libmsodbcsql-18.5.so.1.1 + " + displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 container' + + - script: | + # Run tests in the container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-rhel9 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-rhel9 bash -c " -# source myvenv/bin/activate -# echo 'Build successful, running tests now on RHEL 9' -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# python main.py -# python -m pytest -v --junitxml=test-results-rhel9.xml --cov=. --cov-report=xml:coverage-rhel9.xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in RHEL 9 container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-rhel9:/workspace/test-results-rhel9.xml $(Build.SourcesDirectory)/ -# docker cp test-container-rhel9:/workspace/coverage-rhel9.xml $(Build.SourcesDirectory)/ -# displayName: 'Copy test results from RHEL 9 container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-rhel9 || true -# docker rm test-container-rhel9 || true -# docker stop sqlserver-rhel9 || true -# docker rm sqlserver-rhel9 || true -# displayName: 'Clean up RHEL 9 containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-rhel9.xml' -# testRunTitle: 'Publish pytest results on RHEL 9' - -# - job: PytestOnLinux_RHEL9_ARM64 -# displayName: 'Linux RedHat ARM64' -# pool: -# vmImage: 'ubuntu-latest' - -# steps: -# - script: | -# # Set up Docker buildx for multi-architecture support -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -# docker buildx create --name multiarch --driver docker-container --use -# docker buildx inspect --bootstrap -# displayName: 'Setup Docker buildx for ARM64 emulation' - -# - script: | -# # Create a Docker container for testing on ARM64 -# docker run -d --name test-container-rhel9-arm64 \ -# --platform linux/arm64 \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# redhat/ubi9:latest \ -# tail -f /dev/null -# displayName: 'Create RHEL 9 ARM64 container' - -# - script: | -# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) -# docker run -d --name sqlserver-rhel9-arm64 \ -# --platform linux/amd64 \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-rhel9 bash -c " + source myvenv/bin/activate + echo 'Build successful, running tests now on RHEL 9' + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + python main.py + python -m pytest -v --junitxml=test-results-rhel9.xml --cov=. --cov-report=xml:coverage-rhel9.xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in RHEL 9 container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-rhel9:/workspace/test-results-rhel9.xml $(Build.SourcesDirectory)/ + docker cp test-container-rhel9:/workspace/coverage-rhel9.xml $(Build.SourcesDirectory)/ + displayName: 'Copy test results from RHEL 9 container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-rhel9 || true + docker rm test-container-rhel9 || true + docker stop sqlserver-rhel9 || true + docker rm sqlserver-rhel9 || true + displayName: 'Clean up RHEL 9 containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-rhel9.xml' + testRunTitle: 'Publish pytest results on RHEL 9' + +- job: PytestOnLinux_RHEL9_ARM64 + displayName: 'Linux RedHat ARM64' + pool: + vmImage: 'ubuntu-latest' + + steps: + - script: | + # Set up Docker buildx for multi-architecture support + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker buildx create --name multiarch --driver docker-container --use + docker buildx inspect --bootstrap + displayName: 'Setup Docker buildx for ARM64 emulation' + + - script: | + # Create a Docker container for testing on ARM64 + docker run -d --name test-container-rhel9-arm64 \ + --platform linux/arm64 \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + redhat/ubi9:latest \ + tail -f /dev/null + displayName: 'Create RHEL 9 ARM64 container' + + - script: | + # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) + docker run -d --name sqlserver-rhel9-arm64 \ + --platform linux/amd64 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-rhel9-arm64 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-rhel9-arm64 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-rhel9-arm64 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for RHEL 9 ARM64' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the RHEL 9 ARM64 container -# docker exec test-container-rhel9-arm64 bash -c " -# # Enable CodeReady Builder repository for additional packages -# dnf update -y -# dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm -# subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder + # Create test database + docker exec sqlserver-rhel9-arm64 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for RHEL 9 ARM64' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the RHEL 9 ARM64 container + docker exec test-container-rhel9-arm64 bash -c " + # Enable CodeReady Builder repository for additional packages + dnf update -y + dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm + subscription-manager repos --enable codeready-builder-for-rhel-9-$(arch)-rpms || dnf config-manager --set-enabled ubi-9-codeready-builder -# # Install Python 3.9 (available in RHEL 9 UBI) and development tools -# dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers -# dnf install -y python3-libs python3-debug -# dnf install -y gcc gcc-c++ make binutils -# dnf install -y cmake -# # If that doesn't work, try installing from different repositories -# if ! which gcc; then -# echo 'Trying alternative gcc installation...' -# dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ -# fi -# # Verify installation and architecture -# python3 --version -# which gcc && which g++ -# gcc --version -# g++ --version -# uname -m -# echo 'Architecture:' \$(uname -m) -# " -# displayName: 'Install basic dependencies in RHEL 9 ARM64 container' - -# - script: | -# # Verify compiler installation and set environment for RHEL 9 ARM64 -# docker exec test-container-rhel9-arm64 bash -c " -# # Verify compilers are available -# which gcc || echo 'GCC not found' -# which g++ || echo 'G++ not found' -# gcc --version || echo 'GCC version check failed' -# g++ --version || echo 'G++ version check failed' + # Install Python 3.9 (available in RHEL 9 UBI) and development tools + dnf install -y python3 python3-pip python3-devel cmake curl wget gnupg2 glibc-devel kernel-headers + dnf install -y python3-libs python3-debug + dnf install -y gcc gcc-c++ make binutils + dnf install -y cmake + # If that doesn't work, try installing from different repositories + if ! which gcc; then + echo 'Trying alternative gcc installation...' + dnf --enablerepo=ubi-9-codeready-builder install -y gcc gcc-c++ + fi + # Verify installation and architecture + python3 --version + which gcc && which g++ + gcc --version + g++ --version + uname -m + echo 'Architecture:' \$(uname -m) + " + displayName: 'Install basic dependencies in RHEL 9 ARM64 container' + + - script: | + # Verify compiler installation and set environment for RHEL 9 ARM64 + docker exec test-container-rhel9-arm64 bash -c " + # Verify compilers are available + which gcc || echo 'GCC not found' + which g++ || echo 'G++ not found' + gcc --version || echo 'GCC version check failed' + g++ --version || echo 'G++ version check failed' -# # Set compiler environment variables -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ -# echo 'CC set to:' \$CC -# echo 'CXX set to:' \$CXX -# echo 'Running on architecture:' \$(uname -m) + # Set compiler environment variables + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + echo 'CC set to:' \$CC + echo 'CXX set to:' \$CXX + echo 'Running on architecture:' \$(uname -m) -# # Create a wrapper script to preserve environment -# cat > /workspace/setup_env.sh << 'EOF' -# #!/bin/bash -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ -# export PATH=/usr/bin:\$PATH -# exec \"\$@\" -# EOF -# chmod +x /workspace/setup_env.sh -# " -# displayName: 'Verify and configure compilers in RHEL 9 ARM64 container' - -# - script: | -# # Install ODBC driver in the RHEL 9 ARM64 container -# docker exec test-container-rhel9-arm64 bash -c " -# # Add Microsoft repository for RHEL 9 -# curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo + # Create a wrapper script to preserve environment + cat > /workspace/setup_env.sh << 'EOF' + #!/bin/bash + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + export PATH=/usr/bin:\$PATH + exec \"\$@\" + EOF + chmod +x /workspace/setup_env.sh + " + displayName: 'Verify and configure compilers in RHEL 9 ARM64 container' + + - script: | + # Install ODBC driver in the RHEL 9 ARM64 container + docker exec test-container-rhel9-arm64 bash -c " + # Add Microsoft repository for RHEL 9 + curl -sSL -o /etc/yum.repos.d/msprod.repo https://packages.microsoft.com/config/rhel/9/prod.repo -# # Install the driver (ARM64 version) -# ACCEPT_EULA=Y dnf install -y msodbcsql18 -# # optional: for bcp and sqlcmd -# ACCEPT_EULA=Y dnf install -y mssql-tools18 -# # optional: for unixODBC development headers -# dnf install -y unixODBC-devel -# " -# displayName: 'Install ODBC Driver in RHEL 9 ARM64 container' - -# - script: | -# # Install Python dependencies in the container using virtual environment -# docker exec test-container-rhel9-arm64 bash -c " -# # Create a virtual environment with Python 3.9 -# python3 -m venv myvenv -# source myvenv/bin/activate - -# # Install dependencies in the virtual environment -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Install the driver (ARM64 version) + ACCEPT_EULA=Y dnf install -y msodbcsql18 + # optional: for bcp and sqlcmd + ACCEPT_EULA=Y dnf install -y mssql-tools18 + # optional: for unixODBC development headers + dnf install -y unixODBC-devel + " + displayName: 'Install ODBC Driver in RHEL 9 ARM64 container' + + - script: | + # Install Python dependencies in the container using virtual environment + docker exec test-container-rhel9-arm64 bash -c " + # Create a virtual environment with Python 3.9 + python3 -m venv myvenv + source myvenv/bin/activate + + # Install dependencies in the virtual environment + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Make the virtual environment globally available -# echo 'source myvenv/bin/activate' >> ~/.bashrc -# " -# displayName: 'Install Python dependencies in RHEL 9 ARM64 container' - -# - script: | -# # Build pybind bindings in the ARM64 container -# docker exec test-container-rhel9-arm64 bash -c " -# source myvenv/bin/activate -# ls /usr/include/python3.9 -# # Set compiler environment variables -# export CC=/usr/bin/gcc -# export CXX=/usr/bin/g++ - -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in RHEL 9 ARM64 container' - -# - script: | -# # Uninstall ODBC Driver before running tests -# docker exec test-container-rhel9-arm64 bash -c " -# dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql -# rm -f /lib64/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true -# echo 'Uninstalled ODBC Driver and cleaned up libraries' -# echo 'Verifying arm64 rhel driver library signatures:' -# ldd mssql_python/libs/linux/rhel/arm64/lib/libmsodbcsql-18.5.so.1.1 -# " -# displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 ARM64 container' - -# - script: | -# # Run tests in the ARM64 container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-rhel9-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + # Make the virtual environment globally available + echo 'source myvenv/bin/activate' >> ~/.bashrc + " + displayName: 'Install Python dependencies in RHEL 9 ARM64 container' + + - script: | + # Build pybind bindings in the ARM64 container + docker exec test-container-rhel9-arm64 bash -c " + source myvenv/bin/activate + ls /usr/include/python3.9 + # Set compiler environment variables + export CC=/usr/bin/gcc + export CXX=/usr/bin/g++ + + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in RHEL 9 ARM64 container' + + - script: | + # Uninstall ODBC Driver before running tests + docker exec test-container-rhel9-arm64 bash -c " + dnf remove -y msodbcsql18 mssql-tools18 unixODBC-devel + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql + rm -f /lib64/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true + echo 'Uninstalled ODBC Driver and cleaned up libraries' + echo 'Verifying arm64 rhel driver library signatures:' + ldd mssql_python/libs/linux/rhel/arm64/lib/libmsodbcsql-18.5.so.1.1 + " + displayName: 'Uninstall ODBC Driver before running tests in RHEL 9 ARM64 container' + + - script: | + # Run tests in the ARM64 container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-rhel9-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-rhel9-arm64 bash -c " -# source myvenv/bin/activate -# echo 'Build successful, running tests now on RHEL 9 ARM64' -# echo 'Architecture:' \$(uname -m) -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# python -m pytest -v --junitxml=test-results-rhel9-arm64.xml --cov=. --cov-report=xml:coverage-rhel9-arm64.xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in RHEL 9 ARM64 container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-rhel9-arm64:/workspace/test-results-rhel9-arm64.xml $(Build.SourcesDirectory)/ -# docker cp test-container-rhel9-arm64:/workspace/coverage-rhel9-arm64.xml $(Build.SourcesDirectory)/ -# displayName: 'Copy test results from RHEL 9 ARM64 container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-rhel9-arm64 || true -# docker rm test-container-rhel9-arm64 || true -# docker stop sqlserver-rhel9-arm64 || true -# docker rm sqlserver-rhel9-arm64 || true -# displayName: 'Clean up RHEL 9 ARM64 containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-rhel9-arm64.xml' -# testRunTitle: 'Publish pytest results on RHEL 9 ARM64' - -# - job: PytestOnLinux_Alpine -# displayName: 'Linux Alpine x86_64' -# pool: -# vmImage: 'ubuntu-latest' - -# steps: -# - script: | -# # Set up Docker buildx for multi-architecture support -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -# docker buildx create --name multiarch --driver docker-container --use -# docker buildx inspect --bootstrap -# displayName: 'Setup Docker buildx for multi-architecture support' - -# - script: | -# # Create a Docker container for testing on x86_64 -# docker run -d --name test-container-alpine \ -# --platform linux/amd64 \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# alpine:latest \ -# tail -f /dev/null -# displayName: 'Create Alpine x86_64 container' - -# - script: | -# # Start SQL Server container (x86_64) -# docker run -d --name sqlserver-alpine \ -# --platform linux/amd64 \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-rhel9-arm64 bash -c " + source myvenv/bin/activate + echo 'Build successful, running tests now on RHEL 9 ARM64' + echo 'Architecture:' \$(uname -m) + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + python -m pytest -v --junitxml=test-results-rhel9-arm64.xml --cov=. --cov-report=xml:coverage-rhel9-arm64.xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in RHEL 9 ARM64 container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-rhel9-arm64:/workspace/test-results-rhel9-arm64.xml $(Build.SourcesDirectory)/ + docker cp test-container-rhel9-arm64:/workspace/coverage-rhel9-arm64.xml $(Build.SourcesDirectory)/ + displayName: 'Copy test results from RHEL 9 ARM64 container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-rhel9-arm64 || true + docker rm test-container-rhel9-arm64 || true + docker stop sqlserver-rhel9-arm64 || true + docker rm sqlserver-rhel9-arm64 || true + displayName: 'Clean up RHEL 9 ARM64 containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-rhel9-arm64.xml' + testRunTitle: 'Publish pytest results on RHEL 9 ARM64' + +- job: PytestOnLinux_Alpine + displayName: 'Linux Alpine x86_64' + pool: + vmImage: 'ubuntu-latest' + + steps: + - script: | + # Set up Docker buildx for multi-architecture support + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker buildx create --name multiarch --driver docker-container --use + docker buildx inspect --bootstrap + displayName: 'Setup Docker buildx for multi-architecture support' + + - script: | + # Create a Docker container for testing on x86_64 + docker run -d --name test-container-alpine \ + --platform linux/amd64 \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + alpine:latest \ + tail -f /dev/null + displayName: 'Create Alpine x86_64 container' + + - script: | + # Start SQL Server container (x86_64) + docker run -d --name sqlserver-alpine \ + --platform linux/amd64 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-alpine \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-alpine \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-alpine \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for Alpine x86_64' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the Alpine x86_64 container -# docker exec test-container-alpine sh -c " -# # Update package index -# apk update + # Create test database + docker exec sqlserver-alpine \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for Alpine x86_64' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the Alpine x86_64 container + docker exec test-container-alpine sh -c " + # Update package index + apk update -# # Install build tools and system dependencies -# apk add --no-cache \ -# build-base \ -# cmake \ -# clang \ -# git \ -# bash \ -# wget \ -# curl \ -# gnupg \ -# unixodbc \ -# unixodbc-dev \ -# libffi-dev \ -# openssl-dev \ -# zlib-dev \ -# py3-pip \ -# python3-dev \ -# patchelf + # Install build tools and system dependencies + apk add --no-cache \ + build-base \ + cmake \ + clang \ + git \ + bash \ + wget \ + curl \ + gnupg \ + unixodbc \ + unixodbc-dev \ + libffi-dev \ + openssl-dev \ + zlib-dev \ + py3-pip \ + python3-dev \ + patchelf -# # Create symlinks for Python compatibility -# ln -sf python3 /usr/bin/python || true -# ln -sf pip3 /usr/bin/pip || true + # Create symlinks for Python compatibility + ln -sf python3 /usr/bin/python || true + ln -sf pip3 /usr/bin/pip || true -# # Verify installation and architecture -# uname -m -# python --version -# which cmake -# " -# displayName: 'Install basic dependencies in Alpine x86_64 container' - -# - script: | -# # Install ODBC driver in the Alpine x86_64 container -# docker exec test-container-alpine bash -c " -# # Detect architecture for ODBC driver download -# case \$(uname -m) in -# x86_64) architecture='amd64' ;; -# arm64|aarch64) architecture='arm64' ;; -# *) architecture='unsupported' ;; -# esac + # Verify installation and architecture + uname -m + python --version + which cmake + " + displayName: 'Install basic dependencies in Alpine x86_64 container' + + - script: | + # Install ODBC driver in the Alpine x86_64 container + docker exec test-container-alpine bash -c " + # Detect architecture for ODBC driver download + case \$(uname -m) in + x86_64) architecture='amd64' ;; + arm64|aarch64) architecture='arm64' ;; + *) architecture='unsupported' ;; + esac -# if [[ 'unsupported' == '\$architecture' ]]; then -# echo 'Alpine architecture \$(uname -m) is not currently supported.' -# exit 1 -# fi + if [[ 'unsupported' == '\$architecture' ]]; then + echo 'Alpine architecture \$(uname -m) is not currently supported.' + exit 1 + fi -# echo 'Detected architecture: '\$architecture + echo 'Detected architecture: '\$architecture -# # Download the packages -# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk -# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk + # Download the packages + curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk + curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Download signatures for verification -# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig -# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig + # Download signatures for verification + curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig + curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig -# # Import Microsoft GPG key and verify packages -# curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - -# gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk -# gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk + # Import Microsoft GPG key and verify packages + curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - + gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk + gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Install the packages -# apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk -# apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk + # Install the packages + apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk + apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Cleanup -# rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* + # Cleanup + rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* -# # Add mssql-tools to PATH -# export PATH=\"\$PATH:/opt/mssql-tools18/bin\" -# echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc -# " -# displayName: 'Install ODBC Driver in Alpine x86_64 container' - -# - script: | -# # Install Python dependencies in the Alpine x86_64 container using virtual environment -# docker exec test-container-alpine bash -c " -# # Create virtual environment -# python -m venv /workspace/venv + # Add mssql-tools to PATH + export PATH=\"\$PATH:/opt/mssql-tools18/bin\" + echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc + " + displayName: 'Install ODBC Driver in Alpine x86_64 container' + + - script: | + # Install Python dependencies in the Alpine x86_64 container using virtual environment + docker exec test-container-alpine bash -c " + # Create virtual environment + python -m venv /workspace/venv -# # Activate virtual environment and install dependencies -# source /workspace/venv/bin/activate + # Activate virtual environment and install dependencies + source /workspace/venv/bin/activate -# # Upgrade pip and install dependencies -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Upgrade pip and install dependencies + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Verify virtual environment is active -# which python -# which pip -# " -# displayName: 'Install Python dependencies in Alpine x86_64 container' - -# - script: | -# # Build pybind bindings in the Alpine x86_64 container -# docker exec test-container-alpine bash -c " -# # Activate virtual environment -# source /workspace/venv/bin/activate + # Verify virtual environment is active + which python + which pip + " + displayName: 'Install Python dependencies in Alpine x86_64 container' + + - script: | + # Build pybind bindings in the Alpine x86_64 container + docker exec test-container-alpine bash -c " + # Activate virtual environment + source /workspace/venv/bin/activate -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in Alpine x86_64 container' - -# - script: | -# # Uninstall ODBC Driver before running tests to use bundled libraries -# docker exec test-container-alpine bash -c " -# # Remove system ODBC installation -# apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql18 -# rm -f /usr/lib/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true -# echo 'Uninstalled system ODBC Driver and cleaned up libraries' -# echo 'Verifying x86_64 alpine driver library signatures:' -# ldd mssql_python/libs/linux/alpine/x86_64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' -# " -# displayName: 'Uninstall system ODBC Driver before running tests in Alpine x86_64 container' - -# - script: | -# # Run tests in the Alpine x86_64 container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-alpine --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in Alpine x86_64 container' + + - script: | + # Uninstall ODBC Driver before running tests to use bundled libraries + docker exec test-container-alpine bash -c " + # Remove system ODBC installation + apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql18 + rm -f /usr/lib/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true + echo 'Uninstalled system ODBC Driver and cleaned up libraries' + echo 'Verifying x86_64 alpine driver library signatures:' + ldd mssql_python/libs/linux/alpine/x86_64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' + " + displayName: 'Uninstall system ODBC Driver before running tests in Alpine x86_64 container' + + - script: | + # Run tests in the Alpine x86_64 container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-alpine --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-alpine bash -c " -# echo 'Build successful, running tests now on Alpine x86_64' -# echo 'Architecture:' \$(uname -m) -# echo 'Alpine version:' \$(cat /etc/alpine-release) -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-alpine bash -c " + echo 'Build successful, running tests now on Alpine x86_64' + echo 'Architecture:' \$(uname -m) + echo 'Alpine version:' \$(cat /etc/alpine-release) + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# # Activate virtual environment -# source /workspace/venv/bin/activate + # Activate virtual environment + source /workspace/venv/bin/activate -# # Test basic Python import first -# python -c 'import mssql_python; print(\"mssql_python imported successfully\")' + # Test basic Python import first + python -c 'import mssql_python; print(\"mssql_python imported successfully\")' -# # Run main.py if it exists -# if [ -f main.py ]; then -# echo 'Running main.py...' -# python main.py -# fi + # Run main.py if it exists + if [ -f main.py ]; then + echo 'Running main.py...' + python main.py + fi -# # Run pytest -# python -m pytest -v --junitxml=test-results-alpine.xml --cov=. --cov-report=xml:coverage-alpine.xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in Alpine x86_64 container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-alpine:/workspace/test-results-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' -# docker cp test-container-alpine:/workspace/coverage-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' -# displayName: 'Copy test results from Alpine x86_64 container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-alpine || true -# docker rm test-container-alpine || true -# docker stop sqlserver-alpine || true -# docker rm sqlserver-alpine || true -# displayName: 'Clean up Alpine x86_64 containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-alpine.xml' -# testRunTitle: 'Publish pytest results on Alpine x86_64' - -# - job: PytestOnLinux_Alpine_ARM64 -# displayName: 'Linux Alpine ARM64' -# pool: -# vmImage: 'ubuntu-latest' - -# steps: -# - script: | -# # Set up Docker buildx for multi-architecture support -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -# docker buildx create --name multiarch --driver docker-container --use -# docker buildx inspect --bootstrap -# displayName: 'Setup Docker buildx for ARM64 emulation' - -# - script: | -# # Create a Docker container for testing on ARM64 -# docker run -d --name test-container-alpine-arm64 \ -# --platform linux/arm64 \ -# -v $(Build.SourcesDirectory):/workspace \ -# -w /workspace \ -# --network bridge \ -# alpine:latest \ -# tail -f /dev/null -# displayName: 'Create Alpine ARM64 container' - -# - script: | -# # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) -# docker run -d --name sqlserver-alpine-arm64 \ -# --platform linux/amd64 \ -# -e ACCEPT_EULA=Y \ -# -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ -# -p 1433:1433 \ -# mcr.microsoft.com/mssql/server:2022-latest + # Run pytest + python -m pytest -v --junitxml=test-results-alpine.xml --cov=. --cov-report=xml:coverage-alpine.xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in Alpine x86_64 container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-alpine:/workspace/test-results-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' + docker cp test-container-alpine:/workspace/coverage-alpine.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' + displayName: 'Copy test results from Alpine x86_64 container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-alpine || true + docker rm test-container-alpine || true + docker stop sqlserver-alpine || true + docker rm sqlserver-alpine || true + displayName: 'Clean up Alpine x86_64 containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-alpine.xml' + testRunTitle: 'Publish pytest results on Alpine x86_64' + +- job: PytestOnLinux_Alpine_ARM64 + displayName: 'Linux Alpine ARM64' + pool: + vmImage: 'ubuntu-latest' + + steps: + - script: | + # Set up Docker buildx for multi-architecture support + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker buildx create --name multiarch --driver docker-container --use + docker buildx inspect --bootstrap + displayName: 'Setup Docker buildx for ARM64 emulation' + + - script: | + # Create a Docker container for testing on ARM64 + docker run -d --name test-container-alpine-arm64 \ + --platform linux/arm64 \ + -v $(Build.SourcesDirectory):/workspace \ + -w /workspace \ + --network bridge \ + alpine:latest \ + tail -f /dev/null + displayName: 'Create Alpine ARM64 container' + + - script: | + # Start SQL Server container (x86_64 - SQL Server doesn't support ARM64) + docker run -d --name sqlserver-alpine-arm64 \ + --platform linux/amd64 \ + -e ACCEPT_EULA=Y \ + -e MSSQL_SA_PASSWORD="$(DB_PASSWORD)" \ + -p 1433:1433 \ + mcr.microsoft.com/mssql/server:2022-latest -# # Wait for SQL Server to be ready -# echo "Waiting for SQL Server to start..." -# for i in {1..60}; do -# if docker exec sqlserver-alpine-arm64 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "SELECT 1" >/dev/null 2>&1; then -# echo "SQL Server is ready!" -# break -# fi -# echo "Waiting... ($i/60)" -# sleep 2 -# done + # Wait for SQL Server to be ready + echo "Waiting for SQL Server to start..." + for i in {1..60}; do + if docker exec sqlserver-alpine-arm64 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "SELECT 1" >/dev/null 2>&1; then + echo "SQL Server is ready!" + break + fi + echo "Waiting... ($i/60)" + sleep 2 + done -# # Create test database -# docker exec sqlserver-alpine-arm64 \ -# /opt/mssql-tools18/bin/sqlcmd \ -# -S localhost \ -# -U SA \ -# -P "$(DB_PASSWORD)" \ -# -C -Q "CREATE DATABASE TestDB" -# displayName: 'Start SQL Server container for Alpine ARM64' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Install dependencies in the Alpine ARM64 container -# docker exec test-container-alpine-arm64 sh -c " -# # Update package index -# apk update + # Create test database + docker exec sqlserver-alpine-arm64 \ + /opt/mssql-tools18/bin/sqlcmd \ + -S localhost \ + -U SA \ + -P "$(DB_PASSWORD)" \ + -C -Q "CREATE DATABASE TestDB" + displayName: 'Start SQL Server container for Alpine ARM64' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Install dependencies in the Alpine ARM64 container + docker exec test-container-alpine-arm64 sh -c " + # Update package index + apk update -# # Install build tools and system dependencies -# apk add --no-cache \ -# build-base \ -# cmake \ -# clang \ -# git \ -# bash \ -# wget \ -# curl \ -# gnupg \ -# unixodbc \ -# unixodbc-dev \ -# libffi-dev \ -# openssl-dev \ -# zlib-dev \ -# py3-pip \ -# python3-dev \ -# patchelf + # Install build tools and system dependencies + apk add --no-cache \ + build-base \ + cmake \ + clang \ + git \ + bash \ + wget \ + curl \ + gnupg \ + unixodbc \ + unixodbc-dev \ + libffi-dev \ + openssl-dev \ + zlib-dev \ + py3-pip \ + python3-dev \ + patchelf -# # Create symlinks for Python compatibility -# ln -sf python3 /usr/bin/python || true -# ln -sf pip3 /usr/bin/pip || true + # Create symlinks for Python compatibility + ln -sf python3 /usr/bin/python || true + ln -sf pip3 /usr/bin/pip || true -# # Verify installation and architecture -# uname -m -# python --version -# which cmake -# " -# displayName: 'Install basic dependencies in Alpine ARM64 container' - -# - script: | -# # Install ODBC driver in the Alpine ARM64 container -# docker exec test-container-alpine-arm64 bash -c " -# # Detect architecture for ODBC driver download -# case \$(uname -m) in -# x86_64) architecture='amd64' ;; -# arm64|aarch64) architecture='arm64' ;; -# *) architecture='unsupported' ;; -# esac + # Verify installation and architecture + uname -m + python --version + which cmake + " + displayName: 'Install basic dependencies in Alpine ARM64 container' + + - script: | + # Install ODBC driver in the Alpine ARM64 container + docker exec test-container-alpine-arm64 bash -c " + # Detect architecture for ODBC driver download + case \$(uname -m) in + x86_64) architecture='amd64' ;; + arm64|aarch64) architecture='arm64' ;; + *) architecture='unsupported' ;; + esac -# if [[ 'unsupported' == '\$architecture' ]]; then -# echo 'Alpine architecture \$(uname -m) is not currently supported.' -# exit 1 -# fi + if [[ 'unsupported' == '\$architecture' ]]; then + echo 'Alpine architecture \$(uname -m) is not currently supported.' + exit 1 + fi -# echo 'Detected architecture: '\$architecture + echo 'Detected architecture: '\$architecture -# # Download the packages -# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk -# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk + # Download the packages + curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.apk + curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Download signatures for verification -# curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig -# curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig + # Download signatures for verification + curl -O https://download.microsoft.com/download/fae28b9a-d880-42fd-9b98-d779f0fdd77f/msodbcsql18_18.5.1.1-1_\$architecture.sig + curl -O https://download.microsoft.com/download/7/6d/76de322a-d860-4894-9945-f0cc5d6a45f8/mssql-tools18_18.4.1.1-1_\$architecture.sig -# # Import Microsoft GPG key and verify packages -# curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - -# gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk -# gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk + # Import Microsoft GPG key and verify packages + curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - + gpg --verify msodbcsql18_18.5.1.1-1_\$architecture.sig msodbcsql18_18.5.1.1-1_\$architecture.apk + gpg --verify mssql-tools18_18.4.1.1-1_\$architecture.sig mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Install the packages -# apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk -# apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk + # Install the packages + apk add --allow-untrusted msodbcsql18_18.5.1.1-1_\$architecture.apk + apk add --allow-untrusted mssql-tools18_18.4.1.1-1_\$architecture.apk -# # Cleanup -# rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* + # Cleanup + rm -f msodbcsql18_18.5.1.1-1_\$architecture.* mssql-tools18_18.4.1.1-1_\$architecture.* -# # Add mssql-tools to PATH -# export PATH=\"\$PATH:/opt/mssql-tools18/bin\" -# echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc -# " -# displayName: 'Install ODBC Driver in Alpine ARM64 container' - -# - script: | -# # Install Python dependencies in the Alpine ARM64 container using virtual environment -# docker exec test-container-alpine-arm64 bash -c " -# # Create virtual environment -# python -m venv /workspace/venv + # Add mssql-tools to PATH + export PATH=\"\$PATH:/opt/mssql-tools18/bin\" + echo 'export PATH=\"\$PATH:/opt/mssql-tools18/bin\"' >> ~/.bashrc + " + displayName: 'Install ODBC Driver in Alpine ARM64 container' + + - script: | + # Install Python dependencies in the Alpine ARM64 container using virtual environment + docker exec test-container-alpine-arm64 bash -c " + # Create virtual environment + python -m venv /workspace/venv -# # Activate virtual environment and install dependencies -# source /workspace/venv/bin/activate + # Activate virtual environment and install dependencies + source /workspace/venv/bin/activate -# # Upgrade pip and install dependencies -# python -m pip install --upgrade pip -# python -m pip install -r requirements.txt + # Upgrade pip and install dependencies + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -# # Verify virtual environment is active -# which python -# which pip -# " -# displayName: 'Install Python dependencies in Alpine ARM64 container' - -# - script: | -# # Build pybind bindings in the Alpine ARM64 container -# docker exec test-container-alpine-arm64 bash -c " -# # Activate virtual environment -# source /workspace/venv/bin/activate + # Verify virtual environment is active + which python + which pip + " + displayName: 'Install Python dependencies in Alpine ARM64 container' + + - script: | + # Build pybind bindings in the Alpine ARM64 container + docker exec test-container-alpine-arm64 bash -c " + # Activate virtual environment + source /workspace/venv/bin/activate -# cd mssql_python/pybind -# chmod +x build.sh -# ./build.sh -# " -# displayName: 'Build pybind bindings (.so) in Alpine ARM64 container' - -# - script: | -# # Uninstall ODBC Driver before running tests to use bundled libraries -# docker exec test-container-alpine-arm64 bash -c " -# # Remove system ODBC installation -# apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' -# rm -f /usr/bin/sqlcmd -# rm -f /usr/bin/bcp -# rm -rf /opt/microsoft/msodbcsql18 -# rm -f /usr/lib/libodbcinst.so.2 -# odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true -# echo 'Uninstalled system ODBC Driver and cleaned up libraries' -# echo 'Verifying arm64 alpine driver library signatures:' -# ldd mssql_python/libs/linux/alpine/arm64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' -# " -# displayName: 'Uninstall system ODBC Driver before running tests in Alpine ARM64 container' - -# - script: | -# # Run tests in the Alpine ARM64 container -# # Get SQL Server container IP -# SQLSERVER_IP=$(docker inspect sqlserver-alpine-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') -# echo "SQL Server IP: $SQLSERVER_IP" + cd mssql_python/pybind + chmod +x build.sh + ./build.sh + " + displayName: 'Build pybind bindings (.so) in Alpine ARM64 container' + + - script: | + # Uninstall ODBC Driver before running tests to use bundled libraries + docker exec test-container-alpine-arm64 bash -c " + # Remove system ODBC installation + apk del msodbcsql18 mssql-tools18 unixodbc-dev || echo 'ODBC packages not installed via apk' + rm -f /usr/bin/sqlcmd + rm -f /usr/bin/bcp + rm -rf /opt/microsoft/msodbcsql18 + rm -f /usr/lib/libodbcinst.so.2 + odbcinst -u -d -n 'ODBC Driver 18 for SQL Server' || true + echo 'Uninstalled system ODBC Driver and cleaned up libraries' + echo 'Verifying arm64 alpine driver library signatures:' + ldd mssql_python/libs/linux/alpine/arm64/lib/libmsodbcsql-18.5.so.1.1 || echo 'Driver library not found' + " + displayName: 'Uninstall system ODBC Driver before running tests in Alpine ARM64 container' + + - script: | + # Run tests in the Alpine ARM64 container + # Get SQL Server container IP + SQLSERVER_IP=$(docker inspect sqlserver-alpine-arm64 --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "SQL Server IP: $SQLSERVER_IP" -# docker exec \ -# -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ -# -e DB_PASSWORD="$(DB_PASSWORD)" \ -# test-container-alpine-arm64 bash -c " -# echo 'Build successful, running tests now on Alpine ARM64' -# echo 'Architecture:' \$(uname -m) -# echo 'Alpine version:' \$(cat /etc/alpine-release) -# echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' + docker exec \ + -e DB_CONNECTION_STRING="Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=$(DB_PASSWORD);TrustServerCertificate=yes" \ + -e DB_PASSWORD="$(DB_PASSWORD)" \ + test-container-alpine-arm64 bash -c " + echo 'Build successful, running tests now on Alpine ARM64' + echo 'Architecture:' \$(uname -m) + echo 'Alpine version:' \$(cat /etc/alpine-release) + echo 'Using connection string: Driver=ODBC Driver 18 for SQL Server;Server=$SQLSERVER_IP;Database=TestDB;Uid=SA;Pwd=***;TrustServerCertificate=yes' -# # Activate virtual environment -# source /workspace/venv/bin/activate + # Activate virtual environment + source /workspace/venv/bin/activate -# # Test basic Python import first -# python -c 'import mssql_python; print(\"mssql_python imported successfully\")' + # Test basic Python import first + python -c 'import mssql_python; print(\"mssql_python imported successfully\")' -# # Run main.py if it exists -# if [ -f main.py ]; then -# echo 'Running main.py...' -# python main.py -# fi + # Run main.py if it exists + if [ -f main.py ]; then + echo 'Running main.py...' + python main.py + fi -# # Run pytest -# python -m pytest -v --junitxml=test-results-alpine-arm64.xml --cov=. --cov-report=xml:coverage-alpine-arm64.xml --capture=tee-sys --cache-clear -# " -# displayName: 'Run pytest with coverage in Alpine ARM64 container' -# env: -# DB_PASSWORD: $(DB_PASSWORD) - -# - script: | -# # Copy test results from container to host -# docker cp test-container-alpine-arm64:/workspace/test-results-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' -# docker cp test-container-alpine-arm64:/workspace/coverage-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' -# displayName: 'Copy test results from Alpine ARM64 container' -# condition: always() - -# - script: | -# # Clean up containers -# docker stop test-container-alpine-arm64 || true -# docker rm test-container-alpine-arm64 || true -# docker stop sqlserver-alpine-arm64 || true -# docker rm sqlserver-alpine-arm64 || true -# displayName: 'Clean up Alpine ARM64 containers' -# condition: always() - -# - task: PublishTestResults@2 -# condition: succeededOrFailed() -# inputs: -# testResultsFiles: '**/test-results-alpine-arm64.xml' -# testRunTitle: 'Publish pytest results on Alpine ARM64' + # Run pytest + python -m pytest -v --junitxml=test-results-alpine-arm64.xml --cov=. --cov-report=xml:coverage-alpine-arm64.xml --capture=tee-sys --cache-clear + " + displayName: 'Run pytest with coverage in Alpine ARM64 container' + env: + DB_PASSWORD: $(DB_PASSWORD) + + - script: | + # Copy test results from container to host + docker cp test-container-alpine-arm64:/workspace/test-results-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy test results' + docker cp test-container-alpine-arm64:/workspace/coverage-alpine-arm64.xml $(Build.SourcesDirectory)/ || echo 'Failed to copy coverage results' + displayName: 'Copy test results from Alpine ARM64 container' + condition: always() + + - script: | + # Clean up containers + docker stop test-container-alpine-arm64 || true + docker rm test-container-alpine-arm64 || true + docker stop sqlserver-alpine-arm64 || true + docker rm sqlserver-alpine-arm64 || true + displayName: 'Clean up Alpine ARM64 containers' + condition: always() + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/test-results-alpine-arm64.xml' + testRunTitle: 'Publish pytest results on Alpine ARM64' diff --git a/main.py b/main.py index 0a4370c14..b45b88d73 100644 --- a/main.py +++ b/main.py @@ -1,4 +1,3 @@ -import pytest from mssql_python import connect from mssql_python import setup_logging import os @@ -6,56 +5,17 @@ setup_logging('stdout') -# conn_str = os.getenv("DB_CONNECTION_STRING") -conn_str = "Server=Saumya;DATABASE=master;UID=sa;PWD=HappyPass1234;Trust_Connection=yes;TrustServerCertificate=yes;" - +conn_str = os.getenv("DB_CONNECTION_STRING") conn = connect(conn_str) -cursor = conn.cursor() - -test_inputs = [ -"Hello 😄", -"Flags 🇮🇳🇺🇸", -"Family 👨‍👩‍👧‍👦", -"Skin tone 👍🏽", -"Brain 🧠", -"Ice 🧊", -"Melting face 🫠", -"Accented éüñç", -"Chinese: 中文", -"Japanese: 日本語", -"Hello 🚀 World", -"admin🔒user", -"1🚀' OR '1'='1", -] - -cursor.execute(""" -CREATE TABLE #pytest_emoji_test ( -id INT IDENTITY PRIMARY KEY, -content NVARCHAR(MAX) -); -""") -conn.commit() -for text in test_inputs: - try: - cursor.execute("INSERT INTO #pytest_emoji_test (content) OUTPUT INSERTED.id VALUES (?)", [text]) - inserted_id = cursor.fetchone()[0] - cursor.execute("SELECT content FROM #pytest_emoji_test WHERE id = ?", [inserted_id]) - result = cursor.fetchone() - assert result is not None, f"No row returned for ID {inserted_id}" - assert result[0] == text, f"Mismatch! Sent: {text}, Got: {result[0]}" - print(f"Test passed for input: {repr(text)}") - - except Exception as e: - print(f"Error for input {repr(text)}: {e}") # conn.autocommit = True +cursor = conn.cursor() +cursor.execute("SELECT database_id, name from sys.databases;") +rows = cursor.fetchall() -# cursor.execute("SELECT database_id, name from sys.databases;") -# rows = cursor.fetchall() - -# for row in rows: -# print(f"Database ID: {row[0]}, Name: {row[1]}") +for row in rows: + print(f"Database ID: {row[0]}, Name: {row[1]}") cursor.close() conn.close() \ No newline at end of file diff --git a/mssql_python/pybind/ddbc_bindings.cpp b/mssql_python/pybind/ddbc_bindings.cpp index 28c6b926c..789d3863a 100644 --- a/mssql_python/pybind/ddbc_bindings.cpp +++ b/mssql_python/pybind/ddbc_bindings.cpp @@ -1860,8 +1860,6 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p row.append(py::none()); continue; } - LOG("Fetching column {}: size={}, type={}", i, columnSize, dataType); - switch (dataType) { case SQL_CHAR: @@ -1922,7 +1920,6 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p case SQL_WVARCHAR: case SQL_WLONGVARCHAR: { if (columnSize == SQL_NO_TOTAL || columnSize > 4000) { - std::cout << "Column size: " << columnSize << std::endl; LOG("Streaming LOB for column {} (NVARCHAR)", i); row.append(FetchLobColumnData(hStmt, i, SQL_C_WCHAR, true, false)); } else { diff --git a/mssql_python/pybind/ddbc_bindings.h b/mssql_python/pybind/ddbc_bindings.h index 4922830e7..fe4e84003 100644 --- a/mssql_python/pybind/ddbc_bindings.h +++ b/mssql_python/pybind/ddbc_bindings.h @@ -70,53 +70,6 @@ inline bool IsValidUnicodeScalar(uint32_t cp) { !(cp >= UNICODE_SURROGATE_HIGH_START && cp <= UNICODE_SURROGATE_LOW_END); } -// inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = SQL_NTS) { -// if (!sqlwStr) return std::wstring(); - -// if (length == SQL_NTS) { -// size_t i = 0; -// while (sqlwStr[i] != 0) ++i; -// length = i; -// } -// std::wstring result; -// result.reserve(length); - -// if constexpr (sizeof(SQLWCHAR) == 2) { -// // Decode UTF-16 to UTF-32 (with surrogate pair handling) -// for (size_t i = 0; i < length; ++i) { -// uint16_t wc = static_cast(sqlwStr[i]); -// // Check if this is a high surrogate (U+D800–U+DBFF) -// if (wc >= UNICODE_SURROGATE_HIGH_START && wc <= UNICODE_SURROGATE_HIGH_END && i + 1 < length) { -// uint16_t low = static_cast(sqlwStr[i + 1]); -// // Check if the next code unit is a low surrogate (U+DC00–U+DFFF) -// if (low >= UNICODE_SURROGATE_LOW_START && low <= UNICODE_SURROGATE_LOW_END) { -// // Combine surrogate pair into a single code point -// uint32_t cp = (((wc - UNICODE_SURROGATE_HIGH_START) << 10) | (low - UNICODE_SURROGATE_LOW_START)) + 0x10000; -// result.push_back(static_cast(cp)); -// ++i; // Skip the low surrogate -// continue; -// } -// } -// // If valid scalar then append, else append replacement char (U+FFFD) -// if (IsValidUnicodeScalar(wc)) { -// result.push_back(static_cast(wc)); -// } else { -// result.push_back(static_cast(UNICODE_REPLACEMENT_CHAR)); -// } -// } -// } else { -// // SQLWCHAR is UTF-32, so just copy with validation -// for (size_t i = 0; i < length; ++i) { -// uint32_t cp = static_cast(sqlwStr[i]); -// if (IsValidUnicodeScalar(cp)) { -// result.push_back(static_cast(cp)); -// } else { -// result.push_back(static_cast(UNICODE_REPLACEMENT_CHAR)); -// } -// } -// } -// return result; -// } inline std::wstring SQLWCHARToWString(const SQLWCHAR* sqlwStr, size_t length = SQL_NTS) { if (!sqlwStr) return std::wstring(); if (length == SQL_NTS) { @@ -401,22 +354,6 @@ struct ErrorInfo { }; ErrorInfo SQLCheckError_Wrap(SQLSMALLINT handleType, SqlHandlePtr handle, SQLRETURN retcode); -// inline std::string WideToUTF8(const std::wstring& wstr) { -// if (wstr.empty()) return {}; -// #if defined(_WIN32) -// int size_needed = WideCharToMultiByte(CP_UTF8, 0, wstr.data(), static_cast(wstr.size()), nullptr, 0, nullptr, nullptr); -// if (size_needed == 0) return {}; -// std::string result(size_needed, 0); -// int converted = WideCharToMultiByte(CP_UTF8, 0, wstr.data(), static_cast(wstr.size()), result.data(), size_needed, nullptr, nullptr); -// if (converted == 0) return {}; -// return result; -// #else -// std::wstring_convert> converter; -// return converter.to_bytes(wstr); -// #endif -// } - - inline std::string WideToUTF8(const std::wstring& wstr) { if (wstr.empty()) return {};