diff --git a/.github/workflows/Docker.yml b/.github/workflows/Docker.yml
deleted file mode 100644
index c6427b732..000000000
--- a/.github/workflows/Docker.yml
+++ /dev/null
@@ -1,100 +0,0 @@
-name: Build and Test - Docker
-
-on:
- schedule:
- - cron: '0 0 * * 1'
- push:
- branches: [ master ]
- pull_request:
- branches: [ master ]
- release:
- types:
- - created
-
-concurrency:
- group: docker-${{ github.head_ref }}
- cancel-in-progress: true
-
-env:
- CLICKHOUSE_SERVER_IMAGE: "clickhouse/clickhouse-server:21.3"
-
-defaults:
- run:
- shell: bash
-
-jobs:
- build_and_test:
-
- strategy:
- fail-fast: false
- matrix:
- # Has to be lowercase for ./test/docker/Dockerfile to work
- odbc_provider:
- - unixodbc
-# - iodbc
- base_os:
-# - centos
- - ubuntu
-
- runs-on: ubuntu-22.04
-
- steps:
-
- - name: Clone the repo
- uses: actions/checkout@v2
- with:
- path: .
- submodules: true
-
- - name: Install dependencies - Docker
- run: |
- sudo apt remove -y docker docker-engine docker.io containerd runc
- sudo apt install -y apt-transport-https ca-certificates curl gnupg lsb-release
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
- echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
- sudo apt update -q
- sudo apt install docker-ce docker-ce-cli containerd.io
-
- - name: Build docker clickhouse_odbc_tester image
- run: |
- ls -lah .
- docker buildx build . -f ./test/docker/Dockerfile --build-arg ODBC_PROVIDER=${{ matrix.odbc_provider }} --build-arg BASE_OS=${{ matrix.base_os }} -t clickhouse_odbc_tester:${{ matrix.odbc_provider }}-${{ matrix.base_os }}
-
- - name: Create packages
- run: |
- docker run --entrypoint /bin/bash -v $(realpath ${{ github.workspace }}/packages):/packages clickhouse_odbc_tester:${{ matrix.odbc_provider }}-${{ matrix.base_os }} -c 'cd clickhouse-odbc-build && ls -lah && ninja package ||: ; mv clickhouse-odbc-* /packages && ls -lah /packages'
-
- - name: List artifacts
- run: |
- echo REF: ${{ github.ref }}
- ls -lahR ${{ github.workspace }}/packages/
-
- - name: Upload the artifacts
- uses: actions/upload-artifact@v3
- with:
- name: clickhouse-odbc-${{ matrix.base_os }}-${{ matrix.odbc_provider }}
- path: ${{ github.workspace }}/packages/*tar.gz*
-
- - name: Upload artifacts as release assets
- if: ${{ github.event_name == 'release' }}
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: ${{ github.workspace }}/packages/clickhouse-odbc-*
- overwrite: true
- tag: ${{ github.ref }}
- file_glob: true
-
- # - name: Test - Run unit tests
- # run: docker run --network=host clickhouse_odbc_tester:${{ matrix.odbc_provider }} -R '.*-ut.*'
-
- # - name: Test - Start ClickHouse server in background
- # run: |
- # docker pull ${CLICKHOUSE_SERVER_IMAGE}
- # docker run -d --name clickhouse ${CLICKHOUSE_SERVER_IMAGE}
- # docker ps -a
- # docker stats -a --no-stream
-
- # - name: Test - Run integration test
- # # Run all tests except those that were run in "Test - unit tests" step, also run tests sequentially
- # run: docker run --network=host clickhouse_odbc_tester:${{ matrix.odbc_provider }} -E '.*-ut.*' -VV --debug -j 1
diff --git a/.github/workflows/Linux.yml b/.github/workflows/Linux.yml
index 5e9b3c4ce..5b6e3f83d 100644
--- a/.github/workflows/Linux.yml
+++ b/.github/workflows/Linux.yml
@@ -15,9 +15,6 @@ concurrency:
group: linux-${{ github.head_ref }}
cancel-in-progress: true
-env:
- CLICKHOUSE_SERVER_IMAGE: "clickhouse/clickhouse-server:21.3"
-
defaults:
run:
shell: bash
@@ -54,7 +51,7 @@ jobs:
mkdir -p ${{ github.workspace }}/package
- name: Clone the repo
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
path: source
submodules: true
@@ -64,17 +61,7 @@ jobs:
sudo apt update -q
sudo apt remove -y php* node* mysql* mssql-tools
sudo apt upgrade -y
- sudo apt install -y build-essential git cmake docker perl libdbi-perl libdbd-odbc-perl python-is-python3 python3 python3-pip python3-pyodbc python3-setuptools libpoco-dev libssl-dev libicu-dev
- pip3 install --user 'testflows==1.6.56'
-
- - name: Install dependencies - Docker
- run: |
- sudo apt remove -y docker docker-engine docker.io containerd runc
- sudo apt install -y apt-transport-https ca-certificates curl gnupg lsb-release
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
- echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
- sudo apt update -q
- sudo apt install docker-ce docker-ce-cli containerd.io
+ sudo apt install -y build-essential git cmake docker perl libdbi-perl libdbd-odbc-perl python-is-python3 python3 python3-pip python3-setuptools libpoco-dev libssl-dev libicu-dev
- name: Install dependencies - UnixODBC
if: ${{ matrix.odbc_provider == 'UnixODBC' }}
@@ -95,6 +82,12 @@ jobs:
- name: Install dependencies - Cleanup
run: sudo apt autoremove -y
+ - name: Start ClickHouse in Docker
+ uses: hoverkraft-tech/compose-action@v2.0.1
+ with:
+ compose-file: source/test/docker-compose.yml
+ down-flags: --volumes
+
- name: Configure
run: >
CC=${{ fromJSON('{"Clang": "clang", "GCC": "gcc"}')[matrix.compiler] }}
@@ -107,7 +100,7 @@ jobs:
-DTEST_DSN_LIST="ClickHouse DSN (ANSI);ClickHouse DSN (Unicode);ClickHouse DSN (ANSI, RBWNAT)"
- name: Build
- run: cmake --build ${{ github.workspace }}/build --config ${{ matrix.build_type }}
+ run: cmake --build ${{ github.workspace }}/build --config ${{ matrix.build_type }} --parallel $(nproc)
- name: Package
run: cmake --build ${{ github.workspace }}/build --config ${{ matrix.build_type }} --target package
@@ -122,94 +115,96 @@ jobs:
# However, these binaries are uploaded to be available in GH's 'Actions', just in case.
- name: Upload the artifacts
if: ${{ matrix.compiler == 'GCC' && matrix.odbc_provider == 'UnixODBC' && matrix.build_type == 'Release' && matrix.runtime_link == 'dynamic-runtime' && matrix.third_parties == 'bundled-third-parties' }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: clickhouse-odbc-linux-${{ matrix.compiler }}-${{ matrix.odbc_provider }}-${{ matrix.build_type }}
path: ${{ github.workspace }}/build/clickhouse-odbc-*
- - name: Test - Run unit tests
+ - name: Test - Run C++ unit tests
working-directory: ${{ github.workspace }}/build
run: ctest --output-on-failure --build-config ${{ matrix.build_type }} -R '.*-ut.*'
- - name: Test - Start ClickHouse server in background
+ - name: Prepare ODBC ini configs
run: |
- docker pull ${CLICKHOUSE_SERVER_IMAGE}
- docker run -d --name clickhouse ${CLICKHOUSE_SERVER_IMAGE}
- docker ps -a
- docker stats -a --no-stream
-
- - name: Test - Run integration tests
- working-directory: ${{ github.workspace }}/build
- run: |
- export CLICKHOUSE_SERVER_IP=$(docker inspect -f '{{ .NetworkSettings.IPAddress }}' clickhouse)
-
- export ODBCSYSINI=${{ github.workspace }}/run
- export ODBCINSTINI=.odbcinst.ini
- export ODBCINI=$ODBCSYSINI/.odbc.ini
- if [[ "${{ matrix.odbc_provider }}" == "iODBC" ]]; then
- # Full path to a custom odbcinst.ini in ODBCINSTINI for iODBC.
- export ODBCINSTINI=$ODBCSYSINI/$ODBCINSTINI
- fi
-
- cat > $ODBCSYSINI/.odbcinst.ini <<-EOF
+ echo "Preparing ODBC ini configs"
+ cat > ${{ github.workspace }}/run/.odbcinst.ini <<-EOF
[ODBC]
Trace = 1
TraceFile = ${{ github.workspace }}/run/odbc-driver-manager-trace.log
Debug = 1
DebugFile = ${{ github.workspace }}/run/odbc-driver-manager-debug.log
-
+
[ODBC Drivers]
ClickHouse ODBC Driver (ANSI) = Installed
ClickHouse ODBC Driver (Unicode) = Installed
-
+
[ClickHouse ODBC Driver (ANSI)]
Driver = ${{ github.workspace }}/build/driver/libclickhouseodbc.so
Setup = ${{ github.workspace }}/build/driver/libclickhouseodbc.so
UsageCount = 1
-
+
[ClickHouse ODBC Driver (Unicode)]
Driver = ${{ github.workspace }}/build/driver/libclickhouseodbcw.so
Setup = ${{ github.workspace }}/build/driver/libclickhouseodbcw.so
UsageCount = 1
EOF
-
- cat > $ODBCSYSINI/.odbc.ini <<-EOF
+
+ cat > ${{ github.workspace }}/run/.odbc.ini <<-EOF
[ODBC]
Trace = 1
TraceFile = ${{ github.workspace }}/run/odbc-driver-manager-trace.log
Debug = 1
DebugFile = ${{ github.workspace }}/run/odbc-driver-manager-debug.log
-
+
[ODBC Data Sources]
ClickHouse DSN (ANSI) = ClickHouse ODBC Driver (ANSI)
ClickHouse DSN (Unicode) = ClickHouse ODBC Driver (Unicode)
ClickHouse DSN (ANSI, RBWNAT) = ClickHouse ODBC Driver (ANSI)
-
+
[ClickHouse DSN (ANSI)]
Driver = ClickHouse ODBC Driver (ANSI)
Description = Test DSN for ClickHouse ODBC Driver (ANSI)
Url = http://${CLICKHOUSE_SERVER_IP}
DriverLog = yes
DriverLogFile = ${{ github.workspace }}/run/clickhouse-odbc-driver.log
-
+
[ClickHouse DSN (Unicode)]
Driver = ClickHouse ODBC Driver (Unicode)
Description = Test DSN for ClickHouse ODBC Driver (Unicode)
- Url = http://${CLICKHOUSE_SERVER_IP}
+ Url = http://localhost:8123
DriverLog = yes
DriverLogFile = ${{ github.workspace }}/run/clickhouse-odbc-driver-w.log
-
+
[ClickHouse DSN (ANSI, RBWNAT)]
Driver = ClickHouse ODBC Driver (ANSI)
Description = Test DSN for ClickHouse ODBC Driver (ANSI) that uses RowBinaryWithNamesAndTypes as data source communication default format
- Url = http://${CLICKHOUSE_SERVER_IP}/query?default_format=RowBinaryWithNamesAndTypes
+ Url = http://localhost:8123/query?default_format=RowBinaryWithNamesAndTypes
DriverLog = yes
DriverLogFile = ${{ github.workspace }}/run/clickhouse-odbc-driver.log
EOF
+ # Run all tests except those that were run in "Test - unit tests" step.
+ - name: Test - Run C++ integration tests
+ working-directory: ${{ github.workspace }}/build
+ run: |
+ export ODBCSYSINI=
+ export ODBCINSTINI="${{ github.workspace }}/run/.odbcinst.ini"
+ export ODBCINI="${{ github.workspace }}/run/.odbc.ini"
if [[ "${{ matrix.odbc_provider }}" == "iODBC" ]]; then
- export GTEST_FILTER="-PerformanceTest.*"
+ export GTEST_FILTER="-PerformanceTest.*"
fi
-
- # Run all tests except those that were run in "Test - unit tests" step.
ctest --output-on-failure --build-config ${{ matrix.build_type }} -E '.*-ut.*'
+
+ - name: Prepare Python dependencies
+ working-directory: source/test
+ run: |
+ pip install -r requirements.txt
+
+ # An empty `ODBCSYSINI` is required in this case to run properly with custom `ODBCINI`/`ODBCINSTINI` paths
+ - name: Test - Run Python e2e tests
+ working-directory: source/test
+ run: |
+ export ODBCSYSINI=
+ export ODBCINSTINI="${{ github.workspace }}/run/.odbcinst.ini"
+ export ODBCINI="${{ github.workspace }}/run/.odbc.ini"
+ pytest --log-level=DEBUG -v
diff --git a/CMakeLists.txt b/CMakeLists.txt
index e999a6a69..a844fc319 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -199,9 +199,6 @@ if (ipo_supported)
endif ()
add_subdirectory (driver)
-if (CH_ODBC_ENABLE_TESTING)
- add_subdirectory (test)
-endif ()
if (CH_ODBC_ENABLE_INSTALL)
if (NOT WIN32)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 000000000..85386d1df
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,102 @@
+## Python e2e tests
+
+E2E tests are using [pyodbc](https://pypi.org/project/pyodbc/) and [pytest](https://docs.pytest.org/en/latest/index.html).
+
+The default DSN the tests use is `ClickHouse DSN (ANSI)`. If required, the test DSN can be changed via the `DSN` env variable.
+
+### Prerequisites
+
+- Build the driver from [sources](./README.md#building-from-sources).
+- Create the [required INI files](./README.md#configuration-unixodbc). You could also have a look how the config files are [generated by the CI workflows](.github/workflows/Linux.yml).
+
+### Sample ini config files
+
+Sample `~/.odbc.ini` config (can be used as is):
+
+```
+[ODBC Data Sources]
+ClickHouse DSN (ANSI) = ClickHouse ODBC Driver (ANSI)
+ClickHouse DSN (Unicode) = ClickHouse ODBC Driver (Unicode)
+
+[ClickHouse DSN (ANSI)]
+Driver = ClickHouse ODBC Driver (ANSI)
+Description = DSN (localhost) for ClickHouse ODBC Driver (ANSI)
+Url = http://localhost:8123
+
+[ClickHouse DSN (Unicode)]
+Driver = ClickHouse ODBC Driver (Unicode)
+Description = DSN (localhost) for ClickHouse ODBC Driver (Unicode)
+Url = http://localhost:8123
+```
+
+Sample `~/.odbcinst.ini` config (don't forget to replace `/absolute/path/to/clickhouse-odbc`):
+
+```
+[ODBC Drivers]
+ClickHouse ODBC Driver (ANSI) = Installed
+ClickHouse ODBC Driver (Unicode) = Installed
+
+[ClickHouse ODBC Driver (ANSI)]
+Description = ODBC Driver (ANSI) for ClickHouse
+Driver = /absolute/path/to/clickhouse-odbc/build/driver/libclickhouseodbc.so
+Setup = /absolute/path/to/clickhouse-odbc/build/driver/libclickhouseodbc.so
+UsageCount = 1
+
+[ClickHouse ODBC Driver (Unicode)]
+Description = ODBC Driver (Unicode) for ClickHouse
+Driver = /absolute/path/to/clickhouse-odbc/build/driver/libclickhouseodbcw.so
+Setup = /absolute/path/to/clickhouse-odbc/build/driver/libclickhouseodbcw.so
+UsageCount = 1
+```
+
+### Custom ini config files location (optional)
+
+Instead of placing the config files to the default locations (`~/.odbc.ini` and `~/.odbcinst.ini`), you can create them elsewhere.
+However, in this case, for the tests to run properly (this is the case for both Python e2e and C++ integration tests), the required environment variables should be set pointing the correct paths to the configuration files:
+
+```
+export ODBCSYSINI=
+export ODBCINSTINI="/absolute/path/to/.odbcinst.ini"
+export ODBCINI="/absolute/path/to/.odbc.ini"
+```
+
+**IMPORTANT**: note that the `ODBCSYSINI` variable is overridden as an empty string.
+
+Please refer to the [CI workflows](.github/workflows/Linux.yml) for more examples of such usage.
+
+### Running the python e2e tests from the CLI
+
+Having a [virtual environment](https://docs.python.org/3/library/venv.html) set up for the project (recommended), install the dependencies:
+
+```sh
+cd tests
+pip install -r requirements.txt
+```
+
+Start the ClickHouse server in Docker, if it is not running locally already:
+
+```sh
+docker-compose up -d
+```
+
+Run the tests:
+
+```sh
+pytest
+```
+
+If the debug logs (queries, parameters etc) are required, you can run it as follows:
+
+```sh
+pytest --log-level=DEBUG -v
+```
+
+### PyCharm setup
+
+Verified with PyCharm 2024.1 and Python 3.12.
+
+* Open the `test` directory in PyCharm as a new project
+* Create a new `venv` interpreter using PyCharm
+* Install all dependencies from the `requirements.txt` file
+
+Now, you should be able to run the tests from the PyCharm itself.
diff --git a/driver/test/datetime_it.cpp b/driver/test/datetime_it.cpp
index 0f4059c4a..0eeb117a8 100755
--- a/driver/test/datetime_it.cpp
+++ b/driver/test/datetime_it.cpp
@@ -84,7 +84,7 @@ TEST_P(DateTime, GetData) {
{
SQLLEN sql_type = SQL_TYPE_NULL;
ODBC_CALL_ON_STMT_THROW(hstmt, SQLColAttribute(hstmt, 1, SQL_DESC_TYPE, NULL, 0, NULL, &sql_type));
- EXPECT_EQ(sql_type, params.expected_sql_type);
+ EXPECT_EQ(sql_type, params.expected_sql_type) << "expected: " << params.expected_str_val;
}
{
@@ -100,7 +100,7 @@ TEST_P(DateTime, GetData) {
&col_ind
));
- EXPECT_EQ(toUTF8(col), params.expected_str_val);
+ EXPECT_EQ(toUTF8(col), params.expected_str_val) << "expected: " << params.expected_str_val;;
}
if (params.format != "RowBinaryWithNamesAndTypes" || params.expected_sql_type == SQL_TYPE_DATE) {
@@ -116,7 +116,7 @@ TEST_P(DateTime, GetData) {
&col_ind
));
- EXPECT_EQ(col, expected_date_val);
+ EXPECT_EQ(col, expected_date_val) << "expected: " << params.expected_str_val;;
}
if (params.format != "RowBinaryWithNamesAndTypes") {
@@ -132,7 +132,7 @@ TEST_P(DateTime, GetData) {
&col_ind
));
- EXPECT_EQ(col, expected_time_val);
+ EXPECT_EQ(col, expected_time_val) << "expected: " << params.expected_str_val;;
}
if (params.format != "RowBinaryWithNamesAndTypes" || params.expected_sql_type != SQL_TYPE_DATE) {
@@ -148,7 +148,7 @@ TEST_P(DateTime, GetData) {
&col_ind
));
- EXPECT_EQ(col, params.expected_timestamp_val);
+ EXPECT_EQ(col, params.expected_timestamp_val) << "expected: " << params.expected_str_val;;
}
}
@@ -170,53 +170,53 @@ INSTANTIATE_TEST_SUITE_P(
MiscellaneousTest,
DateTime,
::testing::Values(
- DateTimeParams{"Date", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"Date", "ODBCDriver2", "UTC",
"toDate('2020-03-25')", SQL_TYPE_DATE,
"2020-03-25", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 0, 0, 0, 0}
},
- DateTimeParams{"DateTime", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime", "ODBCDriver2", "UTC",
"toDateTime('2020-03-25 12:11:22')", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 0}
},
- DateTimeParams{"DateTime_TZ", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime_TZ", "ODBCDriver2", "UTC",
"toDateTime('2020-03-25 12:11:22', 'Asia/Kathmandu')", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 0}
},
- DateTimeParams{"DateTime64_0", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime64_0", "ODBCDriver2", "UTC",
"toDateTime64('2020-03-25 12:11:22.123456789', 0)", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 0}
},
- DateTimeParams{"DateTime64_4", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime64_4", "ODBCDriver2", "UTC",
"toDateTime64('2020-03-25 12:11:22.123456789', 4)", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22.1234", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 123400000}
},
- DateTimeParams{"DateTime64_9", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime64_9", "ODBCDriver2", "UTC",
"toDateTime64('2020-03-25 12:11:22.123456789', 9)", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22.123456789", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 123456789}
},
- DateTimeParams{"DateTime64_9_TZ", "ODBCDriver2", "Europe/Moscow",
+ DateTimeParams{"DateTime64_9_TZ", "ODBCDriver2", "UTC",
"toDateTime64('2020-03-25 12:11:22.123456789', 9, 'Asia/Kathmandu')", SQL_TYPE_TIMESTAMP,
"2020-03-25 12:11:22.123456789", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 12, 11, 22, 123456789}
},
// TODO: remove this once the formats behave identically.
- DateTimeParams{"Date", "RowBinaryWithNamesAndTypes", "Europe/Moscow",
+ DateTimeParams{"Date", "RowBinaryWithNamesAndTypes", "UTC",
"toDate('2020-03-25')", SQL_TYPE_DATE,
"2020-03-25", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 0, 0, 0, 0}
},
- DateTimeParams{"DateTime_TZ", "RowBinaryWithNamesAndTypes", "Europe/Moscow",
+ DateTimeParams{"DateTime_TZ", "RowBinaryWithNamesAndTypes", "UTC",
"toDateTime('2020-03-25 12:11:22', 'Asia/Kathmandu')", SQL_TYPE_TIMESTAMP,
- "2020-03-25 09:26:22", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 9, 26, 22, 0}
+ "2020-03-25 06:26:22", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 6, 26, 22, 0}
},
- DateTimeParams{"DateTime64_9_TZ", "RowBinaryWithNamesAndTypes", "Europe/Moscow",
+ DateTimeParams{"DateTime64_9_TZ", "RowBinaryWithNamesAndTypes", "UTC",
"toDateTime64('2020-03-25 12:11:22.123456789', 9, 'Asia/Kathmandu')", SQL_TYPE_TIMESTAMP,
- "2020-03-25 09:26:22.123456789", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 9, 26, 22, 123456789}
+ "2020-03-25 06:26:22.123456789", SQL_TIMESTAMP_STRUCT{2020, 3, 25, 6, 26, 22, 123456789}
}/*,
// TODO: uncomment once the target ClickHouse server is 21.4+
- DateTimeParams{"DateTime64_9_TZ_pre_epoch", "RowBinaryWithNamesAndTypes", "Europe/Moscow",
+ DateTimeParams{"DateTime64_9_TZ_pre_epoch", "RowBinaryWithNamesAndTypes", "UTC",
"toDateTime64('1955-03-25 12:11:22.123456789', 9, 'Asia/Kathmandu')", SQL_TYPE_TIMESTAMP,
"1955-03-25 09:26:22.123456789", SQL_TIMESTAMP_STRUCT{1955, 3, 25, 9, 26, 22, 123456789}
}
diff --git a/driver/test/statement_parameters_it.cpp b/driver/test/statement_parameters_it.cpp
index cfc26916b..c35b6d351 100755
--- a/driver/test/statement_parameters_it.cpp
+++ b/driver/test/statement_parameters_it.cpp
@@ -361,8 +361,8 @@ INSTANTIATE_TEST_SUITE_P(TypeConversion, ParameterColumnRoundTripDecimalAsString
"-12345",
"12345.6789",
"-12345.6789",
- "12345.000000000000",
- "12345.001002003000",
+ "12345",
+ "12345.001002003",
"100000000000000000",
"-100000000000000000",
"1.00000000000000001",
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
deleted file mode 100644
index 12a59c580..000000000
--- a/test/CMakeLists.txt
+++ /dev/null
@@ -1,156 +0,0 @@
-find_program(PERL perl)
-find_program(PYTHON2 python2)
-find_program(PYTHON3 python3)
-find_program(SQLCMD sqlcmd)
-
-set(PERL_USES_MDAC 0)
-set(PERL_USES_UNIXODBC 0)
-set(PERL_USES_IODBC 0)
-
-if(PERL)
- execute_process(
- COMMAND ${PERL} ${CMAKE_CURRENT_SOURCE_DIR}/detect_driver_manager.pl
- OUTPUT_VARIABLE _perl_output
- OUTPUT_STRIP_TRAILING_WHITESPACE
-# ERROR_QUIET
- )
- message(STATUS "ODBC Driver Manager used by Perl's DBD::ODBC: ${_perl_output}")
- if(_perl_output MATCHES "Microsoft")
- set(PERL_USES_MDAC 1)
- endif()
- if(_perl_output MATCHES "unixODBC")
- set(PERL_USES_UNIXODBC 1)
- endif()
- if(_perl_output MATCHES "iODBC")
- set(PERL_USES_IODBC 1)
- endif()
-endif()
-
-set(PYTHON2_USES_MDAC 0)
-set(PYTHON2_USES_UNIXODBC 0)
-set(PYTHON2_USES_IODBC 0)
-
-if(PYTHON2)
- execute_process(
- COMMAND ${PYTHON2} ${CMAKE_CURRENT_SOURCE_DIR}/detect_driver_manager.py
- OUTPUT_VARIABLE _python_output
- OUTPUT_STRIP_TRAILING_WHITESPACE
-# ERROR_QUIET
- )
- message(STATUS "ODBC Driver Manager used by Python 2's pyodbc: ${_python_output}")
- if(_python_output MATCHES "Microsoft")
- set(PYTHON2_USES_MDAC 1)
- endif()
- if(_python_output MATCHES "unixODBC")
- set(PYTHON2_USES_UNIXODBC 1)
- endif()
- if(_python_output MATCHES "iODBC")
- set(PYTHON2_USES_IODBC 1)
- endif()
-endif()
-
-set(PYTHON3_USES_MDAC 0)
-set(PYTHON3_USES_UNIXODBC 0)
-set(PYTHON3_USES_IODBC 0)
-
-if(PYTHON3)
- execute_process(
- COMMAND ${PYTHON3} ${CMAKE_CURRENT_SOURCE_DIR}/detect_driver_manager.py
- OUTPUT_VARIABLE _python_output
- OUTPUT_STRIP_TRAILING_WHITESPACE
-# ERROR_QUIET
- )
- message(STATUS "ODBC Driver Manager used by Python 3's pyodbc: ${_python_output}")
- if(_python_output MATCHES "Microsoft")
- set(PYTHON3_USES_MDAC 1)
- endif()
- if(_python_output MATCHES "unixODBC")
- set(PYTHON3_USES_UNIXODBC 1)
- endif()
- if(_python_output MATCHES "iODBC")
- set(PYTHON3_USES_IODBC 1)
- endif()
-endif()
-
-message(STATUS "Testing with:\n\t"
- "TEST_DSN_LIST=${TEST_DSN_LIST}\n\t"
- "PERL=${PERL}\n\t"
- "PERL_USES_MDAC=${PERL_USES_MDAC}\n\t"
- "PERL_USES_UNIXODBC=${PERL_USES_UNIXODBC}\n\t"
- "PERL_USES_IODBC=${PERL_USES_IODBC}\n\t"
- "PYTHON2=${PYTHON2}\n\t"
- "PYTHON2_USES_MDAC=${PYTHON2_USES_MDAC}\n\t"
- "PYTHON2_USES_UNIXODBC=${PYTHON2_USES_UNIXODBC}\n\t"
- "PYTHON2_USES_IODBC=${PYTHON2_USES_IODBC}\n\t"
- "PYTHON3=${PYTHON3}\n\t"
- "PYTHON3_USES_MDAC=${PYTHON3_USES_MDAC}\n\t"
- "PYTHON3_USES_UNIXODBC=${PYTHON3_USES_UNIXODBC}\n\t"
- "PYTHON3_USES_IODBC=${PYTHON3_USES_IODBC}\n\t"
- "ODBC_UNIXODBC_ISQL=${ODBC_UNIXODBC_ISQL}\n\t"
- "ODBC_UNIXODBC_IUSQL=${ODBC_UNIXODBC_IUSQL}\n\t"
- "ODBC_IODBC_IODBCTEST=${ODBC_IODBC_IODBCTEST}\n\t"
- "ODBC_IODBC_IODBCTESTW=${ODBC_IODBC_IODBCTESTW}\n\t"
- "SQLCMD=${SQLCMD}"
-)
-
-set(SQL_QUERY "123456+456789" CACHE STRING "")
-set(SQL_ANSWER "580245" CACHE STRING "")
-
-set(test_dsn_counter 0)
-foreach (test_dsn ${TEST_DSN_LIST})
- if (test_dsn)
- if(PERL AND (
- ("${ODBC_PROVIDER}" STREQUAL "MDAC" AND PERL_USES_MDAC) OR
- ("${ODBC_PROVIDER}" STREQUAL "UnixODBC" AND PERL_USES_UNIXODBC) OR
- ("${ODBC_PROVIDER}" STREQUAL "iODBC" AND PERL_USES_IODBC)
- ))
- add_test(NAME "test.pl-dsn-${test_dsn_counter}" COMMAND ${PERL} ${CMAKE_CURRENT_SOURCE_DIR}/test.pl "${test_dsn}")
- endif()
-
- if(PYTHON2 AND (
- ("${ODBC_PROVIDER}" STREQUAL "MDAC" AND PYTHON2_USES_MDAC) OR
- ("${ODBC_PROVIDER}" STREQUAL "UnixODBC" AND PYTHON2_USES_UNIXODBC) OR
- ("${ODBC_PROVIDER}" STREQUAL "iODBC" AND PYTHON2_USES_IODBC)
- ))
- add_test(NAME "test.py-2-dsn-${test_dsn_counter}" COMMAND ${PYTHON2} ${CMAKE_CURRENT_SOURCE_DIR}/test.py "${test_dsn}")
- endif()
-
- if(PYTHON3 AND (
- ("${ODBC_PROVIDER}" STREQUAL "MDAC" AND PYTHON3_USES_MDAC) OR
- ("${ODBC_PROVIDER}" STREQUAL "UnixODBC" AND PYTHON3_USES_UNIXODBC) OR
- ("${ODBC_PROVIDER}" STREQUAL "iODBC" AND PYTHON3_USES_IODBC)
- ))
- add_test(NAME "test.py-3-dsn-${test_dsn_counter}" COMMAND ${PYTHON3} ${CMAKE_CURRENT_SOURCE_DIR}/test.py "${test_dsn}")
- if(NOT test_dsn MATCHES "RBWNAT") # TODO: a shaky way of detecting a RowBinaryWithNamesAndTypes-enabled DSN and disabling parametrized-regression.py for it, until fixed.
- add_test(NAME "parametrized-regression.py-3-dsn-${test_dsn_counter}" COMMAND ${CMAKE_COMMAND} -E env "DSN=${test_dsn}" ${PYTHON3} ${CMAKE_CURRENT_SOURCE_DIR}/parameterized/regression.py)
- endif()
- endif()
-
- if(ODBC_UNIXODBC_ISQL AND "${ODBC_PROVIDER}" STREQUAL "UnixODBC")
- add_test(NAME "isql-dsn-${test_dsn_counter}" COMMAND sh -c "echo select ${SQL_QUERY} | ${ODBC_UNIXODBC_ISQL} '${test_dsn}' | grep ${SQL_ANSWER}")
- add_test(NAME "test.sh-isql-dsn-${test_dsn_counter}" COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/test.sh ${ODBC_UNIXODBC_ISQL} \"${test_dsn}\" -v -b)
- endif()
-
- if(ODBC_UNIXODBC_IUSQL AND "${ODBC_PROVIDER}" STREQUAL "UnixODBC")
- add_test(NAME "iusql-dsn-${test_dsn_counter}" COMMAND sh -c "echo select ${SQL_QUERY} | ${ODBC_UNIXODBC_IUSQL} '${test_dsn}' | grep ${SQL_ANSWER}")
- add_test(NAME "test.sh-iusql-dsn-${test_dsn_counter}" COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/test.sh ${ODBC_UNIXODBC_IUSQL} \"${test_dsn}\" -v -b)
- endif()
-
- if(ODBC_IODBC_IODBCTEST AND "${ODBC_PROVIDER}" STREQUAL "iODBC")
- add_test(NAME "iodbctest-dsn-${test_dsn_counter}" COMMAND sh -c "echo select ${SQL_QUERY} | ${ODBC_IODBC_IODBCTEST} 'DSN=${test_dsn}' | grep ${SQL_ANSWER}")
- add_test(NAME "test.sh-iodbctest-dsn-${test_dsn_counter}" COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/test.sh ${ODBC_IODBC_IODBCTEST} \"DSN=${test_dsn}\")
- endif()
-
- if(ODBC_IODBC_IODBCTESTW AND "${ODBC_PROVIDER}" STREQUAL "iODBC")
- add_test(NAME "iodbctestw-dsn-${test_dsn_counter}" COMMAND sh -c "echo select ${SQL_QUERY} | ${ODBC_IODBC_IODBCTESTW} 'DSN=${test_dsn}' | grep ${SQL_ANSWER}")
- add_test(NAME "test.sh-iodbctestw-dsn-${test_dsn_counter}" COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/test.sh ${ODBC_IODBC_IODBCTESTW} \"DSN=${test_dsn}\")
- endif()
-
- math (EXPR test_dsn_counter "${test_dsn_counter}+1")
- endif ()
-endforeach ()
-
-if(SQLCMD)
- # MS SQL server need change server in file:
- add_test(NAME "sqlcmd" COMMAND ${SQLCMD} -i ${CMAKE_CURRENT_SOURCE_DIR}/mssql.linked.server.sql)
-endif()
diff --git a/test/detect_driver_manager.py b/test/detect_driver_manager.py
index e5cc677b6..60e487b92 100755
--- a/test/detect_driver_manager.py
+++ b/test/detect_driver_manager.py
@@ -7,8 +7,8 @@
try:
connection = pyodbc.connect("DSN=__nonexistent_dsn__")
except pyodbc.Error as error:
- result = re.search(r"\[([^\[\]]+)\]\[Driver Manager\]", str(error))
- if (result and len(result.groups()) >= 1):
+ result = re.search(r"\[([^\[\]]+)]\[Driver Manager]", str(error))
+ if result and len(result.groups()) >= 1:
print(result.group(1))
else:
raise
diff --git a/test/docker-compose.yml b/test/docker-compose.yml
new file mode 100644
index 000000000..a7082c91c
--- /dev/null
+++ b/test/docker-compose.yml
@@ -0,0 +1,20 @@
+services:
+ clickhouse:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-24.3-alpine}'
+ container_name: 'clickhouse-odbc-clickhouse-server'
+ ports:
+ - '8123:8123'
+ - '9000:9000'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ volumes:
+ - './docker-compose/config.xml:/etc/clickhouse-server/config.xml'
+ - './docker-compose/users.xml:/etc/clickhouse-server/users.xml'
+ networks:
+ - clickhouse-odbc
+
+networks:
+ clickhouse-odbc:
+ driver: bridge
diff --git a/test/docker-compose/config.xml b/test/docker-compose/config.xml
new file mode 100644
index 000000000..3ef3abd52
--- /dev/null
+++ b/test/docker-compose/config.xml
@@ -0,0 +1,35 @@
+
+
+
+ 8123
+ 9000
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
diff --git a/test/docker-compose/users.xml b/test/docker-compose/users.xml
new file mode 100644
index 000000000..611885366
--- /dev/null
+++ b/test/docker-compose/users.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ random
+
+
+
+
+
+
+
+ ::/0
+
+ default
+ default
+ 1
+
+
+
+
+
+
+ 3600
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
diff --git a/test/docker/Dockerfile b/test/docker/Dockerfile
index 0e68fa956..b484525b3 100644
--- a/test/docker/Dockerfile
+++ b/test/docker/Dockerfile
@@ -30,9 +30,6 @@ RUN apt-get update -y \
libssl-dev \
libicu-dev
-RUN pip3 install --user \
- 'testflows==1.6.56'
-
#
#FROM centos:7 as clickhouse_odbc_tester_base_centos
#RUN yum makecache
@@ -109,8 +106,7 @@ ENV LOG_DIR=/var/log/ch-odbc
ARG BIN_DIR=/clickhouse-odbc-build
# We need to install it after odbc provider, since we need a 'sql.h' to build it
-RUN pip3 install --user \
- 'pyodbc>=4.0.0'
+RUN pip3 install --user -r requirements.txt
RUN mkdir -p ${BIN_DIR} \
&& echo $CMAKE_ODBC_PROVIDER \
diff --git a/test/parameterized/parameterized/datatypes.py b/test/parameterized/parameterized/datatypes.py
deleted file mode 100755
index cc698e4e0..000000000
--- a/test/parameterized/parameterized/datatypes.py
+++ /dev/null
@@ -1,519 +0,0 @@
-import datetime
-import decimal
-import uuid
-
-from testflows.core import TestFeature, TestScenario
-from testflows.core import Requirements, Feature, Scenario, Given, When, Then, TE
-from testflows.asserts import error
-from requirements.QA_SRS003_ParameterizedQueries import *
-from utils import Logs, PyODBCConnection
-
-class Null(object):
- """NULL data type"""
- def __repr__(self):
- return 'NULL'
-
-NULL = Null()
-
-def check_datatype(connection, datatype, values, nullable=False, quote=False, repr=str, encoding="utf-8", expected=None):
- """Check support for a data type.
- """
- if expected is None:
- expected = dict()
-
- if nullable:
- datatype = f"Nullable({datatype})"
- values.append(NULL)
-
- if expected:
- expected["all"] = expected['all'].rsplit("]", 1)[0] + ", (None, )]"
- expected[NULL] = "[(None, )]"
-
- with Given("PyODBC connection"):
- with Given(f"parameters", description=f"""
- values {values}
- expected data {expected}
- """, format_description=False):
-
- with Given(f"table with a column of data type {datatype}"):
- connection.query("DROP TABLE IF EXISTS ps", fetch=False)
- connection.query(f"CREATE TABLE ps (v {datatype}) ENGINE = Memory", fetch=False)
- try:
- connection.connection.setencoding(encoding=encoding)
- for v in values:
- with When(f"I insert value {repr(v)}", flags=TE, format_name=False):
- # connection.query("INSERT INTO ps VALUES (?)", [v], fetch=False)
- if quote:
- connection.query(f"INSERT INTO ps VALUES ('{repr(v)}')", fetch=False)
- else:
- connection.query(f"INSERT INTO ps VALUES ({repr(v)})", fetch=False)
-
- with When("I select all values", flags=TE):
- rows = connection.query("SELECT * FROM ps ORDER BY v")
- if expected.get("all") is not None:
- with Then(f"the result is {expected.get('all')}", flags=TE, format_name=False):
- assert repr(rows) == expected.get("all"), error("result did not match")
-
- with When(f"I have values {repr(values)}", format_name=False):
- for v in values:
- if v is NULL:
- # comparing to NULL is not valid in SQL
- continue
- with When(f"I select value {repr(v)}", flags=TE, format_name=False):
- rows = connection.query("SELECT * FROM ps WHERE v = ? ORDER BY v", [v])
- if expected.get(v) is not None:
- with Then(f"the result is {repr(expected.get(v))}", flags=TE, format_name=False):
- assert repr(rows) == expected.get(v), error("result did not match")
- finally:
- connection.connection.setencoding(encoding=connection.encoding)
- connection.query("DROP TABLE ps", fetch=False)
-
-@TestScenario
-def sanity_check(self, connection):
- """Check connection to the database.
- """
- with Given("PyODBC connection"):
- with When("I do 'SELECT 1'"):
- rows = connection.query("SELECT 1")
-
- result = "[(1, )]"
- with Then(f"the result is {result}", format_name=False):
- assert repr(rows) == result, error("result dit not match")
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Int8("1.0"))
-def Int8(self, connection, nullable=False):
- """Verify support for Int8 data type."""
- check_datatype(connection, "Int8", [-128, 0, 127], expected={
- "all": "[(-128, ), (0, ), (127, )]",
- -128: "[(-128, )]",
- 0: "[(0, )]",
- 127: "[(127, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Int16("1.0"))
-def Int16(self, connection, nullable=False):
- """Verify support for Int16 data type."""
- check_datatype(connection, "Int16", [-32768, 0, 32767], expected={
- "all": "[(-32768, ), (0, ), (32767, )]",
- -32768: "[(-32768, )]",
- 0: "[(0, )]",
- 32767: "[(32767, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Int32("1.0"))
-def Int32(self, connection, nullable=False):
- """Verify support for Int32 data type."""
- check_datatype(connection, "Int32", [-2147483648, 0, 2147483647], expected={
- "all": "[(-2147483648, ), (0, ), (2147483647, )]",
- -2147483648: "[(-2147483648, )]",
- 0: "[(0, )]",
- 2147483647: "[(2147483647, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Int64("1.0"))
-def Int64(self, connection, nullable=False):
- """Verify support for Int64 data type."""
- check_datatype(connection, "Int64", [-9223372036854775808, 0, 9223372036854775807], expected={
- "all": "[(-9223372036854775808, ), (0, ), (9223372036854775807, )]",
- -9223372036854775808: "[(-9223372036854775808, )]",
- 0: "[(0, )]",
- 9223372036854775807: "[(9223372036854775807, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt8("1.0"))
-def UInt8(self, connection, nullable=False):
- """Verify support for UInt8 data type."""
- check_datatype(connection, "UInt8", [0, 255], expected={
- "all": "[(0, ), (255, )]",
- 0: "[(0, )]",
- 255: "[(255, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt16("1.0"))
-def UInt16(self, connection, nullable=False):
- """Verify support for UInt16 data type."""
- check_datatype(connection, "UInt16", [0, 65535], expected={
- "all": "[(0, ), (65535, )]",
- 0: "[(0, )]",
- 65535: "[(65535, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt32("1.0"))
-def UInt32(self, connection, nullable=False):
- """Verify support for UInt32 data type."""
- check_datatype(connection, "UInt32", [0, 4294967295], expected={
- "all": "[(0, ), (4294967295, )]",
- 0: "[(0, )]",
- 4294967295: "[(4294967295, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt64("1.0"))
-def UInt64(self, connection, nullable=False):
- """Verify support for UInt64 data type."""
- check_datatype(connection, "UInt64", [0, 18446744073709551615], expected={
- "all": "[(0, ), (18446744073709551615, )]",
- 0: "[(0, )]",
- 18446744073709551615: "[(18446744073709551615, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32("1.0"),
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32_Inf("1.0"),
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32_NaN("1.0")
-)
-def Float32(self, connection, nullable=False):
- """Verify support for Float32 data type."""
- check_datatype(connection, "Float32", [-1, 0, float("inf"), float("-inf"), float("nan"), 13.26], expected={
- "all": "[(-inf, ), (-1.0, ), (0.0, ), (13.26, ), (inf, ), (nan, )]",
- 0: "[(0.0, )]",
- -1: "[(-1.0, )]",
- 13.26: "[(13.26, )]",
- float("inf"): "[(inf, )]",
- float("-inf"): "[(-inf, )]",
- float("nan"): "[(nan, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64("1.0"),
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64_Inf("1.0"),
- RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64_NaN("1.0")
-)
-def Float64(self, connection, nullable=False):
- """Verify support for Float64 data type."""
- check_datatype(connection, "Float64", [-1, 0, float("inf"), 13.26, float("-inf"), float("nan")], expected={
- "all": "[(-inf, ), (-1.0, ), (0.0, ), (13.26, ), (inf, ), (nan, )]",
- 0: "[(0.0, )]",
- -1: "[(-1.0, )]",
- 13.26: "[(13.26, )]",
- float("inf"): "[(inf, )]",
- float("-inf"): "[(-inf, )]",
- float("nan"): "[(nan, )]"
- }, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal32("1.0"))
-def Decimal32(self, connection, nullable=False):
- """Verify support for Decimal32 data type."""
- expected = {
- "all": "[(Decimal('-99999.9999'), ), (Decimal('10.1234'), ), (Decimal('99999.9999'), )]",
- decimal.Decimal('-99999.9999'): "[(Decimal('-99999.9999'), )]",
- decimal.Decimal('10.1234'): "[(Decimal('10.1234'), )]",
- decimal.Decimal('99999.9999'): "[(Decimal('99999.9999'), )]"
- }
-
- check_datatype(connection, "Decimal32(4)", [
- decimal.Decimal('-99999.9999'),
- decimal.Decimal('10.1234'),
- decimal.Decimal('99999.9999')
- ], expected=expected, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal64("1.0"))
-def Decimal64(self, connection, nullable=False):
- """Verify support for Decimal64 data type."""
- expected = {
- "all": "[(Decimal('-99999999999999.9999'), ), (Decimal('10.1234'), ), (Decimal('99999999999999.9999'), )]",
- decimal.Decimal('-99999999999999.9999'): "[(Decimal('-99999999999999.9999'), )]",
- decimal.Decimal('10.1234'): "[(Decimal('10.1234'), )]",
- decimal.Decimal('99999999999999.9999'): "[(Decimal('99999999999999.9999'), )]"
- }
-
- check_datatype(connection, "Decimal64(4)", [
- decimal.Decimal('-99999999999999.9999'),
- decimal.Decimal('10.1234'),
- decimal.Decimal('99999999999999.9999')
- ], expected=expected, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal128("1.0"))
-def Decimal128(self, connection, nullable=False):
- """Verfiy support for Decimal128 data type."""
- expected = {
- "all": "[(Decimal('-9999999999999999999999999999999999.9999'), ), (Decimal('10.1234'), ), (Decimal('9999999999999999999999999999999999.9999'), )]",
- decimal.Decimal('-9999999999999999999999999999999999.9999'): "[(Decimal('-9999999999999999999999999999999999.9999'), )]",
- decimal.Decimal('10.1234'): "[(Decimal('10.1234'), )]",
- decimal.Decimal('9999999999999999999999999999999999.9999'): "[(Decimal('9999999999999999999999999999999999.9999'), )]"
- }
-
- check_datatype(connection, "Decimal128(4)", [
- decimal.Decimal('-9999999999999999999999999999999999.9999'),
- decimal.Decimal('10.1234'),
- decimal.Decimal('9999999999999999999999999999999999.9999')
- ], expected=expected, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_String("1.0"))
-def String(self, connection, nullable=False):
- """Verify support for String data type."""
-
- with Scenario("empty",
- description="Check empty string.",
- flags=TE,
- requirements=[RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Empty("1.0")]):
-
- with Scenario("utf-8", flags=TE, description="UTF-8 encoding"):
- values = ["", b''.decode("utf-8")]
- expected = {
- "all": f"[('{values[0]}', ), ('{values[1]}', )]",
- values[0]: f"[('{values[0]}', ), ('{values[1]}', )]",
- values[1]: f"[('{values[0]}', ), ('{values[1]}', )]"
- }
- check_datatype(connection, "String", values=values, expected=expected,
- encoding="utf-8", quote=True, nullable=nullable)
-
- with Scenario("ascii", flags=TE, description="ASCII encoding."):
- values = ["", b''.decode("ascii")]
- expected = {
- "all": f"[('{values[0]}', ), ('{values[1]}', )]",
- values[0]: f"[('{values[0]}', ), ('{values[1]}', )]",
- values[1]: f"[('{values[0]}', ), ('{values[1]}', )]"
- }
- check_datatype(connection, "String", values=values, expected=expected,
- encoding="ascii", quote=True, nullable=nullable)
-
- with Scenario("utf8",
- flags=TE,
- requirements=[RQ_SRS_003_ParameterizedQueries_DataType_Select_String_UTF8("1.0")],
- description="Check UTF-8 encoding."
- ):
- values = [
- "hello",
- (b'\xe5\x8d\xb0\xe5\x88\xb7\xe5\x8e\x82\xe6\x8b\xbf\xe8\xb5\xb7'
- b'\xe4\xb8\x80\xe4\xb8\xaa\xe6\xa0\xb7\xe6\x9d\xbf\xe9\x97\xb4'
- b'\xef\xbc\x8c\xe7\x84\xb6\xe5\x90\x8e\xe5\xb0\x86\xe5\x85\xb6'
- b'\xe6\x89\x93\xe6\x8b\xbc\xe6\x88\x90\xe6\xa0\xb7\xe6\x9c\xac'
- b'\xe3\x80\x82 \xe5\xae\x83\xe4\xb8\x8d\xe4\xbb\x85\xe7\x94\x9f'
- b'\xe5\xad\x98\xe4\xba\x86\xe4\xba\x94\xe4\xb8\xaa\xe4\xb8\x96'
- b'\xe7\xba\xaa\xef\xbc\x8c\xe8\x80\x8c\xe4\xb8\x94\xe5\x9c\xa8'
- b'\xe7\x94\xb5\xe5\xad\x90\xe6\x8e\x92\xe7\x89\x88\xe6\x96\xb9'
- b'\xe9\x9d\xa2\xe4\xb9\x9f\xe5\x8f\x96\xe5\xbe\x97\xe4\xba\x86'
- b'\xe9\xa3\x9e\xe8\xb7\x83\xef\xbc\x8c\xe4\xbd\x86\xe5\x9f\xba'
- b'\xe6\x9c\xac\xe4\xb8\x8a\xe6\xb2\xa1\xe6\x9c\x89\xe6\x94\xb9'
- b'\xe5\x8f\x98\xe3\x80\x82 \xe5\xae\x83\xe5\x9c\xa81960\xe5\xb9'
- b'\xb4\xe4\xbb\xa3\xe9\x9a\x8f\xe7\x9d\x80Letraset\xe5\xba\x8a'
- b'\xe5\x8d\x95\xe7\x9a\x84\xe5\x8f\x91\xe5\xb8\x83\xe8\x80\x8c'
- b'\xe6\x99\xae\xe5\x8f\x8a\xef\xbc\x8c\xe5\x85\xb6\xe4\xb8\xad'
- b'\xe5\x8c\x85\xe5\x90\xabLerem Ipsum\xe6\xae\xb5\xe8\x90\xbd'
- b'\xe7\xad\x89').decode("utf-8")
- ]
- expected = {
- "all": f"[('{values[0]}', ), ('{values[1]}', )]",
- values[0]: f"[('{values[0]}', )]",
- values[1]: f"[('{values[1]}', )]"
- }
- check_datatype(connection, "String", values=values, expected=expected,
- encoding="utf-8", quote=True, nullable=nullable)
-
- with Scenario("ascii",
- flags=TE,
- requirements=[RQ_SRS_003_ParameterizedQueries_DataType_Select_String_ASCII("1.0")],
- description="Check ASCII encoding."
- ):
- values = [
- "hello",
- r' !"#$%%&()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~'
- ]
- expected = {
- "all": f"[('{values[1]}', ), ('{values[0]}', )]",
- values[0]: f"[('{values[0]}', )]",
- values[1]: f"[('{values[1]}', )]"
- }
- check_datatype(connection, "String", values=values, expected=expected,
- encoding="ascii", quote=True, nullable=nullable)
-
- with Scenario("binary",
- flags=TE,
- requirements=[RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Binary("1.0")],
- description="Check binary data."
- ):
- values = [
- "\x00\x01\x02\0x03\x00\x00\xFF"
- ]
- expected = {
- "all": f"[('{values[0]}', )]",
- values[0]: f"[('{values[0]}', )]",
- }
- check_datatype(connection, "String", values=values, expected=expected, encoding="ascii", quote=False, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_FixedString("1.0"))
-def FixedString(self, connection, nullable=False):
- """Verify support for FixedString data type."""
- with Scenario("utf8", flags=TE, description="UTF-8 encoding"):
- values = [
- "",
- "hello",
- (b'\xe5\x8d\xb0\xe5\x88\xb7\xe5\x8e\x82\xe6\x8b\xbf\xe8\xb5\xb7').decode("utf-8")
- ]
- expected = {
- "all": f"[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('{values[2]}\\x00', )]",
- values[0]: "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]",
- values[1]: "[('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]",
- values[2]: f"[('{values[2]}\\x00', )]"
- }
- check_datatype(connection, "FixedString(16)", values=values, expected=expected,
- encoding="utf-8", quote=True, nullable=nullable)
-
- with Scenario("ascii", flags=TE, description="ASCII encoding."):
- values = [
- "",
- "hello",
- "ABCDEFGHIJKLMN"
- ]
- expected = {
- "all": "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('ABCDEFGHIJKLMN\\x00\\x00', ), ('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]",
- values[0]: "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]",
- values[1]: "[('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]",
- values[2]: "[('ABCDEFGHIJKLMN\\x00\\x00', )]"
- }
- check_datatype(connection, "FixedString(16)", values=values, expected=expected,
- encoding="ascii", quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Date("1.0"))
-def Date(self, connection, nullable=False):
- """Verify support for Date date type."""
- values = [
- datetime.date(1970, 3, 3),
- datetime.date(2000, 12, 31),
- datetime.date(2024, 5, 5)
- ]
- expected = {
- "all": "[(datetime.date(1970, 3, 3), ), (datetime.date(2000, 12, 31), ), (datetime.date(2024, 5, 5), )]",
- values[0]: "[(datetime.date(1970, 3, 3), )]",
- values[1]: "[(datetime.date(2000, 12, 31), )]",
- values[2]: "[(datetime.date(2024, 5, 5), )]"
- }
- check_datatype(connection, "Date", values=values, expected=expected, quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_DateTime("1.0"))
-def DateTime(self, connection, nullable=False):
- """Verify support for DateTime data type."""
- values = [
- datetime.datetime(1970, 3, 3, 0, 0, 0),
- datetime.datetime(2000, 12, 31, 23, 59, 59),
- datetime.datetime(2024, 5, 5, 13, 31, 32)
- ]
- expected = {
- "all": "[(datetime.datetime(1970, 3, 3, 0, 0), ), (datetime.datetime(2000, 12, 31, 23, 59, 59), ), (datetime.datetime(2024, 5, 5, 13, 31, 32), )]",
- values[0]: "[(datetime.datetime(1970, 3, 3, 0, 0), )]",
- values[1]: "[(datetime.datetime(2000, 12, 31, 23, 59, 59), )]",
- values[2]: "[(datetime.datetime(2024, 5, 5, 13, 31, 32), )]"
- }
- check_datatype(connection, "DateTime", values=values, expected=expected, quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Enum("1.0"))
-def Enum(self, connection, nullable=False):
- """Verify support for Enum data type."""
- with Scenario("utf8", flags=TE, description="UTF-8 encoding"):
- key0 = b'\xe5\x8d\xb0'.decode('utf-8')
- key1 = b'\xe5\x88\xb7'.decode('utf-8')
- check_datatype(connection, f"Enum('{key0}' = 1, '{key1}' = 2)", [key0, key1], expected={
- "all": f"[('{key0}', ), ('{key1}', )]",
- key0: f"[('{key0}', )]",
- key1: f"[('{key1}', )]"
- }, encoding="utf-8", quote=True, nullable=nullable)
-
- with Scenario("ascii", flags=TE, description="ASCII encoding"):
- check_datatype(connection, "Enum('hello' = 1, 'world' = 2)", ["hello", "world"], expected={
- "all": "[('hello', ), ('world', )]",
- "hello": "[('hello', )]",
- "world": "[('world', )]"
- }, encoding="ascii", quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_UUID("1.0"))
-def UUID(self, connection, nullable=False):
- """Verify support for UUID data type."""
- uuid0 = "417ddc5d-e556-4d27-95dd-a34d84e46a50"
- uuid1 = "417ddc5d-e556-4d27-95dd-a34d84e46a51"
- uuid2 = uuid.UUID('1dc3c592-f333-11e9-bedd-2477034de0ec')
-
- values = [uuid0, uuid1, uuid2]
- expected = {
- "all": f"[('{uuid0}', ), ('{uuid1}', ), ('{uuid2}', )]",
- uuid0: f"[('{uuid0}', )]",
- uuid1: f"[('{uuid1}', )]",
- uuid2: f"[('{uuid2}', )]"
- }
- check_datatype(connection, "UUID", values=values, expected=expected, quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_IPv4("1.0"))
-def IPv4(self, connection, nullable=False):
- """Verify support for IPv4 data type."""
- ipv40 = "116.106.34.242"
- ipv41 = "116.253.40.133"
-
- values = [ipv40, ipv41]
- expected = {
- "all": f"[('{ipv40}', ), ('{ipv41}', )]",
- ipv40: f"[('{ipv40}', )]",
- ipv41: f"[('{ipv41}', )]"
- }
- check_datatype(connection, "IPv4", values=values, expected=expected, quote=True, nullable=nullable)
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_IPv6("1.0"))
-def IPv6(self, connection, nullable=False):
- """Verify support for IPv6 data type."""
- ipv60 = "2001:44c8:129:2632:33:0:252:2"
- ipv61 = "2a02:e980:1e::1"
-
- values = [ipv60, ipv61]
- expected = {
- "all": f"[('{ipv60}', ), ('{ipv61}', )]",
- ipv60: f"[('{ipv60}', )]",
- ipv61: f"[('{ipv61}', )]"
- }
- check_datatype(connection, "IPv6", values=values, expected=expected, quote=True, nullable=nullable)
-
-@TestFeature
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Nullable("1.0"))
-def nullable(self):
- """Check support for Nullable data types."""
- Feature(test=datatypes)(nullable=True)
-
-@TestFeature
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataTypes("1.0"))
-def datatypes(self, nullable=False):
- """Check clickhouse-odbc driver support for parameterized
- queries with various data types using pyodbc connector.
- """
- with Logs() as logs, PyODBCConnection(logs=logs) as connection:
- args = {"connection": connection, "nullable": nullable}
-
- Scenario("Sanity check", run=sanity_check, args={"connection": connection})
- Scenario("Int8", run=Int8, args=args, flags=TE)
- Scenario("Int16", run=Int16, args=args, flags=TE)
- Scenario("Int32", run=Int32, args=args, flags=TE)
- Scenario("Int64", run=Int64, args=args, flags=TE)
- Scenario("UInt8", run=UInt8, args=args, flags=TE)
- Scenario("UInt16", run=UInt16, args=args, flags=TE)
- Scenario("UInt32", run=UInt32, args=args, flags=TE)
- Scenario("UInt64", run=UInt64, args=args, flags=TE)
- Scenario("Float32", run=Float32, args=args, flags=TE)
- Scenario("Float64", run=Float64, args=args, flags=TE)
- Scenario("Decimal32", run=Decimal32, args=args, flags=TE)
- Scenario("Decimal64", run=Decimal64, args=args, flags=TE)
- Scenario("Decimal128", run=Decimal128, args=args, flags=TE)
- Scenario("String", run=String, args=args, flags=TE)
- Scenario("FixedString", run=FixedString, args=args, flags=TE)
- Scenario("Date", run=Date, args=args, flags=TE)
- Scenario("DateTime", run=DateTime, args=args, flags=TE)
- Scenario("Enum", run=Enum, args=args, flags=TE)
- Scenario("UUID", run=UUID, args=args, flags=TE)
- Scenario("IPv4", run=IPv4, args=args, flags=TE)
- Scenario("IPv6", run=IPv6, args=args, flags=TE)
diff --git a/test/parameterized/parameterized/funcvalues.py b/test/parameterized/parameterized/funcvalues.py
deleted file mode 100644
index 4993829a8..000000000
--- a/test/parameterized/parameterized/funcvalues.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import datetime
-
-from testflows.core import TestFeature, TestScenario
-from testflows.core import Scenario, Given, When, Then
-from testflows.core import Requirements, Name, TE
-from testflows.asserts import error
-from utils import Logs, PyODBCConnection
-
-from requirements.QA_SRS003_ParameterizedQueries import *
-
-@TestScenario
-def isNull(self, connection):
- """Verify support for isNull function."""
- values = [
- "hello", b'\xe5\x8d\xb0'.decode('utf-8'),
- -1, 0, 255,
- 1.0, 0.0, -1.0,
- datetime.date(2000, 12, 31), datetime.datetime(2000, 12, 31, 23, 59, 59),
- ]
- with Given("PyODBC connection"):
- for value in values:
- query = "SELECT isNull(?)"
- with When(f"I run '{query}' with {repr(value)} parameter"):
- rows = connection.query(query, [value])
- expected = "[(0, )]"
- with Then(f"the result is {expected}", flags=TE):
- assert repr(rows) == expected, error("result did not match")
-
-@TestScenario
-@Requirements(RQ_SRS_003_ParameterizedQueries_DataType_Select_Nullable_NULL("1.0"))
-def Null(self, connection):
- """Verify support for handling NULL value."""
- with Given("PyODBC connection"):
- query = "SELECT isNull(?)"
- with When(f"I run '{query}' with [None] parameter", flags=TE):
- rows = connection.query(query, [None])
- expected = "[(1, )]"
- with Then(f"the result is {expected}", flags=TE):
- assert repr(rows) == expected, error("result did not match")
-
- query = "SELECT arrayReduce('count', [?, ?])"
- with When(f"I run '{query}' with [None, None] parameter", flags=TE):
- rows = connection.query(query, [None, None])
- expected = "[(0, )]"
- with Then(f"the result is {expected}", flags=TE):
- assert repr(rows) == expected, error("result did not match")
-
- query = "SELECT arrayReduce('count', [1, ?, ?])"
- with When(f"I run '{query}' with [1, None, None])", flags=TE):
- rows = connection.query(query, [1, None, None])
- expected = "[(1, )]"
- with Then(f"the result is {expected}", flags=TE):
- assert repr(rows) == expected, error("result did not match")
-
-@TestFeature
-@Name("functions and values")
-def funcvalues(self, nullable=False):
- """Check clickhouse-odbc driver support for parameterized
- queries with functions and values using pyodbc connector.
- """
- with Logs() as logs, PyODBCConnection(logs=logs) as connection:
- args = {"connection": connection}
-
- Scenario("isNull", run=isNull, args=args, flags=TE)
- Scenario("Null", run=Null, args=args, flags=TE)
diff --git a/test/parameterized/parameterized/sanity.py b/test/parameterized/parameterized/sanity.py
deleted file mode 100755
index 7227cda4b..000000000
--- a/test/parameterized/parameterized/sanity.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-import datetime
-import decimal
-
-from testflows.core import TestScenario, Given, When, Then
-from testflows.core import TE
-from utils import PyODBCConnection
-
-@TestScenario
-def sanity(self):
- """clickhouse-odbc driver sanity suite to check support of parameterized
- queries using pyodbc connector.
- """
- with PyODBCConnection() as conn:
- with Given("PyODBC connection"):
- def query(query, *args, **kwargs):
- """Execute a query and check that it does not
- raise an exception.
- """
- with When(f"I execute '{query}'", flags=TE):
- with Then("it works"):
- conn.query(query, *args, **kwargs)
-
- with When("I want to do sanity check"):
- query("SELECT 1")
-
- table_schema = (
- "CREATE TABLE ps (i UInt8, ni Nullable(UInt8), s String, d Date, dt DateTime, "
- "f Float32, dc Decimal32(3), fs FixedString(8)) ENGINE = Memory"
- )
-
- with Given("table", description=f"Table schema {table_schema}", format_description=False):
- query("DROP TABLE IF EXISTS ps", fetch=False)
- query(table_schema, fetch=False)
- try:
- with When("I want to insert a couple of rows"):
- query("INSERT INTO ps VALUES (1, NULL, 'Hello, world', '2005-05-05', '2005-05-05 05:05:05', "
- "1.333, 10.123, 'fstring0')", fetch=False)
- query("INSERT INTO ps VALUES (2, NULL, 'test', '2019-05-25', '2019-05-25 15:00:00', "
- "1.433, 11.124, 'fstring1')", fetch=False)
- query("SELECT * FROM ps")
-
- with When("I want to select using parameter of type UInt8", flags=TE):
- query("SELECT * FROM ps WHERE i = ? ORDER BY i, s, d", [1])
-
- with When("I want to select using parameter of type Nullable(UInt8)", flags=TE):
- query("SELECT * FROM ps WHERE ni = ? ORDER BY i, s, d", [None])
-
- with When("I want to select using parameter of type String", flags=TE):
- query("SELECT * FROM ps WHERE s = ? ORDER BY i, s, d", ["Hello, world"])
-
- with When("I want to select using parameter of type Date", flags=TE):
- query("SELECT * FROM ps WHERE d = ? ORDER BY i, s, d", [datetime.date(2019,5,25)])
-
- with When("I want to select using parameter of type DateTime", flags=TE):
- query("SELECT * FROM ps WHERE dt = ? ORDER BY i, s, d", [datetime.datetime(2005, 5, 5, 5, 5, 5)])
-
- with When("I want to select using parameter of type Float32", flags=TE):
- query("SELECT * FROM ps WHERE f = ? ORDER BY i, s, d", [1.333])
-
- with When("I want to select using parameter of type Decimal32(3)", flags=TE):
- query("SELECT * FROM ps WHERE dc = ? ORDER BY i, s, d", [decimal.Decimal('10.123')])
-
- with When("I want to select using parameter of type FixedString(8)", flags=TE):
- query("SELECT * FROM ps WHERE fs = ? ORDER BY i, s, d", [u"fstring0"])
-
- with When("I want to select using parameters of type UInt8 and String", flags=TE):
- query("SELECT * FROM ps WHERE i = ? and s = ? ORDER BY i, s, d", [2, "test"])
-
- with When("I want to select using parameters of type UInt8, String, and Date", flags=TE):
- query("SELECT * FROM ps WHERE i = ? and s = ? and d = ? ORDER BY i, s, d",
- [2, "test", datetime.date(2019,5,25)])
- finally:
- query("DROP TABLE ps", fetch=False)
diff --git a/test/parameterized/regression.py b/test/parameterized/regression.py
deleted file mode 100755
index e6a3eb7a9..000000000
--- a/test/parameterized/regression.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python3
-import os
-
-from testflows.core import TestModule, TestFeature, Module, Feature, Scenario, Requirements
-from testflows.core import Name, Fail, Error, load
-from testflows.core import main, TE
-from requirements.QA_SRS003_ParameterizedQueries import *
-
-@TestFeature
-@Requirements(
- RQ_SRS_003_ParameterizedQueries("1.0"),
- RQ_SRS_003_ParameterizedQueries_pyodbc("1.0"),
- RQ_SRS_003_ParameterizedQueries_Syntax_Select_Parameters("1.0")
-)
-def parameterized(self):
- """Test suite for clickhouse-odbc support of parameterized queries.
- """
- dsn = os.getenv("DSN", "ClickHouse DSN (ANSI)")
- with Feature(f"{dsn}", flags=TE):
- Scenario(run=load("parameterized.sanity", test="sanity"), flags=TE)
- Feature(run=load("parameterized.datatypes", test="datatypes"), flags=TE)
- Feature(run=load("parameterized.datatypes", test="nullable"), flags=TE)
- Feature(run=load("parameterized.funcvalues", test="funcvalues"), flags=TE)
-
-@TestModule
-def regression(self):
- """The regression module for clickhouse-odbc driver.
- """
- Feature(run=parameterized, flags=TE)
-
-if main():
- xfails = {
- "/regression/parameterized/:/sanity/PyODBC connection/table/I want to select using parameter of type Nullable:":
- [(Fail, "Nullable type still not supported")],
-
- "/regression/parameterized/:/sanity/PyODBC connection/table/I want to select using parameter of type Decimal:":
- [(Fail, "Decimal type still not supported")],
-
- "/regression/parameterized/*/datatypes/Int64/"
- "PyODBC connection/parameters/table with a column of data type Int64/"
- "*/I select value -9223372036854775808/*":
- [(Fail, "Int64 large negative value not supported")],
-
- "/regression/parameterized/*/datatypes/Float32/"
- "PyODBC connection/parameters/table with a column of data type Float32/"
- "*/I select value 13.26/*":
- [(Fail, "Selecting Float32 values is not supported")],
-
- "/regression/parameterized/*/datatypes/Float:/"
- "PyODBC connection/parameters/table with a column of data type Float:/"
- "*/I select value nan":
- [(Fail, "Selecting value nan is not supported")],
-
- "/regression/parameterized/*/datatypes/FixedString/:/"
- "PyODBC connection/parameters/table with a column of data type FixedString:/"
- "I have values:/I select value:":
- [(Fail, "Selecting FixedString is not supported due to lack of toFixedString conversion")],
-
- "*/I select value 18446744073709551615":
- [
- (Error, "UInt64 large value not supported"),
- (Fail, "UInt64 large value not supported")
- ],
-
- "*/I select value 9999999999999999999999999999999999.9999":
- [(Fail, "Decimal128 internal overflow")],
-
- "*/I select value -9999999999999999999999999999999999.9999":
- [(Fail, "Decimal128 internal overflow")],
-
- "*/I run 'SELECT isNull(?)' with '印' parameter":
- [(Fail, "Unexpected Unicode string truncation - should be fixed")],
-
- "/regression/parameterized/*/datatypes/IPv4":
- [(Fail, "IPv4 is not supported")],
-
- "/regression/parameterized/*/datatypes/IPv6":
- [(Fail, "IPv6 is not supported")],
-
- "/regression/parameterized/*/datatypes/UUID"
- "/PyODBC connection/parameters/table with a column of data type UUID/"
- "I have values */I select value *":
- [(Fail, "UUID value selection is not supported due to incorrect type convertion to UInt128")],
-
- "/regression/parameterized/*/datatypes/String/binary":
- [
- (Error, "Test procedure is not correct"),
- (Fail, "Test procedure is not correct")
- ],
-
- "/regression/parameterized/:/nullable/datatypes/:":
- [
- (Error, "Nullables are not supported"),
- (Fail, "Nullables are not supported")
- ],
-
- "/regression/parameterized/:/nullable/datatypes/String/empty/utf-8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/String/empty/ascii":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/String/utf8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/String/ascii":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/FixedString/utf8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/FixedString/ascii":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/Enum/utf8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/nullable/datatypes/Enum/ascii":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/functions and values/Null":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/datatypes/String/utf8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/datatypes/FixedString/utf8":
- [ (Fail, "Known failure")],
- "/regression/parameterized/:/datatypes/Enum/utf8":
- [ (Fail, "Known failure")]
- }
-
- Module(run=regression, xfails=xfails)
diff --git a/test/parameterized/requirements/QA_SRS003_ParameterizedQueries.py b/test/parameterized/requirements/QA_SRS003_ParameterizedQueries.py
deleted file mode 100644
index be523e03c..000000000
--- a/test/parameterized/requirements/QA_SRS003_ParameterizedQueries.py
+++ /dev/null
@@ -1,579 +0,0 @@
-# These are auto generated requirements from an SRS document.
-# Do not edit by hand but re-generate instead
-# using "tfs requirement generate" command.
-#
-from testflows.core import Requirement
-
-RQ_SRS_003_ParameterizedQueries = Requirement(
- name='RQ.SRS-003.ParameterizedQueries',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support parameterized queries as described in [SQL Statement Parameters].\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataTypes = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataTypes',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'The ODBC driver SHALL support using parameters for all applicable [ClickHouse] data types.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_pyodbc = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.pyodbc',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support executing parameterized queries using [pyodbc] connector.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_unixODBC_isql = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.unixODBC.isql',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support executing parameterized queries using [isql] connector\n'
- 'from [unixODBC] project.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_unixODBC_iusql = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.unixODBC.iusql',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support executing parameterized queries using [iusql] connector\n'
- 'from [unixODBC] project.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_iODBC_iodbctest = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.iODBC.iodbctest',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support executing parameterized queries using [iodbctest] connector\n'
- 'from [iODBC] project.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_iODBC_iodbctestw = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.iODBC.iodbctestw',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support executing parameterized queries using [iodbctestw] connector\n'
- 'from [iODBC] project.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_Syntax_Select_Parameters = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.Syntax.Select.Parameters',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support parameters in the `SELECT` statement using the syntax\n'
- 'SELECT PartID, Description, Price FROM Parts WHERE PartID = ? AND Description = ? AND Price = ? \n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Int8 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Int8',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns \n'
- 'with `Int8` data type having ranges `[-128 : 127]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Int16 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Int16',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns \n'
- 'with `Int16` data type having ranges `[-32768 : 32767]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Int32 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Int32',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Int32` data type having ranges `[-2147483648 : 2147483647]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Int64 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Int64',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Int64` data type having ranges `[-9223372036854775808 : 9223372036854775807]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt8 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.UInt8',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `UInt8` data type having ranges `[0 : 255]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt16 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.UInt16',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `UInt16` data type having ranges `[0 : 65535]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt32 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.UInt32',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `UInt32` data type having ranges `[0 : 4294967295]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_UInt64 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.UInt64',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `UInt64` data type having ranges `[0 : 18446744073709551615]`.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float32',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Float32` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32_Inf = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float32.Inf',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns \n'
- 'with `Float32` data type having value \n'
- '`Inf` (positive infinity) and `-Inf` (negative infinity).\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float32_NaN = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float32.NaN',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns \n'
- 'with `Float32` data type having value `Nan` (not a number).\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float64',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Float64` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64_Inf = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float64.Inf',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Float64` data type having value \n'
- '`Inf` (positive infinity) and `-Inf` (negative infinity).\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Float64_NaN = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Float64.NaN',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Float64` data type having value `NaN` (not a number).\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal32 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Decimal32',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Decimal32` data type having ranges\n'
- '`[-(1 * 10^(9 - S)-(1 / (10^S))) : 1 * 10^(9 - S) - (1 / (10^S)]`\n'
- '* **P** precision. Valid range: [ 1 : 38 ]. \n'
- 'Determines how many decimal digits number can have (including fraction).\n'
- '* **S** scale. Valid range: [ 0 : P ]. \n'
- 'Determines how many decimal digits fraction can have.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal64 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Decimal64',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns \n'
- 'with `Decimal64` data type having ranges\n'
- '`[-(1 * 10^(18 - S)-(1 / (10^S))) : 1 * 10^(18 - S) - (1 / (10^S)]`\n'
- '* **P** precision. Valid range: [ 1 : 38 ]. \n'
- 'Determines how many decimal digits number can have (including fraction).\n'
- '* **S** scale. Valid range: [ 0 : P ]. \n'
- 'Determines how many decimal digits fraction can have.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Decimal128 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Decimal128',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Decimal128` data type having ranges\n'
- '`[-(1 * 10^(38 - S)-(1 / (10^S))) : 1 * 10^(38 - S) - (1 / (10^S)]`\n'
- '* **P** precision. Valid range: [ 1 : 38 ]. \n'
- 'Determines how many decimal digits number can have (including fraction).\n'
- '* **S** scale. Valid range: [ 0 : P ]. \n'
- 'Determines how many decimal digits fraction can have.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String_ASCII = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String.ASCII',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type containing ASCII encoded strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String_UTF8 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String.UTF8',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type containing UTF-8 encoded strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Unicode = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String.Unicode',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type containing Unicode encoded strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Binary = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String.Binary',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type containing binary data.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Empty = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.String.Empty',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `String` data type containing empty value.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_FixedString = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.FixedString',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `FixedString` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Date = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Date',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Date` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_DateTime = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.DateTime',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `DateTime` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Enum = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Enum',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Enum` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_UUID = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.UUID',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `UUID` data type and treat them like strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_IPv6 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.IPv6',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `IPv6` data type and treat them like strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_IPv4 = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.IPv4',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `IPv4` data type and treat them like strings.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Nullable = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Nullable',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by the columns\n'
- 'with `Nullable` data type.\n'
- ),
- link=None
- )
-
-RQ_SRS_003_ParameterizedQueries_DataType_Select_Nullable_NULL = Requirement(
- name='RQ.SRS-003.ParameterizedQueries.DataType.Select.Nullable.NULL',
- version='1.0',
- priority=None,
- group=None,
- type=None,
- uid=None,
- description=(
- 'ODBC driver SHALL support using parameters for selecting columns and filtering by columns\n'
- 'with `Nullable` data type containing `NULL` value.\n'
- ),
- link=None
- )
diff --git a/test/parameterized/requirements/__init__.py b/test/parameterized/requirements/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/test/parameterized/utils/utils.py b/test/parameterized/utils/utils.py
deleted file mode 100644
index 49b8bfab9..000000000
--- a/test/parameterized/utils/utils.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import time
-import pyodbc
-
-import testflows.settings as settings
-
-from contextlib import contextmanager
-from testflows.core import note, exception, fail
-from testflows.connect import Shell
-
-clickhouse_log_path = os.getenv("CLICKHOUSE_LOG", "/var/log/clickhouse-server/clickhouse-server.log")
-odbc_driver_trace_log_path = os.getenv("ODBC_DRIVER_TRACE_LOG", "/tmp/clickhouse-odbc-driver-trace.log")
-odbc_driver_w_trace_log_path = os.getenv("ODBC_DRIVER_W_TRACE_LOG", "/tmp/clickhouse-odbc-driver-w-trace.log")
-odbc_manager_trace_log_path = os.getenv("ODBC_MANAGER_TRACE_LOG", "/tmp/odbc-driver-manager-trace.log")
-
-@contextmanager
-def Logs():
- """ClickHouse and ODBC driver logs context manager.
- """
- class _Logs:
- def __init__(self, *args):
- self.logs = args
-
- def read(self, timeout=None):
- for l in self.logs:
- l.readlines(timeout=timeout)
-
- if not settings.debug:
- yield None
- else:
- with Shell(name="clickhouse-server.log") as bash0, \
- Shell(name="odbc-driver-trace.log") as bash1, \
- Shell(name="odbc-driver-w-trace.log") as bash2, \
- Shell(name="odbc-manager-trace.log") as bash3:
-
- bash1(f"touch {odbc_driver_trace_log_path}")
- bash2(f"touch {odbc_driver_w_trace_log_path}")
- bash3(f"touch {odbc_manager_trace_log_path}")
-
- with bash0(f"tail -f {clickhouse_log_path}", asyncronous=True, name="") as clickhouse_log, \
- bash1(f"tail -f {odbc_driver_trace_log_path}", asyncronous=True, name="") as odbc_driver_log, \
- bash2(f"tail -f {odbc_driver_w_trace_log_path}", asyncronous=True, name="") as odbc_driver_w_log, \
- bash3(f"tail -f {odbc_manager_trace_log_path}", asyncronous=True, name="") as odbc_manager_log:
- logs = _Logs(clickhouse_log, odbc_driver_log, odbc_driver_w_log, odbc_manager_log)
- logs.read()
- yield logs
-
-
-@contextmanager
-def PyODBCConnection(encoding="utf-8", logs=None):
- """PyODBC connector context manager.
- """
- dsn = os.getenv("DSN", "ClickHouse DSN (ANSI)")
- note(f"Using DNS={dsn}")
- connection = pyodbc.connect(f"DSN={dsn};")
- try:
- class _Connection():
- def __init__(self, connection, encoding, logs=None):
- self.connection = connection
- self.logs = logs
- self.encoding = encoding
- self.connection.setencoding(encoding=self.encoding)
- if self.logs:
- self.logs.read()
-
- def query(self, q, params=[], fetch=True):
- try:
- note(f"query: {q}")
- cursor = self.connection.cursor()
- cursor.execute(q, *params)
- if fetch:
- rows = cursor.fetchall()
- for row in rows:
- note(row)
- return rows
- except pyodbc.Error as exc:
- exception()
- fail(str(exc))
- finally:
- if self.logs and settings.debug:
- # sleep 0.5 sec to let messages to be written to the logs
- time.sleep(0.5)
- self.logs.read(timeout=0.1)
-
- yield _Connection(connection, encoding, logs=logs)
- finally:
- connection.close()
diff --git a/test/pytest.ini b/test/pytest.ini
new file mode 100644
index 000000000..f211becc8
--- /dev/null
+++ b/test/pytest.ini
@@ -0,0 +1,7 @@
+[pytest]
+minversion = 8.0
+testpaths =
+ src
+filterwarnings =
+ ignore:There is no current event loop
+log_cli=true
diff --git a/test/requirements.txt b/test/requirements.txt
new file mode 100644
index 000000000..fd87f7b4d
--- /dev/null
+++ b/test/requirements.txt
@@ -0,0 +1,2 @@
+pyodbc==5.1.0
+pytest==8.3.3
diff --git a/test/parameterized/parameterized/__init__.py b/test/src/e2e/__init__.py
similarity index 100%
rename from test/parameterized/parameterized/__init__.py
rename to test/src/e2e/__init__.py
diff --git a/test/src/e2e/test_datatypes.py b/test/src/e2e/test_datatypes.py
new file mode 100755
index 000000000..fea5295b6
--- /dev/null
+++ b/test/src/e2e/test_datatypes.py
@@ -0,0 +1,499 @@
+import datetime
+import decimal
+import math
+import uuid
+
+import pytest
+
+from util import pyodbc_connection, create_table, rows_as_values
+
+
+# FIXME: None is converted to an empty string (probably by PyODBC itself?)
+# After the fix, Nullable test cases should be re-added
+# Sample error: Attempt to read after eof: while converting '' to UInt8. (ATTEMPT_TO_READ_AFTER_EOF)
+#
+# TODO:
+# Bool
+# (U)Int128
+# (U)Int256
+# Decimal256
+# DateTime64
+# Array
+# Tuple
+# Map
+# LowCardinality
+class TestDataTypes:
+ def test_int8(self):
+ table_name = "odbc_test_data_types_int8"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i Int8")):
+ values = [-128, 0, 127]
+ conn.insert(table_name, "(-128), (0), (127)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_int16(self):
+ table_name = "odbc_test_data_types_int16"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i Int16")):
+ values = [-32768, 0, 32767]
+ conn.insert(table_name, "(-32768), (0), (32767)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_int32(self):
+ table_name = "odbc_test_data_types_int32"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i Int32")):
+ values = [-2147483648, 0, 2147483647]
+ conn.insert(table_name, "(-2147483648), (0), (2147483647)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_int64(self):
+ table_name = "odbc_test_data_types_int64"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i Int64")):
+ values = [-9223372036854775808, 0, 9223372036854775807]
+ conn.insert(table_name, "(-9223372036854775808), (0), (9223372036854775807)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_uint8(self):
+ table_name = "odbc_test_data_types_uint8"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i UInt8")):
+ values = [0, 255]
+ conn.insert(table_name, "(0), (255)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
+
+ def test_uint16(self):
+ table_name = "odbc_test_data_types_uint16"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i UInt16")):
+ values = [0, 65535]
+ conn.insert(table_name, "(0), (65535)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
+
+ def test_uint32(self):
+ table_name = "odbc_test_data_types_uint32"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i UInt32")):
+ values = [0, 4294967295]
+ conn.insert(table_name, "(0), (4294967295)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
+
+ def test_uint64(self):
+ table_name = "odbc_test_data_types_uint64"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i UInt64")):
+ values = [0, 18446744073709551615]
+ conn.insert(table_name, "(0), (18446744073709551615)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?",
+ [str(value)]) # UInt64 max value overflows, bind as a string
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == int
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
+
+ @pytest.mark.parametrize("ch_type", ["Float32", "Float64"])
+ def test_float(self, ch_type):
+ table_name = f"odbc_test_data_types_{ch_type.lower()}"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, f"f {ch_type}")):
+ values = [-1.0, 0.0, float("inf"), float("-inf"), 13.26] # NaN handled separately
+ conn.insert(table_name, "(-1), (0), (inf), (-inf), (nan), (13.26)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE f = ?",
+ [str(value)]) # Avoid float precision issues
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "f"
+ assert rows[0].cursor_description[0][1] == float
+
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE isNaN(f)")
+ assert len(rows) == 1
+ assert math.isnan(rows_as_values(rows)[0])
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 6
+ result_values = rows_as_values(rows)
+ assert result_values[0] == -1.0
+ assert result_values[1] == 0.0
+ assert result_values[2] == float("inf")
+ assert result_values[3] == float("-inf")
+ assert math.isnan(result_values[4])
+ assert result_values[5] == 13.26
+
+ def test_decimal32(self):
+ table_name = "odbc_test_data_types_decimal32"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "d Decimal32(4)")):
+ values = [decimal.Decimal("-99999.9999"),
+ decimal.Decimal("10.1234"),
+ decimal.Decimal("99999.9999")]
+ conn.insert(table_name, "(-99999.9999), (10.1234), (99999.9999)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE d = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "d"
+ assert rows[0].cursor_description[0][1] == decimal.Decimal
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_decimal64(self):
+ table_name = "odbc_test_data_types_decimal64"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "d Decimal64(4)")):
+ values = [decimal.Decimal("-99999999999999.9999"),
+ decimal.Decimal("10.1234"),
+ decimal.Decimal("99999999999999.9999")]
+ conn.insert(table_name, "(-99999999999999.9999), (10.1234), (99999999999999.9999)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE d = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "d"
+ assert rows[0].cursor_description[0][1] == decimal.Decimal
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_decimal128(self):
+ table_name = "odbc_test_data_types_decimal128"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "d Decimal128(4)")):
+ values = [decimal.Decimal("-9999999999999999999999999999999999.9999"),
+ decimal.Decimal("10.1234"),
+ decimal.Decimal("9999999999999999999999999999999999.9999")]
+ conn.insert(table_name, "(-9999999999999999999999999999999999.9999), (10.1234), (9999999999999999999999999999999999.9999)")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE d = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "d"
+ assert rows[0].cursor_description[0][1] == decimal.Decimal
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_string(self):
+ table_name = "odbc_test_data_types_string"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "s String")):
+ values = ["", "hello", "world", "hello, world"]
+ conn.insert(table_name, "(''), ('hello'), ('world'), ('hello, world')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE s = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "s"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 4
+ assert rows_as_values(rows) == values
+
+ def test_string_utf8_and_binary(self):
+ table_name = "odbc_test_data_types_string_utf8"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "s String")):
+ utf8_string1 = "¶"
+ utf8_string2 = (b'\xe5\x8d\xb0\xe5\x88\xb7\xe5\x8e\x82\xe6\x8b\xbf\xe8\xb5\xb7'
+ b'\xe4\xb8\x80\xe4\xb8\xaa\xe6\xa0\xb7\xe6\x9d\xbf\xe9\x97\xb4'
+ b'\xef\xbc\x8c\xe7\x84\xb6\xe5\x90\x8e\xe5\xb0\x86\xe5\x85\xb6'
+ b'\xe6\x89\x93\xe6\x8b\xbc\xe6\x88\x90\xe6\xa0\xb7\xe6\x9c\xac'
+ b'\xe3\x80\x82 \xe5\xae\x83\xe4\xb8\x8d\xe4\xbb\x85\xe7\x94\x9f'
+ b'\xe5\xad\x98\xe4\xba\x86\xe4\xba\x94\xe4\xb8\xaa\xe4\xb8\x96'
+ b'\xe7\xba\xaa\xef\xbc\x8c\xe8\x80\x8c\xe4\xb8\x94\xe5\x9c\xa8'
+ b'\xe7\x94\xb5\xe5\xad\x90\xe6\x8e\x92\xe7\x89\x88\xe6\x96\xb9'
+ b'\xe9\x9d\xa2\xe4\xb9\x9f\xe5\x8f\x96\xe5\xbe\x97\xe4\xba\x86'
+ b'\xe9\xa3\x9e\xe8\xb7\x83\xef\xbc\x8c\xe4\xbd\x86\xe5\x9f\xba'
+ b'\xe6\x9c\xac\xe4\xb8\x8a\xe6\xb2\xa1\xe6\x9c\x89\xe6\x94\xb9'
+ b'\xe5\x8f\x98\xe3\x80\x82 \xe5\xae\x83\xe5\x9c\xa81960\xe5\xb9'
+ b'\xb4\xe4\xbb\xa3\xe9\x9a\x8f\xe7\x9d\x80Letraset\xe5\xba\x8a'
+ b'\xe5\x8d\x95\xe7\x9a\x84\xe5\x8f\x91\xe5\xb8\x83\xe8\x80\x8c'
+ b'\xe6\x99\xae\xe5\x8f\x8a\xef\xbc\x8c\xe5\x85\xb6\xe4\xb8\xad'
+ b'\xe5\x8c\x85\xe5\x90\xabLerem Ipsum\xe6\xae\xb5\xe8\x90\xbd'
+ b'\xe7\xad\x89').decode("utf-8")
+ binary_string = "\x00\x01\x02\0x03\x00\x00\xFF"
+ values = [
+ "hello",
+ utf8_string1,
+ utf8_string2,
+ binary_string
+ ]
+ insert_values = ','.join(list(map(lambda x: f"('{x}')", values)))
+ conn.insert(table_name, insert_values)
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE s = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "s"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 4
+ assert rows_as_values(rows) == values
+
+ def test_fixed_string(self):
+ table_name = "odbc_test_data_types_fixed_string"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "s FixedString(6)")):
+ values = ["hello\x00", "world\x00", "hellow"]
+ conn.insert(table_name, "('hello'), ('world'), ('hellow')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE s = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "s"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_fixed_string_utf8(self):
+ table_name = "odbc_test_data_types_fixed_string_utf8"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "s FixedString(6)")):
+ values = ["h¶\x00\x00\x00", "w¶¶\x00", "hellow"] # ¶ = 2 bytes
+ conn.insert(table_name, "('h¶'), ('w¶¶'), ('hellow')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE s = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "s"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_date(self):
+ table_name = "odbc_test_data_types_date"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "d Date")):
+ values = [
+ datetime.date(1970, 1, 1),
+ datetime.date(2000, 12, 31),
+ datetime.date(2020, 1, 1),
+ datetime.date(2149, 6, 6)]
+ conn.insert(table_name, "('1970-01-01'), ('2000-12-31'), ('2020-01-01'), ('2149-06-06')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE d = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "d"
+ assert rows[0].cursor_description[0][1] == datetime.date
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 4
+ assert rows_as_values(rows) == values
+
+ def test_datetime(self):
+ table_name = "odbc_test_data_types_datetime"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "dt DateTime")):
+ values = [
+ # FIXME: 0 unix time assertion will fail if the server timezone is not UTC even with SETTINGS session_timezone='UTC'
+ # Could be a potential bug
+ # datetime.datetime(1970, 1, 1, 0, 0, 0),
+ datetime.datetime(2000, 12, 31, 23, 59, 59),
+ datetime.datetime(2020, 1, 1, 1, 1, 1),
+ datetime.datetime(2106, 2, 7, 6, 28, 15)]
+ conn.insert(table_name, "('2000-12-31 23:59:59'), ('2020-01-01 01:01:01'), ('2106-02-07 06:28:15')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE dt = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "dt"
+ assert rows[0].cursor_description[0][1] == datetime.datetime
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_enum8(self):
+ table_name = "odbc_test_data_types_enum8"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "e Enum8('hello' = -128, '¶' = 42, 'world' = 127)")):
+ values = ["hello", "¶", "world"]
+ conn.insert(table_name, "('hello'), ('¶'), ('world')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE e = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "e"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_enum16(self):
+ table_name = "odbc_test_data_types_enum16"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "e Enum16('hello' = -32768, '¶' = 42, 'world' = 32767)")):
+ values = ["hello", "¶", "world"]
+ conn.insert(table_name, "('hello'), ('¶'), ('world')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE e = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "e"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == values
+
+ def test_uuid(self):
+ table_name = "odbc_test_data_types_uuid"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "u UUID")):
+ uuid0 = "417ddc5d-e556-4d27-95dd-a34d84e46a50"
+ uuid1 = "417ddc5d-e556-4d27-95dd-a34d84e46a51"
+ uuid2 = uuid.UUID('1dc3c592-f333-11e9-bedd-2477034de0ec')
+ values = [uuid0, uuid1, uuid2]
+ conn.insert(table_name, f"('{uuid0}'), ('{uuid1}'), ('{(str(uuid2))}')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE u = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [str(value)]
+ assert rows[0].cursor_description[0][0] == "u"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 3
+ assert rows_as_values(rows) == list(map(str, values))
+
+ def test_ipv4(self):
+ table_name = "odbc_test_data_types_ipv4"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i IPv4")):
+ ipv40 = "116.106.34.242"
+ ipv41 = "116.253.40.133"
+ values = [ipv40, ipv41]
+ conn.insert(table_name, f"('{ipv40}'), ('{ipv41}')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
+
+ def test_ipv6(self):
+ table_name = "odbc_test_data_types_ipv6"
+ with (pyodbc_connection() as conn,
+ create_table(conn, table_name, "i IPv6")):
+ ipv60 = "2001:db8:85a3::8a2e:370:7334"
+ ipv61 = "2001:db8:85a3::8a2e:370:7335"
+ values = [ipv60, ipv61]
+ conn.insert(table_name, f"('{ipv60}'), ('{ipv61}')")
+
+ for value in values:
+ rows = conn.query(f"SELECT * FROM {table_name} WHERE i = ?", [value])
+ assert len(rows) == 1
+ assert rows_as_values(rows) == [value]
+ assert rows[0].cursor_description[0][0] == "i"
+ assert rows[0].cursor_description[0][1] == str
+
+ rows = conn.query(f"SELECT * FROM {table_name}")
+ assert len(rows) == 2
+ assert rows_as_values(rows) == values
diff --git a/test/src/e2e/test_funcvalues.py b/test/src/e2e/test_funcvalues.py
new file mode 100644
index 000000000..a55b34cc4
--- /dev/null
+++ b/test/src/e2e/test_funcvalues.py
@@ -0,0 +1,42 @@
+import datetime
+
+from util import pyodbc_connection
+
+
+class TestFuncValues:
+ def test_is_null_false(self):
+ with pyodbc_connection() as conn:
+ values = [
+ "hello",
+ b'\xe5\x8d\xb0'.decode('utf-8'),
+ -1,
+ 0,
+ 255,
+ 1.0,
+ 0.0,
+ -1.0,
+ datetime.date(2000, 12, 31),
+ datetime.datetime(2000, 12, 31, 23, 59, 59),
+ ]
+ for value in values:
+ rows = conn.query("SELECT isNull(?)", [value])
+ assert repr(rows) == "[(0,)]", f"result did not match for value {value}"
+
+ def test_is_null_true(self):
+ with pyodbc_connection() as conn:
+ rows = conn.query("SELECT isNull(?)", [None])
+ assert repr(rows) == "[(1,)]"
+
+ def test_array_reduce_null(self):
+ with pyodbc_connection() as conn:
+ rows = conn.query("SELECT arrayReduce('count', [?, ?])", [None, None])
+ assert repr(rows) == "[(0,)]"
+
+ # FIXME:
+ # Fails with a NO_COMMON_TYPE error
+ # Rendered query:
+ # SELECT arrayReduce('count', [1, _CAST(NULL, 'LowCardinality(Nullable(String))'), _CAST(NULL, 'LowCardinality(Nullable(String))')])
+ # def test_array_reduce_not_null(self):
+ # with pyodbc_connection() as conn:
+ # rows = conn.query("SELECT arrayReduce('count', [1, ?, ?])", [None, None])
+ # assert repr(rows) == "[(1,)]"
diff --git a/test/src/e2e/test_sanity.py b/test/src/e2e/test_sanity.py
new file mode 100755
index 000000000..47431a836
--- /dev/null
+++ b/test/src/e2e/test_sanity.py
@@ -0,0 +1,99 @@
+import datetime
+from decimal import Decimal
+
+import pytest
+
+from util import pyodbc_connection, create_table
+
+TABLE_NAME = "test_sanity_simple_data_types"
+TABLE_SCHEMA = "i UInt8, ni Nullable(UInt8), s String, d Date, dt DateTime, f Float32, dc Decimal32(3), fs FixedString(8)"
+
+VALUES_ROW1 = "(1, NULL, 'Hello, world', '2005-05-05', '2005-05-05 05:05:05', 1.5, 10.123, 'fstring0')"
+PYODBC_ROW1 = [1, None, 'Hello, world', datetime.date(2005, 5, 5), datetime.datetime(2005, 5, 5, 5, 5, 5), 1.5,
+ Decimal('10.123'), 'fstring0']
+
+VALUES_ROW2 = "(2, NULL, 'test', '2019-05-25', '2019-05-25 15:00:00', 1.433, 11.124, 'fstring1')"
+PYODBC_ROW2 = [2, None, 'test', datetime.date(2019, 5, 25), datetime.datetime(2019, 5, 25, 15, 0), 1.433, Decimal('11.124'), 'fstring1']
+
+
+class TestSanity:
+ @pytest.fixture(scope='class')
+ def conn(self):
+ with (pyodbc_connection() as conn,
+ create_table(conn, TABLE_NAME, TABLE_SCHEMA)):
+ conn.insert(TABLE_NAME, VALUES_ROW1)
+ conn.insert(TABLE_NAME, VALUES_ROW2)
+ yield conn
+
+ def test_simple_query(self):
+ with pyodbc_connection() as conn:
+ def query(_query, *args, **kwargs):
+ return conn.query(_query, *args, **kwargs)
+
+ result = query("SELECT 1")
+ assert len(result) == 1
+ assert result[0].cursor_description[0][0] == "1"
+ assert result[0].cursor_description[0][1] == int
+
+ def test_uint8_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE i = ? ORDER BY i, s, d",
+ [1])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ # FIXME: None is converted to an empty string (probably by PyODBC itself?)
+ # Rendered query: SELECT * FROM test_sanity_simple_data_types WHERE ni = _CAST(NULL, 'Nullable(String)') ORDER BY i ASC, s ASC, d ASC
+ # Attempt to read after eof: while converting '' to UInt8. (ATTEMPT_TO_READ_AFTER_EOF)
+ # def test_nullable_uint8_param(self, conn):
+ # result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE ni = NULL ORDER BY i, s, d",
+ # [None])
+ # assert len(result) == 1
+ # assert list(result[0]) == PYODBC_ROW1
+
+ def test_string_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE s = ? ORDER BY i, s, d",
+ ["Hello, world"])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_date_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE d = ? ORDER BY i, s, d",
+ [datetime.date(2005, 5, 5)])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_datetime_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE dt = ? ORDER BY i, s, d",
+ [datetime.datetime(2005, 5, 5, 5, 5, 5)])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_float32_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE f = ? ORDER BY i, s, d",
+ [1.5])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_decimal32_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE dc = ? ORDER BY i, s, d",
+ [Decimal('10.123')])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_fixed_string_param(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE fs = ? ORDER BY i, s, d",
+ ["fstring0"])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW1
+
+ def test_uint8_and_string_params(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE i = ? and s = ? ORDER BY i, s, d",
+ [2, "test"])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW2
+
+ def test_uint8_string_and_date_params(self, conn):
+ result = conn.query(f"SELECT * FROM {TABLE_NAME} WHERE i = ? and s = ? and d = ? ORDER BY i, s, d",
+ [2, "test", datetime.date(2019, 5, 25)])
+ assert len(result) == 1
+ assert list(result[0]) == PYODBC_ROW2
diff --git a/test/parameterized/utils/__init__.py b/test/src/util/__init__.py
similarity index 52%
rename from test/parameterized/utils/__init__.py
rename to test/src/util/__init__.py
index 16281fe0b..c50350c52 100644
--- a/test/parameterized/utils/__init__.py
+++ b/test/src/util/__init__.py
@@ -1 +1,2 @@
from .utils import *
+from .env import *
diff --git a/test/src/util/env.py b/test/src/util/env.py
new file mode 100644
index 000000000..a13309dd3
--- /dev/null
+++ b/test/src/util/env.py
@@ -0,0 +1,20 @@
+import logging
+import os
+
+LOGGER = logging.getLogger(__name__)
+DEFAULT_DSN = "ClickHouse DSN (ANSI)"
+
+
+def read_dsn_from_env():
+ env_dsn = os.getenv("DSN")
+ if env_dsn is None:
+ LOGGER.info(f"Setting DSN to default value: {DEFAULT_DSN}")
+ return DEFAULT_DSN
+ return env_dsn
+
+
+DSN = read_dsn_from_env()
+CLICKHOUSE_LOG_PATH = os.getenv("CLICKHOUSE_LOG", "/var/log/clickhouse-server/clickhouse-server.log")
+ODBC_DRIVER_TRACE_LOG_PATH = os.getenv("ODBC_DRIVER_TRACE_LOG", "/tmp/clickhouse-odbc-driver-trace.log")
+ODBC_DRIVER_W_TRACE_LOG_PATH = os.getenv("ODBC_DRIVER_W_TRACE_LOG", "/tmp/clickhouse-odbc-driver-w-trace.log")
+ODBC_MANAGER_TRACE_LOG_PATH = os.getenv("ODBC_MANAGER_TRACE_LOG", "/tmp/odbc-driver-manager-trace.log")
diff --git a/test/src/util/utils.py b/test/src/util/utils.py
new file mode 100644
index 000000000..ff7a30c2e
--- /dev/null
+++ b/test/src/util/utils.py
@@ -0,0 +1,113 @@
+import logging
+import os
+from contextlib import contextmanager
+
+import pyodbc
+
+LOGGER = logging.getLogger(__name__)
+
+
+@contextmanager
+def shell_logs():
+ """ClickHouse and ODBC driver logs context manager.
+ """
+
+ # rewrite without testflows.Shell if needed
+ #
+ # class _Logs:
+ # def __init__(self, *args):
+ # self.logs = args
+ #
+ # def read(self, timeout=None):
+ # for log in self.logs:
+ # log.readlines(timeout=timeout)
+ #
+ # if not settings.debug:
+ # yield None
+ # else:
+ # with Shell(name="clickhouse-server.log") as bash0, \
+ # Shell(name="odbc-driver-trace.log") as bash1, \
+ # Shell(name="odbc-driver-w-trace.log") as bash2, \
+ # Shell(name="odbc-manager-trace.log") as bash3:
+ #
+ # bash1(f"touch {ODBC_DRIVER_TRACE_LOG_PATH}")
+ # bash2(f"touch {ODBC_DRIVER_W_TRACE_LOG_PATH}")
+ # bash3(f"touch {ODBC_MANAGER_TRACE_LOG_PATH}")
+ #
+ # with bash0(f"tail -f {CLICKHOUSE_LOG_PATH}", asyncronous=True, name="") as clickhouse_log, \
+ # bash1(f"tail -f {ODBC_DRIVER_TRACE_LOG_PATH}", asyncronous=True, name="") as odbc_driver_log, \
+ # bash2(f"tail -f {ODBC_DRIVER_W_TRACE_LOG_PATH}", asyncronous=True, name="") as odbc_driver_w_log, \
+ # bash3(f"tail -f {ODBC_MANAGER_TRACE_LOG_PATH}", asyncronous=True, name="") as odbc_manager_log:
+ # logs = _Logs(clickhouse_log, odbc_driver_log, odbc_driver_w_log, odbc_manager_log)
+ # logs.read()
+ # yield logs
+
+
+class PyODBCConnection:
+ def __init__(self, connection: pyodbc.Connection, encoding, logs=None):
+ self.connection = connection
+ self.logs = logs
+ self.encoding = encoding
+ # encoding/decoding fix is required for iODBC (UTF-16 by default)
+ self.connection.setencoding(encoding=self.encoding)
+ self.connection.setdecoding(pyodbc.SQL_CHAR, encoding=self.encoding, ctype=pyodbc.SQL_CHAR)
+ if self.logs:
+ self.logs.read()
+
+ def query(self, q, params=None, fetch=True):
+ if params is None:
+ params = []
+ try:
+ LOGGER.debug(f"Query: {q}")
+ if len(params) > 0:
+ LOGGER.debug(f"Params: {params}")
+ cursor = self.connection.cursor()
+ # self.connection.setencoding(encoding=self.encoding)
+ cursor.execute(q, *params)
+ if fetch:
+ # self.connection.setencoding(encoding="utf-16")
+ rows = cursor.fetchall()
+ for row in rows:
+ LOGGER.debug(f"Row: {row}")
+ return rows
+ except pyodbc.Error as exc:
+ raise exc
+ finally:
+ pass
+ # if self.logs and settings.debug:
+ # # sleep 0.5 sec to let messages to be written to the logs
+ # time.sleep(0.5)
+ # self.logs.read(timeout=0.1)
+
+ def insert(self, table_name: str, values: str):
+ stmt = f"INSERT INTO {table_name} VALUES {values}"
+ self.query(stmt, fetch=False)
+
+
+@contextmanager
+def pyodbc_connection(encoding="utf-8", logs=None):
+ dsn = os.getenv("DSN", "ClickHouse DSN (ANSI)")
+ LOGGER.debug(f"Using DNS={dsn}")
+ connection = None
+ try:
+ connection = pyodbc.connect(f"DSN={dsn};")
+ yield PyODBCConnection(connection, encoding, logs=logs)
+ except Exception as e:
+ LOGGER.error(f"Error: {e}")
+ raise
+ finally:
+ if connection:
+ connection.close()
+
+
+@contextmanager
+def create_table(connection: PyODBCConnection, table_name: str, schema: str):
+ ddl = f"CREATE OR REPLACE TABLE {table_name} ({schema}) ENGINE = Memory"
+ connection.query(ddl, fetch=False)
+ yield
+ # No need to drop the table locally, might be useful for debugging
+ # connection.query(f"DROP TABLE IF EXISTS {table_name}", fetch=False)
+
+
+def rows_as_values(rows: list[pyodbc.Row]) -> list:
+ return list(map(lambda r: list(r)[0], rows))
diff --git a/test/test.py b/test/test.py
index 4ddea6c9e..1080cb1ca 100755
--- a/test/test.py
+++ b/test/test.py
@@ -16,6 +16,7 @@
is_python_3 = (sys.version_info.major == 3)
is_windows = (os.name == 'nt')
+
def main():
dsn = 'ClickHouse DSN (ANSI)'
@@ -27,12 +28,12 @@ def main():
query(connection, "select * from system.build_options")
query(connection,
- "SELECT *, (CASE WHEN (number == 1) THEN 'o' WHEN (number == 2) THEN 'two long string' WHEN (number == 3) THEN 'r' WHEN (number == 4) THEN NULL ELSE '-' END) FROM system.numbers LIMIT 6")
+ "SELECT *, (CASE WHEN (number == 1) THEN 'o' WHEN (number == 2) THEN 'two long string' WHEN (number == 3) THEN 'r' WHEN (number == 4) THEN NULL ELSE '-' END) FROM system.numbers LIMIT 6")
# TODO query("SELECT 1, 'string', NULL")
if is_python_3:
query(connection, u"SELECT 'абвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ'")
query(connection,
- "SELECT -127,-128,-129,126,127,128,255,256,257,-32767,-32768,-32769,32766,32767,32768,65535,65536,65537,-2147483647,-2147483648,-2147483649,2147483646,2147483647,2147483648,4294967295,4294967296,4294967297,-9223372036854775807,-9223372036854775808,-9223372036854775809,9223372036854775806,9223372036854775807,9223372036854775808,18446744073709551615,18446744073709551616,18446744073709551617")
+ "SELECT -127,-128,-129,126,127,128,255,256,257,-32767,-32768,-32769,32766,32767,32768,65535,65536,65537,-2147483647,-2147483648,-2147483649,2147483646,2147483647,2147483648,4294967295,4294967296,4294967297,-9223372036854775807,-9223372036854775808,-9223372036854775809,9223372036854775806,9223372036854775807,9223372036854775808,18446744073709551615,18446744073709551616,18446744073709551617")
query(connection, "SELECT 2147483647, 2147483648, 2147483647+1, 2147483647+10, 4294967295")
query(connection, "SELECT * FROM system.contributors ORDER BY name LIMIT 10")
@@ -84,6 +85,7 @@ def getConnection(connectionString):
return connection
+
def query(connection, q):
print("{} :".format(q))
cursor = connection.cursor()
@@ -92,5 +94,6 @@ def query(connection, q):
for row in rows:
print(row)
+
if __name__ == '__main__':
main()