Skip to content

Commit

Permalink
Fix flaky tests
Browse files Browse the repository at this point in the history
  • Loading branch information
fabriziomello committed Sep 26, 2024
1 parent 4d49e7d commit a5aff22
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 16 deletions.
13 changes: 12 additions & 1 deletion .github/gh_matrix_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,18 @@ def macos_config(overrides):
"coverage": False,
"installcheck_args": f'TESTS="{" ".join(list(tests) * 20)}"',
"name": "Flaky Check Debug",
"pg": PG14_LATEST,
"pg": PG16_LATEST,
"pginstallcheck": False,
}
)
)
m["include"].append(
build_debug_config(
{
"coverage": False,
"installcheck_args": f'TESTS="{" ".join(list(tests) * 20)}"',
"name": "Flaky Check Debug",
"pg": PG17_LATEST,
"pginstallcheck": False,
}
)
Expand Down
14 changes: 5 additions & 9 deletions tsl/test/shared/expected/constraint_exclusion_prepared.out
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,17 @@ SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set
-- get EXPLAIN output for all variations
\set PREFIX 'EXPLAIN (analyze, costs off, timing off, summary off)'
\set PREFIX_VERBOSE 'EXPLAIN (analyze, costs off, timing off, summary off, verbose)'
set work_mem to '64MB';
-- disable incremental sort here to make plans comparable to PG < 13
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 13 THEN set_config('enable_incremental_sort','off',false) ELSE 'off' END;
case
off
(1 row)

SET work_mem TO '64MB';
-- In the following test cases, we test that certain indexes are used. By using the
-- timescaledb.enable_decompression_sorted_merge optimization, we are pushing a sort node
-- below the DecompressChunk node, which operates on the batches. This could lead to flaky
-- tests because the input data is small and PostgreSQL switches from IndexScans to
-- SequentialScans. Disable the optimization for the following tests to ensure we have
-- stable query plans in all CI environments.
SET timescaledb.enable_decompression_sorted_merge = 0;
set max_parallel_workers_per_gather to 0;
SET timescaledb.enable_decompression_sorted_merge TO 0;
SET max_parallel_workers_per_gather TO 0;
-- disable incremental sort to avoid flaky results
SET enable_incremental_sort TO off;
\set TEST_TABLE 'metrics'
\ir :TEST_QUERY_NAME
-- This file and its contents are licensed under the Timescale License.
Expand Down
12 changes: 6 additions & 6 deletions tsl/test/shared/sql/constraint_exclusion_prepared.sql
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,20 @@ SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set
\set PREFIX 'EXPLAIN (analyze, costs off, timing off, summary off)'
\set PREFIX_VERBOSE 'EXPLAIN (analyze, costs off, timing off, summary off, verbose)'

set work_mem to '64MB';
-- disable incremental sort here to make plans comparable to PG < 13
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 13 THEN set_config('enable_incremental_sort','off',false) ELSE 'off' END;

SET work_mem TO '64MB';

-- In the following test cases, we test that certain indexes are used. By using the
-- timescaledb.enable_decompression_sorted_merge optimization, we are pushing a sort node
-- below the DecompressChunk node, which operates on the batches. This could lead to flaky
-- tests because the input data is small and PostgreSQL switches from IndexScans to
-- SequentialScans. Disable the optimization for the following tests to ensure we have
-- stable query plans in all CI environments.
SET timescaledb.enable_decompression_sorted_merge = 0;
SET timescaledb.enable_decompression_sorted_merge TO 0;
SET max_parallel_workers_per_gather TO 0;

-- disable incremental sort to avoid flaky results
SET enable_incremental_sort TO off;

set max_parallel_workers_per_gather to 0;
\set TEST_TABLE 'metrics'
\ir :TEST_QUERY_NAME
\set TEST_TABLE 'metrics_space'
Expand Down

0 comments on commit a5aff22

Please sign in to comment.