Allow to use experimental functions in caggs

Current implementation of caggs can't find a bucketing function
if it's declared in the experimental schema. This patch fixes it.

Also the patch adds `debug_notice` test to IGNORE list on AppVeyor.
The corresponding test generates an extra "DEBUG: rehashing catalog
cache" message which is not critical. It seems to be stable on Linux.
This commit is contained in:
Aleksander Alekseev 2021-06-25 16:10:10 +03:00 committed by Aleksander Alekseev
parent 06433f6228
commit a1a789be8e
10 changed files with 215 additions and 26 deletions

View File

@ -203,7 +203,7 @@ test_script:
# killer. Therefore, we need to ignore the results of the
# remote_connection and remote_txn tests.
docker exec -e IGNORES="bgw_db_scheduler compression_algos continuous_aggs_bgw ordered_append_join-12 remote_connection remote_txn " -e SKIPS="bgw_db_scheduler" -e TEST_TABLESPACE1_PREFIX="C:\Users\$env:UserName\Documents\tablespace1\" -e TEST_TABLESPACE2_PREFIX="C:\Users\$env:UserName\Documents\tablespace2\" -e TEST_SPINWAIT_ITERS=10000 -e USER=postgres -it pgregress /bin/bash -c "cd /timescaledb/build && make -k regresschecklocal-t regresschecklocal-shared"
docker exec -e IGNORES="bgw_db_scheduler compression_algos continuous_aggs_bgw debug_notice ordered_append_join-12 remote_connection remote_txn " -e SKIPS="bgw_db_scheduler" -e TEST_TABLESPACE1_PREFIX="C:\Users\$env:UserName\Documents\tablespace1\" -e TEST_TABLESPACE2_PREFIX="C:\Users\$env:UserName\Documents\tablespace2\" -e TEST_SPINWAIT_ITERS=10000 -e USER=postgres -it pgregress /bin/bash -c "cd /timescaledb/build && make -k regresschecklocal-t regresschecklocal-shared"
if( -not $? -or -not $TESTS1 ) { exit 1 }

View File

@ -1093,4 +1093,4 @@ int64
ts_continuous_agg_max_bucket_width(const ContinuousAgg *agg)
{
return agg->data.bucket_width;
}
}

View File

@ -31,6 +31,13 @@
#define TS_UPDATE_SCRIPT_CONFIG_VAR "timescaledb.update_script_stage"
#define POST_UPDATE "post"
/*
* The name of the experimental schema.
*
* Call ts_extension_schema_name() or ts_experimental_schema_name() for
* consistency. Don't use this macro directly.
*/
#define TS_EXPERIMENTAL_SCHEMA_NAME "timescaledb_experimental"
static Oid extension_proxy_oid = InvalidOid;
/*
@ -202,6 +209,12 @@ ts_extension_schema_name(void)
return get_namespace_name(ts_extension_schema_oid());
}
const char *
ts_experimental_schema_name(void)
{
return TS_EXPERIMENTAL_SCHEMA_NAME;
}
/*
* Called upon all Relcache invalidate events.
* Returns whether or not to invalidate the entire extension.

View File

@ -15,6 +15,7 @@ extern void ts_extension_check_version(const char *so_version);
extern void ts_extension_check_server_version(void);
extern Oid ts_extension_schema_oid(void);
extern TSDLLEXPORT char *ts_extension_schema_name(void);
extern const char *ts_experimental_schema_name(void);
extern const char *ts_extension_get_so_name(void);
extern TSDLLEXPORT const char *ts_extension_get_version(void);

View File

@ -176,7 +176,7 @@ typedef struct FuncEntry
/* Information about functions that we put in the cache */
static FuncInfo funcinfo[] = {
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -185,7 +185,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -194,7 +194,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -203,7 +203,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -212,7 +212,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -221,7 +221,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -230,7 +230,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -239,7 +239,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -248,7 +248,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -257,7 +257,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -266,7 +266,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 2,
@ -275,7 +275,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket",
.nargs = 3,
@ -283,9 +283,44 @@ static FuncInfo funcinfo[] = {
.group_estimate = time_bucket_group_estimate,
.sort_transform = time_bucket_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE_EXPERIMENTAL,
.is_bucketing_func = true,
.funcname = "time_bucket_ng",
.nargs = 2,
.arg_types = { INTERVALOID, DATEOID },
.group_estimate = time_bucket_group_estimate,
.sort_transform = time_bucket_sort_transform,
},
{
.origin = ORIGIN_TIMESCALE_EXPERIMENTAL,
.is_bucketing_func = true,
.funcname = "time_bucket_ng",
.nargs = 3,
.arg_types = { INTERVALOID, DATEOID, DATEOID },
.group_estimate = time_bucket_group_estimate,
.sort_transform = time_bucket_sort_transform,
},
{
.origin = ORIGIN_TIMESCALE_EXPERIMENTAL,
.is_bucketing_func = true,
.funcname = "time_bucket_ng",
.nargs = 2,
.arg_types = { INTERVALOID, TIMESTAMPOID },
.group_estimate = time_bucket_group_estimate,
.sort_transform = time_bucket_sort_transform,
},
{
.origin = ORIGIN_TIMESCALE_EXPERIMENTAL,
.is_bucketing_func = true,
.funcname = "time_bucket_ng",
.nargs = 3,
.arg_types = { INTERVALOID, TIMESTAMPOID, TIMESTAMPOID },
.group_estimate = time_bucket_group_estimate,
.sort_transform = time_bucket_sort_transform,
},
{
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -294,7 +329,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_gapfill_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -303,7 +338,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_gapfill_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -312,7 +347,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_gapfill_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -321,7 +356,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_gapfill_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -330,7 +365,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = time_bucket_gapfill_sort_transform,
},
{
.is_timescaledb_func = true,
.origin = ORIGIN_TIMESCALE,
.is_bucketing_func = true,
.funcname = "time_bucket_gapfill",
.nargs = 4,
@ -340,7 +375,7 @@ static FuncInfo funcinfo[] = {
},
{
.is_timescaledb_func = false,
.origin = ORIGIN_POSTGRES,
.is_bucketing_func = true,
.funcname = "date_trunc",
.nargs = 2,
@ -349,7 +384,7 @@ static FuncInfo funcinfo[] = {
.sort_transform = date_trunc_sort_transform,
},
{
.is_timescaledb_func = false,
.origin = ORIGIN_POSTGRES,
.is_bucketing_func = true,
.funcname = "date_trunc",
.nargs = 2,
@ -379,6 +414,7 @@ initialize_func_info()
.hcxt = CacheMemoryContext,
};
Oid extension_nsp = ts_extension_schema_oid();
Oid experimental_nsp = get_namespace_oid(ts_experimental_schema_name(), false);
Oid pg_nsp = get_namespace_oid("pg_catalog", false);
HeapTuple tuple;
Relation rel;
@ -391,12 +427,21 @@ initialize_func_info()
for (i = 0; i < _MAX_CACHE_FUNCTIONS; i++)
{
FuncInfo *finfo = &funcinfo[i];
Oid namespaceoid = finfo->is_timescaledb_func ? extension_nsp : pg_nsp;
Oid namespaceoid = pg_nsp;
oidvector *paramtypes = buildoidvector(finfo->arg_types, finfo->nargs);
FuncEntry *fentry;
bool hash_found;
Oid funcid;
if (finfo->origin == ORIGIN_TIMESCALE)
{
namespaceoid = extension_nsp;
}
else if (finfo->origin == ORIGIN_TIMESCALE_EXPERIMENTAL)
{
namespaceoid = experimental_nsp;
}
tuple = SearchSysCache3(PROCNAMEARGSNSP,
PointerGetDatum(finfo->funcname),
PointerGetDatum(paramtypes),

View File

@ -16,10 +16,28 @@
typedef Expr *(*sort_transform_func)(FuncExpr *func);
typedef double (*group_estimate_func)(PlannerInfo *root, FuncExpr *expr, double path_rows);
/* Describes the function origin */
typedef enum
{
/*
* Function is provided by PostgreSQL.
*/
ORIGIN_POSTGRES = 0,
/*
* Function is provided by TimescaleDB.
*/
ORIGIN_TIMESCALE = 1,
/*
* Fuction is provided by TimescaleDB and is experimental.
* It should be looked for in the experimental schema.
*/
ORIGIN_TIMESCALE_EXPERIMENTAL = 2,
} FuncOrigin;
typedef struct FuncInfo
{
const char *funcname;
bool is_timescaledb_func;
FuncOrigin origin;
bool is_bucketing_func;
int nargs;
Oid arg_types[FUNC_CACHE_MAX_FUNC_ARGS];

View File

@ -1069,9 +1069,18 @@ get_partialize_funcexpr(Aggref *agg)
static bool
is_valid_bucketing_function(Oid funcid)
{
bool is_timescale;
FuncInfo *finfo = ts_func_cache_get_bucketing_func(funcid);
return finfo != NULL && finfo->is_timescaledb_func && finfo->nargs == 2;
if (finfo == NULL)
{
return false;
}
is_timescale =
(finfo->origin == ORIGIN_TIMESCALE) || (finfo->origin == ORIGIN_TIMESCALE_EXPERIMENTAL);
return is_timescale && (finfo->nargs == 2);
}
/*initialize MatTableColumnInfo */

View File

@ -0,0 +1,55 @@
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
-- Make sure experimental immutable function with 2 arguments can be used in caggs.
-- Functions with 3 arguments and/or stable functions are currently not supported in caggs.
CREATE TABLE conditions(
day DATE NOT NULL,
city text NOT NULL,
temperature INT NOT NULL);
SELECT create_hypertable(
'conditions', 'day',
chunk_time_interval => INTERVAL '1 day'
);
create_hypertable
-------------------------
(1,public,conditions,t)
(1 row)
INSERT INTO conditions (day, city, temperature) VALUES
('2021-06-14', 'Moscow', 26),
('2021-06-15', 'Moscow', 22),
('2021-06-16', 'Moscow', 24),
('2021-06-17', 'Moscow', 24),
('2021-06-18', 'Moscow', 27),
('2021-06-19', 'Moscow', 28),
('2021-06-20', 'Moscow', 30),
('2021-06-21', 'Moscow', 31),
('2021-06-22', 'Moscow', 34),
('2021-06-23', 'Moscow', 34),
('2021-06-24', 'Moscow', 34),
('2021-06-25', 'Moscow', 32),
('2021-06-26', 'Moscow', 32),
('2021-06-27', 'Moscow', 31);
CREATE MATERIALIZED VIEW conditions_summary_weekly
WITH (timescaledb.continuous) AS
SELECT city,
timescaledb_experimental.time_bucket_ng('7 days', day) AS bucket,
MIN(temperature),
MAX(temperature)
FROM conditions
GROUP BY city, bucket;
NOTICE: refreshing continuous aggregate "conditions_summary_weekly"
SELECT to_char(bucket, 'YYYY-MM-DD'), city, min, max
FROM conditions_summary_weekly
ORDER BY bucket;
to_char | city | min | max
------------+--------+-----+-----
2021-06-12 | Moscow | 22 | 27
2021-06-19 | Moscow | 28 | 34
2021-06-26 | Moscow | 31 | 32
(3 rows)
DROP TABLE conditions CASCADE;
NOTICE: drop cascades to 3 other objects
NOTICE: drop cascades to 2 other objects

View File

@ -10,6 +10,7 @@ set(TEST_FILES
continuous_aggs_policy.sql
continuous_aggs_refresh.sql
continuous_aggs_watermark.sql
continuous_aggs_experimental.sql
dist_views.sql
move.sql
partialize_finalize.sql

View File

@ -0,0 +1,47 @@
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
-- Make sure experimental immutable function with 2 arguments can be used in caggs.
-- Functions with 3 arguments and/or stable functions are currently not supported in caggs.
CREATE TABLE conditions(
day DATE NOT NULL,
city text NOT NULL,
temperature INT NOT NULL);
SELECT create_hypertable(
'conditions', 'day',
chunk_time_interval => INTERVAL '1 day'
);
INSERT INTO conditions (day, city, temperature) VALUES
('2021-06-14', 'Moscow', 26),
('2021-06-15', 'Moscow', 22),
('2021-06-16', 'Moscow', 24),
('2021-06-17', 'Moscow', 24),
('2021-06-18', 'Moscow', 27),
('2021-06-19', 'Moscow', 28),
('2021-06-20', 'Moscow', 30),
('2021-06-21', 'Moscow', 31),
('2021-06-22', 'Moscow', 34),
('2021-06-23', 'Moscow', 34),
('2021-06-24', 'Moscow', 34),
('2021-06-25', 'Moscow', 32),
('2021-06-26', 'Moscow', 32),
('2021-06-27', 'Moscow', 31);
CREATE MATERIALIZED VIEW conditions_summary_weekly
WITH (timescaledb.continuous) AS
SELECT city,
timescaledb_experimental.time_bucket_ng('7 days', day) AS bucket,
MIN(temperature),
MAX(temperature)
FROM conditions
GROUP BY city, bucket;
SELECT to_char(bucket, 'YYYY-MM-DD'), city, min, max
FROM conditions_summary_weekly
ORDER BY bucket;
DROP TABLE conditions CASCADE;