diff --git a/appveyor.yml b/appveyor.yml index 3cfefa584..524435f9b 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -203,7 +203,7 @@ test_script: # killer. Therefore, we need to ignore the results of the # remote_connection and remote_txn tests. - docker exec -e IGNORES="bgw_db_scheduler compression_algos continuous_aggs_bgw ordered_append_join-12 remote_connection remote_txn " -e SKIPS="bgw_db_scheduler" -e TEST_TABLESPACE1_PREFIX="C:\Users\$env:UserName\Documents\tablespace1\" -e TEST_TABLESPACE2_PREFIX="C:\Users\$env:UserName\Documents\tablespace2\" -e TEST_SPINWAIT_ITERS=10000 -e USER=postgres -it pgregress /bin/bash -c "cd /timescaledb/build && make -k regresschecklocal-t regresschecklocal-shared" + docker exec -e IGNORES="bgw_db_scheduler compression_algos continuous_aggs_bgw debug_notice ordered_append_join-12 remote_connection remote_txn " -e SKIPS="bgw_db_scheduler" -e TEST_TABLESPACE1_PREFIX="C:\Users\$env:UserName\Documents\tablespace1\" -e TEST_TABLESPACE2_PREFIX="C:\Users\$env:UserName\Documents\tablespace2\" -e TEST_SPINWAIT_ITERS=10000 -e USER=postgres -it pgregress /bin/bash -c "cd /timescaledb/build && make -k regresschecklocal-t regresschecklocal-shared" if( -not $? -or -not $TESTS1 ) { exit 1 } diff --git a/src/continuous_agg.c b/src/continuous_agg.c index 37e1dad54..16f906ce4 100644 --- a/src/continuous_agg.c +++ b/src/continuous_agg.c @@ -1093,4 +1093,4 @@ int64 ts_continuous_agg_max_bucket_width(const ContinuousAgg *agg) { return agg->data.bucket_width; -} \ No newline at end of file +} diff --git a/src/extension.c b/src/extension.c index 055ae9705..4752e1c73 100644 --- a/src/extension.c +++ b/src/extension.c @@ -31,6 +31,13 @@ #define TS_UPDATE_SCRIPT_CONFIG_VAR "timescaledb.update_script_stage" #define POST_UPDATE "post" +/* + * The name of the experimental schema. + * + * Call ts_extension_schema_name() or ts_experimental_schema_name() for + * consistency. Don't use this macro directly. + */ +#define TS_EXPERIMENTAL_SCHEMA_NAME "timescaledb_experimental" static Oid extension_proxy_oid = InvalidOid; /* @@ -202,6 +209,12 @@ ts_extension_schema_name(void) return get_namespace_name(ts_extension_schema_oid()); } +const char * +ts_experimental_schema_name(void) +{ + return TS_EXPERIMENTAL_SCHEMA_NAME; +} + /* * Called upon all Relcache invalidate events. * Returns whether or not to invalidate the entire extension. diff --git a/src/extension.h b/src/extension.h index 6b89de635..89ca84e0f 100644 --- a/src/extension.h +++ b/src/extension.h @@ -15,6 +15,7 @@ extern void ts_extension_check_version(const char *so_version); extern void ts_extension_check_server_version(void); extern Oid ts_extension_schema_oid(void); extern TSDLLEXPORT char *ts_extension_schema_name(void); +extern const char *ts_experimental_schema_name(void); extern const char *ts_extension_get_so_name(void); extern TSDLLEXPORT const char *ts_extension_get_version(void); diff --git a/src/func_cache.c b/src/func_cache.c index eec895f99..4ef940c18 100644 --- a/src/func_cache.c +++ b/src/func_cache.c @@ -176,7 +176,7 @@ typedef struct FuncEntry /* Information about functions that we put in the cache */ static FuncInfo funcinfo[] = { { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -185,7 +185,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -194,7 +194,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -203,7 +203,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -212,7 +212,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -221,7 +221,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -230,7 +230,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -239,7 +239,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -248,7 +248,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -257,7 +257,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -266,7 +266,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 2, @@ -275,7 +275,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket", .nargs = 3, @@ -283,9 +283,44 @@ static FuncInfo funcinfo[] = { .group_estimate = time_bucket_group_estimate, .sort_transform = time_bucket_sort_transform, }, - { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE_EXPERIMENTAL, + .is_bucketing_func = true, + .funcname = "time_bucket_ng", + .nargs = 2, + .arg_types = { INTERVALOID, DATEOID }, + .group_estimate = time_bucket_group_estimate, + .sort_transform = time_bucket_sort_transform, + }, + { + .origin = ORIGIN_TIMESCALE_EXPERIMENTAL, + .is_bucketing_func = true, + .funcname = "time_bucket_ng", + .nargs = 3, + .arg_types = { INTERVALOID, DATEOID, DATEOID }, + .group_estimate = time_bucket_group_estimate, + .sort_transform = time_bucket_sort_transform, + }, + { + .origin = ORIGIN_TIMESCALE_EXPERIMENTAL, + .is_bucketing_func = true, + .funcname = "time_bucket_ng", + .nargs = 2, + .arg_types = { INTERVALOID, TIMESTAMPOID }, + .group_estimate = time_bucket_group_estimate, + .sort_transform = time_bucket_sort_transform, + }, + { + .origin = ORIGIN_TIMESCALE_EXPERIMENTAL, + .is_bucketing_func = true, + .funcname = "time_bucket_ng", + .nargs = 3, + .arg_types = { INTERVALOID, TIMESTAMPOID, TIMESTAMPOID }, + .group_estimate = time_bucket_group_estimate, + .sort_transform = time_bucket_sort_transform, + }, + { + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -294,7 +329,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_gapfill_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -303,7 +338,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_gapfill_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -312,7 +347,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_gapfill_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -321,7 +356,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_gapfill_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -330,7 +365,7 @@ static FuncInfo funcinfo[] = { .sort_transform = time_bucket_gapfill_sort_transform, }, { - .is_timescaledb_func = true, + .origin = ORIGIN_TIMESCALE, .is_bucketing_func = true, .funcname = "time_bucket_gapfill", .nargs = 4, @@ -340,7 +375,7 @@ static FuncInfo funcinfo[] = { }, { - .is_timescaledb_func = false, + .origin = ORIGIN_POSTGRES, .is_bucketing_func = true, .funcname = "date_trunc", .nargs = 2, @@ -349,7 +384,7 @@ static FuncInfo funcinfo[] = { .sort_transform = date_trunc_sort_transform, }, { - .is_timescaledb_func = false, + .origin = ORIGIN_POSTGRES, .is_bucketing_func = true, .funcname = "date_trunc", .nargs = 2, @@ -379,6 +414,7 @@ initialize_func_info() .hcxt = CacheMemoryContext, }; Oid extension_nsp = ts_extension_schema_oid(); + Oid experimental_nsp = get_namespace_oid(ts_experimental_schema_name(), false); Oid pg_nsp = get_namespace_oid("pg_catalog", false); HeapTuple tuple; Relation rel; @@ -391,12 +427,21 @@ initialize_func_info() for (i = 0; i < _MAX_CACHE_FUNCTIONS; i++) { FuncInfo *finfo = &funcinfo[i]; - Oid namespaceoid = finfo->is_timescaledb_func ? extension_nsp : pg_nsp; + Oid namespaceoid = pg_nsp; oidvector *paramtypes = buildoidvector(finfo->arg_types, finfo->nargs); FuncEntry *fentry; bool hash_found; Oid funcid; + if (finfo->origin == ORIGIN_TIMESCALE) + { + namespaceoid = extension_nsp; + } + else if (finfo->origin == ORIGIN_TIMESCALE_EXPERIMENTAL) + { + namespaceoid = experimental_nsp; + } + tuple = SearchSysCache3(PROCNAMEARGSNSP, PointerGetDatum(finfo->funcname), PointerGetDatum(paramtypes), diff --git a/src/func_cache.h b/src/func_cache.h index 07c8d9b62..d82affcd6 100644 --- a/src/func_cache.h +++ b/src/func_cache.h @@ -16,10 +16,28 @@ typedef Expr *(*sort_transform_func)(FuncExpr *func); typedef double (*group_estimate_func)(PlannerInfo *root, FuncExpr *expr, double path_rows); +/* Describes the function origin */ +typedef enum +{ + /* + * Function is provided by PostgreSQL. + */ + ORIGIN_POSTGRES = 0, + /* + * Function is provided by TimescaleDB. + */ + ORIGIN_TIMESCALE = 1, + /* + * Fuction is provided by TimescaleDB and is experimental. + * It should be looked for in the experimental schema. + */ + ORIGIN_TIMESCALE_EXPERIMENTAL = 2, +} FuncOrigin; + typedef struct FuncInfo { const char *funcname; - bool is_timescaledb_func; + FuncOrigin origin; bool is_bucketing_func; int nargs; Oid arg_types[FUNC_CACHE_MAX_FUNC_ARGS]; diff --git a/tsl/src/continuous_aggs/create.c b/tsl/src/continuous_aggs/create.c index 058d12868..a4ddcefc8 100644 --- a/tsl/src/continuous_aggs/create.c +++ b/tsl/src/continuous_aggs/create.c @@ -1069,9 +1069,18 @@ get_partialize_funcexpr(Aggref *agg) static bool is_valid_bucketing_function(Oid funcid) { + bool is_timescale; FuncInfo *finfo = ts_func_cache_get_bucketing_func(funcid); - return finfo != NULL && finfo->is_timescaledb_func && finfo->nargs == 2; + if (finfo == NULL) + { + return false; + } + + is_timescale = + (finfo->origin == ORIGIN_TIMESCALE) || (finfo->origin == ORIGIN_TIMESCALE_EXPERIMENTAL); + + return is_timescale && (finfo->nargs == 2); } /*initialize MatTableColumnInfo */ diff --git a/tsl/test/expected/continuous_aggs_experimental.out b/tsl/test/expected/continuous_aggs_experimental.out new file mode 100644 index 000000000..2d954ffef --- /dev/null +++ b/tsl/test/expected/continuous_aggs_experimental.out @@ -0,0 +1,55 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +-- Make sure experimental immutable function with 2 arguments can be used in caggs. +-- Functions with 3 arguments and/or stable functions are currently not supported in caggs. +CREATE TABLE conditions( + day DATE NOT NULL, + city text NOT NULL, + temperature INT NOT NULL); +SELECT create_hypertable( + 'conditions', 'day', + chunk_time_interval => INTERVAL '1 day' +); + create_hypertable +------------------------- + (1,public,conditions,t) +(1 row) + +INSERT INTO conditions (day, city, temperature) VALUES + ('2021-06-14', 'Moscow', 26), + ('2021-06-15', 'Moscow', 22), + ('2021-06-16', 'Moscow', 24), + ('2021-06-17', 'Moscow', 24), + ('2021-06-18', 'Moscow', 27), + ('2021-06-19', 'Moscow', 28), + ('2021-06-20', 'Moscow', 30), + ('2021-06-21', 'Moscow', 31), + ('2021-06-22', 'Moscow', 34), + ('2021-06-23', 'Moscow', 34), + ('2021-06-24', 'Moscow', 34), + ('2021-06-25', 'Moscow', 32), + ('2021-06-26', 'Moscow', 32), + ('2021-06-27', 'Moscow', 31); +CREATE MATERIALIZED VIEW conditions_summary_weekly +WITH (timescaledb.continuous) AS +SELECT city, + timescaledb_experimental.time_bucket_ng('7 days', day) AS bucket, + MIN(temperature), + MAX(temperature) +FROM conditions +GROUP BY city, bucket; +NOTICE: refreshing continuous aggregate "conditions_summary_weekly" +SELECT to_char(bucket, 'YYYY-MM-DD'), city, min, max +FROM conditions_summary_weekly +ORDER BY bucket; + to_char | city | min | max +------------+--------+-----+----- + 2021-06-12 | Moscow | 22 | 27 + 2021-06-19 | Moscow | 28 | 34 + 2021-06-26 | Moscow | 31 | 32 +(3 rows) + +DROP TABLE conditions CASCADE; +NOTICE: drop cascades to 3 other objects +NOTICE: drop cascades to 2 other objects diff --git a/tsl/test/sql/CMakeLists.txt b/tsl/test/sql/CMakeLists.txt index ebf06847a..95f2a1991 100644 --- a/tsl/test/sql/CMakeLists.txt +++ b/tsl/test/sql/CMakeLists.txt @@ -10,6 +10,7 @@ set(TEST_FILES continuous_aggs_policy.sql continuous_aggs_refresh.sql continuous_aggs_watermark.sql + continuous_aggs_experimental.sql dist_views.sql move.sql partialize_finalize.sql diff --git a/tsl/test/sql/continuous_aggs_experimental.sql b/tsl/test/sql/continuous_aggs_experimental.sql new file mode 100644 index 000000000..91bdac2d9 --- /dev/null +++ b/tsl/test/sql/continuous_aggs_experimental.sql @@ -0,0 +1,47 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. + +-- Make sure experimental immutable function with 2 arguments can be used in caggs. +-- Functions with 3 arguments and/or stable functions are currently not supported in caggs. + +CREATE TABLE conditions( + day DATE NOT NULL, + city text NOT NULL, + temperature INT NOT NULL); + +SELECT create_hypertable( + 'conditions', 'day', + chunk_time_interval => INTERVAL '1 day' +); + +INSERT INTO conditions (day, city, temperature) VALUES + ('2021-06-14', 'Moscow', 26), + ('2021-06-15', 'Moscow', 22), + ('2021-06-16', 'Moscow', 24), + ('2021-06-17', 'Moscow', 24), + ('2021-06-18', 'Moscow', 27), + ('2021-06-19', 'Moscow', 28), + ('2021-06-20', 'Moscow', 30), + ('2021-06-21', 'Moscow', 31), + ('2021-06-22', 'Moscow', 34), + ('2021-06-23', 'Moscow', 34), + ('2021-06-24', 'Moscow', 34), + ('2021-06-25', 'Moscow', 32), + ('2021-06-26', 'Moscow', 32), + ('2021-06-27', 'Moscow', 31); + +CREATE MATERIALIZED VIEW conditions_summary_weekly +WITH (timescaledb.continuous) AS +SELECT city, + timescaledb_experimental.time_bucket_ng('7 days', day) AS bucket, + MIN(temperature), + MAX(temperature) +FROM conditions +GROUP BY city, bucket; + +SELECT to_char(bucket, 'YYYY-MM-DD'), city, min, max +FROM conditions_summary_weekly +ORDER BY bucket; + +DROP TABLE conditions CASCADE;