Add ability to partition by a date type

This commit is contained in:
Matvey Arye 2017-08-23 11:09:53 -04:00 committed by Matvey Arye
parent 48e0a61131
commit d2561cc4fd
13 changed files with 281 additions and 75 deletions

View File

@ -91,7 +91,7 @@ BEGIN
WHERE attrelid = main_table AND attname = time_column_name;
-- Timestamp types can use default value, integral should be an error if NULL
IF time_type IN ('TIMESTAMP', 'TIMESTAMPTZ') AND chunk_time_interval IS NULL THEN
IF time_type IN ('TIMESTAMP', 'TIMESTAMPTZ', 'DATE') AND chunk_time_interval IS NULL THEN
chunk_time_interval_actual := _timescaledb_internal.interval_to_usec('1 month');
ELSIF time_type IN ('SMALLINT', 'INTEGER', 'BIGINT') AND chunk_time_interval IS NULL THEN
RAISE EXCEPTION 'chunk_time_interval needs to be explicitly set for types SMALLINT, INTEGER, and BIGINT'

View File

@ -119,10 +119,16 @@ BEGIN
IF num_slices IS NULL THEN
-- Open dimension
IF column_type NOT IN ('BIGINT', 'INTEGER', 'SMALLINT', 'TIMESTAMP', 'TIMESTAMPTZ') THEN
IF column_type NOT IN ('BIGINT', 'INTEGER', 'SMALLINT', 'DATE', 'TIMESTAMP', 'TIMESTAMPTZ') THEN
RAISE EXCEPTION 'illegal type for column "%": %', column_name, column_type
USING ERRCODE = 'IO102';
END IF;
IF column_type = 'DATE'::regtype AND
(interval_length <= 0 OR interval_length % _timescaledb_internal.interval_to_usec('1 day') != 0)
THEN
RAISE EXCEPTION 'The interval for a hypertable with a DATE time column must be at least one day and given in multiples of days'
USING ERRCODE = 'IO102';
END IF;
partitioning_func := NULL;
partitioning_func_schema := NULL;
aligned = TRUE;

View File

@ -100,6 +100,12 @@ BEGIN
dimension_row.column_name, dimension_slice_row.range_start, dimension_slice_row.range_end);
ELSE
--TODO: only works with time for now
IF _timescaledb_internal.time_literal_sql(dimension_slice_row.range_start, dimension_row.column_type) =
_timescaledb_internal.time_literal_sql(dimension_slice_row.range_end, dimension_row.column_type) THEN
RAISE 'Time based constraints have the same start and end values for column "%": %',
dimension_row.column_name,
_timescaledb_internal.time_literal_sql(dimension_slice_row.range_end, dimension_row.column_type);
END IF;
return format(
$$
%1$I >= %2$s AND %1$I < %3$s

View File

@ -35,6 +35,8 @@ BEGIN
WHEN 'TIMESTAMP'::regtype, 'TIMESTAMPTZ'::regtype THEN
-- assume time_value is in microsec
RETURN format('%2$s %1$L', _timescaledb_internal.to_timestamp(time_value), column_type); -- microseconds
WHEN 'DATE'::regtype THEN
RETURN format('%L', timezone('UTC',_timescaledb_internal.to_timestamp(time_value))::date);
END CASE;
END
$BODY$;

View File

@ -87,6 +87,33 @@ transform_timestamp_cast(FuncExpr *func)
return (Expr *) copyObject(first);
}
static Expr *
transform_timestamptz_cast(FuncExpr *func)
{
/*
* Transform cast from date to timestamptz, or timestamp to timestamptz,
* or abstime to timestamptz Handles only single-argument versions of the
* cast to avoid explicit timezone specifiers
*
*
* timestamptz(var) => var
*
* proof: timestamptz(time1) > timestamptz(time2) iff time1 > time2
*
*/
Expr *first;
if (list_length(func->args) != 1)
return (Expr *) func;
first = sort_transform_expr(linitial(func->args));
if (!IsA(first, Var))
return (Expr *) func;
return (Expr *) copyObject(first);
}
static inline Expr *
transform_time_op_const_interval(OpExpr *op)
@ -102,7 +129,8 @@ transform_time_op_const_interval(OpExpr *op)
Oid right = exprType((Node *) lsecond(op->args));
if ((left == TIMESTAMPOID && right == INTERVALOID) ||
(left == TIMESTAMPTZOID && right == INTERVALOID))
(left == TIMESTAMPTZOID && right == INTERVALOID) ||
(left == DATEOID && right == INTERVALOID))
{
char *name = get_opname(op->opno);
@ -215,13 +243,17 @@ sort_transform_expr(Expr *orig_expr)
return transform_time_bucket(func);
if (strncmp(func_name, "timestamp", NAMEDATALEN) == 0)
return transform_timestamp_cast(func);
if (strncmp(func_name, "timestamptz", NAMEDATALEN) == 0)
return transform_timestamptz_cast(func);
}
if (IsA(orig_expr, OpExpr))
{
OpExpr *op = (OpExpr *) orig_expr;
Oid type_first = exprType((Node *) linitial(op->args));
if (type_first == TIMESTAMPOID || type_first == TIMESTAMPTZOID)
if (type_first == TIMESTAMPOID ||
type_first == TIMESTAMPTZOID ||
type_first == DATEOID)
{
return transform_time_op_const_interval(op);
}

View File

@ -167,32 +167,14 @@ time_value_to_internal(Datum time_val, Oid type)
return DatumGetInt64(res);
}
elog(ERROR, "unkown time type oid '%d'", type);
}
char *
internal_time_to_column_literal_sql(int64 internal_time, Oid type)
{
char *sql = palloc(100 * sizeof(char));
/* ok to waste a little space */
if (type == INT8OID || type == INT4OID || type == INT8OID)
if (type == DATEOID)
{
snprintf(sql, 100, "%ld", internal_time);
return sql;
}
/* todo avoid these function calls */
if (type == TIMESTAMPOID)
{
snprintf(sql, 100, "_timescaledb_internal.to_timestamp(%ld)::TIMESTAMP", internal_time);
return sql;
}
if (type == TIMESTAMPTZOID)
{
snprintf(sql, 100, "_timescaledb_internal.to_timestamp(%ld)", internal_time);
return sql;
Datum tz = DirectFunctionCall1(date_timestamptz, time_val);
Datum res = DirectFunctionCall1(pg_timestamp_to_unix_microseconds, tz);
return DatumGetInt64(res);
}
elog(ERROR, "unkown time type oid '%d'", type);
}

View File

@ -15,7 +15,6 @@ extern Datum timestamp_bucket(PG_FUNCTION_ARGS);
* Convert a column value into the internal time representation.
*/
extern int64 time_value_to_internal(Datum time_val, Oid type);
extern char *internal_time_to_column_literal_sql(int64 internal_time, Oid type);
#if 0
#define CACHE1_elog(a,b) elog(a,b)

View File

@ -421,16 +421,33 @@ INSERT INTO many_partitions_test
SELECT to_timestamp(ser), ser, ser::text FROM generate_series(101,200) ser;
INSERT INTO many_partitions_test
SELECT to_timestamp(ser), ser, (ser-201)::text FROM generate_series(201,300) ser;
SELECT * FROM many_partitions_test ORDER BY time DESC LIMIT 2;
SELECT * FROM many_partitions_test ORDER BY time DESC LIMIT 2;
time | temp | device
--------------------------+------+--------
Wed Dec 31 16:05:00 1969 | 300 | 99
Wed Dec 31 16:04:59 1969 | 299 | 98
(2 rows)
SELECT count(*) FROM many_partitions_test;
SELECT count(*) FROM many_partitions_test;
count
-------
300
(1 row)
CREATE TABLE date_col_test(time date, temp float8, device text NOT NULL);
SELECT create_hypertable('date_col_test', 'time', 'device', 1000);
create_hypertable
-------------------
(1 row)
INSERT INTO date_col_test
VALUES ('2001-02-01', 98, 'dev1'),
('2001-03-02', 98, 'dev1');
SELECT * FROM date_col_test WHERE time > '2001-01-01';
time | temp | device
------------+------+--------
02-01-2001 | 98 | dev1
03-02-2001 | 98 | dev1
(2 rows)

View File

@ -54,6 +54,23 @@ SELECT * FROM create_hypertable('"public"."hyper_1_int"'::regclass, 'time'::name
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.hyper_1_date (
time date NOT NULL,
series_0 DOUBLE PRECISION NULL,
series_1 DOUBLE PRECISION NULL,
series_2 DOUBLE PRECISION NULL
);
CREATE INDEX "time_plain_date" ON PUBLIC.hyper_1_date (time DESC, series_0);
SELECT * FROM create_hypertable('"public"."hyper_1_date"'::regclass, 'time'::name, number_partitions => 1, chunk_time_interval=>86400000000, create_default_indexes=>FALSE);
create_hypertable
-------------------
(1 row)
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
--below needed to create enough unique dates to trigger an index scan
INSERT INTO hyper_1_date SELECT to_timestamp(ser*100)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.plain_table (
time TIMESTAMPTZ NOT NULL,
series_0 DOUBLE PRECISION NULL,
@ -96,6 +113,32 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Index Scan using "1-time_plain" on _hyper_1_1_chunk
(8 rows)
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2)
FROM hyper_1_date GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
------------------------------------------------------------------------------------------------------------------
Limit
-> GroupAggregate
Group Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone))
-> Result
-> Merge Append
Sort Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)) DESC
-> Index Scan using time_plain_date on hyper_1_date
-> Index Scan using "6-time_plain_date" on _hyper_4_6_chunk
-> Index Scan using "7-time_plain_date" on _hyper_4_7_chunk
-> Index Scan using "8-time_plain_date" on _hyper_4_8_chunk
-> Index Scan using "9-time_plain_date" on _hyper_4_9_chunk
-> Index Scan using "10-time_plain_date" on _hyper_4_10_chunk
-> Index Scan using "11-time_plain_date" on _hyper_4_11_chunk
-> Index Scan using "12-time_plain_date" on _hyper_4_12_chunk
-> Index Scan using "13-time_plain_date" on _hyper_4_13_chunk
-> Index Scan using "14-time_plain_date" on _hyper_4_14_chunk
-> Index Scan using "15-time_plain_date" on _hyper_4_15_chunk
-> Index Scan using "16-time_plain_date" on _hyper_4_16_chunk
-> Index Scan using "17-time_plain_date" on _hyper_4_17_chunk
-> Index Scan using "18-time_plain_date" on _hyper_4_18_chunk
(20 rows)
--the minute and second results should be diff
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
t | avg | min | avg
@ -149,8 +192,8 @@ DROP INDEX "time_plain";
CREATE INDEX "time_trunc" ON PUBLIC.hyper_1 (date_trunc('minute', time));
ANALYZE;
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
--------------------------------------------------------------------------------------
QUERY PLAN
---------------------------------------------------------------------------------------
Limit
-> GroupAggregate
Group Key: (date_trunc('minute'::text, hyper_1."time"))
@ -158,7 +201,7 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Merge Append
Sort Key: (date_trunc('minute'::text, hyper_1."time")) DESC
-> Index Scan Backward using time_trunc on hyper_1
-> Index Scan Backward using "6-time_trunc" on _hyper_1_1_chunk
-> Index Scan Backward using "19-time_trunc" on _hyper_1_1_chunk
(8 rows)
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
@ -172,8 +215,8 @@ SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2)
CREATE INDEX "time_plain" ON PUBLIC.hyper_1 (time DESC, series_0);
ANALYZE;
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
--------------------------------------------------------------------------------------
QUERY PLAN
---------------------------------------------------------------------------------------
Limit
-> GroupAggregate
Group Key: (date_trunc('minute'::text, hyper_1."time"))
@ -181,7 +224,7 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Merge Append
Sort Key: (date_trunc('minute'::text, hyper_1."time")) DESC
-> Index Scan Backward using time_trunc on hyper_1
-> Index Scan Backward using "6-time_trunc" on _hyper_1_1_chunk
-> Index Scan Backward using "19-time_trunc" on _hyper_1_1_chunk
(8 rows)
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
@ -202,7 +245,7 @@ FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
-> Merge Append
Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")) DESC
-> Index Scan using time_plain on hyper_1
-> Index Scan using "7-time_plain" on _hyper_1_1_chunk
-> Index Scan using "20-time_plain" on _hyper_1_1_chunk
(8 rows)
SELECT time_bucket('1 minute', time) t, avg(series_0), min(series_1), avg(series_2)
@ -224,7 +267,7 @@ FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
-> Merge Append
Sort Key: ((time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval)) + '@ 30 secs'::interval)) DESC
-> Index Scan using time_plain on hyper_1
-> Index Scan using "7-time_plain" on _hyper_1_1_chunk
-> Index Scan using "20-time_plain" on _hyper_1_1_chunk
(8 rows)
SELECT time_bucket('1 minute', time, INTERVAL '30 seconds') t, avg(series_0), min(series_1), avg(series_2)
@ -246,7 +289,7 @@ FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
-> Merge Append
Sort Key: (time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval))) DESC
-> Index Scan using time_plain on hyper_1
-> Index Scan using "7-time_plain" on _hyper_1_1_chunk
-> Index Scan using "20-time_plain" on _hyper_1_1_chunk
(8 rows)
SELECT time_bucket('1 minute', time - INTERVAL '30 seconds') t, avg(series_0), min(series_1), avg(series_2)
@ -268,7 +311,7 @@ FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
-> Merge Append
Sort Key: ((time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval)) + '@ 30 secs'::interval)) DESC
-> Index Scan using time_plain on hyper_1
-> Index Scan using "7-time_plain" on _hyper_1_1_chunk
-> Index Scan using "20-time_plain" on _hyper_1_1_chunk
(8 rows)
SELECT time_bucket('1 minute', time - INTERVAL '30 seconds') + INTERVAL '30 seconds' t, avg(series_0), min(series_1), avg(series_2)

View File

@ -55,6 +55,23 @@ SELECT * FROM create_hypertable('"public"."hyper_1_int"'::regclass, 'time'::name
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.hyper_1_date (
time date NOT NULL,
series_0 DOUBLE PRECISION NULL,
series_1 DOUBLE PRECISION NULL,
series_2 DOUBLE PRECISION NULL
);
CREATE INDEX "time_plain_date" ON PUBLIC.hyper_1_date (time DESC, series_0);
SELECT * FROM create_hypertable('"public"."hyper_1_date"'::regclass, 'time'::name, number_partitions => 1, chunk_time_interval=>86400000000, create_default_indexes=>FALSE);
create_hypertable
-------------------
(1 row)
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
--below needed to create enough unique dates to trigger an index scan
INSERT INTO hyper_1_date SELECT to_timestamp(ser*100)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.plain_table (
time TIMESTAMPTZ NOT NULL,
series_0 DOUBLE PRECISION NULL,
@ -98,6 +115,33 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Seq Scan on _hyper_1_1_chunk
(9 rows)
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2)
FROM hyper_1_date GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
------------------------------------------------------------------------------------------------------
Limit
-> Sort
Sort Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)) DESC
-> HashAggregate
Group Key: date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)
-> Result
-> Append
-> Seq Scan on hyper_1_date
-> Seq Scan on _hyper_4_6_chunk
-> Seq Scan on _hyper_4_7_chunk
-> Seq Scan on _hyper_4_8_chunk
-> Seq Scan on _hyper_4_9_chunk
-> Seq Scan on _hyper_4_10_chunk
-> Seq Scan on _hyper_4_11_chunk
-> Seq Scan on _hyper_4_12_chunk
-> Seq Scan on _hyper_4_13_chunk
-> Seq Scan on _hyper_4_14_chunk
-> Seq Scan on _hyper_4_15_chunk
-> Seq Scan on _hyper_4_16_chunk
-> Seq Scan on _hyper_4_17_chunk
-> Seq Scan on _hyper_4_18_chunk
(21 rows)
--the minute and second results should be diff
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
t | avg | min | avg
@ -155,8 +199,8 @@ DROP INDEX "time_plain";
CREATE INDEX "time_trunc" ON PUBLIC.hyper_1 (date_trunc('minute', time));
ANALYZE;
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
--------------------------------------------------------------------------------------
QUERY PLAN
---------------------------------------------------------------------------------------
Limit
-> GroupAggregate
Group Key: (date_trunc('minute'::text, hyper_1."time"))
@ -164,7 +208,7 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Merge Append
Sort Key: (date_trunc('minute'::text, hyper_1."time")) DESC
-> Index Scan Backward using time_trunc on hyper_1
-> Index Scan Backward using "6-time_trunc" on _hyper_1_1_chunk
-> Index Scan Backward using "19-time_trunc" on _hyper_1_1_chunk
(8 rows)
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
@ -178,8 +222,8 @@ SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2)
CREATE INDEX "time_plain" ON PUBLIC.hyper_1 (time DESC, series_0);
ANALYZE;
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
QUERY PLAN
--------------------------------------------------------------------------------------
QUERY PLAN
---------------------------------------------------------------------------------------
Limit
-> GroupAggregate
Group Key: (date_trunc('minute'::text, hyper_1."time"))
@ -187,7 +231,7 @@ EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(seri
-> Merge Append
Sort Key: (date_trunc('minute'::text, hyper_1."time")) DESC
-> Index Scan Backward using time_trunc on hyper_1
-> Index Scan Backward using "6-time_trunc" on _hyper_1_1_chunk
-> Index Scan Backward using "19-time_trunc" on _hyper_1_1_chunk
(8 rows)
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;

View File

@ -2,13 +2,13 @@
\! diff ../results/sql_query_results_optimized.out ../results/sql_query_results_unoptimized.out
10a11
> SET timescaledb.disable_optimizations= 'on';
87,88c88,89
104,105c105,106
< QUERY PLAN
< ---------------------------------------------------------------------------------
---
> QUERY PLAN
> ---------------------------------------------------------------------------
92,97c93,99
109,114c110,116
< -> Result
< -> Merge Append
< Sort Key: (date_trunc('minute'::text, hyper_1."time")) DESC
@ -23,13 +23,62 @@
> -> Seq Scan on hyper_1
> -> Seq Scan on _hyper_1_1_chunk
> (9 rows)
122,123c124,125
118,119c120,121
< QUERY PLAN
< ------------------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> ------------------------------------------------------------------------------------------------------
121,140c123,143
< -> GroupAggregate
< Group Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone))
< -> Result
< -> Merge Append
< Sort Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)) DESC
< -> Index Scan using time_plain_date on hyper_1_date
< -> Index Scan using "6-time_plain_date" on _hyper_4_6_chunk
< -> Index Scan using "7-time_plain_date" on _hyper_4_7_chunk
< -> Index Scan using "8-time_plain_date" on _hyper_4_8_chunk
< -> Index Scan using "9-time_plain_date" on _hyper_4_9_chunk
< -> Index Scan using "10-time_plain_date" on _hyper_4_10_chunk
< -> Index Scan using "11-time_plain_date" on _hyper_4_11_chunk
< -> Index Scan using "12-time_plain_date" on _hyper_4_12_chunk
< -> Index Scan using "13-time_plain_date" on _hyper_4_13_chunk
< -> Index Scan using "14-time_plain_date" on _hyper_4_14_chunk
< -> Index Scan using "15-time_plain_date" on _hyper_4_15_chunk
< -> Index Scan using "16-time_plain_date" on _hyper_4_16_chunk
< -> Index Scan using "17-time_plain_date" on _hyper_4_17_chunk
< -> Index Scan using "18-time_plain_date" on _hyper_4_18_chunk
< (20 rows)
---
> -> Sort
> Sort Key: (date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)) DESC
> -> HashAggregate
> Group Key: date_trunc('minute'::text, (hyper_1_date."time")::timestamp with time zone)
> -> Result
> -> Append
> -> Seq Scan on hyper_1_date
> -> Seq Scan on _hyper_4_6_chunk
> -> Seq Scan on _hyper_4_7_chunk
> -> Seq Scan on _hyper_4_8_chunk
> -> Seq Scan on _hyper_4_9_chunk
> -> Seq Scan on _hyper_4_10_chunk
> -> Seq Scan on _hyper_4_11_chunk
> -> Seq Scan on _hyper_4_12_chunk
> -> Seq Scan on _hyper_4_13_chunk
> -> Seq Scan on _hyper_4_14_chunk
> -> Seq Scan on _hyper_4_15_chunk
> -> Seq Scan on _hyper_4_16_chunk
> -> Seq Scan on _hyper_4_17_chunk
> -> Seq Scan on _hyper_4_18_chunk
> (21 rows)
165,166c168,169
< QUERY PLAN
< -----------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -----------------------------------------------------------------------------------------------------------------------
125,133c127,139
168,176c171,183
< -> GroupAggregate
< Group Key: (date_trunc('minute'::text, hyper_1."time"))
< -> Custom Scan (ConstraintAwareAppend)
@ -53,18 +102,18 @@
> -> Bitmap Index Scan on "1-time_plain"
> Index Cond: ("time" < 'Wed Dec 31 16:15:00 1969 PST'::timestamp with time zone)
> (13 rows)
196,197c202,203
239,240c246,247
< QUERY PLAN
< ---------------------------------------------------------------------------------------
---
> QUERY PLAN
> ---------------------------------------------------------------------------------
201,206c207,213
244,249c251,257
< -> Result
< -> Merge Append
< Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")) DESC
< -> Index Scan using time_plain on hyper_1
< -> Index Scan using "7-time_plain" on _hyper_1_1_chunk
< -> Index Scan using "20-time_plain" on _hyper_1_1_chunk
< (8 rows)
---
> -> Sort
@ -74,18 +123,18 @@
> -> Seq Scan on hyper_1
> -> Seq Scan on _hyper_1_1_chunk
> (9 rows)
218,219c225,226
261,262c269,270
< QUERY PLAN
< -------------------------------------------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -------------------------------------------------------------------------------------------------------------------------------------
223,228c230,236
266,271c274,280
< -> Result
< -> Merge Append
< Sort Key: ((time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval)) + '@ 30 secs'::interval)) DESC
< -> Index Scan using time_plain on hyper_1
< -> Index Scan using "7-time_plain" on _hyper_1_1_chunk
< -> Index Scan using "20-time_plain" on _hyper_1_1_chunk
< (8 rows)
---
> -> Sort
@ -95,18 +144,18 @@
> -> Seq Scan on hyper_1
> -> Seq Scan on _hyper_1_1_chunk
> (9 rows)
240,241c248,249
283,284c292,293
< QUERY PLAN
< -----------------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -----------------------------------------------------------------------------------------------------------
245,250c253,259
288,293c297,303
< -> Result
< -> Merge Append
< Sort Key: (time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval))) DESC
< -> Index Scan using time_plain on hyper_1
< -> Index Scan using "7-time_plain" on _hyper_1_1_chunk
< -> Index Scan using "20-time_plain" on _hyper_1_1_chunk
< (8 rows)
---
> -> Sort
@ -116,18 +165,18 @@
> -> Seq Scan on hyper_1
> -> Seq Scan on _hyper_1_1_chunk
> (9 rows)
262,263c271,272
305,306c315,316
< QUERY PLAN
< -------------------------------------------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -------------------------------------------------------------------------------------------------------------------------------------
267,272c276,282
310,315c320,326
< -> Result
< -> Merge Append
< Sort Key: ((time_bucket('@ 1 min'::interval, (hyper_1."time" - '@ 30 secs'::interval)) + '@ 30 secs'::interval)) DESC
< -> Index Scan using time_plain on hyper_1
< -> Index Scan using "7-time_plain" on _hyper_1_1_chunk
< -> Index Scan using "20-time_plain" on _hyper_1_1_chunk
< (8 rows)
---
> -> Sort
@ -137,13 +186,13 @@
> -> Seq Scan on hyper_1
> -> Seq Scan on _hyper_1_1_chunk
> (9 rows)
284,285c294,295
327,328c338,339
< QUERY PLAN
< ------------------------------------------------------------------------------------------
---
> QUERY PLAN
> ------------------------------------------------------------------------------------
289,294c299,305
332,337c343,349
< -> Result
< -> Merge Append
< Sort Key: (time_bucket('@ 1 min'::interval, hyper_1_tz."time")) DESC
@ -158,13 +207,13 @@
> -> Seq Scan on hyper_1_tz
> -> Seq Scan on _hyper_2_2_chunk
> (9 rows)
306,307c317,318
349,350c361,362
< QUERY PLAN
< -------------------------------------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -------------------------------------------------------------------------------------------------------------------
311,316c322,328
354,359c366,372
< -> Result
< -> Merge Append
< Sort Key: (time_bucket('@ 1 min'::interval, (hyper_1_tz."time")::timestamp without time zone)) DESC
@ -179,13 +228,13 @@
> -> Seq Scan on hyper_1_tz
> -> Seq Scan on _hyper_2_2_chunk
> (9 rows)
328,329c340,341
371,372c384,385
< QUERY PLAN
< ---------------------------------------------------------------------------------
---
> QUERY PLAN
> -----------------------------------------------------------------
333,340c345,353
376,383c389,397
< -> Result
< -> Merge Append
< Sort Key: (((hyper_1_int."time" / 10) * 10)) DESC
@ -204,13 +253,13 @@
> -> Seq Scan on _hyper_3_4_chunk
> -> Seq Scan on _hyper_3_5_chunk
> (11 rows)
352,353c365,366
395,396c409,410
< QUERY PLAN
< -----------------------------------------------------------------------------------
---
> QUERY PLAN
> -----------------------------------------------------------------------------
357,364c370,378
400,407c414,422
< -> Result
< -> Merge Append
< Sort Key: (((((hyper_1_int."time" - 2) / 10) * 10) + 2)) DESC
@ -229,13 +278,13 @@
> -> Seq Scan on _hyper_3_4_chunk
> -> Seq Scan on _hyper_3_5_chunk
> (11 rows)
417,418c431,432
460,461c475,476
< QUERY PLAN
< -----------------------------------------------------------------------------------------------
---
> QUERY PLAN
> -----------------------------------------------------------------------------------------------------------
420,424c434,442
463,467c478,486
< -> GroupAggregate
< Group Key: date_trunc('minute'::text, "time")
< -> Index Scan using time_plain_plain_table on plain_table

View File

@ -34,6 +34,20 @@ SELECT * FROM create_hypertable('"public"."hyper_1_int"'::regclass, 'time'::name
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_int SELECT ser, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.hyper_1_date (
time date NOT NULL,
series_0 DOUBLE PRECISION NULL,
series_1 DOUBLE PRECISION NULL,
series_2 DOUBLE PRECISION NULL
);
CREATE INDEX "time_plain_date" ON PUBLIC.hyper_1_date (time DESC, series_0);
SELECT * FROM create_hypertable('"public"."hyper_1_date"'::regclass, 'time'::name, number_partitions => 1, chunk_time_interval=>86400000000, create_default_indexes=>FALSE);
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(0,10000) ser;
INSERT INTO hyper_1_date SELECT to_timestamp(ser)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
--below needed to create enough unique dates to trigger an index scan
INSERT INTO hyper_1_date SELECT to_timestamp(ser*100)::date, ser, ser+10000, sqrt(ser::numeric) FROM generate_series(10001,20000) ser;
CREATE TABLE PUBLIC.plain_table (
time TIMESTAMPTZ NOT NULL,
series_0 DOUBLE PRECISION NULL,
@ -55,6 +69,10 @@ SELECT * FROM hyper_1 ORDER BY "time" DESC limit 2;
--aggregates use MergeAppend only in optimized
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
EXPLAIN (costs off) SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2)
FROM hyper_1_date GROUP BY t ORDER BY t DESC limit 2;
--the minute and second results should be diff
SELECT date_trunc('minute', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;
SELECT date_trunc('second', time) t, avg(series_0), min(series_1), avg(series_2) FROM hyper_1 GROUP BY t ORDER BY t DESC limit 2;

View File

@ -42,5 +42,13 @@ INSERT INTO many_partitions_test
INSERT INTO many_partitions_test
SELECT to_timestamp(ser), ser, (ser-201)::text FROM generate_series(201,300) ser;
SELECT * FROM many_partitions_test ORDER BY time DESC LIMIT 2;
SELECT count(*) FROM many_partitions_test;
SELECT * FROM many_partitions_test ORDER BY time DESC LIMIT 2;
SELECT count(*) FROM many_partitions_test;
CREATE TABLE date_col_test(time date, temp float8, device text NOT NULL);
SELECT create_hypertable('date_col_test', 'time', 'device', 1000);
INSERT INTO date_col_test
VALUES ('2001-02-01', 98, 'dev1'),
('2001-03-02', 98, 'dev1');
SELECT * FROM date_col_test WHERE time > '2001-01-01';