mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-20 20:54:29 +08:00
Block updates/deletes on compressed chunks
This commit is contained in:
parent
edd3999553
commit
909b0ece78
@ -2417,7 +2417,7 @@ ts_chunk_dml_blocker(PG_FUNCTION_ARGS)
|
||||
elog(ERROR, "dml_blocker: not called by trigger manager");
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
|
||||
errmsg("insert/update/delete not permitted on this chunk \"%s\"", relname),
|
||||
errmsg("insert/update/delete not permitted on chunk \"%s\"", relname),
|
||||
errhint("Make sure the chunk is not compressed.")));
|
||||
|
||||
PG_RETURN_NULL();
|
||||
|
@ -48,7 +48,7 @@ static Plan *
|
||||
adjust_childscan(PlannerInfo *root, Plan *plan, Path *path, List *pathkeys, List *tlist,
|
||||
AttrNumber *sortColIdx)
|
||||
{
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, path->parent->relid);
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, path->parent->relid, false);
|
||||
int childSortCols;
|
||||
Oid *sortOperators;
|
||||
Oid *collations;
|
||||
@ -113,7 +113,8 @@ ts_chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path
|
||||
if (child_path->parent->reloptkind == RELOPT_OTHER_MEMBER_REL)
|
||||
{
|
||||
/* if this is an append child we need to adjust targetlist references */
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, child_path->parent->relid);
|
||||
AppendRelInfo *appinfo =
|
||||
ts_get_appendrelinfo(root, child_path->parent->relid, false);
|
||||
|
||||
child_plan->targetlist =
|
||||
(List *) adjust_appendrel_attrs_compat(root, (Node *) tlist, appinfo);
|
||||
@ -241,7 +242,7 @@ ts_chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path
|
||||
{
|
||||
List *chunk_clauses = NIL;
|
||||
ListCell *lc;
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, scan->scanrelid);
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, scan->scanrelid, false);
|
||||
|
||||
foreach (lc, clauses)
|
||||
{
|
||||
|
@ -447,7 +447,7 @@ constraint_aware_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPa
|
||||
List *chunk_clauses = NIL;
|
||||
ListCell *lc;
|
||||
Index scanrelid = ((Scan *) plan)->scanrelid;
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, scanrelid);
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, scanrelid, false);
|
||||
|
||||
foreach (lc, clauses)
|
||||
{
|
||||
|
@ -45,8 +45,8 @@ typedef struct CrossModuleFunctions
|
||||
Datum (*remove_reorder_policy)(PG_FUNCTION_ARGS);
|
||||
Datum (*remove_compress_chunks_policy)(PG_FUNCTION_ARGS);
|
||||
void (*create_upper_paths_hook)(PlannerInfo *, UpperRelationKind, RelOptInfo *, RelOptInfo *);
|
||||
void (*set_rel_pathlist_hook)(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *,
|
||||
Hypertable *);
|
||||
void (*set_rel_pathlist_hook)(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *,
|
||||
bool isdml);
|
||||
PGFunction gapfill_marker;
|
||||
PGFunction gapfill_int16_time_bucket;
|
||||
PGFunction gapfill_int32_time_bucket;
|
||||
|
100
src/planner.c
100
src/planner.c
@ -312,40 +312,36 @@ get_parentoid(PlannerInfo *root, Index rti)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte)
|
||||
/* is this a hypertable's chunk involved in DML
|
||||
: used only for updates and deletes for compression now */
|
||||
static bool
|
||||
is_hypertable_chunk_dml(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte)
|
||||
{
|
||||
Hypertable *ht;
|
||||
Cache *hcache;
|
||||
Oid ht_reloid = rte->relid;
|
||||
|
||||
if (prev_set_rel_pathlist_hook != NULL)
|
||||
(*prev_set_rel_pathlist_hook)(root, rel, rti, rte);
|
||||
|
||||
if (!ts_extension_is_loaded() || IS_DUMMY_REL(rel) || !OidIsValid(rte->relid))
|
||||
return;
|
||||
|
||||
/* quick abort if only optimizing hypertables */
|
||||
if (!ts_guc_optimize_non_hypertables &&
|
||||
!(is_append_parent(rel, rte) || is_append_child(rel, rte)))
|
||||
return;
|
||||
|
||||
hcache = ts_hypertable_cache_pin();
|
||||
|
||||
/*
|
||||
* if this is an append child we use the parent relid to
|
||||
* check if its a hypertable
|
||||
*/
|
||||
if (is_append_child(rel, rte))
|
||||
ht_reloid = get_parentoid(root, rti);
|
||||
|
||||
ht = ts_hypertable_cache_get_entry(hcache, ht_reloid);
|
||||
|
||||
if (ts_cm_functions->set_rel_pathlist_hook != NULL)
|
||||
ts_cm_functions->set_rel_pathlist_hook(root, rel, rti, rte, ht);
|
||||
if (root->parse->commandType == CMD_UPDATE || root->parse->commandType == CMD_DELETE)
|
||||
{
|
||||
Oid parent_oid;
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, rti, true);
|
||||
if (!appinfo)
|
||||
return false;
|
||||
parent_oid = appinfo->parent_reloid;
|
||||
if (parent_oid != InvalidOid && rte->relid != parent_oid)
|
||||
{
|
||||
Cache *hcache = ts_hypertable_cache_pin();
|
||||
Hypertable *parent_ht = ts_hypertable_cache_get_entry(hcache, parent_oid);
|
||||
ts_cache_release(hcache);
|
||||
if (parent_ht)
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static void
|
||||
timescaledb_set_rel_pathlist_query(PlannerInfo *root, RelOptInfo *rel, Index rti,
|
||||
RangeTblEntry *rte, Hypertable *ht)
|
||||
{
|
||||
if (!should_optimize_query(ht))
|
||||
goto out_release;
|
||||
return;
|
||||
|
||||
if (ts_guc_optimize_non_hypertables)
|
||||
{
|
||||
@ -423,8 +419,48 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
out_release:
|
||||
static void
|
||||
timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte)
|
||||
{
|
||||
Hypertable *ht;
|
||||
Cache *hcache;
|
||||
Oid ht_reloid = rte->relid;
|
||||
bool is_htdml;
|
||||
|
||||
if (prev_set_rel_pathlist_hook != NULL)
|
||||
(*prev_set_rel_pathlist_hook)(root, rel, rti, rte);
|
||||
|
||||
if (!ts_extension_is_loaded() || IS_DUMMY_REL(rel) || !OidIsValid(rte->relid))
|
||||
return;
|
||||
|
||||
/* do we have a DML transformation here */
|
||||
is_htdml = is_hypertable_chunk_dml(root, rel, rti, rte);
|
||||
|
||||
/* quick abort if only optimizing hypertables */
|
||||
if (!ts_guc_optimize_non_hypertables &&
|
||||
!(is_append_parent(rel, rte) || is_append_child(rel, rte) || is_htdml))
|
||||
return;
|
||||
|
||||
hcache = ts_hypertable_cache_pin();
|
||||
|
||||
/*
|
||||
* if this is an append child or DML we use the parent relid to
|
||||
* check if its a hypertable
|
||||
*/
|
||||
if (is_append_child(rel, rte) || is_htdml)
|
||||
ht_reloid = get_parentoid(root, rti);
|
||||
|
||||
ht = ts_hypertable_cache_get_entry(hcache, ht_reloid);
|
||||
|
||||
if (ts_cm_functions->set_rel_pathlist_hook != NULL)
|
||||
ts_cm_functions->set_rel_pathlist_hook(root, rel, rti, rte, ht, is_htdml);
|
||||
if (!is_htdml)
|
||||
{
|
||||
timescaledb_set_rel_pathlist_query(root, rel, rti, rte, ht);
|
||||
}
|
||||
ts_cache_release(hcache);
|
||||
}
|
||||
|
||||
|
13
src/utils.c
13
src/utils.c
@ -576,7 +576,7 @@ ts_get_cast_func(Oid source, Oid target)
|
||||
}
|
||||
|
||||
AppendRelInfo *
|
||||
ts_get_appendrelinfo(PlannerInfo *root, Index rti)
|
||||
ts_get_appendrelinfo(PlannerInfo *root, Index rti, bool missing_ok)
|
||||
{
|
||||
ListCell *lc;
|
||||
#if PG11_GE
|
||||
@ -585,10 +585,11 @@ ts_get_appendrelinfo(PlannerInfo *root, Index rti)
|
||||
{
|
||||
if (root->append_rel_array[rti])
|
||||
return root->append_rel_array[rti];
|
||||
else
|
||||
if (!missing_ok)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INTERNAL_ERROR),
|
||||
errmsg("no appendrelinfo found for index %d", rti)));
|
||||
return NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -598,9 +599,11 @@ ts_get_appendrelinfo(PlannerInfo *root, Index rti)
|
||||
if (appinfo->child_relid == rti)
|
||||
return appinfo;
|
||||
}
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INTERNAL_ERROR), errmsg("no appendrelinfo found for index %d", rti)));
|
||||
pg_unreachable();
|
||||
if (!missing_ok)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_INTERNAL_ERROR),
|
||||
errmsg("no appendrelinfo found for index %d", rti)));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Expr *
|
||||
|
@ -77,7 +77,8 @@ extern Oid ts_get_cast_func(Oid source, Oid target);
|
||||
extern void *ts_create_struct_from_tuple(HeapTuple tuple, MemoryContext mctx, size_t alloc_size,
|
||||
size_t copy_size);
|
||||
|
||||
extern TSDLLEXPORT AppendRelInfo *ts_get_appendrelinfo(PlannerInfo *root, Index rti);
|
||||
extern TSDLLEXPORT AppendRelInfo *ts_get_appendrelinfo(PlannerInfo *root, Index rti,
|
||||
bool missing_ok);
|
||||
|
||||
extern TSDLLEXPORT Expr *ts_find_em_expr_for_rel(EquivalenceClass *ec, RelOptInfo *rel);
|
||||
|
||||
|
@ -124,7 +124,7 @@ chunk_dml_blocker_trigger_add(Oid relid)
|
||||
Oid schemaid = get_rel_namespace(relid);
|
||||
char *schema = get_namespace_name(schemaid);
|
||||
|
||||
/* stmt triggers are blocked on hypertable chunks */
|
||||
/* stmt triggers are blocked on hypertable chunks */
|
||||
CreateTrigStmt stmt = {
|
||||
.type = T_CreateTrigStmt,
|
||||
.row = true,
|
||||
|
@ -71,7 +71,7 @@ build_compressioninfo(PlannerInfo *root, Hypertable *ht, RelOptInfo *chunk_rel)
|
||||
info->chunk_rel = chunk_rel;
|
||||
info->chunk_rte = planner_rt_fetch(chunk_rel->relid, root);
|
||||
|
||||
appinfo = ts_get_appendrelinfo(root, chunk_rel->relid);
|
||||
appinfo = ts_get_appendrelinfo(root, chunk_rel->relid, false);
|
||||
info->ht_rte = planner_rt_fetch(appinfo->parent_relid, root);
|
||||
info->hypertable_id = ht->fd.id;
|
||||
|
||||
@ -115,7 +115,7 @@ ts_decompress_chunk_generate_paths(PlannerInfo *root, RelOptInfo *chunk_rel, Hyp
|
||||
*/
|
||||
int parallel_workers = 1;
|
||||
|
||||
AppendRelInfo *chunk_info = ts_get_appendrelinfo(root, chunk_rel->relid);
|
||||
AppendRelInfo *chunk_info = ts_get_appendrelinfo(root, chunk_rel->relid, false);
|
||||
Assert(chunk_info != NULL);
|
||||
Assert(chunk_info->parent_reloid == ht->main_table_relid);
|
||||
ht_index = chunk_info->parent_relid;
|
||||
|
@ -1 +1,2 @@
|
||||
add_subdirectory(gapfill)
|
||||
add_subdirectory(compress_dml)
|
||||
|
4
tsl/src/nodes/compress_dml/CMakeLists.txt
Normal file
4
tsl/src/nodes/compress_dml/CMakeLists.txt
Normal file
@ -0,0 +1,4 @@
|
||||
set(SOURCES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compress_dml.c
|
||||
)
|
||||
target_sources(${TSL_LIBRARY_NAME} PRIVATE ${SOURCES})
|
134
tsl/src/nodes/compress_dml/compress_dml.c
Normal file
134
tsl/src/nodes/compress_dml/compress_dml.c
Normal file
@ -0,0 +1,134 @@
|
||||
/*
|
||||
* This file and its contents are licensed under the Timescale License.
|
||||
* Please see the included NOTICE for copyright information and
|
||||
* LICENSE-TIMESCALE for a copy of the license.
|
||||
*/
|
||||
|
||||
#include <postgres.h>
|
||||
#include <nodes/extensible.h>
|
||||
#include <optimizer/pathnode.h>
|
||||
#include <optimizer/paths.h>
|
||||
|
||||
#include "compat.h"
|
||||
#include "chunk.h"
|
||||
#include "hypertable.h"
|
||||
#include "hypertable_compression.h"
|
||||
#include "compress_dml.h"
|
||||
#include "utils.h"
|
||||
|
||||
/*Path, Plan and State node for processing dml on compressed chunks
|
||||
* For now, this just blocks updates/deletes on compressed chunks
|
||||
* since trigger based approach does not work
|
||||
*/
|
||||
|
||||
static Path *compress_chunk_dml_path_create(Path *subpath, Oid chunk_relid);
|
||||
static Plan *compress_chunk_dml_plan_create(PlannerInfo *root, RelOptInfo *relopt,
|
||||
CustomPath *best_path, List *tlist, List *clauses,
|
||||
List *custom_plans);
|
||||
static Node *compress_chunk_dml_state_create(CustomScan *scan);
|
||||
|
||||
static void compress_chunk_dml_begin(CustomScanState *node, EState *estate, int eflags);
|
||||
static TupleTableSlot *compress_chunk_dml_exec(CustomScanState *node);
|
||||
static void compress_chunk_dml_end(CustomScanState *node);
|
||||
|
||||
static CustomPathMethods compress_chunk_dml_path_methods = {
|
||||
.CustomName = "CompressChunkDml",
|
||||
.PlanCustomPath = compress_chunk_dml_plan_create,
|
||||
};
|
||||
|
||||
static CustomScanMethods compress_chunk_dml_plan_methods = {
|
||||
.CustomName = "CompressChunkDml",
|
||||
.CreateCustomScanState = compress_chunk_dml_state_create,
|
||||
};
|
||||
|
||||
static CustomExecMethods compress_chunk_dml_state_methods = {
|
||||
.CustomName = COMPRESS_CHUNK_DML_STATE_NAME,
|
||||
.BeginCustomScan = compress_chunk_dml_begin,
|
||||
.EndCustomScan = compress_chunk_dml_end,
|
||||
.ExecCustomScan = compress_chunk_dml_exec,
|
||||
};
|
||||
|
||||
static void
|
||||
compress_chunk_dml_begin(CustomScanState *node, EState *estate, int eflags)
|
||||
{
|
||||
CustomScan *cscan = castNode(CustomScan, node->ss.ps.plan);
|
||||
Plan *subplan = linitial(cscan->custom_plans);
|
||||
node->custom_ps = list_make1(ExecInitNode(subplan, estate, eflags));
|
||||
}
|
||||
|
||||
/* we cannot update/delete rows if we have a compressed chunk. so
|
||||
* throw an error. Note this subplan will return 0 tuples as the chunk is empty
|
||||
* and all rows are saved in the compressed chunk.
|
||||
*/
|
||||
static TupleTableSlot *
|
||||
compress_chunk_dml_exec(CustomScanState *node)
|
||||
{
|
||||
CompressChunkDmlState *state = (CompressChunkDmlState *) node;
|
||||
Oid chunk_relid = state->chunk_relid;
|
||||
elog(ERROR,
|
||||
"cannot update/delete rows from chunk \"%s\" as it is compressed",
|
||||
get_rel_name(chunk_relid));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static void
|
||||
compress_chunk_dml_end(CustomScanState *node)
|
||||
{
|
||||
// CompressChunkDmlState *state = (CompressChunkDmlState *) node;
|
||||
PlanState *substate = linitial(node->custom_ps);
|
||||
ExecEndNode(substate);
|
||||
}
|
||||
|
||||
static Path *
|
||||
compress_chunk_dml_path_create(Path *subpath, Oid chunk_relid)
|
||||
{
|
||||
CompressChunkDmlPath *path = (CompressChunkDmlPath *) palloc0(sizeof(CompressChunkDmlPath));
|
||||
|
||||
memcpy(&path->cpath.path, subpath, sizeof(Path));
|
||||
path->cpath.path.type = T_CustomPath;
|
||||
path->cpath.path.pathtype = T_CustomScan;
|
||||
path->cpath.path.parent = subpath->parent;
|
||||
path->cpath.path.pathtarget = subpath->pathtarget;
|
||||
// path->cpath.path.param_info = subpath->param_info;
|
||||
path->cpath.methods = &compress_chunk_dml_path_methods;
|
||||
path->cpath.custom_paths = list_make1(subpath);
|
||||
path->chunk_relid = chunk_relid;
|
||||
|
||||
return &path->cpath.path;
|
||||
}
|
||||
|
||||
static Plan *
|
||||
compress_chunk_dml_plan_create(PlannerInfo *root, RelOptInfo *relopt, CustomPath *best_path,
|
||||
List *tlist, List *clauses, List *custom_plans)
|
||||
{
|
||||
CompressChunkDmlPath *cdpath = (CompressChunkDmlPath *) best_path;
|
||||
CustomScan *cscan = makeNode(CustomScan);
|
||||
|
||||
Assert(list_length(custom_plans) == 1);
|
||||
|
||||
cscan->methods = &compress_chunk_dml_plan_methods;
|
||||
cscan->custom_plans = custom_plans;
|
||||
cscan->scan.scanrelid = relopt->relid;
|
||||
cscan->scan.plan.targetlist = tlist;
|
||||
cscan->custom_scan_tlist = NIL;
|
||||
cscan->custom_private = list_make1_oid(cdpath->chunk_relid);
|
||||
return &cscan->scan.plan;
|
||||
}
|
||||
|
||||
static Node *
|
||||
compress_chunk_dml_state_create(CustomScan *scan)
|
||||
{
|
||||
CompressChunkDmlState *state;
|
||||
|
||||
state = (CompressChunkDmlState *) newNode(sizeof(CompressChunkDmlState), T_CustomScanState);
|
||||
state->chunk_relid = linitial_oid(scan->custom_private);
|
||||
state->cscan_state.methods = &compress_chunk_dml_state_methods;
|
||||
return (Node *) state;
|
||||
}
|
||||
|
||||
Path *
|
||||
compress_chunk_dml_generate_paths(Path *subpath, Chunk *chunk)
|
||||
{
|
||||
Assert(chunk->fd.compressed_chunk_id > 0);
|
||||
return compress_chunk_dml_path_create(subpath, chunk->table_id);
|
||||
}
|
30
tsl/src/nodes/compress_dml/compress_dml.h
Normal file
30
tsl/src/nodes/compress_dml/compress_dml.h
Normal file
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* This file and its contents are licensed under the Timescale License.
|
||||
* Please see the included NOTICE for copyright information and
|
||||
* LICENSE-TIMESCALE for a copy of the license.
|
||||
*/
|
||||
#ifndef TIMESCALEDB_COMPRESS_CHUNK_DML_H
|
||||
#define TIMESCALEDB_COMPRESS_CHUNK_DML_H
|
||||
|
||||
#include <postgres.h>
|
||||
#include <nodes/execnodes.h>
|
||||
#include <foreign/fdwapi.h>
|
||||
|
||||
#include "hypertable.h"
|
||||
|
||||
typedef struct CompressChunkDmlPath
|
||||
{
|
||||
CustomPath cpath;
|
||||
Oid chunk_relid;
|
||||
} CompressChunkDmlPath;
|
||||
|
||||
typedef struct CompressChunkDmlState
|
||||
{
|
||||
CustomScanState cscan_state;
|
||||
Oid chunk_relid;
|
||||
} CompressChunkDmlState;
|
||||
|
||||
Path *compress_chunk_dml_generate_paths(Path *subpath, Chunk *chunk);
|
||||
|
||||
#define COMPRESS_CHUNK_DML_STATE_NAME "CompressChunkDmlState"
|
||||
#endif
|
@ -8,9 +8,11 @@
|
||||
|
||||
#include "planner.h"
|
||||
#include "nodes/gapfill/planner.h"
|
||||
#include "decompress_chunk/decompress_chunk.h"
|
||||
#include "nodes/compress_dml/compress_dml.h"
|
||||
#include "chunk.h"
|
||||
#include "decompress_chunk/decompress_chunk.h"
|
||||
#include "hypertable.h"
|
||||
#include "hypertable_compression.h"
|
||||
#include "guc.h"
|
||||
|
||||
void
|
||||
@ -28,14 +30,37 @@ tsl_create_upper_paths_hook(PlannerInfo *root, UpperRelationKind stage, RelOptIn
|
||||
|
||||
void
|
||||
tsl_set_rel_pathlist_hook(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte,
|
||||
Hypertable *ht)
|
||||
Hypertable *ht, bool isdml)
|
||||
{
|
||||
if (ts_guc_enable_transparent_decompression && ht != NULL &&
|
||||
rel->reloptkind == RELOPT_OTHER_MEMBER_REL && ht->fd.compressed_hypertable_id > 0)
|
||||
if (isdml)
|
||||
{
|
||||
Chunk *chunk = ts_chunk_get_by_relid(rte->relid, 0, true);
|
||||
if (ht != NULL && TS_HYPERTABLE_HAS_COMPRESSION_ON(ht))
|
||||
{
|
||||
ListCell *lc;
|
||||
/* is this a chunk under compressed hypertable ? */
|
||||
AppendRelInfo *appinfo = ts_get_appendrelinfo(root, rti, false);
|
||||
Oid parent_oid = appinfo->parent_reloid;
|
||||
Chunk *chunk = ts_chunk_get_by_relid(rte->relid, 0, true);
|
||||
Assert(parent_oid == ht->main_table_relid && (parent_oid == chunk->hypertable_relid));
|
||||
if (chunk->fd.compressed_chunk_id > 0)
|
||||
{
|
||||
foreach (lc, rel->pathlist)
|
||||
{
|
||||
Path **pathptr = (Path **) &lfirst(lc);
|
||||
*pathptr = compress_chunk_dml_generate_paths(*pathptr, chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (ts_guc_enable_transparent_decompression && ht != NULL &&
|
||||
rel->reloptkind == RELOPT_OTHER_MEMBER_REL && ht->fd.compressed_hypertable_id > 0)
|
||||
{
|
||||
Chunk *chunk = ts_chunk_get_by_relid(rte->relid, 0, true);
|
||||
|
||||
if (chunk->fd.compressed_chunk_id > 0)
|
||||
ts_decompress_chunk_generate_paths(root, rel, ht, chunk);
|
||||
if (chunk->fd.compressed_chunk_id > 0)
|
||||
ts_decompress_chunk_generate_paths(root, rel, ht, chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "hypertable.h"
|
||||
|
||||
void tsl_create_upper_paths_hook(PlannerInfo *, UpperRelationKind, RelOptInfo *, RelOptInfo *);
|
||||
void tsl_set_rel_pathlist_hook(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *);
|
||||
void tsl_set_rel_pathlist_hook(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *,
|
||||
bool isdml);
|
||||
|
||||
#endif /* TIMESCALEDB_TSL_PLANNER_H */
|
||||
|
@ -32,6 +32,7 @@ NOTICE: adding not-null constraint to column "a"
|
||||
foo
|
||||
(1 row)
|
||||
|
||||
create unique index foo_uniq ON foo (a, b);
|
||||
insert into foo values( 3 , 16 , 20, 11);
|
||||
insert into foo values( 10 , 10 , 20, 120);
|
||||
insert into foo values( 20 , 11 , 20, 13);
|
||||
@ -75,9 +76,9 @@ select * from timescaledb_information.compressed_chunk_size;
|
||||
hypertable_name | foo
|
||||
chunk_name | _timescaledb_internal._hyper_1_2_chunk
|
||||
uncompressed_heap_bytes | 8192 bytes
|
||||
uncompressed_index_bytes | 16 kB
|
||||
uncompressed_index_bytes | 32 kB
|
||||
uncompressed_toast_bytes | 0 bytes
|
||||
uncompressed_total_bytes | 24 kB
|
||||
uncompressed_total_bytes | 40 kB
|
||||
compressed_heap_bytes | 8192 bytes
|
||||
compressed_index_bytes | 0 bytes
|
||||
compressed_toast_bytes | 8192 bytes
|
||||
@ -98,7 +99,7 @@ chunk_id | 1
|
||||
compressed_chunk_id | 6
|
||||
uncompressed_heap_size | 8192
|
||||
uncompressed_toast_size | 0
|
||||
uncompressed_index_size | 16384
|
||||
uncompressed_index_size | 32768
|
||||
compressed_heap_size | 8192
|
||||
compressed_toast_size | 8192
|
||||
compressed_index_size | 0
|
||||
@ -107,7 +108,7 @@ chunk_id | 2
|
||||
compressed_chunk_id | 5
|
||||
uncompressed_heap_size | 8192
|
||||
uncompressed_toast_size | 0
|
||||
uncompressed_index_size | 16384
|
||||
uncompressed_index_size | 32768
|
||||
compressed_heap_size | 8192
|
||||
compressed_toast_size | 8192
|
||||
compressed_index_size | 0
|
||||
@ -129,10 +130,58 @@ select compress_chunk( '_timescaledb_internal._hyper_1_2_chunk');
|
||||
ERROR: chunk is already compressed
|
||||
--TEST2a try DML on a compressed chunk
|
||||
insert into foo values( 11 , 10 , 20, 120);
|
||||
ERROR: insert/update/delete not permitted on this chunk "_hyper_1_2_chunk"
|
||||
ERROR: insert/update/delete not permitted on chunk "_hyper_1_2_chunk"
|
||||
update foo set b =20 where a = 10;
|
||||
ERROR: cannot update/delete rows from chunk "_hyper_1_2_chunk" as it is compressed
|
||||
delete from foo where a = 10;
|
||||
--TEST2b decompress the chunk and try DML
|
||||
ERROR: cannot update/delete rows from chunk "_hyper_1_2_chunk" as it is compressed
|
||||
--TEST2b try complex DML on compressed chunk
|
||||
create table foo_join ( a integer, newval integer);
|
||||
select table_name from create_hypertable('foo_join', 'a', chunk_time_interval=> 10);
|
||||
NOTICE: adding not-null constraint to column "a"
|
||||
table_name
|
||||
------------
|
||||
foo_join
|
||||
(1 row)
|
||||
|
||||
insert into foo_join select generate_series(0,40, 10), 111;
|
||||
create table foo_join2 ( a integer, newval integer);
|
||||
select table_name from create_hypertable('foo_join2', 'a', chunk_time_interval=> 10);
|
||||
NOTICE: adding not-null constraint to column "a"
|
||||
table_name
|
||||
------------
|
||||
foo_join2
|
||||
(1 row)
|
||||
|
||||
insert into foo_join select generate_series(0,40, 10), 222;
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a;
|
||||
ERROR: cannot update/delete rows from chunk "_hyper_1_1_chunk" as it is compressed
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a and foo_join.a > 10;
|
||||
ERROR: cannot update/delete rows from chunk "_hyper_1_1_chunk" as it is compressed
|
||||
--here the chunk gets excluded , so succeeds --
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a and foo.a > 20;
|
||||
update foo
|
||||
set b = (select f1.newval from foo_join f1 left join lateral (select newval as newval2 from foo_join2 f2 where f1.a= f2.a ) subq on true limit 1);
|
||||
ERROR: cannot update/delete rows from chunk "_hyper_1_1_chunk" as it is compressed
|
||||
--upsert test --
|
||||
insert into foo values(10, 12, 12, 12)
|
||||
on conflict( a, b)
|
||||
do update set b = excluded.b;
|
||||
ERROR: insert/update/delete not permitted on chunk "_hyper_1_2_chunk"
|
||||
--TEST2c dml directly on the chunk NOTE update/deletes don't get blocked (TODO)
|
||||
insert into _timescaledb_internal._hyper_1_2_chunk values(10, 12, 12, 12);
|
||||
ERROR: insert/update/delete not permitted on chunk "_hyper_1_2_chunk"
|
||||
update _timescaledb_internal._hyper_1_2_chunk
|
||||
set b = 12;
|
||||
delete from _timescaledb_internal._hyper_1_2_chunk;
|
||||
|
||||
--TEST2d decompress the chunk and try DML
|
||||
select decompress_chunk( '_timescaledb_internal._hyper_1_2_chunk');
|
||||
decompress_chunk
|
||||
------------------
|
||||
@ -166,7 +215,7 @@ CREATE TABLE conditions (
|
||||
select create_hypertable( 'conditions', 'time', chunk_time_interval=> '31days'::interval);
|
||||
create_hypertable
|
||||
-------------------------
|
||||
(3,public,conditions,t)
|
||||
(5,public,conditions,t)
|
||||
(1 row)
|
||||
|
||||
alter table conditions set (timescaledb.compress, timescaledb.compress_segmentby = 'location', timescaledb.compress_orderby = 'time');
|
||||
@ -182,11 +231,11 @@ where ht.id = hc.hypertable_id and ht.table_name like 'conditions' and al.id = h
|
||||
ORDER BY hypertable_id, attname;
|
||||
hypertable_id | attname | compression_algorithm_id | name
|
||||
---------------+-------------+--------------------------+----------------------------------
|
||||
3 | humidity | 3 | COMPRESSION_ALGORITHM_GORILLA
|
||||
3 | location | 0 | COMPRESSION_ALGORITHM_NONE
|
||||
3 | location2 | 2 | COMPRESSION_ALGORITHM_DICTIONARY
|
||||
3 | temperature | 3 | COMPRESSION_ALGORITHM_GORILLA
|
||||
3 | time | 4 | COMPRESSION_ALGORITHM_DELTADELTA
|
||||
5 | humidity | 3 | COMPRESSION_ALGORITHM_GORILLA
|
||||
5 | location | 0 | COMPRESSION_ALGORITHM_NONE
|
||||
5 | location2 | 2 | COMPRESSION_ALGORITHM_DICTIONARY
|
||||
5 | temperature | 3 | COMPRESSION_ALGORITHM_GORILLA
|
||||
5 | time | 4 | COMPRESSION_ALGORITHM_DELTADELTA
|
||||
(5 rows)
|
||||
|
||||
select attname, attstorage, typname from pg_attribute at, pg_class cl , pg_type ty
|
||||
@ -194,17 +243,9 @@ where cl.oid = at.attrelid and at.attnum > 0
|
||||
and cl.relname = '_compressed_hypertable_4'
|
||||
and atttypid = ty.oid
|
||||
order by at.attnum;
|
||||
attname | attstorage | typname
|
||||
-----------------------+------------+----------------------
|
||||
time | e | compressed_data
|
||||
location | x | text
|
||||
location2 | x | compressed_data
|
||||
temperature | e | compressed_data
|
||||
humidity | e | compressed_data
|
||||
_ts_meta_count | p | int4
|
||||
_ts_meta_sequence_num | p | int4
|
||||
_ts_meta_min_max_1 | e | segment_meta_min_max
|
||||
(8 rows)
|
||||
attname | attstorage | typname
|
||||
---------+------------+---------
|
||||
(0 rows)
|
||||
|
||||
SELECT ch1.schema_name|| '.' || ch1.table_name as "CHUNK_NAME", ch1.id "CHUNK_ID"
|
||||
FROM _timescaledb_catalog.chunk ch1, _timescaledb_catalog.hypertable ht where ch1.hypertable_id = ht.id and ht.table_name like 'conditions'
|
||||
@ -218,10 +259,10 @@ SELECT count(*) from :CHUNK_NAME;
|
||||
|
||||
SELECT count(*) as "ORIGINAL_CHUNK_COUNT" from :CHUNK_NAME \gset
|
||||
select tableoid::regclass, count(*) from conditions group by tableoid order by tableoid;
|
||||
tableoid | count
|
||||
----------------------------------------+-------
|
||||
_timescaledb_internal._hyper_3_7_chunk | 42
|
||||
_timescaledb_internal._hyper_3_8_chunk | 20
|
||||
tableoid | count
|
||||
-----------------------------------------+-------
|
||||
_timescaledb_internal._hyper_5_12_chunk | 42
|
||||
_timescaledb_internal._hyper_5_13_chunk | 20
|
||||
(2 rows)
|
||||
|
||||
select compress_chunk(ch1.schema_name|| '.' || ch1.table_name)
|
||||
@ -234,9 +275,9 @@ FROM _timescaledb_catalog.chunk ch1, _timescaledb_catalog.hypertable ht where ch
|
||||
--test that only one chunk was affected
|
||||
--note tables with 0 rows will not show up in here.
|
||||
select tableoid::regclass, count(*) from conditions group by tableoid order by tableoid;
|
||||
tableoid | count
|
||||
----------------------------------------+-------
|
||||
_timescaledb_internal._hyper_3_8_chunk | 20
|
||||
tableoid | count
|
||||
-----------------------------------------+-------
|
||||
_timescaledb_internal._hyper_5_13_chunk | 20
|
||||
(1 row)
|
||||
|
||||
select compress_chunk(ch1.schema_name|| '.' || ch1.table_name)
|
||||
@ -283,9 +324,9 @@ SELECT _ts_meta_sequence_num from :COMPRESSED_CHUNK_NAME;
|
||||
select * from timescaledb_information.compressed_chunk_size
|
||||
where hypertable_name::text like 'conditions'
|
||||
order by hypertable_name, chunk_name;
|
||||
-[ RECORD 1 ]------------+---------------------------------------
|
||||
-[ RECORD 1 ]------------+----------------------------------------
|
||||
hypertable_name | conditions
|
||||
chunk_name | _timescaledb_internal._hyper_3_7_chunk
|
||||
chunk_name | _timescaledb_internal._hyper_5_12_chunk
|
||||
uncompressed_heap_bytes | 8192 bytes
|
||||
uncompressed_index_bytes | 16 kB
|
||||
uncompressed_toast_bytes | 8192 bytes
|
||||
@ -294,9 +335,9 @@ compressed_heap_bytes | 8192 bytes
|
||||
compressed_index_bytes | 0 bytes
|
||||
compressed_toast_bytes | 8192 bytes
|
||||
compressed_total_bytes | 16 kB
|
||||
-[ RECORD 2 ]------------+---------------------------------------
|
||||
-[ RECORD 2 ]------------+----------------------------------------
|
||||
hypertable_name | conditions
|
||||
chunk_name | _timescaledb_internal._hyper_3_8_chunk
|
||||
chunk_name | _timescaledb_internal._hyper_5_13_chunk
|
||||
uncompressed_heap_bytes | 8192 bytes
|
||||
uncompressed_index_bytes | 16 kB
|
||||
uncompressed_toast_bytes | 8192 bytes
|
||||
@ -311,9 +352,9 @@ order by hypertable_name;
|
||||
-[ RECORD 1 ]------------+-----------
|
||||
hypertable_name | foo
|
||||
uncompressed_heap_bytes | 8192 bytes
|
||||
uncompressed_index_bytes | 16 kB
|
||||
uncompressed_index_bytes | 32 kB
|
||||
uncompressed_toast_bytes | 0 bytes
|
||||
uncompressed_total_bytes | 24 kB
|
||||
uncompressed_total_bytes | 40 kB
|
||||
compressed_heap_bytes | 8192 bytes
|
||||
compressed_index_bytes | 0 bytes
|
||||
compressed_toast_bytes | 8192 bytes
|
||||
@ -347,7 +388,7 @@ SELECT count(*), count(*) = :'ORIGINAL_CHUNK_COUNT' from :CHUNK_NAME;
|
||||
--check that the compressed chunk is dropped
|
||||
\set ON_ERROR_STOP 0
|
||||
SELECT count(*) from :COMPRESSED_CHUNK_NAME;
|
||||
ERROR: relation "_timescaledb_internal.compress_hyper_4_9_chunk" does not exist at character 22
|
||||
ERROR: relation "_timescaledb_internal.compress_hyper_6_14_chunk" does not exist at character 22
|
||||
\set ON_ERROR_STOP 1
|
||||
--size information is gone too
|
||||
select count(*) from timescaledb_information.compressed_chunk_size
|
||||
@ -373,7 +414,7 @@ SELECT create_hypertable('plan_inval','time');
|
||||
NOTICE: adding not-null constraint to column "time"
|
||||
create_hypertable
|
||||
-------------------------
|
||||
(5,public,plan_inval,t)
|
||||
(7,public,plan_inval,t)
|
||||
(1 row)
|
||||
|
||||
ALTER TABLE plan_inval SET (timescaledb.compress,timescaledb.compress_orderby='time desc');
|
||||
@ -419,9 +460,9 @@ EXPLAIN (COSTS OFF) EXECUTE prep_plan;
|
||||
----------------------------------------------------------------
|
||||
Aggregate
|
||||
-> Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_5_11_chunk
|
||||
-> Seq Scan on compress_hyper_6_13_chunk
|
||||
-> Seq Scan on _hyper_5_12_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_16_chunk
|
||||
-> Seq Scan on compress_hyper_8_18_chunk
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
(5 rows)
|
||||
|
||||
CREATE TABLE test_collation (
|
||||
@ -435,7 +476,7 @@ CREATE TABLE test_collation (
|
||||
select create_hypertable( 'test_collation', 'time', chunk_time_interval=> '1 day'::interval);
|
||||
create_hypertable
|
||||
-----------------------------
|
||||
(7,public,test_collation,t)
|
||||
(9,public,test_collation,t)
|
||||
(1 row)
|
||||
|
||||
\set ON_ERROR_STOP 0
|
||||
@ -465,27 +506,27 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE device_id < 'a';
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (device_id < 'a'::text)
|
||||
(23 rows)
|
||||
|
||||
@ -493,27 +534,27 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE device_id < 'a' COLLATE "
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (device_id < 'a'::text COLLATE "POSIX")
|
||||
(23 rows)
|
||||
|
||||
@ -529,29 +570,29 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_1 < 'a';
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_1, NULL::text) < 'a'::text)
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_1, NULL::text) < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_1 < 'a'::text)
|
||||
(25 rows)
|
||||
|
||||
@ -559,29 +600,29 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_2 < 'a';
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_2, NULL::text) < 'a'::text)
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_2, NULL::text) < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_2 < 'a'::text)
|
||||
(25 rows)
|
||||
|
||||
@ -589,29 +630,29 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_1 < 'a' COLLATE "C";
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_1, NULL::text) < 'a'::text COLLATE "C")
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_1, NULL::text) < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "C")
|
||||
(25 rows)
|
||||
|
||||
@ -619,29 +660,29 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_2 < 'a' COLLATE "POSI
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_2, NULL::text) < 'a'::text COLLATE "POSIX")
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
Filter: (_timescaledb_internal.segment_meta_get_min(_ts_meta_min_max_2, NULL::text) < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "POSIX")
|
||||
(25 rows)
|
||||
|
||||
@ -650,27 +691,27 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_1 < 'a' COLLATE "POSI
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_1 < 'a'::text COLLATE "POSIX")
|
||||
(23 rows)
|
||||
|
||||
@ -678,27 +719,27 @@ EXPLAIN (costs off) SELECT * FROM test_collation WHERE val_2 < 'a' COLLATE "C";
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
Append
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_14_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_19_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on compress_hyper_8_24_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_7_15_chunk
|
||||
-> Seq Scan on compress_hyper_10_29_chunk
|
||||
-> Custom Scan (DecompressChunk) on _hyper_9_20_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on compress_hyper_8_25_chunk
|
||||
-> Seq Scan on _hyper_7_16_chunk
|
||||
-> Seq Scan on compress_hyper_10_30_chunk
|
||||
-> Seq Scan on _hyper_9_21_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_17_chunk
|
||||
-> Seq Scan on _hyper_9_22_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_18_chunk
|
||||
-> Seq Scan on _hyper_9_23_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_19_chunk
|
||||
-> Seq Scan on _hyper_9_24_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_20_chunk
|
||||
-> Seq Scan on _hyper_9_25_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_21_chunk
|
||||
-> Seq Scan on _hyper_9_26_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_22_chunk
|
||||
-> Seq Scan on _hyper_9_27_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
-> Seq Scan on _hyper_7_23_chunk
|
||||
-> Seq Scan on _hyper_9_28_chunk
|
||||
Filter: (val_2 < 'a'::text COLLATE "C")
|
||||
(23 rows)
|
||||
|
||||
|
@ -10,6 +10,7 @@ SET timescaledb.enable_transparent_decompression to OFF;
|
||||
--basic test with count
|
||||
create table foo (a integer, b integer, c integer, d integer);
|
||||
select table_name from create_hypertable('foo', 'a', chunk_time_interval=> 10);
|
||||
create unique index foo_uniq ON foo (a, b);
|
||||
|
||||
insert into foo values( 3 , 16 , 20, 11);
|
||||
insert into foo values( 10 , 10 , 20, 120);
|
||||
@ -48,7 +49,38 @@ insert into foo values( 11 , 10 , 20, 120);
|
||||
update foo set b =20 where a = 10;
|
||||
delete from foo where a = 10;
|
||||
|
||||
--TEST2b decompress the chunk and try DML
|
||||
--TEST2b try complex DML on compressed chunk
|
||||
create table foo_join ( a integer, newval integer);
|
||||
select table_name from create_hypertable('foo_join', 'a', chunk_time_interval=> 10);
|
||||
insert into foo_join select generate_series(0,40, 10), 111;
|
||||
create table foo_join2 ( a integer, newval integer);
|
||||
select table_name from create_hypertable('foo_join2', 'a', chunk_time_interval=> 10);
|
||||
insert into foo_join select generate_series(0,40, 10), 222;
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a;
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a and foo_join.a > 10;
|
||||
--here the chunk gets excluded , so succeeds --
|
||||
update foo
|
||||
set b = newval
|
||||
from foo_join where foo.a = foo_join.a and foo.a > 20;
|
||||
update foo
|
||||
set b = (select f1.newval from foo_join f1 left join lateral (select newval as newval2 from foo_join2 f2 where f1.a= f2.a ) subq on true limit 1);
|
||||
|
||||
--upsert test --
|
||||
insert into foo values(10, 12, 12, 12)
|
||||
on conflict( a, b)
|
||||
do update set b = excluded.b;
|
||||
|
||||
--TEST2c dml directly on the chunk NOTE update/deletes don't get blocked (TODO)
|
||||
insert into _timescaledb_internal._hyper_1_2_chunk values(10, 12, 12, 12);
|
||||
update _timescaledb_internal._hyper_1_2_chunk
|
||||
set b = 12;
|
||||
delete from _timescaledb_internal._hyper_1_2_chunk;
|
||||
|
||||
--TEST2d decompress the chunk and try DML
|
||||
select decompress_chunk( '_timescaledb_internal._hyper_1_2_chunk');
|
||||
insert into foo values( 11 , 10 , 20, 120);
|
||||
update foo set b =20 where a = 10;
|
||||
|
Loading…
x
Reference in New Issue
Block a user