mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-23 22:41:34 +08:00
Prepare C code for compiling on Windows
For all exported functions the macro PGDLLEXPORT needs to be pre- pended. Additionally, on Windows `open` is a macro that needed to be renamed. A few other small changes are done to make Visual Studio's compiler happy / get rid of warnings (e.g. adding return statements after elog).
This commit is contained in:
parent
097db3d589
commit
b57e2bf1f4
@ -15,6 +15,15 @@
|
||||
* SELECT first(metric, time), last(metric, time) FROM metric GROUP BY hostname.
|
||||
*/
|
||||
|
||||
PGDLLEXPORT Datum first_sfunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum first_combinefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum last_sfunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum last_combinefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum bookend_finalfunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum bookend_serializefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum bookend_deserializefunc(PG_FUNCTION_ARGS);
|
||||
|
||||
|
||||
PG_FUNCTION_INFO_V1(first_sfunc);
|
||||
PG_FUNCTION_INFO_V1(first_combinefunc);
|
||||
PG_FUNCTION_INFO_V1(last_sfunc);
|
||||
@ -369,7 +378,6 @@ bookend_combinefunc(MemoryContext aggcontext, InternalCmpAggStore *state1, Inter
|
||||
PG_RETURN_POINTER(state1);
|
||||
}
|
||||
|
||||
|
||||
/* first(internal internal_state, anyelement value, "any" comparison_element) */
|
||||
Datum
|
||||
first_sfunc(PG_FUNCTION_ARGS)
|
||||
|
@ -8,7 +8,7 @@
|
||||
typedef struct CacheQuery
|
||||
{
|
||||
void *result;
|
||||
void *data[0];
|
||||
void *data;
|
||||
} CacheQuery;
|
||||
|
||||
typedef struct CacheStats
|
||||
|
@ -60,6 +60,7 @@ inval_cache_callback(Datum arg, Oid relid)
|
||||
hypertable_cache_invalidate_callback();
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum invalidate_relcache_trigger(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(invalidate_relcache_trigger);
|
||||
|
||||
/*
|
||||
@ -101,6 +102,7 @@ invalidate_relcache_trigger(PG_FUNCTION_ARGS)
|
||||
return PointerGetDatum(trigdata->tg_trigtuple);
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum invalidate_relcache(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(invalidate_relcache);
|
||||
|
||||
/*
|
||||
|
@ -36,7 +36,7 @@ typedef struct Chunk
|
||||
Hypercube *cube;
|
||||
int16 capacity;
|
||||
int16 num_constraints;
|
||||
ChunkConstraint constraints[0];
|
||||
ChunkConstraint constraints[FLEXIBLE_ARRAY_MEMBER];
|
||||
} Chunk;
|
||||
|
||||
#define CHUNK_SIZE(num_constraints) \
|
||||
|
@ -16,7 +16,7 @@ typedef struct ChunkConstraint
|
||||
typedef struct ChunkConstraintVec
|
||||
{
|
||||
int16 num_constraints;
|
||||
ChunkConstraint constraints[0];
|
||||
ChunkConstraint constraints[FLEXIBLE_ARRAY_MEMBER];
|
||||
} ChunkConstraintVec;
|
||||
|
||||
typedef struct Chunk Chunk;
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
/* Old functions that are no longer used but are needed for compatibiliy when
|
||||
* updating the extension. */
|
||||
PGDLLEXPORT Datum insert_main_table_trigger(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(insert_main_table_trigger);
|
||||
|
||||
Datum
|
||||
@ -12,6 +13,7 @@ insert_main_table_trigger(PG_FUNCTION_ARGS)
|
||||
PG_RETURN_NULL();
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum insert_main_table_trigger_after(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(insert_main_table_trigger_after);
|
||||
|
||||
Datum
|
||||
@ -21,6 +23,7 @@ insert_main_table_trigger_after(PG_FUNCTION_ARGS)
|
||||
PG_RETURN_NULL();
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum ddl_is_change_owner(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(ddl_is_change_owner);
|
||||
|
||||
Datum
|
||||
@ -30,6 +33,7 @@ ddl_is_change_owner(PG_FUNCTION_ARGS)
|
||||
PG_RETURN_NULL();
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum ddl_change_owner_to(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(ddl_change_owner_to);
|
||||
|
||||
Datum
|
||||
|
@ -149,6 +149,7 @@ calculate_open_range_default(Dimension *dim, int64 value)
|
||||
return dimension_slice_create(dim->fd.id, range_start, range_end);
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum dimension_calculate_open_range_default(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(dimension_calculate_open_range_default);
|
||||
|
||||
/*
|
||||
@ -194,6 +195,7 @@ calculate_closed_range_default(Dimension *dim, int64 value)
|
||||
return dimension_slice_create(dim->fd.id, range_start, range_end);
|
||||
}
|
||||
|
||||
PGDLLEXPORT Datum dimension_calculate_closed_range_default(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(dimension_calculate_closed_range_default);
|
||||
|
||||
/*
|
||||
|
@ -42,7 +42,7 @@ typedef struct Hyperspace
|
||||
uint16 capacity;
|
||||
uint16 num_dimensions;
|
||||
/* Open dimensions should be stored before closed dimensions */
|
||||
Dimension dimensions[0];
|
||||
Dimension dimensions[FLEXIBLE_ARRAY_MEMBER];
|
||||
} Hyperspace;
|
||||
|
||||
#define HYPERSPACE_SIZE(num_dimensions) \
|
||||
@ -56,7 +56,7 @@ typedef struct Point
|
||||
int16 cardinality;
|
||||
uint8 num_coords;
|
||||
/* Open dimension coordinates are stored before the closed coordinates */
|
||||
int64 coordinates[0];
|
||||
int64 coordinates[FLEXIBLE_ARRAY_MEMBER];
|
||||
} Point;
|
||||
|
||||
#define POINT_SIZE(cardinality) \
|
||||
|
@ -14,7 +14,7 @@ typedef struct DimensionVec
|
||||
int32 capacity; /* The capacity of the slices array */
|
||||
int32 num_slices; /* The current number of slices in slices
|
||||
* array */
|
||||
DimensionSlice *slices[0];
|
||||
DimensionSlice *slices[FLEXIBLE_ARRAY_MEMBER];
|
||||
} DimensionVec;
|
||||
|
||||
#define DIMENSION_VEC_SIZE(num_slices) \
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
#include "event_trigger.h"
|
||||
|
||||
#define DDL_INFO_NATTS 9
|
||||
|
||||
/* Function manager info for the event "pg_event_trigger_ddl_commands", which is
|
||||
* used to retrieve information on executed DDL commands in an event
|
||||
* trigger. The function manager info is initialized on extension load. */
|
||||
@ -42,8 +44,8 @@ event_trigger_ddl_commands(void)
|
||||
{
|
||||
HeapTuple tuple = ExecFetchSlotTuple(slot);
|
||||
CollectedCommand *cmd;
|
||||
Datum values[rsinfo.setDesc->natts];
|
||||
bool nulls[rsinfo.setDesc->natts];
|
||||
Datum values[DDL_INFO_NATTS];
|
||||
bool nulls[DDL_INFO_NATTS];
|
||||
|
||||
heap_deform_tuple(tuple, rsinfo.setDesc, values, nulls);
|
||||
|
||||
|
@ -167,6 +167,7 @@ extension_invalidate(Oid relid)
|
||||
return false;
|
||||
default:
|
||||
elog(ERROR, "unknown state: %d", extstate);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,5 +202,6 @@ extension_is_loaded(void)
|
||||
return false;
|
||||
default:
|
||||
elog(ERROR, "unknown state: %d", extstate);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,12 @@
|
||||
* nbucket+2 buckets accounting for buckets outside the range.
|
||||
*/
|
||||
|
||||
PGDLLEXPORT Datum hist_sfunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum hist_combinefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum hist_serializefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum hist_deserializefunc(PG_FUNCTION_ARGS);
|
||||
PGDLLEXPORT Datum hist_finalfunc(PG_FUNCTION_ARGS);
|
||||
|
||||
PG_FUNCTION_INFO_V1(hist_sfunc);
|
||||
PG_FUNCTION_INFO_V1(hist_combinefunc);
|
||||
PG_FUNCTION_INFO_V1(hist_serializefunc);
|
||||
|
@ -15,7 +15,7 @@ typedef struct Hypercube
|
||||
int16 num_slices; /* actual number of slices (should equal
|
||||
* capacity after create) */
|
||||
/* Slices are stored in dimension order */
|
||||
DimensionSlice *slices[0];
|
||||
DimensionSlice *slices[FLEXIBLE_ARRAY_MEMBER];
|
||||
} Hypercube;
|
||||
|
||||
#define HYPERCUBE_SIZE(num_dimensions) \
|
||||
|
@ -95,6 +95,7 @@ partitioning_func_apply_tuple(PartitioningInfo *pinfo, HeapTuple tuple, TupleDes
|
||||
}
|
||||
|
||||
/* _timescaledb_catalog.get_partition_for_key(key TEXT) RETURNS INT */
|
||||
PGDLLEXPORT Datum get_partition_for_key(PG_FUNCTION_ARGS);
|
||||
PG_FUNCTION_INFO_V1(get_partition_for_key);
|
||||
|
||||
Datum
|
||||
|
@ -33,11 +33,11 @@ typedef struct InternalScannerCtx
|
||||
*/
|
||||
typedef struct Scanner
|
||||
{
|
||||
Relation (*open) (InternalScannerCtx *ctx);
|
||||
Relation (*openheap) (InternalScannerCtx *ctx);
|
||||
ScanDesc (*beginscan) (InternalScannerCtx *ctx);
|
||||
bool (*getnext) (InternalScannerCtx *ctx);
|
||||
void (*endscan) (InternalScannerCtx *ctx);
|
||||
void (*close) (InternalScannerCtx *ctx);
|
||||
void (*closeheap) (InternalScannerCtx *ctx);
|
||||
} Scanner;
|
||||
|
||||
/* Functions implementing heap scans */
|
||||
@ -127,18 +127,18 @@ index_scanner_close(InternalScannerCtx *ctx)
|
||||
*/
|
||||
static Scanner scanners[] = {
|
||||
[ScannerTypeHeap] = {
|
||||
.open = heap_scanner_open,
|
||||
.openheap = heap_scanner_open,
|
||||
.beginscan = heap_scanner_beginscan,
|
||||
.getnext = heap_scanner_getnext,
|
||||
.endscan = heap_scanner_endscan,
|
||||
.close = heap_scanner_close,
|
||||
.closeheap = heap_scanner_close,
|
||||
},
|
||||
[ScannerTypeIndex] = {
|
||||
.open = index_scanner_open,
|
||||
.openheap = index_scanner_open,
|
||||
.beginscan = index_scanner_beginscan,
|
||||
.getnext = index_scanner_getnext,
|
||||
.endscan = index_scanner_endscan,
|
||||
.close = index_scanner_close,
|
||||
.closeheap = index_scanner_close,
|
||||
}
|
||||
};
|
||||
|
||||
@ -159,7 +159,7 @@ scanner_scan(ScannerCtx *ctx)
|
||||
.sctx = ctx,
|
||||
};
|
||||
|
||||
scanner->open(&ictx);
|
||||
scanner->openheap(&ictx);
|
||||
scanner->beginscan(&ictx);
|
||||
|
||||
tuple_desc = RelationGetDescr(ictx.tablerel);
|
||||
@ -214,7 +214,7 @@ scanner_scan(ScannerCtx *ctx)
|
||||
ctx->postscan(ictx.tinfo.count, ctx->data);
|
||||
|
||||
scanner->endscan(&ictx);
|
||||
scanner->close(&ictx);
|
||||
scanner->closeheap(&ictx);
|
||||
|
||||
return ictx.tinfo.count;
|
||||
}
|
||||
|
@ -176,6 +176,7 @@ time_value_to_internal(Datum time_val, Oid type)
|
||||
}
|
||||
|
||||
elog(ERROR, "unkown time type oid '%d'", type);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Make a RangeVar from a regclass Oid */
|
||||
|
12
src/utils.h
12
src/utils.h
@ -4,12 +4,14 @@
|
||||
#include "fmgr.h"
|
||||
#include "nodes/primnodes.h"
|
||||
|
||||
extern Datum pg_timestamp_to_microseconds(PG_FUNCTION_ARGS);
|
||||
extern Datum pg_microseconds_to_timestamp(PG_FUNCTION_ARGS);
|
||||
extern Datum pg_timestamp_to_unix_microseconds(PG_FUNCTION_ARGS);
|
||||
extern Datum pg_unix_microseconds_to_timestamp(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum pg_timestamp_to_microseconds(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum pg_microseconds_to_timestamp(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum pg_timestamp_to_unix_microseconds(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum pg_unix_microseconds_to_timestamp(PG_FUNCTION_ARGS);
|
||||
|
||||
extern Datum timestamp_bucket(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum timestamp_bucket(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum timestamptz_bucket(PG_FUNCTION_ARGS);
|
||||
extern PGDLLEXPORT Datum date_bucket(PG_FUNCTION_ARGS);
|
||||
|
||||
/*
|
||||
* Convert a column value into the internal time representation.
|
||||
|
@ -4,6 +4,8 @@
|
||||
|
||||
const char *git_commit = EXT_GIT_COMMIT;
|
||||
|
||||
PGDLLEXPORT Datum get_git_commit(PG_FUNCTION_ARGS);
|
||||
|
||||
PG_FUNCTION_INFO_V1(get_git_commit);
|
||||
|
||||
Datum
|
||||
|
Loading…
x
Reference in New Issue
Block a user