From b57e2bf1f4fb0f8d0dcc4a493218be518f3729a3 Mon Sep 17 00:00:00 2001 From: Rob Kiefer Date: Fri, 22 Sep 2017 14:18:34 +0000 Subject: [PATCH] Prepare C code for compiling on Windows For all exported functions the macro PGDLLEXPORT needs to be pre- pended. Additionally, on Windows `open` is a macro that needed to be renamed. A few other small changes are done to make Visual Studio's compiler happy / get rid of warnings (e.g. adding return statements after elog). --- src/agg_bookend.c | 10 +++++++++- src/cache.h | 2 +- src/cache_invalidate.c | 2 ++ src/chunk.h | 2 +- src/chunk_constraint.h | 2 +- src/compat.c | 4 ++++ src/dimension.c | 2 ++ src/dimension.h | 4 ++-- src/dimension_vector.h | 2 +- src/event_trigger.c | 6 ++++-- src/extension.c | 2 ++ src/histogram.c | 6 ++++++ src/hypercube.h | 2 +- src/partitioning.c | 1 + src/scanner.c | 16 ++++++++-------- src/utils.c | 1 + src/utils.h | 12 +++++++----- src/version.c | 2 ++ 18 files changed, 55 insertions(+), 23 deletions(-) diff --git a/src/agg_bookend.c b/src/agg_bookend.c index 6d1f2d7dd..1624f24ed 100644 --- a/src/agg_bookend.c +++ b/src/agg_bookend.c @@ -15,6 +15,15 @@ * SELECT first(metric, time), last(metric, time) FROM metric GROUP BY hostname. */ +PGDLLEXPORT Datum first_sfunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum first_combinefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum last_sfunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum last_combinefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum bookend_finalfunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum bookend_serializefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum bookend_deserializefunc(PG_FUNCTION_ARGS); + + PG_FUNCTION_INFO_V1(first_sfunc); PG_FUNCTION_INFO_V1(first_combinefunc); PG_FUNCTION_INFO_V1(last_sfunc); @@ -369,7 +378,6 @@ bookend_combinefunc(MemoryContext aggcontext, InternalCmpAggStore *state1, Inter PG_RETURN_POINTER(state1); } - /* first(internal internal_state, anyelement value, "any" comparison_element) */ Datum first_sfunc(PG_FUNCTION_ARGS) diff --git a/src/cache.h b/src/cache.h index 6324a86b5..64b5f35af 100644 --- a/src/cache.h +++ b/src/cache.h @@ -8,7 +8,7 @@ typedef struct CacheQuery { void *result; - void *data[0]; + void *data; } CacheQuery; typedef struct CacheStats diff --git a/src/cache_invalidate.c b/src/cache_invalidate.c index 42ad190d6..331f64319 100644 --- a/src/cache_invalidate.c +++ b/src/cache_invalidate.c @@ -60,6 +60,7 @@ inval_cache_callback(Datum arg, Oid relid) hypertable_cache_invalidate_callback(); } +PGDLLEXPORT Datum invalidate_relcache_trigger(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(invalidate_relcache_trigger); /* @@ -101,6 +102,7 @@ invalidate_relcache_trigger(PG_FUNCTION_ARGS) return PointerGetDatum(trigdata->tg_trigtuple); } +PGDLLEXPORT Datum invalidate_relcache(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(invalidate_relcache); /* diff --git a/src/chunk.h b/src/chunk.h index 4f6287741..b917d1594 100644 --- a/src/chunk.h +++ b/src/chunk.h @@ -36,7 +36,7 @@ typedef struct Chunk Hypercube *cube; int16 capacity; int16 num_constraints; - ChunkConstraint constraints[0]; + ChunkConstraint constraints[FLEXIBLE_ARRAY_MEMBER]; } Chunk; #define CHUNK_SIZE(num_constraints) \ diff --git a/src/chunk_constraint.h b/src/chunk_constraint.h index b0138a3da..d300a500f 100644 --- a/src/chunk_constraint.h +++ b/src/chunk_constraint.h @@ -16,7 +16,7 @@ typedef struct ChunkConstraint typedef struct ChunkConstraintVec { int16 num_constraints; - ChunkConstraint constraints[0]; + ChunkConstraint constraints[FLEXIBLE_ARRAY_MEMBER]; } ChunkConstraintVec; typedef struct Chunk Chunk; diff --git a/src/compat.c b/src/compat.c index d6fa2faa5..aceb36d57 100644 --- a/src/compat.c +++ b/src/compat.c @@ -3,6 +3,7 @@ /* Old functions that are no longer used but are needed for compatibiliy when * updating the extension. */ +PGDLLEXPORT Datum insert_main_table_trigger(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(insert_main_table_trigger); Datum @@ -12,6 +13,7 @@ insert_main_table_trigger(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } +PGDLLEXPORT Datum insert_main_table_trigger_after(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(insert_main_table_trigger_after); Datum @@ -21,6 +23,7 @@ insert_main_table_trigger_after(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } +PGDLLEXPORT Datum ddl_is_change_owner(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(ddl_is_change_owner); Datum @@ -30,6 +33,7 @@ ddl_is_change_owner(PG_FUNCTION_ARGS) PG_RETURN_NULL(); } +PGDLLEXPORT Datum ddl_change_owner_to(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(ddl_change_owner_to); Datum diff --git a/src/dimension.c b/src/dimension.c index 646554f94..aa36e7938 100644 --- a/src/dimension.c +++ b/src/dimension.c @@ -149,6 +149,7 @@ calculate_open_range_default(Dimension *dim, int64 value) return dimension_slice_create(dim->fd.id, range_start, range_end); } +PGDLLEXPORT Datum dimension_calculate_open_range_default(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(dimension_calculate_open_range_default); /* @@ -194,6 +195,7 @@ calculate_closed_range_default(Dimension *dim, int64 value) return dimension_slice_create(dim->fd.id, range_start, range_end); } +PGDLLEXPORT Datum dimension_calculate_closed_range_default(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(dimension_calculate_closed_range_default); /* diff --git a/src/dimension.h b/src/dimension.h index 76694681b..02a7cf80f 100644 --- a/src/dimension.h +++ b/src/dimension.h @@ -42,7 +42,7 @@ typedef struct Hyperspace uint16 capacity; uint16 num_dimensions; /* Open dimensions should be stored before closed dimensions */ - Dimension dimensions[0]; + Dimension dimensions[FLEXIBLE_ARRAY_MEMBER]; } Hyperspace; #define HYPERSPACE_SIZE(num_dimensions) \ @@ -56,7 +56,7 @@ typedef struct Point int16 cardinality; uint8 num_coords; /* Open dimension coordinates are stored before the closed coordinates */ - int64 coordinates[0]; + int64 coordinates[FLEXIBLE_ARRAY_MEMBER]; } Point; #define POINT_SIZE(cardinality) \ diff --git a/src/dimension_vector.h b/src/dimension_vector.h index 61382f8ee..d85aa7a6b 100644 --- a/src/dimension_vector.h +++ b/src/dimension_vector.h @@ -14,7 +14,7 @@ typedef struct DimensionVec int32 capacity; /* The capacity of the slices array */ int32 num_slices; /* The current number of slices in slices * array */ - DimensionSlice *slices[0]; + DimensionSlice *slices[FLEXIBLE_ARRAY_MEMBER]; } DimensionVec; #define DIMENSION_VEC_SIZE(num_slices) \ diff --git a/src/event_trigger.c b/src/event_trigger.c index cd38b6e26..ff261d32a 100644 --- a/src/event_trigger.c +++ b/src/event_trigger.c @@ -5,6 +5,8 @@ #include "event_trigger.h" +#define DDL_INFO_NATTS 9 + /* Function manager info for the event "pg_event_trigger_ddl_commands", which is * used to retrieve information on executed DDL commands in an event * trigger. The function manager info is initialized on extension load. */ @@ -42,8 +44,8 @@ event_trigger_ddl_commands(void) { HeapTuple tuple = ExecFetchSlotTuple(slot); CollectedCommand *cmd; - Datum values[rsinfo.setDesc->natts]; - bool nulls[rsinfo.setDesc->natts]; + Datum values[DDL_INFO_NATTS]; + bool nulls[DDL_INFO_NATTS]; heap_deform_tuple(tuple, rsinfo.setDesc, values, nulls); diff --git a/src/extension.c b/src/extension.c index 2f9d1043d..8c2034a53 100644 --- a/src/extension.c +++ b/src/extension.c @@ -167,6 +167,7 @@ extension_invalidate(Oid relid) return false; default: elog(ERROR, "unknown state: %d", extstate); + return false; } } @@ -201,5 +202,6 @@ extension_is_loaded(void) return false; default: elog(ERROR, "unknown state: %d", extstate); + return false; } } diff --git a/src/histogram.c b/src/histogram.c index 0996bc6bf..ef75143e2 100644 --- a/src/histogram.c +++ b/src/histogram.c @@ -21,6 +21,12 @@ * nbucket+2 buckets accounting for buckets outside the range. */ +PGDLLEXPORT Datum hist_sfunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum hist_combinefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum hist_serializefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum hist_deserializefunc(PG_FUNCTION_ARGS); +PGDLLEXPORT Datum hist_finalfunc(PG_FUNCTION_ARGS); + PG_FUNCTION_INFO_V1(hist_sfunc); PG_FUNCTION_INFO_V1(hist_combinefunc); PG_FUNCTION_INFO_V1(hist_serializefunc); diff --git a/src/hypercube.h b/src/hypercube.h index 94bc53f38..4b726aede 100644 --- a/src/hypercube.h +++ b/src/hypercube.h @@ -15,7 +15,7 @@ typedef struct Hypercube int16 num_slices; /* actual number of slices (should equal * capacity after create) */ /* Slices are stored in dimension order */ - DimensionSlice *slices[0]; + DimensionSlice *slices[FLEXIBLE_ARRAY_MEMBER]; } Hypercube; #define HYPERCUBE_SIZE(num_dimensions) \ diff --git a/src/partitioning.c b/src/partitioning.c index 66851918b..80b468161 100644 --- a/src/partitioning.c +++ b/src/partitioning.c @@ -95,6 +95,7 @@ partitioning_func_apply_tuple(PartitioningInfo *pinfo, HeapTuple tuple, TupleDes } /* _timescaledb_catalog.get_partition_for_key(key TEXT) RETURNS INT */ +PGDLLEXPORT Datum get_partition_for_key(PG_FUNCTION_ARGS); PG_FUNCTION_INFO_V1(get_partition_for_key); Datum diff --git a/src/scanner.c b/src/scanner.c index 91364154f..9049f8373 100644 --- a/src/scanner.c +++ b/src/scanner.c @@ -33,11 +33,11 @@ typedef struct InternalScannerCtx */ typedef struct Scanner { - Relation (*open) (InternalScannerCtx *ctx); + Relation (*openheap) (InternalScannerCtx *ctx); ScanDesc (*beginscan) (InternalScannerCtx *ctx); bool (*getnext) (InternalScannerCtx *ctx); void (*endscan) (InternalScannerCtx *ctx); - void (*close) (InternalScannerCtx *ctx); + void (*closeheap) (InternalScannerCtx *ctx); } Scanner; /* Functions implementing heap scans */ @@ -127,18 +127,18 @@ index_scanner_close(InternalScannerCtx *ctx) */ static Scanner scanners[] = { [ScannerTypeHeap] = { - .open = heap_scanner_open, + .openheap = heap_scanner_open, .beginscan = heap_scanner_beginscan, .getnext = heap_scanner_getnext, .endscan = heap_scanner_endscan, - .close = heap_scanner_close, + .closeheap = heap_scanner_close, }, [ScannerTypeIndex] = { - .open = index_scanner_open, + .openheap = index_scanner_open, .beginscan = index_scanner_beginscan, .getnext = index_scanner_getnext, .endscan = index_scanner_endscan, - .close = index_scanner_close, + .closeheap = index_scanner_close, } }; @@ -159,7 +159,7 @@ scanner_scan(ScannerCtx *ctx) .sctx = ctx, }; - scanner->open(&ictx); + scanner->openheap(&ictx); scanner->beginscan(&ictx); tuple_desc = RelationGetDescr(ictx.tablerel); @@ -214,7 +214,7 @@ scanner_scan(ScannerCtx *ctx) ctx->postscan(ictx.tinfo.count, ctx->data); scanner->endscan(&ictx); - scanner->close(&ictx); + scanner->closeheap(&ictx); return ictx.tinfo.count; } diff --git a/src/utils.c b/src/utils.c index ff5a88725..e8fa3d1be 100644 --- a/src/utils.c +++ b/src/utils.c @@ -176,6 +176,7 @@ time_value_to_internal(Datum time_val, Oid type) } elog(ERROR, "unkown time type oid '%d'", type); + return -1; } /* Make a RangeVar from a regclass Oid */ diff --git a/src/utils.h b/src/utils.h index b120d830b..1d97b6a8d 100644 --- a/src/utils.h +++ b/src/utils.h @@ -4,12 +4,14 @@ #include "fmgr.h" #include "nodes/primnodes.h" -extern Datum pg_timestamp_to_microseconds(PG_FUNCTION_ARGS); -extern Datum pg_microseconds_to_timestamp(PG_FUNCTION_ARGS); -extern Datum pg_timestamp_to_unix_microseconds(PG_FUNCTION_ARGS); -extern Datum pg_unix_microseconds_to_timestamp(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum pg_timestamp_to_microseconds(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum pg_microseconds_to_timestamp(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum pg_timestamp_to_unix_microseconds(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum pg_unix_microseconds_to_timestamp(PG_FUNCTION_ARGS); -extern Datum timestamp_bucket(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum timestamp_bucket(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum timestamptz_bucket(PG_FUNCTION_ARGS); +extern PGDLLEXPORT Datum date_bucket(PG_FUNCTION_ARGS); /* * Convert a column value into the internal time representation. diff --git a/src/version.c b/src/version.c index e99d0f5f0..a9ef61e42 100644 --- a/src/version.c +++ b/src/version.c @@ -4,6 +4,8 @@ const char *git_commit = EXT_GIT_COMMIT; +PGDLLEXPORT Datum get_git_commit(PG_FUNCTION_ARGS); + PG_FUNCTION_INFO_V1(get_git_commit); Datum