On Tue, Nov 27, 2018 at 10:24:52AM -0800, Andres Freund wrote:
> And pushed. Justin, thanks again for reporting the bug and then
> narrowing it down to a reproducible test case! Would've been much harder
> to diagnose without that.
>
> I'll look into your comments patch in a bit.
Thanks for implementing and patching it :)
And thanks for remembering the patch, and reminding me.
Here's an updated copy with additional hunks.
Justin
diff --git a/src/backend/executor/execExprInterp.c b/src/backend/executor/execExprInterp.c
index ec4a250..83e4e05 100644
--- a/src/backend/executor/execExprInterp.c
+++ b/src/backend/executor/execExprInterp.c
@@ -1873,7 +1873,7 @@ CheckOpSlotCompatibility(ExprEvalStep *op, TupleTableSlot *slot)
/*
* Should probably fixed at some point, but for now it's easier to allow
- * buffer and heap tuples to be used interchangably.
+ * buffer and heap tuples to be used interchangeably.
*/
if (slot->tts_ops == &TTSOpsBufferHeapTuple &&
op->d.fetch.kind == &TTSOpsHeapTuple)
diff --git a/src/backend/jit/llvm/llvmjit_deform.c b/src/backend/jit/llvm/llvmjit_deform.c
index 4111bf0..ba238f1 100644
--- a/src/backend/jit/llvm/llvmjit_deform.c
+++ b/src/backend/jit/llvm/llvmjit_deform.c
@@ -103,7 +103,7 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
funcname = llvm_expand_funcname(context, "deform");
/*
- * Check which columns do have to exist, so we don't have to check the
+ * Check which columns have to exist, so we don't have to check the
* rows natts unnecessarily.
*/
for (attnum = 0; attnum < desc->natts; attnum++)
@@ -292,7 +292,7 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
}
/*
- * Check if's guaranteed the all the desired attributes are available in
+ * Check if it's guaranteed that all the desired attributes are available in
* tuple. If so, we can start deforming. If not, need to make sure to
* fetch the missing columns.
*/
@@ -377,7 +377,7 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
/*
* If this is the first attribute, slot->tts_nvalid was 0. Therefore
- * reset offset to 0 to, it be from a previous execution.
+ * also reset offset to 0, it may be from a previous execution.
*/
if (attnum == 0)
{
@@ -407,7 +407,7 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
/*
* Check for nulls if necessary. No need to take missing attributes
- * into account, because in case they're present the heaptuple's natts
+ * into account, because if they're present, the heaptuple's natts
* would have indicated that a slot_getmissingattrs() is needed.
*/
if (!att->attnotnull)
@@ -494,13 +494,13 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
(known_alignment < 0 || known_alignment != TYPEALIGN(alignto, known_alignment)))
{
/*
- * When accessing a varlena field we have to "peek" to see if we
+ * When accessing a varlena field, we have to "peek" to see if we
* are looking at a pad byte or the first byte of a 1-byte-header
* datum. A zero byte must be either a pad byte, or the first
- * byte of a correctly aligned 4-byte length word; in either case
+ * byte of a correctly aligned 4-byte length word; in either case,
* we can align safely. A non-zero byte must be either a 1-byte
* length word, or the first byte of a correctly aligned 4-byte
- * length word; in either case we need not align.
+ * length word; in either case, we need not align.
*/
if (att->attlen == -1)
{
@@ -594,8 +594,8 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
else if (att->attnotnull && attguaranteedalign && known_alignment >= 0)
{
/*
- * If the offset to the column was previously known a NOT NULL &
- * fixed width column guarantees that alignment is just the
+ * If the offset to the column was previously known, a NOT NULL &
+ * fixed-width column guarantees that alignment is just the
* previous alignment plus column width.
*/
Assert(att->attlen > 0);
@@ -636,8 +636,8 @@ slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, ""));
/*
- * Store datum. For byval datums copy the value, extend to Datum's
- * width, and store. For byref types, store pointer to data.
+ * Store datum. For byval datums: copy the value, extend to Datum's
+ * width, and store. For byref types: store pointer to data.
*/
if (att->attbyval)
{
diff --git a/src/backend/jit/llvm/llvmjit_inline.cpp b/src/backend/jit/llvm/llvmjit_inline.cpp
index b33a321..2ad29be 100644
--- a/src/backend/jit/llvm/llvmjit_inline.cpp
+++ b/src/backend/jit/llvm/llvmjit_inline.cpp
@@ -9,7 +9,7 @@
* for an external function is found - not guaranteed! - the index will then
* be used to judge their instruction count / inline worthiness. After doing
* so for all external functions, all the referenced functions (and
- * prerequisites) will be imorted.
+ * prerequisites) will be imported.
*
* Copyright (c) 2016-2018, PostgreSQL Global Development Group
*