* Portions Copyright (c) 1994, Regents of the University of California
*
* IDENTIFICATION
- * $PostgreSQL: pgsql/src/backend/access/gist/gistget.c,v 1.70 2008/04/10 22:25:25 tgl Exp $
+ * $PostgreSQL: pgsql/src/backend/access/gist/gistget.c,v 1.71 2008/04/13 19:18:14 tgl Exp $
*
*-------------------------------------------------------------------------
*/
static OffsetNumber gistfindnext(IndexScanDesc scan, OffsetNumber n,
ScanDirection dir);
-static int64 gistnext(IndexScanDesc scan, ScanDirection dir,
- ItemPointer tid, TIDBitmap *tbm,
- bool ignore_killed_tuples);
+static int64 gistnext(IndexScanDesc scan, ScanDirection dir, TIDBitmap *tbm);
static bool gistindex_keytest(IndexTuple tuple, IndexScanDesc scan,
OffsetNumber offset);
IndexScanDesc scan = (IndexScanDesc) PG_GETARG_POINTER(0);
ScanDirection dir = (ScanDirection) PG_GETARG_INT32(1);
GISTScanOpaque so;
- ItemPointerData tid;
bool res;
so = (GISTScanOpaque) scan->opaque;
killtuple(scan->indexRelation, so, &(so->curpos));
/*
- * Get the next tuple that matches the search key. If asked to skip killed
- * tuples, continue looping until we find a non-killed tuple that matches
- * the search key.
+ * Get the next tuple that matches the search key.
*/
- res = (gistnext(scan, dir, &tid, NULL, scan->ignore_killed_tuples) > 0) ? true : false;
+ res = (gistnext(scan, dir, NULL) > 0);
PG_RETURN_BOOL(res);
}
TIDBitmap *tbm = (TIDBitmap *) PG_GETARG_POINTER(1);
int64 ntids;
- ntids = gistnext(scan, ForwardScanDirection, NULL, tbm, false);
+ ntids = gistnext(scan, ForwardScanDirection, tbm);
PG_RETURN_INT64(ntids);
}
*
* This function is used by both gistgettuple and gistgetbitmap. When
* invoked from gistgettuple, tbm is null and the next matching tuple
- * is returned in *tid. When invoked from getbitmap, tid is null and
- * all matching tuples are added to tbm. In both cases, the function
- * result is the number of returned tuples.
+ * is returned in scan->xs_ctup.t_self. When invoked from getbitmap,
+ * tbm is non-null and all matching tuples are added to tbm before
+ * returning. In both cases, the function result is the number of
+ * returned tuples.
+ *
+ * If scan specifies to skip killed tuples, continue looping until we find a
+ * non-killed tuple that matches the search key.
*/
static int64
-gistnext(IndexScanDesc scan, ScanDirection dir,
- ItemPointer tid, TIDBitmap *tbm,
- bool ignore_killed_tuples)
+gistnext(IndexScanDesc scan, ScanDirection dir, TIDBitmap *tbm)
{
Page p;
OffsetNumber n;
GISTScanOpaque so;
GISTSearchStack *stk;
IndexTuple it;
+ bool recheck;
GISTPageOpaque opaque;
bool resetoffset = false;
int64 ntids = 0;
if (XLogRecPtrIsInvalid(so->stack->lsn) || !XLByteEQ(so->stack->lsn, PageGetLSN(p)))
{
- /* page changed from last visit or visit first time , reset offset */
+ /* first visit or page changed from last visit, reset offset */
so->stack->lsn = PageGetLSN(p);
resetoffset = true;
for (;;)
{
n = gistfindnext(scan, n, dir);
+ /* XXX for the moment, treat all GIST operators as lossy */
+ recheck = true;
if (!OffsetNumberIsValid(n))
{
ItemPointerSet(&(so->curpos),
BufferGetBlockNumber(so->curbuf), n);
- if (!(ignore_killed_tuples && ItemIdIsDead(PageGetItemId(p, n))))
+ if (!(scan->ignore_killed_tuples &&
+ ItemIdIsDead(PageGetItemId(p, n))))
{
it = (IndexTuple) PageGetItem(p, PageGetItemId(p, n));
ntids++;
if (tbm != NULL)
- tbm_add_tuples(tbm, &it->t_tid, 1, false);
+ tbm_add_tuples(tbm, &it->t_tid, 1, recheck);
else
{
- *tid = scan->xs_ctup.t_self = it->t_tid;
+ scan->xs_ctup.t_self = it->t_tid;
+ scan->xs_recheck = recheck;
LockBuffer(so->curbuf, GIST_UNLOCK);
return ntids; /* always 1 */
*
*
* IDENTIFICATION
- * $PostgreSQL: pgsql/src/backend/optimizer/plan/createplan.c,v 1.237 2008/01/01 19:45:50 momjian Exp $
+ * $PostgreSQL: pgsql/src/backend/optimizer/plan/createplan.c,v 1.238 2008/04/13 19:18:14 tgl Exp $
*
*-------------------------------------------------------------------------
*/
static SeqScan *create_seqscan_plan(PlannerInfo *root, Path *best_path,
List *tlist, List *scan_clauses);
static IndexScan *create_indexscan_plan(PlannerInfo *root, IndexPath *best_path,
- List *tlist, List *scan_clauses,
- List **nonlossy_clauses);
+ List *tlist, List *scan_clauses);
static BitmapHeapScan *create_bitmap_scan_plan(PlannerInfo *root,
BitmapHeapPath *best_path,
List *tlist, List *scan_clauses);
static Plan *create_bitmap_subplan(PlannerInfo *root, Path *bitmapqual,
- List **qual, List **indexqual);
+ List **qual);
static TidScan *create_tidscan_plan(PlannerInfo *root, TidPath *best_path,
List *tlist, List *scan_clauses);
static SubqueryScan *create_subqueryscan_plan(PlannerInfo *root, Path *best_path,
Plan *outer_plan, Plan *inner_plan);
static void fix_indexqual_references(List *indexquals, IndexPath *index_path,
List **fixed_indexquals,
- List **nonlossy_indexquals,
List **indexstrategy,
List **indexsubtype);
static Node *fix_indexqual_operand(Node *node, IndexOptInfo *index,
plan = (Plan *) create_indexscan_plan(root,
(IndexPath *) best_path,
tlist,
- scan_clauses,
- NULL);
+ scan_clauses);
break;
case T_BitmapHeapScan:
* The indexquals list of the path contains implicitly-ANDed qual conditions.
* The list can be empty --- then no index restrictions will be applied during
* the scan.
- *
- * If nonlossy_clauses isn't NULL, *nonlossy_clauses receives a list of the
- * nonlossy indexquals.
*/
static IndexScan *
create_indexscan_plan(PlannerInfo *root,
IndexPath *best_path,
List *tlist,
- List *scan_clauses,
- List **nonlossy_clauses)
+ List *scan_clauses)
{
List *indexquals = best_path->indexquals;
Index baserelid = best_path->path.parent->relid;
List *qpqual;
List *stripped_indexquals;
List *fixed_indexquals;
- List *nonlossy_indexquals;
List *indexstrategy;
List *indexsubtype;
ListCell *l;
/*
* The executor needs a copy with the indexkey on the left of each clause
* and with index attr numbers substituted for table ones. This pass also
- * gets strategy info and looks for "lossy" operators.
+ * gets strategy info.
*/
fix_indexqual_references(indexquals, best_path,
&fixed_indexquals,
- &nonlossy_indexquals,
&indexstrategy,
&indexsubtype);
- /* pass back nonlossy quals if caller wants 'em */
- if (nonlossy_clauses)
- *nonlossy_clauses = nonlossy_indexquals;
-
/*
* If this is an innerjoin scan, the indexclauses will contain join
* clauses that are not present in scan_clauses (since the passed-in value
* by the index. All the predicates in the indexquals will be checked
* (either by the index itself, or by nodeIndexscan.c), but if there are
* any "special" operators involved then they must be included in qpqual.
- * Also, any lossy index operators must be rechecked in the qpqual. The
- * upshot is that qpqual must contain scan_clauses minus whatever appears
- * in nonlossy_indexquals.
+ * The upshot is that qpqual must contain scan_clauses minus whatever
+ * appears in indexquals.
*
* In normal cases simple pointer equality checks will be enough to spot
* duplicate RestrictInfos, so we try that first. In some situations
Assert(IsA(rinfo, RestrictInfo));
if (rinfo->pseudoconstant)
continue; /* we may drop pseudoconstants here */
- if (list_member_ptr(nonlossy_indexquals, rinfo))
+ if (list_member_ptr(indexquals, rinfo))
continue;
if (!contain_mutable_functions((Node *) rinfo->clause))
{
List *clausel = list_make1(rinfo->clause);
- if (predicate_implied_by(clausel, nonlossy_indexquals))
+ if (predicate_implied_by(clausel, indexquals))
continue;
if (best_path->indexinfo->indpred)
{
Index baserelid = best_path->path.parent->relid;
Plan *bitmapqualplan;
List *bitmapqualorig;
- List *indexquals;
List *qpqual;
ListCell *l;
BitmapHeapScan *scan_plan;
Assert(baserelid > 0);
Assert(best_path->path.parent->rtekind == RTE_RELATION);
- /* Process the bitmapqual tree into a Plan tree and qual lists */
+ /* Process the bitmapqual tree into a Plan tree and qual list */
bitmapqualplan = create_bitmap_subplan(root, best_path->bitmapqual,
- &bitmapqualorig, &indexquals);
+ &bitmapqualorig);
/* Reduce RestrictInfo list to bare expressions; ignore pseudoconstants */
scan_clauses = extract_actual_clauses(scan_clauses, false);
* The qpqual list must contain all restrictions not automatically handled
* by the index. All the predicates in the indexquals will be checked
* (either by the index itself, or by nodeBitmapHeapscan.c), but if there
- * are any "special" or lossy operators involved then they must be added
- * to qpqual. The upshot is that qpqual must contain scan_clauses minus
- * whatever appears in indexquals.
+ * are any "special" operators involved then they must be added to qpqual.
+ * The upshot is that qpqual must contain scan_clauses minus whatever
+ * appears in bitmapqualorig.
*
* In normal cases simple equal() checks will be enough to spot duplicate
* clauses, so we try that first. In some situations (particularly with
*
* Unlike create_indexscan_plan(), we need take no special thought here
* for partial index predicates; this is because the predicate conditions
- * are already listed in bitmapqualorig and indexquals. Bitmap scans have
- * to do it that way because predicate conditions need to be rechecked if
- * the scan becomes lossy.
+ * are already listed in bitmapqualorig. Bitmap scans have to do it that
+ * way because predicate conditions need to be rechecked if the scan's
+ * bitmap becomes lossy.
*/
qpqual = NIL;
foreach(l, scan_clauses)
{
Node *clause = (Node *) lfirst(l);
- if (list_member(indexquals, clause))
+ if (list_member(bitmapqualorig, clause))
continue;
if (!contain_mutable_functions(clause))
{
List *clausel = list_make1(clause);
- if (predicate_implied_by(clausel, indexquals))
+ if (predicate_implied_by(clausel, bitmapqualorig))
continue;
}
qpqual = lappend(qpqual, clause);
qpqual = order_qual_clauses(root, qpqual);
/*
- * When dealing with special or lossy operators, we will at this point
+ * When dealing with special operators, we will at this point
* have duplicate clauses in qpqual and bitmapqualorig. We may as well
* drop 'em from bitmapqualorig, since there's no point in making the
* tests twice.
/*
* Given a bitmapqual tree, generate the Plan tree that implements it
*
- * As byproducts, we also return in *qual and *indexqual the qual lists
- * (in implicit-AND form, without RestrictInfos) describing the original index
- * conditions and the generated indexqual conditions. The latter is made to
- * exclude lossy index operators. Both lists include partial-index predicates,
- * because we have to recheck predicates as well as index conditions if the
- * bitmap scan becomes lossy.
+ * As a byproduct, we also return in *qual a qual list (in implicit-AND
+ * form, without RestrictInfos) describing the generated indexqual
+ * conditions, as needed for rechecking heap tuples in lossy cases.
+ * This list also includes partial-index predicates, because we have to
+ * recheck predicates as well as index conditions if the scan's bitmap
+ * becomes lossy.
*
* Note: if you find yourself changing this, you probably need to change
* make_restrictinfo_from_bitmapqual too.
*/
static Plan *
create_bitmap_subplan(PlannerInfo *root, Path *bitmapqual,
- List **qual, List **indexqual)
+ List **qual)
{
Plan *plan;
BitmapAndPath *apath = (BitmapAndPath *) bitmapqual;
List *subplans = NIL;
List *subquals = NIL;
- List *subindexquals = NIL;
ListCell *l;
/*
{
Plan *subplan;
List *subqual;
- List *subindexqual;
subplan = create_bitmap_subplan(root, (Path *) lfirst(l),
- &subqual, &subindexqual);
+ &subqual);
subplans = lappend(subplans, subplan);
subquals = list_concat_unique(subquals, subqual);
- subindexquals = list_concat_unique(subindexquals, subindexqual);
}
plan = (Plan *) make_bitmap_and(subplans);
plan->startup_cost = apath->path.startup_cost;
clamp_row_est(apath->bitmapselectivity * apath->path.parent->tuples);
plan->plan_width = 0; /* meaningless */
*qual = subquals;
- *indexqual = subindexquals;
}
else if (IsA(bitmapqual, BitmapOrPath))
{
BitmapOrPath *opath = (BitmapOrPath *) bitmapqual;
List *subplans = NIL;
List *subquals = NIL;
- List *subindexquals = NIL;
bool const_true_subqual = false;
- bool const_true_subindexqual = false;
ListCell *l;
/*
{
Plan *subplan;
List *subqual;
- List *subindexqual;
subplan = create_bitmap_subplan(root, (Path *) lfirst(l),
- &subqual, &subindexqual);
+ &subqual);
subplans = lappend(subplans, subplan);
if (subqual == NIL)
const_true_subqual = true;
else if (!const_true_subqual)
subquals = lappend(subquals,
make_ands_explicit(subqual));
- if (subindexqual == NIL)
- const_true_subindexqual = true;
- else if (!const_true_subindexqual)
- subindexquals = lappend(subindexquals,
- make_ands_explicit(subindexqual));
}
/*
*qual = subquals;
else
*qual = list_make1(make_orclause(subquals));
- if (const_true_subindexqual)
- *indexqual = NIL;
- else if (list_length(subindexquals) <= 1)
- *indexqual = subindexquals;
- else
- *indexqual = list_make1(make_orclause(subindexquals));
}
else if (IsA(bitmapqual, IndexPath))
{
IndexPath *ipath = (IndexPath *) bitmapqual;
IndexScan *iscan;
- List *nonlossy_clauses;
ListCell *l;
/* Use the regular indexscan plan build machinery... */
- iscan = create_indexscan_plan(root, ipath, NIL, NIL,
- &nonlossy_clauses);
+ iscan = create_indexscan_plan(root, ipath, NIL, NIL);
/* then convert to a bitmap indexscan */
plan = (Plan *) make_bitmap_indexscan(iscan->scan.scanrelid,
iscan->indexid,
clamp_row_est(ipath->indexselectivity * ipath->path.parent->tuples);
plan->plan_width = 0; /* meaningless */
*qual = get_actual_clauses(ipath->indexclauses);
- *indexqual = get_actual_clauses(nonlossy_clauses);
foreach(l, ipath->indexinfo->indpred)
{
Expr *pred = (Expr *) lfirst(l);
* generating redundant conditions.
*/
if (!predicate_implied_by(list_make1(pred), ipath->indexclauses))
- {
*qual = lappend(*qual, pred);
- *indexqual = lappend(*indexqual, pred);
- }
}
}
else
/*
* fix_indexqual_references
* Adjust indexqual clauses to the form the executor's indexqual
- * machinery needs, and check for recheckable (lossy) index conditions.
+ * machinery needs.
*
- * We have five tasks here:
+ * We have four tasks here:
* * Remove RestrictInfo nodes from the input clauses.
* * Index keys must be represented by Var nodes with varattno set to the
* index's attribute number, not the attribute number in the original rel.
* left.
* * We must construct lists of operator strategy numbers and subtypes
* for the top-level operators of each index clause.
- * * We must detect any lossy index operators. The API is that we return
- * a list of the input clauses whose operators are NOT lossy.
*
* fixed_indexquals receives a modified copy of the indexquals list --- the
* original is not changed. Note also that the copy shares no substructure
* with the original; this is needed in case there is a subplan in it (we need
* two separate copies of the subplan tree, or things will go awry).
*
- * nonlossy_indexquals receives a list of the original input clauses (with
- * RestrictInfos) that contain non-lossy operators.
- *
* indexstrategy receives an integer list of strategy numbers.
* indexsubtype receives an OID list of strategy subtypes.
*/
static void
fix_indexqual_references(List *indexquals, IndexPath *index_path,
List **fixed_indexquals,
- List **nonlossy_indexquals,
List **indexstrategy,
List **indexsubtype)
{
ListCell *l;
*fixed_indexquals = NIL;
- *nonlossy_indexquals = NIL;
*indexstrategy = NIL;
*indexsubtype = NIL;
* For each qual clause, commute if needed to put the indexkey operand on
* the left, and then fix its varattno. (We do not need to change the
* other side of the clause.) Then determine the operator's strategy
- * number and subtype number, and check for lossy index behavior.
+ * number and subtype number.
*/
foreach(l, indexquals)
{
int stratno;
Oid stratlefttype;
Oid stratrighttype;
- bool recheck;
bool is_null_op = false;
Assert(IsA(rinfo, RestrictInfo));
/* IS NULL doesn't have a clause_op */
stratno = InvalidStrategy;
stratrighttype = InvalidOid;
- /* We assume it's non-lossy ... might need more work someday */
- recheck = false;
}
else
{
* to get its strategy number and the recheck indicator. This also
* double-checks that we found an operator matching the index.
*/
+ bool recheck;
+
get_op_opfamily_properties(clause_op, opfamily,
&stratno,
&stratlefttype,
*indexstrategy = lappend_int(*indexstrategy, stratno);
*indexsubtype = lappend_oid(*indexsubtype, stratrighttype);
-
- /* If it's not lossy, add to nonlossy_indexquals */
- if (!recheck)
- *nonlossy_indexquals = lappend(*nonlossy_indexquals, rinfo);
}
}