]> git.ipfire.org Git - thirdparty/postgresql.git/commitdiff
Fix various issues with ALTER TEXT SEARCH CONFIGURATION
authorMichael Paquier <michael@paquier.xyz>
Wed, 31 Jan 2024 04:16:50 +0000 (13:16 +0900)
committerMichael Paquier <michael@paquier.xyz>
Wed, 31 Jan 2024 04:16:50 +0000 (13:16 +0900)
This commit addresses a set of issues when changing token type mappings
in a text search configuration when using duplicated token names:
- ADD MAPPING would fail on insertion because of a constraint failure
after inserting the same mapping.
- ALTER MAPPING with an "overridden" configuration failed with "tuple
already updated by self" when the token mappings are removed.
- DROP MAPPING failed with "tuple already updated by self", like
previously, but in a different code path.

The code is refactored so the token names (with their numbers) are
handled as a List with unique members rather than an array with numbers,
ensuring that no duplicates mess up with the catalog inserts, updates
and deletes.  The list is generated by getTokenTypes(), with the same
error handling as previously while duplicated tokens are discarded from
the list used to work on the catalogs.

Regression tests are expanded to cover much more ground for the cases
fixed by this commit, as there was no coverage for the code touched in
this commit.  A bit more is done regarding the fact that a token name
not supported by a configuration's parser should result in an error even
if IF EXISTS is used in a DROP MAPPING clause.  This is implied in the
code but there was no coverage for that, and it was very easy to miss.

These issues exist since at least their introduction in core with
140d4ebcb46e, so backpatch all the way down.

Reported-by: Alexander Lakhin
Author: Tender Wang, Michael Paquier
Discussion: https://postgr.es/m/18310-1eb233c5908189c8@postgresql.org
Backpatch-through: 12

src/backend/commands/tsearchcmds.c
src/test/regress/expected/tsdicts.out
src/test/regress/sql/tsdicts.sql
src/tools/pgindent/typedefs.list

index 11a7f29eafa7bcc14b589cd0d20e81438f64ba1b..a9e92152e6d8a101970096d1c8f14e3c56491243 100644 (file)
 #include "utils/rel.h"
 #include "utils/syscache.h"
 
+/* Single entry of List returned by getTokenTypes() */
+typedef struct
+{
+       int                     num;                    /* token type number */
+       char       *name;                       /* token type name */
+} TSTokenTypeItem;
 
 static void MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
                                                                         HeapTuple tup, Relation relMap);
@@ -1227,22 +1233,45 @@ AlterTSConfiguration(AlterTSConfigurationStmt *stmt)
 }
 
 /*
- * Translate a list of token type names to an array of token type numbers
+ * Check whether a token type name is a member of a TSTokenTypeItem list.
  */
-static int *
+static bool
+tstoken_list_member(char *token_name, List *tokens)
+{
+       ListCell   *c;
+       bool            found = false;
+
+       foreach(c, tokens)
+       {
+               TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
+               if (strcmp(token_name, ts->name) == 0)
+               {
+                       found = true;
+                       break;
+               }
+       }
+
+       return found;
+}
+
+/*
+ * Translate a list of token type names to a list of unique TSTokenTypeItem.
+ *
+ * Duplicated entries list are removed from tokennames.
+ */
+static List *
 getTokenTypes(Oid prsId, List *tokennames)
 {
        TSParserCacheEntry *prs = lookup_ts_parser_cache(prsId);
        LexDescr   *list;
-       int                *res,
-                               i,
-                               ntoken;
+       List       *result = NIL;
+       int                     ntoken;
        ListCell   *tn;
 
        ntoken = list_length(tokennames);
        if (ntoken == 0)
-               return NULL;
-       res = (int *) palloc(sizeof(int) * ntoken);
+               return NIL;
 
        if (!OidIsValid(prs->lextypeOid))
                elog(ERROR, "method lextype isn't defined for text search parser %u",
@@ -1252,19 +1281,26 @@ getTokenTypes(Oid prsId, List *tokennames)
        list = (LexDescr *) DatumGetPointer(OidFunctionCall1(prs->lextypeOid,
                                                                                                                 (Datum) 0));
 
-       i = 0;
        foreach(tn, tokennames)
        {
                Value      *val = (Value *) lfirst(tn);
                bool            found = false;
                int                     j;
 
+               /* Skip if this token is already in the result */
+               if (tstoken_list_member(strVal(val), result))
+                       continue;
+
                j = 0;
                while (list && list[j].lexid)
                {
                        if (strcmp(strVal(val), list[j].alias) == 0)
                        {
-                               res[i] = list[j].lexid;
+                               TSTokenTypeItem *ts = (TSTokenTypeItem *) palloc0(sizeof(TSTokenTypeItem));
+
+                               ts->num = list[j].lexid;
+                               ts->name = pstrdup(strVal(val));
+                               result = lappend(result, ts);
                                found = true;
                                break;
                        }
@@ -1275,10 +1311,9 @@ getTokenTypes(Oid prsId, List *tokennames)
                                        (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
                                         errmsg("token type \"%s\" does not exist",
                                                        strVal(val))));
-               i++;
        }
 
-       return res;
+       return result;
 }
 
 /*
@@ -1296,8 +1331,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
        int                     i;
        int                     j;
        Oid                     prsId;
-       int                *tokens,
-                               ntoken;
+       List       *tokens = NIL;
        Oid                *dictIds;
        int                     ndict;
        ListCell   *c;
@@ -1307,15 +1341,16 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
        prsId = tsform->cfgparser;
 
        tokens = getTokenTypes(prsId, stmt->tokentype);
-       ntoken = list_length(stmt->tokentype);
 
        if (stmt->override)
        {
                /*
                 * delete maps for tokens if they exist and command was ALTER
                 */
-               for (i = 0; i < ntoken; i++)
+               foreach(c, tokens)
                {
+                       TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
                        ScanKeyInit(&skey[0],
                                                Anum_pg_ts_config_map_mapcfg,
                                                BTEqualStrategyNumber, F_OIDEQ,
@@ -1323,7 +1358,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
                        ScanKeyInit(&skey[1],
                                                Anum_pg_ts_config_map_maptokentype,
                                                BTEqualStrategyNumber, F_INT4EQ,
-                                               Int32GetDatum(tokens[i]));
+                                               Int32GetDatum(ts->num));
 
                        scan = systable_beginscan(relMap, TSConfigMapIndexId, true,
                                                                          NULL, 2, skey);
@@ -1378,9 +1413,11 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
                        {
                                bool            tokmatch = false;
 
-                               for (j = 0; j < ntoken; j++)
+                               foreach(c, tokens)
                                {
-                                       if (cfgmap->maptokentype == tokens[j])
+                                       TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
+                                       if (cfgmap->maptokentype == ts->num)
                                        {
                                                tokmatch = true;
                                                break;
@@ -1421,8 +1458,10 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
                /*
                 * Insertion of new entries
                 */
-               for (i = 0; i < ntoken; i++)
+               foreach(c, tokens)
                {
+                       TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
                        for (j = 0; j < ndict; j++)
                        {
                                Datum           values[Natts_pg_ts_config_map];
@@ -1430,7 +1469,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
 
                                memset(nulls, false, sizeof(nulls));
                                values[Anum_pg_ts_config_map_mapcfg - 1] = ObjectIdGetDatum(cfgId);
-                               values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(tokens[i]);
+                               values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(ts->num);
                                values[Anum_pg_ts_config_map_mapseqno - 1] = Int32GetDatum(j + 1);
                                values[Anum_pg_ts_config_map_mapdict - 1] = ObjectIdGetDatum(dictIds[j]);
 
@@ -1457,9 +1496,8 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt,
        ScanKeyData skey[2];
        SysScanDesc scan;
        HeapTuple       maptup;
-       int                     i;
        Oid                     prsId;
-       int                *tokens;
+       List       *tokens = NIL;
        ListCell   *c;
 
        tsform = (Form_pg_ts_config) GETSTRUCT(tup);
@@ -1468,10 +1506,9 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt,
 
        tokens = getTokenTypes(prsId, stmt->tokentype);
 
-       i = 0;
-       foreach(c, stmt->tokentype)
+       foreach(c, tokens)
        {
-               Value      *val = (Value *) lfirst(c);
+               TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
                bool            found = false;
 
                ScanKeyInit(&skey[0],
@@ -1481,7 +1518,7 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt,
                ScanKeyInit(&skey[1],
                                        Anum_pg_ts_config_map_maptokentype,
                                        BTEqualStrategyNumber, F_INT4EQ,
-                                       Int32GetDatum(tokens[i]));
+                                       Int32GetDatum(ts->num));
 
                scan = systable_beginscan(relMap, TSConfigMapIndexId, true,
                                                                  NULL, 2, skey);
@@ -1501,17 +1538,15 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt,
                                ereport(ERROR,
                                                (errcode(ERRCODE_UNDEFINED_OBJECT),
                                                 errmsg("mapping for token type \"%s\" does not exist",
-                                                               strVal(val))));
+                                                               ts->name)));
                        }
                        else
                        {
                                ereport(NOTICE,
                                                (errmsg("mapping for token type \"%s\" does not exist, skipping",
-                                                               strVal(val))));
+                                                               ts->name)));
                        }
                }
-
-               i++;
        }
 
        EventTriggerCollectAlterTSConfig(stmt, cfgId, NULL, 0);
index 5a927be9485aa3cfe11c38e23f325054a9616213..91b5ea617f67d4908407e941ca6fb0a2528893df 100644 (file)
@@ -652,3 +652,37 @@ CREATE TEXT SEARCH DICTIONARY tsdict_case
        "AffFile" = ispell_sample
 );
 ERROR:  unrecognized Ispell parameter: "DictFile"
+-- Test grammar for configurations
+CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english);
+-- Overriden mapping change with duplicated tokens.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ALTER MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR not_a_token, not_a_token;
+ERROR:  token type "not_a_token" does not exist
+-- Not a token supported by the configuration's parser, fails even
+-- with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING IF EXISTS FOR not_a_token, not_a_token;
+ERROR:  token type "not_a_token" does not exist
+-- Token supported by the configuration's parser, succeeds.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR word, word;
+-- No mapping for token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR word;
+ERROR:  mapping for token type "word" does not exist
+-- Token supported by the configuration's parser, cannot be found,
+-- succeeds with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING IF EXISTS FOR word, word;
+NOTICE:  mapping for token type "word" does not exist, skipping
+-- Re-add mapping, with duplicated tokens supported by the parser.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ADD MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ADD MAPPING FOR not_a_token WITH ispell;
+ERROR:  token type "not_a_token" does not exist
+DROP TEXT SEARCH CONFIGURATION dummy_tst;
index 908e6755018f2639c2e934c40027c01a01785365..5a722bad96ab18f824d49879bfa0a7eace089975 100644 (file)
@@ -238,3 +238,33 @@ CREATE TEXT SEARCH DICTIONARY tsdict_case
        "DictFile" = ispell_sample,
        "AffFile" = ispell_sample
 );
+
+-- Test grammar for configurations
+CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english);
+-- Overriden mapping change with duplicated tokens.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ALTER MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR not_a_token, not_a_token;
+-- Not a token supported by the configuration's parser, fails even
+-- with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING IF EXISTS FOR not_a_token, not_a_token;
+-- Token supported by the configuration's parser, succeeds.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR word, word;
+-- No mapping for token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING FOR word;
+-- Token supported by the configuration's parser, cannot be found,
+-- succeeds with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  DROP MAPPING IF EXISTS FOR word, word;
+-- Re-add mapping, with duplicated tokens supported by the parser.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ADD MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+  ADD MAPPING FOR not_a_token WITH ispell;
+DROP TEXT SEARCH CONFIGURATION dummy_tst;
index 19088ae859c975a65a4dfd4adebfc190f5c1bce2..66c28f0ff4adedfbe1c2cae23454ef2a46e24a1f 100644 (file)
@@ -2380,6 +2380,7 @@ TSQueryParserState
 TSQuerySign
 TSReadPointer
 TSTemplateInfo
+TSTokenTypeItem
 TSTokenTypeStorage
 TSVector
 TSVectorBuildState