diff --git a/src/backend/commands/tsearchcmds.c b/src/backend/commands/tsearchcmds.c index e06fb32b3d..d3ce548ea1 100644 --- a/src/backend/commands/tsearchcmds.c +++ b/src/backend/commands/tsearchcmds.c @@ -48,6 +48,12 @@ #include "utils/rel.h" #include "utils/syscache.h" +/* Single entry of List returned by getTokenTypes() */ +typedef struct +{ + int num; /* token type number */ + char *name; /* token type name */ +} TSTokenTypeItem; static void MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, HeapTuple tup, Relation relMap); @@ -1151,22 +1157,45 @@ AlterTSConfiguration(AlterTSConfigurationStmt *stmt) } /* - * Translate a list of token type names to an array of token type numbers + * Check whether a token type name is a member of a TSTokenTypeItem list. */ -static int * +static bool +tstoken_list_member(char *token_name, List *tokens) +{ + ListCell *c; + bool found = false; + + foreach(c, tokens) + { + TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c); + + if (strcmp(token_name, ts->name) == 0) + { + found = true; + break; + } + } + + return found; +} + +/* + * Translate a list of token type names to a list of unique TSTokenTypeItem. + * + * Duplicated entries list are removed from tokennames. + */ +static List * getTokenTypes(Oid prsId, List *tokennames) { TSParserCacheEntry *prs = lookup_ts_parser_cache(prsId); LexDescr *list; - int *res, - i, - ntoken; + List *result = NIL; + int ntoken; ListCell *tn; ntoken = list_length(tokennames); if (ntoken == 0) - return NULL; - res = (int *) palloc(sizeof(int) * ntoken); + return NIL; if (!OidIsValid(prs->lextypeOid)) elog(ERROR, "method lextype isn't defined for text search parser %u", @@ -1176,19 +1205,26 @@ getTokenTypes(Oid prsId, List *tokennames) list = (LexDescr *) DatumGetPointer(OidFunctionCall1(prs->lextypeOid, (Datum) 0)); - i = 0; foreach(tn, tokennames) { Value *val = (Value *) lfirst(tn); bool found = false; int j; + /* Skip if this token is already in the result */ + if (tstoken_list_member(strVal(val), result)) + continue; + j = 0; while (list && list[j].lexid) { if (strcmp(strVal(val), list[j].alias) == 0) { - res[i] = list[j].lexid; + TSTokenTypeItem *ts = (TSTokenTypeItem *) palloc0(sizeof(TSTokenTypeItem)); + + ts->num = list[j].lexid; + ts->name = pstrdup(strVal(val)); + result = lappend(result, ts); found = true; break; } @@ -1199,10 +1235,9 @@ getTokenTypes(Oid prsId, List *tokennames) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("token type \"%s\" does not exist", strVal(val)))); - i++; } - return res; + return result; } /* @@ -1220,8 +1255,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, int i; int j; Oid prsId; - int *tokens, - ntoken; + List *tokens = NIL; Oid *dictIds; int ndict; ListCell *c; @@ -1231,15 +1265,16 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, prsId = tsform->cfgparser; tokens = getTokenTypes(prsId, stmt->tokentype); - ntoken = list_length(stmt->tokentype); if (stmt->override) { /* * delete maps for tokens if they exist and command was ALTER */ - for (i = 0; i < ntoken; i++) + foreach(c, tokens) { + TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c); + ScanKeyInit(&skey[0], Anum_pg_ts_config_map_mapcfg, BTEqualStrategyNumber, F_OIDEQ, @@ -1247,7 +1282,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, ScanKeyInit(&skey[1], Anum_pg_ts_config_map_maptokentype, BTEqualStrategyNumber, F_INT4EQ, - Int32GetDatum(tokens[i])); + Int32GetDatum(ts->num)); scan = systable_beginscan(relMap, TSConfigMapIndexId, true, NULL, 2, skey); @@ -1302,9 +1337,11 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, { bool tokmatch = false; - for (j = 0; j < ntoken; j++) + foreach(c, tokens) { - if (cfgmap->maptokentype == tokens[j]) + TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c); + + if (cfgmap->maptokentype == ts->num) { tokmatch = true; break; @@ -1345,8 +1382,10 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, /* * Insertion of new entries */ - for (i = 0; i < ntoken; i++) + foreach(c, tokens) { + TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c); + for (j = 0; j < ndict; j++) { Datum values[Natts_pg_ts_config_map]; @@ -1354,7 +1393,7 @@ MakeConfigurationMapping(AlterTSConfigurationStmt *stmt, memset(nulls, false, sizeof(nulls)); values[Anum_pg_ts_config_map_mapcfg - 1] = ObjectIdGetDatum(cfgId); - values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(tokens[i]); + values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(ts->num); values[Anum_pg_ts_config_map_mapseqno - 1] = Int32GetDatum(j + 1); values[Anum_pg_ts_config_map_mapdict - 1] = ObjectIdGetDatum(dictIds[j]); @@ -1381,9 +1420,8 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt, ScanKeyData skey[2]; SysScanDesc scan; HeapTuple maptup; - int i; Oid prsId; - int *tokens; + List *tokens = NIL; ListCell *c; tsform = (Form_pg_ts_config) GETSTRUCT(tup); @@ -1392,10 +1430,9 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt, tokens = getTokenTypes(prsId, stmt->tokentype); - i = 0; - foreach(c, stmt->tokentype) + foreach(c, tokens) { - Value *val = (Value *) lfirst(c); + TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c); bool found = false; ScanKeyInit(&skey[0], @@ -1405,7 +1442,7 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt, ScanKeyInit(&skey[1], Anum_pg_ts_config_map_maptokentype, BTEqualStrategyNumber, F_INT4EQ, - Int32GetDatum(tokens[i])); + Int32GetDatum(ts->num)); scan = systable_beginscan(relMap, TSConfigMapIndexId, true, NULL, 2, skey); @@ -1425,17 +1462,15 @@ DropConfigurationMapping(AlterTSConfigurationStmt *stmt, ereport(ERROR, (errcode(ERRCODE_UNDEFINED_OBJECT), errmsg("mapping for token type \"%s\" does not exist", - strVal(val)))); + ts->name))); } else { ereport(NOTICE, (errmsg("mapping for token type \"%s\" does not exist, skipping", - strVal(val)))); + ts->name))); } } - - i++; } EventTriggerCollectAlterTSConfig(stmt, cfgId, NULL, 0); diff --git a/src/test/regress/expected/tsdicts.out b/src/test/regress/expected/tsdicts.out index c804293142..4eff85da79 100644 --- a/src/test/regress/expected/tsdicts.out +++ b/src/test/regress/expected/tsdicts.out @@ -687,3 +687,37 @@ CREATE TEXT SEARCH DICTIONARY tsdict_case "AffFile" = ispell_sample ); ERROR: unrecognized Ispell parameter: "DictFile" +-- Test grammar for configurations +CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english); +-- Overriden mapping change with duplicated tokens. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ALTER MAPPING FOR word, word WITH ispell; +-- Not a token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR not_a_token, not_a_token; +ERROR: token type "not_a_token" does not exist +-- Not a token supported by the configuration's parser, fails even +-- with IF EXISTS. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING IF EXISTS FOR not_a_token, not_a_token; +ERROR: token type "not_a_token" does not exist +-- Token supported by the configuration's parser, succeeds. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR word, word; +-- No mapping for token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR word; +ERROR: mapping for token type "word" does not exist +-- Token supported by the configuration's parser, cannot be found, +-- succeeds with IF EXISTS. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING IF EXISTS FOR word, word; +NOTICE: mapping for token type "word" does not exist, skipping +-- Re-add mapping, with duplicated tokens supported by the parser. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ADD MAPPING FOR word, word WITH ispell; +-- Not a token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ADD MAPPING FOR not_a_token WITH ispell; +ERROR: token type "not_a_token" does not exist +DROP TEXT SEARCH CONFIGURATION dummy_tst; diff --git a/src/test/regress/sql/tsdicts.sql b/src/test/regress/sql/tsdicts.sql index ddc6c7f445..6a2b00369c 100644 --- a/src/test/regress/sql/tsdicts.sql +++ b/src/test/regress/sql/tsdicts.sql @@ -251,3 +251,33 @@ CREATE TEXT SEARCH DICTIONARY tsdict_case "DictFile" = ispell_sample, "AffFile" = ispell_sample ); + +-- Test grammar for configurations +CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english); +-- Overriden mapping change with duplicated tokens. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ALTER MAPPING FOR word, word WITH ispell; +-- Not a token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR not_a_token, not_a_token; +-- Not a token supported by the configuration's parser, fails even +-- with IF EXISTS. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING IF EXISTS FOR not_a_token, not_a_token; +-- Token supported by the configuration's parser, succeeds. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR word, word; +-- No mapping for token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING FOR word; +-- Token supported by the configuration's parser, cannot be found, +-- succeeds with IF EXISTS. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + DROP MAPPING IF EXISTS FOR word, word; +-- Re-add mapping, with duplicated tokens supported by the parser. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ADD MAPPING FOR word, word WITH ispell; +-- Not a token supported by the configuration's parser, fails. +ALTER TEXT SEARCH CONFIGURATION dummy_tst + ADD MAPPING FOR not_a_token WITH ispell; +DROP TEXT SEARCH CONFIGURATION dummy_tst; diff --git a/src/tools/pgindent/typedefs.list b/src/tools/pgindent/typedefs.list index 2ba43907bc..fb59bd3be7 100644 --- a/src/tools/pgindent/typedefs.list +++ b/src/tools/pgindent/typedefs.list @@ -2594,6 +2594,7 @@ TSQuerySign TSReadPointer TSTemplateInfo TSTernaryValue +TSTokenTypeItem TSTokenTypeStorage TSVector TSVectorBuildState