#include "utils/rel.h"
#include "utils/syscache.h"
+/* Single entry of List returned by getTokenTypes() */
+typedef struct
+{
+ int num; /* token type number */
+ char *name; /* token type name */
+} TSTokenTypeItem;
static void MakeConfigurationMapping(AlterTSConfigurationStmt *stmt,
HeapTuple tup, Relation relMap);
}
/*
- * Translate a list of token type names to an array of token type numbers
+ * Check whether a token type name is a member of a TSTokenTypeItem list.
*/
-static int *
+static bool
+tstoken_list_member(char *token_name, List *tokens)
+{
+ ListCell *c;
+ bool found = false;
+
+ foreach(c, tokens)
+ {
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
+ if (strcmp(token_name, ts->name) == 0)
+ {
+ found = true;
+ break;
+ }
+ }
+
+ return found;
+}
+
+/*
+ * Translate a list of token type names to a list of unique TSTokenTypeItem.
+ *
+ * Duplicated entries list are removed from tokennames.
+ */
+static List *
getTokenTypes(Oid prsId, List *tokennames)
{
TSParserCacheEntry *prs = lookup_ts_parser_cache(prsId);
LexDescr *list;
- int *res,
- i,
- ntoken;
+ List *result = NIL;
+ int ntoken;
ListCell *tn;
ntoken = list_length(tokennames);
if (ntoken == 0)
- return NULL;
- res = (int *) palloc(sizeof(int) * ntoken);
+ return NIL;
if (!OidIsValid(prs->lextypeOid))
elog(ERROR, "method lextype isn't defined for text search parser %u",
list = (LexDescr *) DatumGetPointer(OidFunctionCall1(prs->lextypeOid,
(Datum) 0));
- i = 0;
foreach(tn, tokennames)
{
String *val = lfirst_node(String, tn);
bool found = false;
int j;
+ /* Skip if this token is already in the result */
+ if (tstoken_list_member(strVal(val), result))
+ continue;
+
j = 0;
while (list && list[j].lexid)
{
if (strcmp(strVal(val), list[j].alias) == 0)
{
- res[i] = list[j].lexid;
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) palloc0(sizeof(TSTokenTypeItem));
+
+ ts->num = list[j].lexid;
+ ts->name = pstrdup(strVal(val));
+ result = lappend(result, ts);
found = true;
break;
}
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("token type \"%s\" does not exist",
strVal(val))));
- i++;
}
- return res;
+ return result;
}
/*
int i;
int j;
Oid prsId;
- int *tokens,
- ntoken;
+ List *tokens = NIL;
+ int ntoken;
Oid *dictIds;
int ndict;
ListCell *c;
prsId = tsform->cfgparser;
tokens = getTokenTypes(prsId, stmt->tokentype);
- ntoken = list_length(stmt->tokentype);
+ ntoken = list_length(tokens);
if (stmt->override)
{
/*
* delete maps for tokens if they exist and command was ALTER
*/
- for (i = 0; i < ntoken; i++)
+ foreach(c, tokens)
{
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
ScanKeyInit(&skey[0],
Anum_pg_ts_config_map_mapcfg,
BTEqualStrategyNumber, F_OIDEQ,
ScanKeyInit(&skey[1],
Anum_pg_ts_config_map_maptokentype,
BTEqualStrategyNumber, F_INT4EQ,
- Int32GetDatum(tokens[i]));
+ Int32GetDatum(ts->num));
scan = systable_beginscan(relMap, TSConfigMapIndexId, true,
NULL, 2, skey);
{
bool tokmatch = false;
- for (j = 0; j < ntoken; j++)
+ foreach(c, tokens)
{
- if (cfgmap->maptokentype == tokens[j])
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
+ if (cfgmap->maptokentype == ts->num)
{
tokmatch = true;
break;
/*
* Insertion of new entries
*/
- for (i = 0; i < ntoken; i++)
+ foreach(c, tokens)
{
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
+
for (j = 0; j < ndict; j++)
{
ExecClearTuple(slot[slotCount]);
slot[slotCount]->tts_tupleDescriptor->natts * sizeof(bool));
slot[slotCount]->tts_values[Anum_pg_ts_config_map_mapcfg - 1] = ObjectIdGetDatum(cfgId);
- slot[slotCount]->tts_values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(tokens[i]);
+ slot[slotCount]->tts_values[Anum_pg_ts_config_map_maptokentype - 1] = Int32GetDatum(ts->num);
slot[slotCount]->tts_values[Anum_pg_ts_config_map_mapseqno - 1] = Int32GetDatum(j + 1);
slot[slotCount]->tts_values[Anum_pg_ts_config_map_mapdict - 1] = ObjectIdGetDatum(dictIds[j]);
ScanKeyData skey[2];
SysScanDesc scan;
HeapTuple maptup;
- int i;
Oid prsId;
- int *tokens;
+ List *tokens = NIL;
ListCell *c;
tsform = (Form_pg_ts_config) GETSTRUCT(tup);
tokens = getTokenTypes(prsId, stmt->tokentype);
- i = 0;
- foreach(c, stmt->tokentype)
+ foreach(c, tokens)
{
- String *val = lfirst_node(String, c);
+ TSTokenTypeItem *ts = (TSTokenTypeItem *) lfirst(c);
bool found = false;
ScanKeyInit(&skey[0],
ScanKeyInit(&skey[1],
Anum_pg_ts_config_map_maptokentype,
BTEqualStrategyNumber, F_INT4EQ,
- Int32GetDatum(tokens[i]));
+ Int32GetDatum(ts->num));
scan = systable_beginscan(relMap, TSConfigMapIndexId, true,
NULL, 2, skey);
ereport(ERROR,
(errcode(ERRCODE_UNDEFINED_OBJECT),
errmsg("mapping for token type \"%s\" does not exist",
- strVal(val))));
+ ts->name)));
}
else
{
ereport(NOTICE,
(errmsg("mapping for token type \"%s\" does not exist, skipping",
- strVal(val))));
+ ts->name)));
}
}
-
- i++;
}
EventTriggerCollectAlterTSConfig(stmt, cfgId, NULL, 0);
"AffFile" = ispell_sample
);
ERROR: unrecognized Ispell parameter: "DictFile"
+-- Test grammar for configurations
+CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english);
+-- Overriden mapping change with duplicated tokens.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ALTER MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR not_a_token, not_a_token;
+ERROR: token type "not_a_token" does not exist
+-- Not a token supported by the configuration's parser, fails even
+-- with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING IF EXISTS FOR not_a_token, not_a_token;
+ERROR: token type "not_a_token" does not exist
+-- Token supported by the configuration's parser, succeeds.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR word, word;
+-- No mapping for token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR word;
+ERROR: mapping for token type "word" does not exist
+-- Token supported by the configuration's parser, cannot be found,
+-- succeeds with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING IF EXISTS FOR word, word;
+NOTICE: mapping for token type "word" does not exist, skipping
+-- Re-add mapping, with duplicated tokens supported by the parser.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ADD MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ADD MAPPING FOR not_a_token WITH ispell;
+ERROR: token type "not_a_token" does not exist
+DROP TEXT SEARCH CONFIGURATION dummy_tst;
"DictFile" = ispell_sample,
"AffFile" = ispell_sample
);
+
+-- Test grammar for configurations
+CREATE TEXT SEARCH CONFIGURATION dummy_tst (COPY=english);
+-- Overriden mapping change with duplicated tokens.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ALTER MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR not_a_token, not_a_token;
+-- Not a token supported by the configuration's parser, fails even
+-- with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING IF EXISTS FOR not_a_token, not_a_token;
+-- Token supported by the configuration's parser, succeeds.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR word, word;
+-- No mapping for token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING FOR word;
+-- Token supported by the configuration's parser, cannot be found,
+-- succeeds with IF EXISTS.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ DROP MAPPING IF EXISTS FOR word, word;
+-- Re-add mapping, with duplicated tokens supported by the parser.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ADD MAPPING FOR word, word WITH ispell;
+-- Not a token supported by the configuration's parser, fails.
+ALTER TEXT SEARCH CONFIGURATION dummy_tst
+ ADD MAPPING FOR not_a_token WITH ispell;
+DROP TEXT SEARCH CONFIGURATION dummy_tst;