Changes so that fts5 can handle tokens with embedded '\0' bytes.

FossilOrigin-Name: c027c092c4af53bd6ae3cc6e2b4439167d9eeb0f9de549b6a2c2a72a67ee886c
This commit is contained in:
dan 2023-09-30 18:13:35 +00:00
parent f046b82324
commit a35ae44150
7 changed files with 345 additions and 43 deletions

View File

@ -645,6 +645,7 @@ void sqlite3Fts5HashScanNext(Fts5Hash*);
int sqlite3Fts5HashScanEof(Fts5Hash*);
void sqlite3Fts5HashScanEntry(Fts5Hash *,
const char **pzTerm, /* OUT: term (nul-terminated) */
int *pnTerm, /* OUT: Size of term in bytes */
const u8 **ppDoclist, /* OUT: pointer to doclist */
int *pnDoclist /* OUT: size of doclist in bytes */
);

View File

@ -36,10 +36,15 @@ struct Fts5Hash {
/*
** Each entry in the hash table is represented by an object of the
** following type. Each object, its key (a nul-terminated string) and
** its current data are stored in a single memory allocation. The
** key immediately follows the object in memory. The position list
** data immediately follows the key data in memory.
** following type. Each object, its key, and its current data are stored
** in a single memory allocation. The key immediately follows the object
** in memory. The position list data immediately follows the key data
** in memory.
**
** The key is Fts5HashEntry.nKey bytes in size. It consists of a single
** byte identifying the index (either the main term index or a prefix-index),
** followed by the term data. For example: "0token". There is no
** nul-terminator - in this case nKey=6.
**
** The data that follows the key is in a similar, but not identical format
** to the doclist data stored in the database. It is:
@ -174,8 +179,7 @@ static int fts5HashResize(Fts5Hash *pHash){
unsigned int iHash;
Fts5HashEntry *p = apOld[i];
apOld[i] = p->pHashNext;
iHash = fts5HashKey(nNew, (u8*)fts5EntryKey(p),
(int)strlen(fts5EntryKey(p)));
iHash = fts5HashKey(nNew, (u8*)fts5EntryKey(p), p->nKey);
p->pHashNext = apNew[iHash];
apNew[iHash] = p;
}
@ -259,7 +263,7 @@ int sqlite3Fts5HashWrite(
for(p=pHash->aSlot[iHash]; p; p=p->pHashNext){
char *zKey = fts5EntryKey(p);
if( zKey[0]==bByte
&& p->nKey==nToken
&& p->nKey==nToken+1
&& memcmp(&zKey[1], pToken, nToken)==0
){
break;
@ -289,9 +293,9 @@ int sqlite3Fts5HashWrite(
zKey[0] = bByte;
memcpy(&zKey[1], pToken, nToken);
assert( iHash==fts5HashKey(pHash->nSlot, (u8*)zKey, nToken+1) );
p->nKey = nToken;
p->nKey = nToken+1;
zKey[nToken+1] = '\0';
p->nData = nToken+1 + 1 + sizeof(Fts5HashEntry);
p->nData = nToken+1 + sizeof(Fts5HashEntry);
p->pHashNext = pHash->aSlot[iHash];
pHash->aSlot[iHash] = p;
pHash->nEntry++;
@ -408,12 +412,17 @@ static Fts5HashEntry *fts5HashEntryMerge(
*ppOut = p1;
p1 = 0;
}else{
int i = 0;
char *zKey1 = fts5EntryKey(p1);
char *zKey2 = fts5EntryKey(p2);
while( zKey1[i]==zKey2[i] ) i++;
int nMin = MIN(p1->nKey, p2->nKey);
if( ((u8)zKey1[i])>((u8)zKey2[i]) ){
int cmp = memcmp(zKey1, zKey2, nMin);
if( cmp==0 ){
cmp = p1->nKey - p2->nKey;
}
assert( cmp!=0 );
if( cmp>0 ){
/* p2 is smaller */
*ppOut = p2;
ppOut = &p2->pScanNext;
@ -457,7 +466,7 @@ static int fts5HashEntrySort(
Fts5HashEntry *pIter;
for(pIter=pHash->aSlot[iSlot]; pIter; pIter=pIter->pHashNext){
if( pTerm==0
|| (pIter->nKey+1>=nTerm && 0==memcmp(fts5EntryKey(pIter), pTerm, nTerm))
|| (pIter->nKey>=nTerm && 0==memcmp(fts5EntryKey(pIter), pTerm, nTerm))
){
Fts5HashEntry *pEntry = pIter;
pEntry->pScanNext = 0;
@ -496,12 +505,11 @@ int sqlite3Fts5HashQuery(
for(p=pHash->aSlot[iHash]; p; p=p->pHashNext){
zKey = fts5EntryKey(p);
assert( p->nKey+1==(int)strlen(zKey) );
if( nTerm==p->nKey+1 && memcmp(zKey, pTerm, nTerm)==0 ) break;
if( nTerm==p->nKey && memcmp(zKey, pTerm, nTerm)==0 ) break;
}
if( p ){
int nHashPre = sizeof(Fts5HashEntry) + nTerm + 1;
int nHashPre = sizeof(Fts5HashEntry) + nTerm;
int nList = p->nData - nHashPre;
u8 *pRet = (u8*)(*ppOut = sqlite3_malloc64(nPre + nList + 10));
if( pRet ){
@ -562,19 +570,22 @@ int sqlite3Fts5HashScanEof(Fts5Hash *p){
void sqlite3Fts5HashScanEntry(
Fts5Hash *pHash,
const char **pzTerm, /* OUT: term (nul-terminated) */
int *pnTerm, /* OUT: Size of term in bytes */
const u8 **ppDoclist, /* OUT: pointer to doclist */
int *pnDoclist /* OUT: size of doclist in bytes */
){
Fts5HashEntry *p;
if( (p = pHash->pScan) ){
char *zKey = fts5EntryKey(p);
int nTerm = (int)strlen(zKey);
int nTerm = p->nKey;
fts5HashAddPoslistSize(pHash, p, 0);
*pzTerm = zKey;
*ppDoclist = (const u8*)&zKey[nTerm+1];
*pnDoclist = p->nData - (sizeof(Fts5HashEntry) + nTerm + 1);
*pnTerm = nTerm;
*ppDoclist = (const u8*)&zKey[nTerm];
*pnDoclist = p->nData - (sizeof(Fts5HashEntry) + nTerm);
}else{
*pzTerm = 0;
*pnTerm = 0;
*ppDoclist = 0;
*pnDoclist = 0;
}

View File

@ -2177,15 +2177,16 @@ static void fts5SegIterNext_None(
}else{
const u8 *pList = 0;
const char *zTerm = 0;
int nTerm = 0;
int nList;
sqlite3Fts5HashScanNext(p->pHash);
sqlite3Fts5HashScanEntry(p->pHash, &zTerm, &pList, &nList);
sqlite3Fts5HashScanEntry(p->pHash, &zTerm, &nTerm, &pList, &nList);
if( pList==0 ) goto next_none_eof;
pIter->pLeaf->p = (u8*)pList;
pIter->pLeaf->nn = nList;
pIter->pLeaf->szLeaf = nList;
pIter->iEndofDoclist = nList;
sqlite3Fts5BufferSet(&p->rc,&pIter->term, (int)strlen(zTerm), (u8*)zTerm);
sqlite3Fts5BufferSet(&p->rc,&pIter->term, nTerm, (u8*)zTerm);
pIter->iLeafOffset = fts5GetVarint(pList, (u64*)&pIter->iRowid);
}
@ -2251,11 +2252,12 @@ static void fts5SegIterNext(
}else if( pIter->pSeg==0 ){
const u8 *pList = 0;
const char *zTerm = 0;
int nTerm = 0;
int nList = 0;
assert( (pIter->flags & FTS5_SEGITER_ONETERM) || pbNewTerm );
if( 0==(pIter->flags & FTS5_SEGITER_ONETERM) ){
sqlite3Fts5HashScanNext(p->pHash);
sqlite3Fts5HashScanEntry(p->pHash, &zTerm, &pList, &nList);
sqlite3Fts5HashScanEntry(p->pHash, &zTerm, &nTerm, &pList, &nList);
}
if( pList==0 ){
fts5DataRelease(pIter->pLeaf);
@ -2265,8 +2267,7 @@ static void fts5SegIterNext(
pIter->pLeaf->nn = nList;
pIter->pLeaf->szLeaf = nList;
pIter->iEndofDoclist = nList+1;
sqlite3Fts5BufferSet(&p->rc, &pIter->term, (int)strlen(zTerm),
(u8*)zTerm);
sqlite3Fts5BufferSet(&p->rc, &pIter->term, nTerm, (u8*)zTerm);
pIter->iLeafOffset = fts5GetVarint(pList, (u64*)&pIter->iRowid);
*pbNewTerm = 1;
}
@ -2711,8 +2712,7 @@ static void fts5SegIterHashInit(
const u8 *pList = 0;
p->rc = sqlite3Fts5HashScanInit(p->pHash, (const char*)pTerm, nTerm);
sqlite3Fts5HashScanEntry(p->pHash, (const char**)&z, &pList, &nList);
n = (z ? (int)strlen((const char*)z) : 0);
sqlite3Fts5HashScanEntry(p->pHash, (const char**)&z, &n, &pList, &nList);
if( pList ){
pLeaf = fts5IdxMalloc(p, sizeof(Fts5Data));
if( pLeaf ){
@ -5313,10 +5313,10 @@ static void fts5FlushSecureDelete(
Fts5Index *p,
Fts5Structure *pStruct,
const char *zTerm,
int nTerm,
i64 iRowid
){
const int f = FTS5INDEX_QUERY_SKIPHASH;
int nTerm = (int)strlen(zTerm);
Fts5Iter *pIter = 0; /* Used to find term instance */
fts5MultiIterNew(p, pStruct, f, 0, (const u8*)zTerm, nTerm, -1, 0, &pIter);
@ -5390,8 +5390,7 @@ static void fts5FlushOneHash(Fts5Index *p){
int nDoclist; /* Size of doclist in bytes */
/* Get the term and doclist for this entry. */
sqlite3Fts5HashScanEntry(pHash, &zTerm, &pDoclist, &nDoclist);
nTerm = (int)strlen(zTerm);
sqlite3Fts5HashScanEntry(pHash, &zTerm, &nTerm, &pDoclist, &nDoclist);
if( bSecureDelete==0 ){
fts5WriteAppendTerm(p, &writer, nTerm, (const u8*)zTerm);
if( p->rc!=SQLITE_OK ) break;
@ -5421,7 +5420,7 @@ static void fts5FlushOneHash(Fts5Index *p){
if( bSecureDelete ){
if( eDetail==FTS5_DETAIL_NONE ){
if( iOff<nDoclist && pDoclist[iOff]==0x00 ){
fts5FlushSecureDelete(p, pStruct, zTerm, iRowid);
fts5FlushSecureDelete(p, pStruct, zTerm, nTerm, iRowid);
iOff++;
if( iOff<nDoclist && pDoclist[iOff]==0x00 ){
iOff++;
@ -5431,7 +5430,7 @@ static void fts5FlushOneHash(Fts5Index *p){
}
}
}else if( (pDoclist[iOff] & 0x01) ){
fts5FlushSecureDelete(p, pStruct, zTerm, iRowid);
fts5FlushSecureDelete(p, pStruct, zTerm, nTerm, iRowid);
if( p->rc!=SQLITE_OK || pDoclist[iOff]==0x01 ){
iOff++;
continue;

View File

@ -1117,6 +1117,176 @@ static int SQLITE_TCLAPI f5tRegisterTok(
return TCL_OK;
}
typedef struct OriginTextCtx OriginTextCtx;
struct OriginTextCtx {
sqlite3 *db;
fts5_api *pApi;
};
typedef struct OriginTextTokenizer OriginTextTokenizer;
struct OriginTextTokenizer {
Fts5Tokenizer *pTok; /* Underlying tokenizer object */
fts5_tokenizer tokapi; /* API implementation for pTok */
};
/*
** Delete the OriginTextCtx object indicated by the only argument.
*/
static void f5tOrigintextTokenizerDelete(void *pCtx){
OriginTextCtx *p = (OriginTextCtx*)pCtx;
ckfree(p);
}
static int f5tOrigintextCreate(
void *pCtx,
const char **azArg,
int nArg,
Fts5Tokenizer **ppOut
){
OriginTextCtx *p = (OriginTextCtx*)pCtx;
OriginTextTokenizer *pTok = 0;
void *pTokCtx = 0;
int rc = SQLITE_OK;
pTok = (OriginTextTokenizer*)sqlite3_malloc(sizeof(OriginTextTokenizer));
if( pTok==0 ){
rc = SQLITE_NOMEM;
}else if( nArg<1 ){
rc = SQLITE_ERROR;
}else{
/* Locate the underlying tokenizer */
rc = p->pApi->xFindTokenizer(p->pApi, azArg[0], &pTokCtx, &pTok->tokapi);
}
/* Create the new tokenizer instance */
if( rc==SQLITE_OK ){
rc = pTok->tokapi.xCreate(pTokCtx, &azArg[1], nArg-1, &pTok->pTok);
}
if( rc!=SQLITE_OK ){
sqlite3_free(pTok);
pTok = 0;
}
*ppOut = (Fts5Tokenizer*)pTok;
return rc;
}
static void f5tOrigintextDelete(Fts5Tokenizer *pTokenizer){
OriginTextTokenizer *p = (OriginTextTokenizer*)pTokenizer;
if( p->pTok ){
p->tokapi.xDelete(p->pTok);
}
sqlite3_free(p);
}
typedef struct OriginTextCb OriginTextCb;
struct OriginTextCb {
void *pCtx;
const char *pText;
int nText;
int (*xToken)(void *, int, const char *, int, int, int);
char *aBuf; /* Buffer to use */
int nBuf; /* Allocated size of aBuf[] */
};
static int xOriginToken(
void *pCtx, /* Copy of 2nd argument to xTokenize() */
int tflags, /* Mask of FTS5_TOKEN_* flags */
const char *pToken, /* Pointer to buffer containing token */
int nToken, /* Size of token in bytes */
int iStart, /* Byte offset of token within input text */
int iEnd /* Byte offset of end of token within input */
){
OriginTextCb *p = (OriginTextCb*)pCtx;
int ret = 0;
if( nToken==(iEnd-iStart) && 0==memcmp(pToken, &p->pText[iStart], nToken) ){
/* Token exactly matches document text. Pass it through as is. */
ret = p->xToken(p->pCtx, tflags, pToken, nToken, iStart, iEnd);
}else{
int nReq = nToken + 1 + (iEnd-iStart);
if( nReq>p->nBuf ){
sqlite3_free(p->aBuf);
p->aBuf = sqlite3_malloc(nReq*2);
if( p->aBuf==0 ) return SQLITE_NOMEM;
p->nBuf = nReq*2;
}
memcpy(p->aBuf, pToken, nToken);
p->aBuf[nToken] = '\0';
memcpy(&p->aBuf[nToken+1], &p->pText[iStart], iEnd-iStart);
ret = p->xToken(p->pCtx, tflags, p->aBuf, nReq, iStart, iEnd);
}
return ret;
}
static int f5tOrigintextTokenize(
Fts5Tokenizer *pTokenizer,
void *pCtx,
int flags, /* Mask of FTS5_TOKENIZE_* flags */
const char *pText, int nText,
int (*xToken)(void *, int, const char *, int, int, int)
){
OriginTextTokenizer *p = (OriginTextTokenizer*)pTokenizer;
OriginTextCb cb;
int ret;
memset(&cb, 0, sizeof(cb));
cb.pCtx = pCtx;
cb.pText = pText;
cb.nText = nText;
cb.xToken = xToken;
ret = p->tokapi.xTokenize(p->pTok,(void*)&cb,flags,pText,nText,xOriginToken);
sqlite3_free(cb.aBuf);
return ret;
}
/*
** sqlite3_fts5_register_origintext DB
**
** Description...
*/
static int SQLITE_TCLAPI f5tRegisterOriginText(
void * clientData,
Tcl_Interp *interp,
int objc,
Tcl_Obj *CONST objv[]
){
sqlite3 *db = 0;
fts5_api *pApi = 0;
int rc;
fts5_tokenizer tok = {0, 0, 0};
OriginTextCtx *pCtx = 0;
if( objc!=2 ){
Tcl_WrongNumArgs(interp, 1, objv, "DB");
return TCL_ERROR;
}
if( f5tDbAndApi(interp, objv[1], &db, &pApi) ) return TCL_ERROR;
pCtx = (OriginTextCtx*)ckalloc(sizeof(OriginTextCtx));
pCtx->db = db;
pCtx->pApi = pApi;
tok.xCreate = f5tOrigintextCreate;
tok.xDelete = f5tOrigintextDelete;
tok.xTokenize = f5tOrigintextTokenize;
rc = pApi->xCreateTokenizer(
pApi, "origintext", (void*)pCtx, &tok, f5tOrigintextTokenizerDelete
);
Tcl_ResetResult(interp);
if( rc!=SQLITE_OK ){
Tcl_AppendResult(interp, "error: ", sqlite3_errmsg(db), 0);
return TCL_ERROR;
}
return TCL_OK;
}
/*
** Entry point.
*/
@ -1133,7 +1303,8 @@ int Fts5tcl_Init(Tcl_Interp *interp){
{ "sqlite3_fts5_may_be_corrupt", f5tMayBeCorrupt, 0 },
{ "sqlite3_fts5_token_hash", f5tTokenHash, 0 },
{ "sqlite3_fts5_register_matchinfo", f5tRegisterMatchinfo, 0 },
{ "sqlite3_fts5_register_fts5tokenize", f5tRegisterTok, 0 }
{ "sqlite3_fts5_register_fts5tokenize", f5tRegisterTok, 0 },
{ "sqlite3_fts5_register_origintext",f5tRegisterOriginText, 0 }
};
int i;
F5tTokenizerContext *pContext;

View File

@ -0,0 +1,116 @@
# 2014 Jan 08
#
# The author disclaims copyright to this source code. In place of
# a legal notice, here is a blessing:
#
# May you do good and not evil.
# May you find forgiveness for yourself and forgive others.
# May you share freely, never taking more than you give.
#
#***********************************************************************
#
# Tests focused on phrase queries.
#
source [file join [file dirname [info script]] fts5_common.tcl]
set testprefix fts5origintext
# If SQLITE_ENABLE_FTS5 is defined, omit this file.
ifcapable !fts5 {
finish_test
return
}
sqlite3_fts5_register_origintext db
do_execsql_test 1.0 {
CREATE VIRTUAL TABLE ft USING fts5(x, tokenize="origintext unicode61");
CREATE VIRTUAL TABLE vocab USING fts5vocab(ft, instance);
}
do_execsql_test 1.1 {
INSERT INTO ft VALUES('Hello world');
}
do_execsql_test 1.2 {
INSERT INTO ft(ft) VALUES('integrity-check');
}
proc b {x} { string map [list "\0" "."] $x }
db func b b
do_execsql_test 1.3 {
select b(term) from vocab;
} {
hello.Hello
world
}
#-------------------------------------------------------------------------
reset_db
# Return a random integer between 0 and n-1.
#
proc random {n} {
expr {abs(int(rand()*$n))}
}
proc select_one {list} {
set n [llength $list]
lindex $list [random $n]
}
proc term {} {
set first_letter {
a b c d e f g h i j k l m n o p q r s t u v w x y z
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
}
set term [select_one $first_letter]
append term [random 100]
}
proc document {} {
set nTerm [expr [random 5] + 5]
set doc ""
for {set ii 0} {$ii < $nTerm} {incr ii} {
lappend doc [term]
}
set doc
}
db func document document
sqlite3_fts5_register_origintext db
do_execsql_test 2.0 {
CREATE VIRTUAL TABLE ft USING fts5(x, tokenize="origintext unicode61");
INSERT INTO ft(ft, rank) VALUES('pgsz', 128);
CREATE VIRTUAL TABLE vocab USING fts5vocab(ft, instance);
}
do_test 2.1 {
for {set ii 0} {$ii < 500} {incr ii} {
execsql { INSERT INTO ft VALUES( document() ) }
}
} {}
do_execsql_test 2.2 {
INSERT INTO ft(ft) VALUES('integrity-check');
}
do_execsql_test 2.3 {
INSERT INTO ft(ft, rank) VALUES('merge', 16);
}
do_execsql_test 2.4 {
INSERT INTO ft(ft) VALUES('integrity-check');
}
do_execsql_test 2.5 {
INSERT INTO ft(ft) VALUES('optimize');
}
proc b {x} { string map [list "\0" "."] $x }
db func b b
#execsql_pp { SELECT b(term) FROM vocab }
finish_test

View File

@ -1,5 +1,5 @@
C Fix\sJNI\sbinding\sto\scompile\swithout\sSQLITE_ENABLE_PREUPDATE_HOOK.\sAdd\sbuild\soption\sto\sdisable\sall\soptional\sENABLE\sflags.
D 2023-09-30T17:08:29.126
C Changes\sso\sthat\sfts5\scan\shandle\stokens\swith\sembedded\s'\\0'\sbytes.
D 2023-09-30T18:13:35.306
F .fossil-settings/empty-dirs dbb81e8fc0401ac46a1491ab34a7f2c7c0452f2f06b54ebb845d024ca8283ef1
F .fossil-settings/ignore-glob 35175cdfcf539b2318cb04a9901442804be81cd677d8b889fcc9149c21f239ea
F LICENSE.md df5091916dbb40e6e9686186587125e1b2ff51f022cc334e886c19a0e9982724
@ -88,16 +88,16 @@ F ext/fts3/unicode/mkunicode.tcl d5aebf022fa4577ee8cdf27468f0d847879993959101f6d
F ext/fts3/unicode/parseunicode.tcl a981bd6466d12dd17967515801c3ff23f74a281be1a03cf1e6f52a6959fc77eb
F ext/fts5/extract_api_docs.tcl a36e54ec777172ddd3f9a88daf593b00848368e0
F ext/fts5/fts5.h 05501612cc655504c5dce8ba765ab621d50fc478490089beaa0d75e00b23e520
F ext/fts5/fts5Int.h 78a63cc0795186cde5384816a9403a68c65774b35d952e05b81a1b4b158e07c8
F ext/fts5/fts5Int.h 66a38b285e2b860baa29745d8eff27f5b0809268e7820498494d9acfaccf8a5c
F ext/fts5/fts5_aux.c 572d5ec92ba7301df2fea3258576332f2f4d2dfd66d8263afd157d9deceac480
F ext/fts5/fts5_buffer.c 3001fbabb585d6de52947b44b455235072b741038391f830d6b729225eeaf6a5
F ext/fts5/fts5_config.c 054359543566cbff1ba65a188330660a5457299513ac71c53b3a07d934c7b081
F ext/fts5/fts5_expr.c bd3b81ce669c4104e34ffe66570af1999a317b142c15fccb112de9fb0caa57a6
F ext/fts5/fts5_hash.c 65e7707bc8774706574346d18c20218facf87de3599b995963c3e6d6809f203d
F ext/fts5/fts5_index.c a86bcd5637625ce1037649d55974ab8da1fa8d1375cb334aae47ef376642e93b
F ext/fts5/fts5_hash.c 76765856397eff56f526b0640b23a1677d737d35e07bc00e4b4b2e0fc5fda60d
F ext/fts5/fts5_index.c 16d775ecbccf7d3698a03bcae3c3fbee0749df748b93b29d0e82a37e02eaaa94
F ext/fts5/fts5_main.c 799ec88d2309055f6406bddb0bd6ed80148c5da5eb14594c3c5309a6e944d489
F ext/fts5/fts5_storage.c 3c9b41fce41b6410f2e8f82eb035c6a29b2560483f773e6dc98cf3cb2e4ddbb5
F ext/fts5/fts5_tcl.c b1445cbe69908c411df8084a10b2485500ac70a9c747cdc8cda175a3da59d8ae
F ext/fts5/fts5_tcl.c 0d2bb0ff7bf6ee136015be118167f0bd956ddd05a8f02c68bd34299b50648f9f
F ext/fts5/fts5_test_mi.c 08c11ec968148d4cb4119d96d819f8c1f329812c568bac3684f5464be177d3ee
F ext/fts5/fts5_test_tok.c a2bed8edb25f6432e8cdb62aad5916935c19dba8dac2b8324950cfff397e25ff
F ext/fts5/fts5_tokenize.c 5e251efb0f1af99a25ed50010ba6b1ad1250aca5921af1988fdcabe5ebc3cb43
@ -187,6 +187,7 @@ F ext/fts5/test/fts5onepass.test f9b7d9b2c334900c6542a869760290e2ab5382af8fbd618
F ext/fts5/test/fts5optimize.test 36a752d24c818792032e4ff502936fc9cc5ef938721696396fdc79214b2717f1
F ext/fts5/test/fts5optimize2.test 93e742c36b487d8874621360af5b1ce4d39b04fb9e71ce9bc34015c5fc811785
F ext/fts5/test/fts5optimize3.test bf9c91bb927d0fb2b9a06318a217a0419183ac5913842e062c7e0b98ea5d0fca
F ext/fts5/test/fts5origintext.test 9a6edc85ccc4afb10e71d54d98d8170f850272e55b120520f367afbb12526674
F ext/fts5/test/fts5phrase.test 13e5d8e9083077b3d9c74315b3c92ec723cc6eb37c8155e0bfe1bba00559f07b
F ext/fts5/test/fts5plan.test b65cfcca9ddd6fdaa118c61e17aeec8e8433bc5b6bb307abd116514f79c49c5a
F ext/fts5/test/fts5porter.test 8d08010c28527db66bc3feebd2b8767504aaeb9b101a986342fa7833d49d0d15
@ -2122,8 +2123,11 @@ F vsixtest/vsixtest.tcl 6a9a6ab600c25a91a7acc6293828957a386a8a93
F vsixtest/vsixtest.vcxproj.data 2ed517e100c66dc455b492e1a33350c1b20fbcdc
F vsixtest/vsixtest.vcxproj.filters 37e51ffedcdb064aad6ff33b6148725226cd608e
F vsixtest/vsixtest_TemporaryKey.pfx e5b1b036facdb453873e7084e1cae9102ccc67a0
P 5e387275f69ab2d3159b4b67b8cbfc6270410b61e5ac1f988616e8d051f6572e
R 02618be495064fd0c511f49fba8a92b2
U stephan
Z fb900a6927398da79962f35041fce8dc
P c04022b7407f77eaf0175e831ebcd6bbdc0af1cef0d42c5c11102aa8484f24ca
R ee3b13ddf778c77c1640cd7c7844c1f5
T *branch * fts5-token-data
T *sym-fts5-token-data *
T -sym-trunk *
U dan
Z 7d5bd217a552215de3d888e155abaef5
# Remove this line to create a well-formed Fossil manifest.

View File

@ -1 +1 @@
c04022b7407f77eaf0175e831ebcd6bbdc0af1cef0d42c5c11102aa8484f24ca
c027c092c4af53bd6ae3cc6e2b4439167d9eeb0f9de549b6a2c2a72a67ee886c