Updated tokenizer to better matching when search for code snippets (#32261)

This PR improves the accuracy of Gitea's code search. 

Currently, Gitea does not consider statements such as
`onsole.log("hello")` as hits when the user searches for `log`. The
culprit is how both ES and Bleve are tokenizing the file contents (in
both cases, `console.log` is a whole token).

In ES' case, we changed the tokenizer to
[simple_pattern_split](https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-simplepatternsplit-tokenizer.html#:~:text=The%20simple_pattern_split%20tokenizer%20uses%20a,the%20tokenization%20is%20generally%20faster.).
In such a case, tokens are words formed by digits and letters. In
Bleve's case, it employs a
[letter](https://blevesearch.com/docs/Tokenizers/) tokenizer.

Resolves #32220

---------

Signed-off-by: Bruno Sofiato <bruno.sofiato@gmail.com>
This commit is contained in:
Bruno Sofiato 2024-11-06 17:51:20 -03:00 committed by GitHub
parent b573512312
commit f64fbd9b74
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 83 additions and 12 deletions

View file

@ -30,7 +30,7 @@ import (
)
const (
esRepoIndexerLatestVersion = 2
esRepoIndexerLatestVersion = 3
// multi-match-types, currently only 2 types are used
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
esMultiMatchTypeBestFields = "best_fields"
@ -60,6 +60,10 @@ const (
"settings": {
"analysis": {
"analyzer": {
"content_analyzer": {
"tokenizer": "content_tokenizer",
"filter" : ["lowercase"]
},
"filename_path_analyzer": {
"tokenizer": "path_tokenizer"
},
@ -68,6 +72,10 @@ const (
}
},
"tokenizer": {
"content_tokenizer": {
"type": "simple_pattern_split",
"pattern": "[^a-zA-Z0-9]"
},
"path_tokenizer": {
"type": "path_hierarchy",
"delimiter": "/"
@ -104,7 +112,8 @@ const (
"content": {
"type": "text",
"term_vector": "with_positions_offsets",
"index": true
"index": true,
"analyzer": "content_analyzer"
},
"commit_id": {
"type": "keyword",