You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

389 lines
12 KiB

  1. // Copyright 2019 The Gitea Authors. All rights reserved.
  2. // Use of this source code is governed by a MIT-style
  3. // license that can be found in the LICENSE file.
  4. package code
  5. import (
  6. "fmt"
  7. "os"
  8. "strconv"
  9. "strings"
  10. "time"
  11. "code.gitea.io/gitea/models"
  12. "code.gitea.io/gitea/modules/analyze"
  13. "code.gitea.io/gitea/modules/base"
  14. "code.gitea.io/gitea/modules/charset"
  15. "code.gitea.io/gitea/modules/git"
  16. "code.gitea.io/gitea/modules/log"
  17. "code.gitea.io/gitea/modules/setting"
  18. "code.gitea.io/gitea/modules/timeutil"
  19. "code.gitea.io/gitea/modules/util"
  20. "github.com/blevesearch/bleve"
  21. analyzer_custom "github.com/blevesearch/bleve/analysis/analyzer/custom"
  22. analyzer_keyword "github.com/blevesearch/bleve/analysis/analyzer/keyword"
  23. "github.com/blevesearch/bleve/analysis/token/lowercase"
  24. "github.com/blevesearch/bleve/analysis/token/unicodenorm"
  25. "github.com/blevesearch/bleve/analysis/tokenizer/unicode"
  26. "github.com/blevesearch/bleve/index/upsidedown"
  27. "github.com/blevesearch/bleve/mapping"
  28. "github.com/blevesearch/bleve/search/query"
  29. "github.com/ethantkoenig/rupture"
  30. "github.com/go-enry/go-enry/v2"
  31. )
  32. const unicodeNormalizeName = "unicodeNormalize"
  33. const maxBatchSize = 16
  34. // numericEqualityQuery a numeric equality query for the given value and field
  35. func numericEqualityQuery(value int64, field string) *query.NumericRangeQuery {
  36. f := float64(value)
  37. tru := true
  38. q := bleve.NewNumericRangeInclusiveQuery(&f, &f, &tru, &tru)
  39. q.SetField(field)
  40. return q
  41. }
  42. func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
  43. return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]interface{}{
  44. "type": unicodenorm.Name,
  45. "form": unicodenorm.NFC,
  46. })
  47. }
  48. // openBleveIndexer open the index at the specified path, checking for metadata
  49. // updates and bleve version updates. If index needs to be created (or
  50. // re-created), returns (nil, nil)
  51. func openBleveIndexer(path string, latestVersion int) (bleve.Index, error) {
  52. _, err := os.Stat(path)
  53. if err != nil && os.IsNotExist(err) {
  54. return nil, nil
  55. } else if err != nil {
  56. return nil, err
  57. }
  58. metadata, err := rupture.ReadIndexMetadata(path)
  59. if err != nil {
  60. return nil, err
  61. }
  62. if metadata.Version < latestVersion {
  63. // the indexer is using a previous version, so we should delete it and
  64. // re-populate
  65. return nil, util.RemoveAll(path)
  66. }
  67. index, err := bleve.Open(path)
  68. if err != nil && err == upsidedown.IncompatibleVersion {
  69. // the indexer was built with a previous version of bleve, so we should
  70. // delete it and re-populate
  71. return nil, util.RemoveAll(path)
  72. } else if err != nil {
  73. return nil, err
  74. }
  75. return index, nil
  76. }
  77. // RepoIndexerData data stored in the repo indexer
  78. type RepoIndexerData struct {
  79. RepoID int64
  80. CommitID string
  81. Content string
  82. Language string
  83. UpdatedAt time.Time
  84. }
  85. // Type returns the document type, for bleve's mapping.Classifier interface.
  86. func (d *RepoIndexerData) Type() string {
  87. return repoIndexerDocType
  88. }
  89. const (
  90. repoIndexerAnalyzer = "repoIndexerAnalyzer"
  91. repoIndexerDocType = "repoIndexerDocType"
  92. repoIndexerLatestVersion = 5
  93. )
  94. // createBleveIndexer create a bleve repo indexer if one does not already exist
  95. func createBleveIndexer(path string, latestVersion int) (bleve.Index, error) {
  96. docMapping := bleve.NewDocumentMapping()
  97. numericFieldMapping := bleve.NewNumericFieldMapping()
  98. numericFieldMapping.IncludeInAll = false
  99. docMapping.AddFieldMappingsAt("RepoID", numericFieldMapping)
  100. textFieldMapping := bleve.NewTextFieldMapping()
  101. textFieldMapping.IncludeInAll = false
  102. docMapping.AddFieldMappingsAt("Content", textFieldMapping)
  103. termFieldMapping := bleve.NewTextFieldMapping()
  104. termFieldMapping.IncludeInAll = false
  105. termFieldMapping.Analyzer = analyzer_keyword.Name
  106. docMapping.AddFieldMappingsAt("Language", termFieldMapping)
  107. docMapping.AddFieldMappingsAt("CommitID", termFieldMapping)
  108. timeFieldMapping := bleve.NewDateTimeFieldMapping()
  109. timeFieldMapping.IncludeInAll = false
  110. docMapping.AddFieldMappingsAt("UpdatedAt", timeFieldMapping)
  111. mapping := bleve.NewIndexMapping()
  112. if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
  113. return nil, err
  114. } else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]interface{}{
  115. "type": analyzer_custom.Name,
  116. "char_filters": []string{},
  117. "tokenizer": unicode.Name,
  118. "token_filters": []string{unicodeNormalizeName, lowercase.Name},
  119. }); err != nil {
  120. return nil, err
  121. }
  122. mapping.DefaultAnalyzer = repoIndexerAnalyzer
  123. mapping.AddDocumentMapping(repoIndexerDocType, docMapping)
  124. mapping.AddDocumentMapping("_all", bleve.NewDocumentDisabledMapping())
  125. indexer, err := bleve.New(path, mapping)
  126. if err != nil {
  127. return nil, err
  128. }
  129. if err = rupture.WriteIndexMetadata(path, &rupture.IndexMetadata{
  130. Version: latestVersion,
  131. }); err != nil {
  132. return nil, err
  133. }
  134. return indexer, nil
  135. }
  136. var (
  137. _ Indexer = &BleveIndexer{}
  138. )
  139. // BleveIndexer represents a bleve indexer implementation
  140. type BleveIndexer struct {
  141. indexDir string
  142. indexer bleve.Index
  143. }
  144. // NewBleveIndexer creates a new bleve local indexer
  145. func NewBleveIndexer(indexDir string) (*BleveIndexer, bool, error) {
  146. indexer := &BleveIndexer{
  147. indexDir: indexDir,
  148. }
  149. created, err := indexer.init()
  150. return indexer, created, err
  151. }
  152. func (b *BleveIndexer) addUpdate(commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error {
  153. // Ignore vendored files in code search
  154. if setting.Indexer.ExcludeVendored && enry.IsVendor(update.Filename) {
  155. return nil
  156. }
  157. stdout, err := git.NewCommand("cat-file", "-s", update.BlobSha).
  158. RunInDir(repo.RepoPath())
  159. if err != nil {
  160. return err
  161. }
  162. if size, err := strconv.Atoi(strings.TrimSpace(stdout)); err != nil {
  163. return fmt.Errorf("Misformatted git cat-file output: %v", err)
  164. } else if int64(size) > setting.Indexer.MaxIndexerFileSize {
  165. return b.addDelete(update.Filename, repo, batch)
  166. }
  167. fileContents, err := git.NewCommand("cat-file", "blob", update.BlobSha).
  168. RunInDirBytes(repo.RepoPath())
  169. if err != nil {
  170. return err
  171. } else if !base.IsTextFile(fileContents) {
  172. // FIXME: UTF-16 files will probably fail here
  173. return nil
  174. }
  175. id := filenameIndexerID(repo.ID, update.Filename)
  176. return batch.Index(id, &RepoIndexerData{
  177. RepoID: repo.ID,
  178. CommitID: commitSha,
  179. Content: string(charset.ToUTF8DropErrors(fileContents)),
  180. Language: analyze.GetCodeLanguage(update.Filename, fileContents),
  181. UpdatedAt: time.Now().UTC(),
  182. })
  183. }
  184. func (b *BleveIndexer) addDelete(filename string, repo *models.Repository, batch rupture.FlushingBatch) error {
  185. id := filenameIndexerID(repo.ID, filename)
  186. return batch.Delete(id)
  187. }
  188. // init init the indexer
  189. func (b *BleveIndexer) init() (bool, error) {
  190. var err error
  191. b.indexer, err = openBleveIndexer(b.indexDir, repoIndexerLatestVersion)
  192. if err != nil {
  193. return false, err
  194. }
  195. if b.indexer != nil {
  196. return false, nil
  197. }
  198. b.indexer, err = createBleveIndexer(b.indexDir, repoIndexerLatestVersion)
  199. if err != nil {
  200. return false, err
  201. }
  202. return true, nil
  203. }
  204. // Close close the indexer
  205. func (b *BleveIndexer) Close() {
  206. log.Debug("Closing repo indexer")
  207. if b.indexer != nil {
  208. err := b.indexer.Close()
  209. if err != nil {
  210. log.Error("Error whilst closing the repository indexer: %v", err)
  211. }
  212. }
  213. log.Info("PID: %d Repository Indexer closed", os.Getpid())
  214. }
  215. // Index indexes the data
  216. func (b *BleveIndexer) Index(repo *models.Repository, sha string, changes *repoChanges) error {
  217. batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
  218. for _, update := range changes.Updates {
  219. if err := b.addUpdate(sha, update, repo, batch); err != nil {
  220. return err
  221. }
  222. }
  223. for _, filename := range changes.RemovedFilenames {
  224. if err := b.addDelete(filename, repo, batch); err != nil {
  225. return err
  226. }
  227. }
  228. return batch.Flush()
  229. }
  230. // Delete deletes indexes by ids
  231. func (b *BleveIndexer) Delete(repoID int64) error {
  232. query := numericEqualityQuery(repoID, "RepoID")
  233. searchRequest := bleve.NewSearchRequestOptions(query, 2147483647, 0, false)
  234. result, err := b.indexer.Search(searchRequest)
  235. if err != nil {
  236. return err
  237. }
  238. batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
  239. for _, hit := range result.Hits {
  240. if err = batch.Delete(hit.ID); err != nil {
  241. return err
  242. }
  243. }
  244. return batch.Flush()
  245. }
  246. // Search searches for files in the specified repo.
  247. // Returns the matching file-paths
  248. func (b *BleveIndexer) Search(repoIDs []int64, language, keyword string, page, pageSize int) (int64, []*SearchResult, []*SearchResultLanguages, error) {
  249. phraseQuery := bleve.NewMatchPhraseQuery(keyword)
  250. phraseQuery.FieldVal = "Content"
  251. phraseQuery.Analyzer = repoIndexerAnalyzer
  252. var indexerQuery query.Query
  253. if len(repoIDs) > 0 {
  254. var repoQueries = make([]query.Query, 0, len(repoIDs))
  255. for _, repoID := range repoIDs {
  256. repoQueries = append(repoQueries, numericEqualityQuery(repoID, "RepoID"))
  257. }
  258. indexerQuery = bleve.NewConjunctionQuery(
  259. bleve.NewDisjunctionQuery(repoQueries...),
  260. phraseQuery,
  261. )
  262. } else {
  263. indexerQuery = phraseQuery
  264. }
  265. // Save for reuse without language filter
  266. facetQuery := indexerQuery
  267. if len(language) > 0 {
  268. languageQuery := bleve.NewMatchQuery(language)
  269. languageQuery.FieldVal = "Language"
  270. languageQuery.Analyzer = analyzer_keyword.Name
  271. indexerQuery = bleve.NewConjunctionQuery(
  272. indexerQuery,
  273. languageQuery,
  274. )
  275. }
  276. from := (page - 1) * pageSize
  277. searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
  278. searchRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
  279. searchRequest.IncludeLocations = true
  280. if len(language) == 0 {
  281. searchRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
  282. }
  283. result, err := b.indexer.Search(searchRequest)
  284. if err != nil {
  285. return 0, nil, nil, err
  286. }
  287. total := int64(result.Total)
  288. searchResults := make([]*SearchResult, len(result.Hits))
  289. for i, hit := range result.Hits {
  290. var startIndex, endIndex int = -1, -1
  291. for _, locations := range hit.Locations["Content"] {
  292. location := locations[0]
  293. locationStart := int(location.Start)
  294. locationEnd := int(location.End)
  295. if startIndex < 0 || locationStart < startIndex {
  296. startIndex = locationStart
  297. }
  298. if endIndex < 0 || locationEnd > endIndex {
  299. endIndex = locationEnd
  300. }
  301. }
  302. language := hit.Fields["Language"].(string)
  303. var updatedUnix timeutil.TimeStamp
  304. if t, err := time.Parse(time.RFC3339, hit.Fields["UpdatedAt"].(string)); err == nil {
  305. updatedUnix = timeutil.TimeStamp(t.Unix())
  306. }
  307. searchResults[i] = &SearchResult{
  308. RepoID: int64(hit.Fields["RepoID"].(float64)),
  309. StartIndex: startIndex,
  310. EndIndex: endIndex,
  311. Filename: filenameOfIndexerID(hit.ID),
  312. Content: hit.Fields["Content"].(string),
  313. CommitID: hit.Fields["CommitID"].(string),
  314. UpdatedUnix: updatedUnix,
  315. Language: language,
  316. Color: enry.GetColor(language),
  317. }
  318. }
  319. searchResultLanguages := make([]*SearchResultLanguages, 0, 10)
  320. if len(language) > 0 {
  321. // Use separate query to go get all language counts
  322. facetRequest := bleve.NewSearchRequestOptions(facetQuery, 1, 0, false)
  323. facetRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
  324. facetRequest.IncludeLocations = true
  325. facetRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
  326. if result, err = b.indexer.Search(facetRequest); err != nil {
  327. return 0, nil, nil, err
  328. }
  329. }
  330. languagesFacet := result.Facets["languages"]
  331. for _, term := range languagesFacet.Terms {
  332. if len(term.Term) == 0 {
  333. continue
  334. }
  335. searchResultLanguages = append(searchResultLanguages, &SearchResultLanguages{
  336. Language: term.Term,
  337. Color: enry.GetColor(term.Term),
  338. Count: term.Count,
  339. })
  340. }
  341. return total, searchResults, searchResultLanguages, nil
  342. }