* Improve issue indexer * Fix new issue sqlite bug * Different test indexer paths for each db * Add integration indexer paths to make cleanfor-closed-social
@ -0,0 +1,143 @@ | |||
// Copyright 2017 The Gitea Authors. All rights reserved. | |||
// Use of this source code is governed by a MIT-style | |||
// license that can be found in the LICENSE file. | |||
package indexer | |||
import ( | |||
"os" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
"github.com/blevesearch/bleve" | |||
"github.com/blevesearch/bleve/analysis/analyzer/custom" | |||
"github.com/blevesearch/bleve/analysis/token/lowercase" | |||
"github.com/blevesearch/bleve/analysis/token/unicodenorm" | |||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode" | |||
) | |||
// issueIndexer (thread-safe) index for searching issues | |||
var issueIndexer bleve.Index | |||
// IssueIndexerData data stored in the issue indexer | |||
type IssueIndexerData struct { | |||
RepoID int64 | |||
Title string | |||
Content string | |||
Comments []string | |||
} | |||
// IssueIndexerUpdate an update to the issue indexer | |||
type IssueIndexerUpdate struct { | |||
IssueID int64 | |||
Data *IssueIndexerData | |||
} | |||
const issueIndexerAnalyzer = "issueIndexer" | |||
// InitIssueIndexer initialize issue indexer | |||
func InitIssueIndexer(populateIndexer func() error) { | |||
_, err := os.Stat(setting.Indexer.IssuePath) | |||
if err != nil { | |||
if os.IsNotExist(err) { | |||
if err = createIssueIndexer(); err != nil { | |||
log.Fatal(4, "CreateIssuesIndexer: %v", err) | |||
} | |||
if err = populateIndexer(); err != nil { | |||
log.Fatal(4, "PopulateIssuesIndex: %v", err) | |||
} | |||
} else { | |||
log.Fatal(4, "InitIssuesIndexer: %v", err) | |||
} | |||
} else { | |||
issueIndexer, err = bleve.Open(setting.Indexer.IssuePath) | |||
if err != nil { | |||
log.Error(4, "Unable to open issues indexer (%s)."+ | |||
" If the error is due to incompatible versions, try deleting the indexer files;"+ | |||
" gitea will recreate them with the appropriate version the next time it runs."+ | |||
" Deleting the indexer files will not result in loss of data.", | |||
setting.Indexer.IssuePath) | |||
log.Fatal(4, "InitIssuesIndexer, open index: %v", err) | |||
} | |||
} | |||
} | |||
// createIssueIndexer create an issue indexer if one does not already exist | |||
func createIssueIndexer() error { | |||
mapping := bleve.NewIndexMapping() | |||
docMapping := bleve.NewDocumentMapping() | |||
docMapping.AddFieldMappingsAt("RepoID", bleve.NewNumericFieldMapping()) | |||
textFieldMapping := bleve.NewTextFieldMapping() | |||
docMapping.AddFieldMappingsAt("Title", textFieldMapping) | |||
docMapping.AddFieldMappingsAt("Content", textFieldMapping) | |||
docMapping.AddFieldMappingsAt("Comments", textFieldMapping) | |||
const unicodeNormNFC = "unicodeNormNFC" | |||
if err := mapping.AddCustomTokenFilter(unicodeNormNFC, map[string]interface{}{ | |||
"type": unicodenorm.Name, | |||
"form": unicodenorm.NFC, | |||
}); err != nil { | |||
return err | |||
} else if err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]interface{}{ | |||
"type": custom.Name, | |||
"char_filters": []string{}, | |||
"tokenizer": unicode.Name, | |||
"token_filters": []string{unicodeNormNFC, lowercase.Name}, | |||
}); err != nil { | |||
return err | |||
} | |||
mapping.DefaultAnalyzer = issueIndexerAnalyzer | |||
mapping.AddDocumentMapping("issues", docMapping) | |||
var err error | |||
issueIndexer, err = bleve.New(setting.Indexer.IssuePath, mapping) | |||
return err | |||
} | |||
// UpdateIssue update the issue indexer | |||
func UpdateIssue(update IssueIndexerUpdate) error { | |||
return issueIndexer.Index(indexerID(update.IssueID), update.Data) | |||
} | |||
// BatchUpdateIssues perform a batch update of the issue indexer | |||
func BatchUpdateIssues(updates ...IssueIndexerUpdate) error { | |||
batch := issueIndexer.NewBatch() | |||
for _, update := range updates { | |||
err := batch.Index(indexerID(update.IssueID), update.Data) | |||
if err != nil { | |||
return err | |||
} | |||
} | |||
return issueIndexer.Batch(batch) | |||
} | |||
// SearchIssuesByKeyword searches for issues by given conditions. | |||
// Returns the matching issue IDs | |||
func SearchIssuesByKeyword(repoID int64, keyword string) ([]int64, error) { | |||
indexerQuery := bleve.NewConjunctionQuery( | |||
numericEqualityQuery(repoID, "RepoID"), | |||
bleve.NewDisjunctionQuery( | |||
newMatchPhraseQuery(keyword, "Title", issueIndexerAnalyzer), | |||
newMatchPhraseQuery(keyword, "Content", issueIndexerAnalyzer), | |||
newMatchPhraseQuery(keyword, "Comments", issueIndexerAnalyzer), | |||
)) | |||
search := bleve.NewSearchRequestOptions(indexerQuery, 2147483647, 0, false) | |||
result, err := issueIndexer.Search(search) | |||
if err != nil { | |||
return nil, err | |||
} | |||
issueIDs := make([]int64, len(result.Hits)) | |||
for i, hit := range result.Hits { | |||
issueIDs[i], err = idOfIndexerID(hit.ID) | |||
if err != nil { | |||
return nil, err | |||
} | |||
} | |||
return issueIDs, nil | |||
} |
@ -0,0 +1,145 @@ | |||
// Copyright (c) 2014 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package custom | |||
import ( | |||
"fmt" | |||
"github.com/blevesearch/bleve/analysis" | |||
"github.com/blevesearch/bleve/registry" | |||
) | |||
const Name = "custom" | |||
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { | |||
var err error | |||
var charFilters []analysis.CharFilter | |||
charFiltersValue, ok := config["char_filters"] | |||
if ok { | |||
switch charFiltersValue := charFiltersValue.(type) { | |||
case []string: | |||
charFilters, err = getCharFilters(charFiltersValue, cache) | |||
if err != nil { | |||
return nil, err | |||
} | |||
case []interface{}: | |||
charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersValue, "char filter") | |||
if err != nil { | |||
return nil, err | |||
} | |||
charFilters, err = getCharFilters(charFiltersNames, cache) | |||
if err != nil { | |||
return nil, err | |||
} | |||
default: | |||
return nil, fmt.Errorf("unsupported type for char_filters, must be slice") | |||
} | |||
} | |||
var tokenizerName string | |||
tokenizerValue, ok := config["tokenizer"] | |||
if ok { | |||
tokenizerName, ok = tokenizerValue.(string) | |||
if !ok { | |||
return nil, fmt.Errorf("must specify tokenizer as string") | |||
} | |||
} else { | |||
return nil, fmt.Errorf("must specify tokenizer") | |||
} | |||
tokenizer, err := cache.TokenizerNamed(tokenizerName) | |||
if err != nil { | |||
return nil, err | |||
} | |||
var tokenFilters []analysis.TokenFilter | |||
tokenFiltersValue, ok := config["token_filters"] | |||
if ok { | |||
switch tokenFiltersValue := tokenFiltersValue.(type) { | |||
case []string: | |||
tokenFilters, err = getTokenFilters(tokenFiltersValue, cache) | |||
if err != nil { | |||
return nil, err | |||
} | |||
case []interface{}: | |||
tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersValue, "token filter") | |||
if err != nil { | |||
return nil, err | |||
} | |||
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache) | |||
if err != nil { | |||
return nil, err | |||
} | |||
default: | |||
return nil, fmt.Errorf("unsupported type for token_filters, must be slice") | |||
} | |||
} | |||
rv := analysis.Analyzer{ | |||
Tokenizer: tokenizer, | |||
} | |||
if charFilters != nil { | |||
rv.CharFilters = charFilters | |||
} | |||
if tokenFilters != nil { | |||
rv.TokenFilters = tokenFilters | |||
} | |||
return &rv, nil | |||
} | |||
func init() { | |||
registry.RegisterAnalyzer(Name, AnalyzerConstructor) | |||
} | |||
func getCharFilters(charFilterNames []string, cache *registry.Cache) ([]analysis.CharFilter, error) { | |||
charFilters := make([]analysis.CharFilter, len(charFilterNames)) | |||
for i, charFilterName := range charFilterNames { | |||
charFilter, err := cache.CharFilterNamed(charFilterName) | |||
if err != nil { | |||
return nil, err | |||
} | |||
charFilters[i] = charFilter | |||
} | |||
return charFilters, nil | |||
} | |||
func getTokenFilters(tokenFilterNames []string, cache *registry.Cache) ([]analysis.TokenFilter, error) { | |||
tokenFilters := make([]analysis.TokenFilter, len(tokenFilterNames)) | |||
for i, tokenFilterName := range tokenFilterNames { | |||
tokenFilter, err := cache.TokenFilterNamed(tokenFilterName) | |||
if err != nil { | |||
return nil, err | |||
} | |||
tokenFilters[i] = tokenFilter | |||
} | |||
return tokenFilters, nil | |||
} | |||
func convertInterfaceSliceToStringSlice(interfaceSlice []interface{}, objType string) ([]string, error) { | |||
stringSlice := make([]string, len(interfaceSlice)) | |||
for i, interfaceObj := range interfaceSlice { | |||
stringObj, ok := interfaceObj.(string) | |||
if ok { | |||
stringSlice[i] = stringObj | |||
} else { | |||
return nil, fmt.Errorf(objType + " name must be a string") | |||
} | |||
} | |||
return stringSlice, nil | |||
} |
@ -1,46 +0,0 @@ | |||
// Copyright (c) 2014 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package simple | |||
import ( | |||
"github.com/blevesearch/bleve/analysis" | |||
"github.com/blevesearch/bleve/analysis/token/lowercase" | |||
"github.com/blevesearch/bleve/analysis/tokenizer/letter" | |||
"github.com/blevesearch/bleve/registry" | |||
) | |||
const Name = "simple" | |||
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { | |||
tokenizer, err := cache.TokenizerNamed(letter.Name) | |||
if err != nil { | |||
return nil, err | |||
} | |||
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name) | |||
if err != nil { | |||
return nil, err | |||
} | |||
rv := analysis.Analyzer{ | |||
Tokenizer: tokenizer, | |||
TokenFilters: []analysis.TokenFilter{ | |||
toLowerFilter, | |||
}, | |||
} | |||
return &rv, nil | |||
} | |||
func init() { | |||
registry.RegisterAnalyzer(Name, AnalyzerConstructor) | |||
} |
@ -0,0 +1,79 @@ | |||
// Copyright (c) 2014 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package unicodenorm | |||
import ( | |||
"fmt" | |||
"github.com/blevesearch/bleve/analysis" | |||
"github.com/blevesearch/bleve/registry" | |||
"golang.org/x/text/unicode/norm" | |||
) | |||
const Name = "normalize_unicode" | |||
const NFC = "nfc" | |||
const NFD = "nfd" | |||
const NFKC = "nfkc" | |||
const NFKD = "nfkd" | |||
var forms = map[string]norm.Form{ | |||
NFC: norm.NFC, | |||
NFD: norm.NFD, | |||
NFKC: norm.NFKC, | |||
NFKD: norm.NFKD, | |||
} | |||
type UnicodeNormalizeFilter struct { | |||
form norm.Form | |||
} | |||
func NewUnicodeNormalizeFilter(formName string) (*UnicodeNormalizeFilter, error) { | |||
form, ok := forms[formName] | |||
if !ok { | |||
return nil, fmt.Errorf("no form named %s", formName) | |||
} | |||
return &UnicodeNormalizeFilter{ | |||
form: form, | |||
}, nil | |||
} | |||
func MustNewUnicodeNormalizeFilter(formName string) *UnicodeNormalizeFilter { | |||
filter, err := NewUnicodeNormalizeFilter(formName) | |||
if err != nil { | |||
panic(err) | |||
} | |||
return filter | |||
} | |||
func (s *UnicodeNormalizeFilter) Filter(input analysis.TokenStream) analysis.TokenStream { | |||
for _, token := range input { | |||
token.Term = s.form.Bytes(token.Term) | |||
} | |||
return input | |||
} | |||
func UnicodeNormalizeFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { | |||
formVal, ok := config["form"].(string) | |||
if !ok { | |||
return nil, fmt.Errorf("must specify form") | |||
} | |||
form := formVal | |||
return NewUnicodeNormalizeFilter(form) | |||
} | |||
func init() { | |||
registry.RegisterTokenFilter(Name, UnicodeNormalizeFilterConstructor) | |||
} |
@ -1,76 +0,0 @@ | |||
// Copyright (c) 2016 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package character | |||
import ( | |||
"unicode/utf8" | |||
"github.com/blevesearch/bleve/analysis" | |||
) | |||
type IsTokenRune func(r rune) bool | |||
type CharacterTokenizer struct { | |||
isTokenRun IsTokenRune | |||
} | |||
func NewCharacterTokenizer(f IsTokenRune) *CharacterTokenizer { | |||
return &CharacterTokenizer{ | |||
isTokenRun: f, | |||
} | |||
} | |||
func (c *CharacterTokenizer) Tokenize(input []byte) analysis.TokenStream { | |||
rv := make(analysis.TokenStream, 0, 1024) | |||
offset := 0 | |||
start := 0 | |||
end := 0 | |||
count := 0 | |||
for currRune, size := utf8.DecodeRune(input[offset:]); currRune != utf8.RuneError; currRune, size = utf8.DecodeRune(input[offset:]) { | |||
isToken := c.isTokenRun(currRune) | |||
if isToken { | |||
end = offset + size | |||
} else { | |||
if end-start > 0 { | |||
// build token | |||
rv = append(rv, &analysis.Token{ | |||
Term: input[start:end], | |||
Start: start, | |||
End: end, | |||
Position: count + 1, | |||
Type: analysis.AlphaNumeric, | |||
}) | |||
count++ | |||
} | |||
start = offset + size | |||
end = start | |||
} | |||
offset += size | |||
} | |||
// if we ended in the middle of a token, finish it | |||
if end-start > 0 { | |||
// build token | |||
rv = append(rv, &analysis.Token{ | |||
Term: input[start:end], | |||
Start: start, | |||
End: end, | |||
Position: count + 1, | |||
Type: analysis.AlphaNumeric, | |||
}) | |||
} | |||
return rv | |||
} |
@ -1,33 +0,0 @@ | |||
// Copyright (c) 2016 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package letter | |||
import ( | |||
"unicode" | |||
"github.com/blevesearch/bleve/analysis" | |||
"github.com/blevesearch/bleve/analysis/tokenizer/character" | |||
"github.com/blevesearch/bleve/registry" | |||
) | |||
const Name = "letter" | |||
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) { | |||
return character.NewCharacterTokenizer(unicode.IsLetter), nil | |||
} | |||
func init() { | |||
registry.RegisterTokenizer(Name, TokenizerConstructor) | |||
} |
@ -1,23 +0,0 @@ | |||
// Copyright (c) 2014 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
// +build appengine appenginevm | |||
package bleve | |||
// in the appengine environment we cannot support disk based indexes | |||
// so we do no extra configuration in this method | |||
func initDisk() { | |||
} |
@ -0,0 +1,137 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package document | |||
import ( | |||
"fmt" | |||
"github.com/blevesearch/bleve/analysis" | |||
"github.com/blevesearch/bleve/geo" | |||
"github.com/blevesearch/bleve/numeric" | |||
) | |||
var GeoPrecisionStep uint = 9 | |||
type GeoPointField struct { | |||
name string | |||
arrayPositions []uint64 | |||
options IndexingOptions | |||
value numeric.PrefixCoded | |||
numPlainTextBytes uint64 | |||
} | |||
func (n *GeoPointField) Name() string { | |||
return n.name | |||
} | |||
func (n *GeoPointField) ArrayPositions() []uint64 { | |||
return n.arrayPositions | |||
} | |||
func (n *GeoPointField) Options() IndexingOptions { | |||
return n.options | |||
} | |||
func (n *GeoPointField) Analyze() (int, analysis.TokenFrequencies) { | |||
tokens := make(analysis.TokenStream, 0) | |||
tokens = append(tokens, &analysis.Token{ | |||
Start: 0, | |||
End: len(n.value), | |||
Term: n.value, | |||
Position: 1, | |||
Type: analysis.Numeric, | |||
}) | |||
original, err := n.value.Int64() | |||
if err == nil { | |||
shift := GeoPrecisionStep | |||
for shift < 64 { | |||
shiftEncoded, err := numeric.NewPrefixCodedInt64(original, shift) | |||
if err != nil { | |||
break | |||
} | |||
token := analysis.Token{ | |||
Start: 0, | |||
End: len(shiftEncoded), | |||
Term: shiftEncoded, | |||
Position: 1, | |||
Type: analysis.Numeric, | |||
} | |||
tokens = append(tokens, &token) | |||
shift += GeoPrecisionStep | |||
} | |||
} | |||
fieldLength := len(tokens) | |||
tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors()) | |||
return fieldLength, tokenFreqs | |||
} | |||
func (n *GeoPointField) Value() []byte { | |||
return n.value | |||
} | |||
func (n *GeoPointField) Lon() (float64, error) { | |||
i64, err := n.value.Int64() | |||
if err != nil { | |||
return 0.0, err | |||
} | |||
return geo.MortonUnhashLon(uint64(i64)), nil | |||
} | |||
func (n *GeoPointField) Lat() (float64, error) { | |||
i64, err := n.value.Int64() | |||
if err != nil { | |||
return 0.0, err | |||
} | |||
return geo.MortonUnhashLat(uint64(i64)), nil | |||
} | |||
func (n *GeoPointField) GoString() string { | |||
return fmt.Sprintf("&document.GeoPointField{Name:%s, Options: %s, Value: %s}", n.name, n.options, n.value) | |||
} | |||
func (n *GeoPointField) NumPlainTextBytes() uint64 { | |||
return n.numPlainTextBytes | |||
} | |||
func NewGeoPointFieldFromBytes(name string, arrayPositions []uint64, value []byte) *GeoPointField { | |||
return &GeoPointField{ | |||
name: name, | |||
arrayPositions: arrayPositions, | |||
value: value, | |||
options: DefaultNumericIndexingOptions, | |||
numPlainTextBytes: uint64(len(value)), | |||
} | |||
} | |||
func NewGeoPointField(name string, arrayPositions []uint64, lon, lat float64) *GeoPointField { | |||
return NewGeoPointFieldWithIndexingOptions(name, arrayPositions, lon, lat, DefaultNumericIndexingOptions) | |||
} | |||
func NewGeoPointFieldWithIndexingOptions(name string, arrayPositions []uint64, lon, lat float64, options IndexingOptions) *GeoPointField { | |||
mhash := geo.MortonHash(lon, lat) | |||
prefixCoded := numeric.MustNewPrefixCodedInt64(int64(mhash), 0) | |||
return &GeoPointField{ | |||
name: name, | |||
arrayPositions: arrayPositions, | |||
value: prefixCoded, | |||
options: options, | |||
// not correct, just a place holder until we revisit how fields are | |||
// represented and can fix this better | |||
numPlainTextBytes: uint64(8), | |||
} | |||
} |
@ -0,0 +1,9 @@ | |||
# geo support in bleve | |||
First, all of this geo code is a Go adaptation of the [Lucene 5.3.2 sandbox geo support](https://lucene.apache.org/core/5_3_2/sandbox/org/apache/lucene/util/package-summary.html). | |||
## Notes | |||
- All of the APIs will use float64 for lon/lat values. | |||
- When describing a point in function arguments or return values, we always use the order lon, lat. | |||
- High level APIs will use TopLeft and BottomRight to describe bounding boxes. This may not map cleanly to min/max lon/lat when crossing the dateline. The lower level APIs will use min/max lon/lat and require the higher-level code to split boxes accordingly. |
@ -0,0 +1,170 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package geo | |||
import ( | |||
"fmt" | |||
"math" | |||
"github.com/blevesearch/bleve/numeric" | |||
) | |||
// GeoBits is the number of bits used for a single geo point | |||
// Currently this is 32bits for lon and 32bits for lat | |||
var GeoBits uint = 32 | |||
var minLon = -180.0 | |||
var minLat = -90.0 | |||
var maxLon = 180.0 | |||
var maxLat = 90.0 | |||
var minLonRad = minLon * degreesToRadian | |||
var minLatRad = minLat * degreesToRadian | |||
var maxLonRad = maxLon * degreesToRadian | |||
var maxLatRad = maxLat * degreesToRadian | |||
var geoTolerance = 1E-6 | |||
var lonScale = float64((uint64(0x1)<<GeoBits)-1) / 360.0 | |||
var latScale = float64((uint64(0x1)<<GeoBits)-1) / 180.0 | |||
// MortonHash computes the morton hash value for the provided geo point | |||
// This point is ordered as lon, lat. | |||
func MortonHash(lon, lat float64) uint64 { | |||
return numeric.Interleave(scaleLon(lon), scaleLat(lat)) | |||
} | |||
func scaleLon(lon float64) uint64 { | |||
rv := uint64((lon - minLon) * lonScale) | |||
return rv | |||
} | |||
func scaleLat(lat float64) uint64 { | |||
rv := uint64((lat - minLat) * latScale) | |||
return rv | |||
} | |||
// MortonUnhashLon extracts the longitude value from the provided morton hash. | |||
func MortonUnhashLon(hash uint64) float64 { | |||
return unscaleLon(numeric.Deinterleave(hash)) | |||
} | |||
// MortonUnhashLat extracts the latitude value from the provided morton hash. | |||
func MortonUnhashLat(hash uint64) float64 { | |||
return unscaleLat(numeric.Deinterleave(hash >> 1)) | |||
} | |||
func unscaleLon(lon uint64) float64 { | |||
return (float64(lon) / lonScale) + minLon | |||
} | |||
func unscaleLat(lat uint64) float64 { | |||
return (float64(lat) / latScale) + minLat | |||
} | |||
// compareGeo will compare two float values and see if they are the same | |||
// taking into consideration a known geo tolerance. | |||
func compareGeo(a, b float64) float64 { | |||
compare := a - b | |||
if math.Abs(compare) <= geoTolerance { | |||
return 0 | |||
} | |||
return compare | |||
} | |||
// RectIntersects checks whether rectangles a and b intersect | |||
func RectIntersects(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { | |||
return !(aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) | |||
} | |||
// RectWithin checks whether box a is within box b | |||
func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { | |||
rv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY) | |||
return rv | |||
} | |||
// BoundingBoxContains checks whether the lon/lat point is within the box | |||
func BoundingBoxContains(lon, lat, minLon, minLat, maxLon, maxLat float64) bool { | |||
return compareGeo(lon, minLon) >= 0 && compareGeo(lon, maxLon) <= 0 && | |||
compareGeo(lat, minLat) >= 0 && compareGeo(lat, maxLat) <= 0 | |||
} | |||
const degreesToRadian = math.Pi / 180 | |||
const radiansToDegrees = 180 / math.Pi | |||
// DegreesToRadians converts an angle in degrees to radians | |||
func DegreesToRadians(d float64) float64 { | |||
return d * degreesToRadian | |||
} | |||
// RadiansToDegrees converts an angle in radians to degress | |||
func RadiansToDegrees(r float64) float64 { | |||
return r * radiansToDegrees | |||
} | |||
var earthMeanRadiusMeters = 6371008.7714 | |||
func RectFromPointDistance(lon, lat, dist float64) (float64, float64, float64, float64, error) { | |||
err := checkLongitude(lon) | |||
if err != nil { | |||
return 0, 0, 0, 0, err | |||
} | |||
err = checkLatitude(lat) | |||
if err != nil { | |||
return 0, 0, 0, 0, err | |||
} | |||
radLon := DegreesToRadians(lon) | |||
radLat := DegreesToRadians(lat) | |||
radDistance := (dist + 7e-2) / earthMeanRadiusMeters | |||
minLatL := radLat - radDistance | |||
maxLatL := radLat + radDistance | |||
var minLonL, maxLonL float64 | |||
if minLatL > minLatRad && maxLatL < maxLatRad { | |||
deltaLon := asin(sin(radDistance) / cos(radLat)) | |||
minLonL = radLon - deltaLon | |||
if minLonL < minLonRad { | |||
minLonL += 2 * math.Pi | |||
} | |||
maxLonL = radLon + deltaLon | |||
if maxLonL > maxLonRad { | |||
maxLonL -= 2 * math.Pi | |||
} | |||
} else { | |||
// pole is inside distance | |||
minLatL = math.Max(minLatL, minLatRad) | |||
maxLatL = math.Min(maxLatL, maxLatRad) | |||
minLonL = minLonRad | |||
maxLonL = maxLonRad | |||
} | |||
return RadiansToDegrees(minLonL), | |||
RadiansToDegrees(maxLatL), | |||
RadiansToDegrees(maxLonL), | |||
RadiansToDegrees(minLatL), | |||
nil | |||
} | |||
func checkLatitude(latitude float64) error { | |||
if math.IsNaN(latitude) || latitude < minLat || latitude > maxLat { | |||
return fmt.Errorf("invalid latitude %f; must be between %f and %f", latitude, minLat, maxLat) | |||
} | |||
return nil | |||
} | |||
func checkLongitude(longitude float64) error { | |||
if math.IsNaN(longitude) || longitude < minLon || longitude > maxLon { | |||
return fmt.Errorf("invalid longitude %f; must be between %f and %f", longitude, minLon, maxLon) | |||
} | |||
return nil | |||
} |
@ -0,0 +1,98 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package geo | |||
import ( | |||
"fmt" | |||
"math" | |||
"strconv" | |||
"strings" | |||
) | |||
type distanceUnit struct { | |||
conv float64 | |||
suffixes []string | |||
} | |||
var inch = distanceUnit{0.0254, []string{"in", "inch"}} | |||
var yard = distanceUnit{0.9144, []string{"yd", "yards"}} | |||
var feet = distanceUnit{0.3048, []string{"ft", "feet"}} | |||
var kilom = distanceUnit{1000, []string{"km", "kilometers"}} | |||
var nauticalm = distanceUnit{1852.0, []string{"nm", "nauticalmiles"}} | |||
var millim = distanceUnit{0.001, []string{"mm", "millimeters"}} | |||
var centim = distanceUnit{0.01, []string{"cm", "centimeters"}} | |||
var miles = distanceUnit{1609.344, []string{"mi", "miles"}} | |||
var meters = distanceUnit{1, []string{"m", "meters"}} | |||
var distanceUnits = []*distanceUnit{ | |||
&inch, &yard, &feet, &kilom, &nauticalm, &millim, ¢im, &miles, &meters, | |||
} | |||
// ParseDistance attempts to parse a distance string and return distance in | |||
// meters. Example formats supported: | |||
// "5in" "5inch" "7yd" "7yards" "9ft" "9feet" "11km" "11kilometers" | |||
// "3nm" "3nauticalmiles" "13mm" "13millimeters" "15cm" "15centimeters" | |||
// "17mi" "17miles" "19m" "19meters" | |||
// If the unit cannot be determined, the entire string is parsed and the | |||
// unit of meters is assumed. | |||
// If the number portion cannot be parsed, 0 and the parse error are returned. | |||
func ParseDistance(d string) (float64, error) { | |||
for _, unit := range distanceUnits { | |||
for _, unitSuffix := range unit.suffixes { | |||
if strings.HasSuffix(d, unitSuffix) { | |||
parsedNum, err := strconv.ParseFloat(d[0:len(d)-len(unitSuffix)], 64) | |||
if err != nil { | |||
return 0, err | |||
} | |||
return parsedNum * unit.conv, nil | |||
} | |||
} | |||
} | |||
// no unit matched, try assuming meters? | |||
parsedNum, err := strconv.ParseFloat(d, 64) | |||
if err != nil { | |||
return 0, err | |||
} | |||
return parsedNum, nil | |||
} | |||
// ParseDistanceUnit attempts to parse a distance unit and return the | |||
// multiplier for converting this to meters. If the unit cannot be parsed | |||
// then 0 and the error message is returned. | |||
func ParseDistanceUnit(u string) (float64, error) { | |||
for _, unit := range distanceUnits { | |||
for _, unitSuffix := range unit.suffixes { | |||
if u == unitSuffix { | |||
return unit.conv, nil | |||
} | |||
} | |||
} | |||
return 0, fmt.Errorf("unknown distance unit: %s", u) | |||
} | |||
// Haversin computes the distance between two points. | |||
// This implemenation uses the sloppy math implemenations which trade off | |||
// accuracy for performance. The distance returned is in kilometers. | |||
func Haversin(lon1, lat1, lon2, lat2 float64) float64 { | |||
x1 := lat1 * degreesToRadian | |||
x2 := lat2 * degreesToRadian | |||
h1 := 1 - cos(x1-x2) | |||
h2 := 1 - cos((lon1-lon2)*degreesToRadian) | |||
h := (h1 + cos(x1)*cos(x2)*h2) / 2 | |||
avgLat := (x1 + x2) / 2 | |||
diameter := earthDiameter(avgLat) | |||
return diameter * asin(math.Min(1, math.Sqrt(h))) | |||
} |
@ -0,0 +1,140 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package geo | |||
import ( | |||
"reflect" | |||
"strings" | |||
) | |||
// ExtractGeoPoint takes an arbitrary interface{} and tries it's best to | |||
// interpret it is as geo point. Supported formats: | |||
// Container: | |||
// slice length 2 (GeoJSON) | |||
// first element lon, second element lat | |||
// map[string]interface{} | |||
// exact keys lat and lon or lng | |||
// struct | |||
// w/exported fields case-insensitive match on lat and lon or lng | |||
// struct | |||
// satisfying Later and Loner or Lnger interfaces | |||
// | |||
// in all cases values must be some sort of numeric-like thing: int/uint/float | |||
func ExtractGeoPoint(thing interface{}) (lon, lat float64, success bool) { | |||
var foundLon, foundLat bool | |||
thingVal := reflect.ValueOf(thing) | |||
thingTyp := thingVal.Type() | |||
// is it a slice | |||
if thingVal.IsValid() && thingVal.Kind() == reflect.Slice { | |||
// must be length 2 | |||
if thingVal.Len() == 2 { | |||
first := thingVal.Index(0) | |||
if first.CanInterface() { | |||
firstVal := first.Interface() | |||
lon, foundLon = extractNumericVal(firstVal) | |||
} | |||
second := thingVal.Index(1) | |||
if second.CanInterface() { | |||
secondVal := second.Interface() | |||
lat, foundLat = extractNumericVal(secondVal) | |||
} | |||
} | |||
} | |||
// is it a map | |||
if l, ok := thing.(map[string]interface{}); ok { | |||
if lval, ok := l["lon"]; ok { | |||
lon, foundLon = extractNumericVal(lval) | |||
} else if lval, ok := l["lng"]; ok { | |||
lon, foundLon = extractNumericVal(lval) | |||
} | |||
if lval, ok := l["lat"]; ok { | |||
lat, foundLat = extractNumericVal(lval) | |||
} | |||
} | |||
// now try reflection on struct fields | |||
if thingVal.IsValid() && thingVal.Kind() == reflect.Struct { | |||
for i := 0; i < thingVal.NumField(); i++ { | |||
fieldName := thingTyp.Field(i).Name | |||
if strings.HasPrefix(strings.ToLower(fieldName), "lon") { | |||
if thingVal.Field(i).CanInterface() { | |||
fieldVal := thingVal.Field(i).Interface() | |||
lon, foundLon = extractNumericVal(fieldVal) | |||
} | |||
} | |||
if strings.HasPrefix(strings.ToLower(fieldName), "lng") { | |||
if thingVal.Field(i).CanInterface() { | |||
fieldVal := thingVal.Field(i).Interface() | |||
lon, foundLon = extractNumericVal(fieldVal) | |||
} | |||
} | |||
if strings.HasPrefix(strings.ToLower(fieldName), "lat") { | |||
if thingVal.Field(i).CanInterface() { | |||
fieldVal := thingVal.Field(i).Interface() | |||
lat, foundLat = extractNumericVal(fieldVal) | |||
} | |||
} | |||
} | |||
} | |||
// last hope, some interfaces | |||
// lon | |||
if l, ok := thing.(loner); ok { | |||
lon = l.Lon() | |||
foundLon = true | |||
} else if l, ok := thing.(lnger); ok { | |||
lon = l.Lng() | |||
foundLon = true | |||
} | |||
// lat | |||
if l, ok := thing.(later); ok { | |||
lat = l.Lat() | |||
foundLat = true | |||
} | |||
return lon, lat, foundLon && foundLat | |||
} | |||
// extract numeric value (if possible) and returns a float64 | |||
func extractNumericVal(v interface{}) (float64, bool) { | |||
val := reflect.ValueOf(v) | |||
typ := val.Type() | |||
switch typ.Kind() { | |||
case reflect.Float32, reflect.Float64: | |||
return val.Float(), true | |||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | |||
return float64(val.Int()), true | |||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: | |||
return float64(val.Uint()), true | |||
} | |||
return 0, false | |||
} | |||
// various support interfaces which can be used to find lat/lon | |||
type loner interface { | |||
Lon() float64 | |||
} | |||
type later interface { | |||
Lat() float64 | |||
} | |||
type lnger interface { | |||
Lng() float64 | |||
} |
@ -0,0 +1,212 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package geo | |||
import ( | |||
"math" | |||
) | |||
var earthDiameterPerLatitude []float64 | |||
var sinTab []float64 | |||
var cosTab []float64 | |||
var asinTab []float64 | |||
var asinDer1DivF1Tab []float64 | |||
var asinDer2DivF2Tab []float64 | |||
var asinDer3DivF3Tab []float64 | |||
var asinDer4DivF4Tab []float64 | |||
const radiusTabsSize = (1 << 10) + 1 | |||
const radiusDelta = (math.Pi / 2) / (radiusTabsSize - 1) | |||
const radiusIndexer = 1 / radiusDelta | |||
const sinCosTabsSize = (1 << 11) + 1 | |||
const asinTabsSize = (1 << 13) + 1 | |||
const oneDivF2 = 1 / 2.0 | |||
const oneDivF3 = 1 / 6.0 | |||
const oneDivF4 = 1 / 24.0 | |||
// 1.57079632673412561417e+00 first 33 bits of pi/2 | |||
var pio2Hi = math.Float64frombits(0x3FF921FB54400000) | |||
// 6.07710050650619224932e-11 pi/2 - PIO2_HI | |||
var pio2Lo = math.Float64frombits(0x3DD0B4611A626331) | |||
var asinPio2Hi = math.Float64frombits(0x3FF921FB54442D18) // 1.57079632679489655800e+00 | |||
var asinPio2Lo = math.Float64frombits(0x3C91A62633145C07) // 6.12323399573676603587e-17 | |||
var asinPs0 = math.Float64frombits(0x3fc5555555555555) // 1.66666666666666657415e-01 | |||
var asinPs1 = math.Float64frombits(0xbfd4d61203eb6f7d) // -3.25565818622400915405e-01 | |||
var asinPs2 = math.Float64frombits(0x3fc9c1550e884455) // 2.01212532134862925881e-01 | |||
var asinPs3 = math.Float64frombits(0xbfa48228b5688f3b) // -4.00555345006794114027e-02 | |||
var asinPs4 = math.Float64frombits(0x3f49efe07501b288) // 7.91534994289814532176e-04 | |||
var asinPs5 = math.Float64frombits(0x3f023de10dfdf709) // 3.47933107596021167570e-05 | |||
var asinQs1 = math.Float64frombits(0xc0033a271c8a2d4b) // -2.40339491173441421878e+00 | |||
var asinQs2 = math.Float64frombits(0x40002ae59c598ac8) // 2.02094576023350569471e+00 | |||
var asinQs3 = math.Float64frombits(0xbfe6066c1b8d0159) // -6.88283971605453293030e-01 | |||
var asinQs4 = math.Float64frombits(0x3fb3b8c5b12e9282) // 7.70381505559019352791e-02 | |||
var twoPiHi = 4 * pio2Hi | |||
var twoPiLo = 4 * pio2Lo | |||
var sinCosDeltaHi = twoPiHi/sinCosTabsSize - 1 | |||
var sinCosDeltaLo = twoPiLo/sinCosTabsSize - 1 | |||
var sinCosIndexer = 1 / (sinCosDeltaHi + sinCosDeltaLo) | |||
var sinCosMaxValueForIntModulo = ((math.MaxInt64 >> 9) / sinCosIndexer) * 0.99 | |||
var asinMaxValueForTabs = math.Sin(73.0 * degreesToRadian) | |||
var asinDelta = asinMaxValueForTabs / (asinTabsSize - 1) | |||
var asinIndexer = 1 / asinDelta | |||
func init() { | |||
// initializes the tables used for the sloppy math functions | |||
// sin and cos | |||
sinTab = make([]float64, sinCosTabsSize) | |||
cosTab = make([]float64, sinCosTabsSize) | |||
sinCosPiIndex := (sinCosTabsSize - 1) / 2 | |||
sinCosPiMul2Index := 2 * sinCosPiIndex | |||
sinCosPiMul05Index := sinCosPiIndex / 2 | |||
sinCosPiMul15Index := 3 * sinCosPiIndex / 2 | |||
for i := 0; i < sinCosTabsSize; i++ { | |||
// angle: in [0,2*PI]. | |||
angle := float64(i)*sinCosDeltaHi + float64(i)*sinCosDeltaLo | |||
sinAngle := math.Sin(angle) | |||
cosAngle := math.Cos(angle) | |||
// For indexes corresponding to null cosine or sine, we make sure the value is zero | |||
// and not an epsilon. This allows for a much better accuracy for results close to zero. | |||
if i == sinCosPiIndex { | |||
sinAngle = 0.0 | |||
} else if i == sinCosPiMul2Index { | |||
sinAngle = 0.0 | |||
} else if i == sinCosPiMul05Index { | |||
sinAngle = 0.0 | |||
} else if i == sinCosPiMul15Index { | |||
sinAngle = 0.0 | |||
} | |||
sinTab[i] = sinAngle | |||
cosTab[i] = cosAngle | |||
} | |||
// asin | |||
asinTab = make([]float64, asinTabsSize) | |||
asinDer1DivF1Tab = make([]float64, asinTabsSize) | |||
asinDer2DivF2Tab = make([]float64, asinTabsSize) | |||
asinDer3DivF3Tab = make([]float64, asinTabsSize) | |||
asinDer4DivF4Tab = make([]float64, asinTabsSize) | |||
for i := 0; i < asinTabsSize; i++ { | |||
// x: in [0,ASIN_MAX_VALUE_FOR_TABS]. | |||
x := float64(i) * asinDelta | |||
asinTab[i] = math.Asin(x) | |||
oneMinusXSqInv := 1.0 / (1 - x*x) | |||
oneMinusXSqInv05 := math.Sqrt(oneMinusXSqInv) | |||
oneMinusXSqInv15 := oneMinusXSqInv05 * oneMinusXSqInv | |||
oneMinusXSqInv25 := oneMinusXSqInv15 * oneMinusXSqInv | |||
oneMinusXSqInv35 := oneMinusXSqInv25 * oneMinusXSqInv | |||
asinDer1DivF1Tab[i] = oneMinusXSqInv05 | |||
asinDer2DivF2Tab[i] = (x * oneMinusXSqInv15) * oneDivF2 | |||
asinDer3DivF3Tab[i] = ((1 + 2*x*x) * oneMinusXSqInv25) * oneDivF3 | |||
asinDer4DivF4Tab[i] = ((5 + 2*x*(2+x*(5-2*x))) * oneMinusXSqInv35) * oneDivF4 | |||
} | |||
// earth radius | |||
a := 6378137.0 | |||
b := 6356752.31420 | |||
a2 := a * a | |||
b2 := b * b | |||
earthDiameterPerLatitude = make([]float64, radiusTabsSize) | |||
earthDiameterPerLatitude[0] = 2.0 * a / 1000 | |||
earthDiameterPerLatitude[radiusTabsSize-1] = 2.0 * b / 1000 | |||
for i := 1; i < radiusTabsSize-1; i++ { | |||
lat := math.Pi * float64(i) / (2*radiusTabsSize - 1) | |||
one := math.Pow(a2*math.Cos(lat), 2) | |||
two := math.Pow(b2*math.Sin(lat), 2) | |||
three := math.Pow(float64(a)*math.Cos(lat), 2) | |||
four := math.Pow(b*math.Sin(lat), 2) | |||
radius := math.Sqrt((one + two) / (three + four)) | |||
earthDiameterPerLatitude[i] = 2 * radius / 1000 | |||
} | |||
} | |||
// earthDiameter returns an estimation of the earth's diameter at the specified | |||
// latitude in kilometers | |||
func earthDiameter(lat float64) float64 { | |||
index := math.Mod(math.Abs(lat)*radiusIndexer+0.5, float64(len(earthDiameterPerLatitude))) | |||
if math.IsNaN(index) { | |||
return 0 | |||
} | |||
return earthDiameterPerLatitude[int(index)] | |||
} | |||
var pio2 = math.Pi / 2 | |||
func sin(a float64) float64 { | |||
return cos(a - pio2) | |||
} | |||
// cos is a sloppy math (faster) implementation of math.Cos | |||
func cos(a float64) float64 { | |||
if a < 0.0 { | |||
a = -a | |||
} | |||
if a > sinCosMaxValueForIntModulo { | |||
return math.Cos(a) | |||
} | |||
// index: possibly outside tables range. | |||
index := int(a*sinCosIndexer + 0.5) | |||
delta := (a - float64(index)*sinCosDeltaHi) - float64(index)*sinCosDeltaLo | |||
// Making sure index is within tables range. | |||
// Last value of each table is the same than first, so we ignore it (tabs size minus one) for modulo. | |||
index &= (sinCosTabsSize - 2) // index % (SIN_COS_TABS_SIZE-1) | |||
indexCos := cosTab[index] | |||
indexSin := sinTab[index] | |||
return indexCos + delta*(-indexSin+delta*(-indexCos*oneDivF2+delta*(indexSin*oneDivF3+delta*indexCos*oneDivF4))) | |||
} | |||
// asin is a sloppy math (faster) implementation of math.Asin | |||
func asin(a float64) float64 { | |||
var negateResult bool | |||
if a < 0 { | |||
a = -a | |||
negateResult = true | |||
} | |||
if a <= asinMaxValueForTabs { | |||
index := int(a*asinIndexer + 0.5) | |||
delta := a - float64(index)*asinDelta | |||
result := asinTab[index] + delta*(asinDer1DivF1Tab[index]+delta*(asinDer2DivF2Tab[index]+delta*(asinDer3DivF3Tab[index]+delta*asinDer4DivF4Tab[index]))) | |||
if negateResult { | |||
return -result | |||
} | |||
return result | |||
} | |||
// value > ASIN_MAX_VALUE_FOR_TABS, or value is NaN | |||
// This part is derived from fdlibm. | |||
if a < 1 { | |||
t := (1.0 - a) * 0.5 | |||
p := t * (asinPs0 + t*(asinPs1+t*(asinPs2+t*(asinPs3+t*(asinPs4+t+asinPs5))))) | |||
q := 1.0 + t*(asinQs1+t*(asinQs2+t*(asinQs3+t*asinQs4))) | |||
s := math.Sqrt(t) | |||
z := s + s*(p/q) | |||
result := asinPio2Hi - ((z + z) - asinPio2Lo) | |||
if negateResult { | |||
return -result | |||
} | |||
return result | |||
} | |||
// value >= 1.0, or value is NaN | |||
if a == 1.0 { | |||
if negateResult { | |||
return -math.Pi / 2 | |||
} | |||
return math.Pi / 2 | |||
} | |||
return math.NaN() | |||
} |
@ -0,0 +1,43 @@ | |||
package numeric | |||
var interleaveMagic = []uint64{ | |||
0x5555555555555555, | |||
0x3333333333333333, | |||
0x0F0F0F0F0F0F0F0F, | |||
0x00FF00FF00FF00FF, | |||
0x0000FFFF0000FFFF, | |||
0x00000000FFFFFFFF, | |||
0xAAAAAAAAAAAAAAAA, | |||
} | |||
var interleaveShift = []uint{1, 2, 4, 8, 16} | |||
// Interleave the first 32 bits of each uint64 | |||
// apdated from org.apache.lucene.util.BitUtil | |||
// whcih was adapted from: | |||
// http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN | |||
func Interleave(v1, v2 uint64) uint64 { | |||
v1 = (v1 | (v1 << interleaveShift[4])) & interleaveMagic[4] | |||
v1 = (v1 | (v1 << interleaveShift[3])) & interleaveMagic[3] | |||
v1 = (v1 | (v1 << interleaveShift[2])) & interleaveMagic[2] | |||
v1 = (v1 | (v1 << interleaveShift[1])) & interleaveMagic[1] | |||
v1 = (v1 | (v1 << interleaveShift[0])) & interleaveMagic[0] | |||
v2 = (v2 | (v2 << interleaveShift[4])) & interleaveMagic[4] | |||
v2 = (v2 | (v2 << interleaveShift[3])) & interleaveMagic[3] | |||
v2 = (v2 | (v2 << interleaveShift[2])) & interleaveMagic[2] | |||
v2 = (v2 | (v2 << interleaveShift[1])) & interleaveMagic[1] | |||
v2 = (v2 | (v2 << interleaveShift[0])) & interleaveMagic[0] | |||
return (v2 << 1) | v1 | |||
} | |||
// Deinterleave the 32-bit value starting at position 0 | |||
// to get the other 32-bit value, shift it by 1 first | |||
func Deinterleave(b uint64) uint64 { | |||
b &= interleaveMagic[0] | |||
b = (b ^ (b >> interleaveShift[0])) & interleaveMagic[1] | |||
b = (b ^ (b >> interleaveShift[1])) & interleaveMagic[2] | |||
b = (b ^ (b >> interleaveShift[2])) & interleaveMagic[3] | |||
b = (b ^ (b >> interleaveShift[3])) & interleaveMagic[4] | |||
b = (b ^ (b >> interleaveShift[4])) & interleaveMagic[5] | |||
return b | |||
} |
@ -0,0 +1,113 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package query | |||
import ( | |||
"encoding/json" | |||
"fmt" | |||
"github.com/blevesearch/bleve/geo" | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/mapping" | |||
"github.com/blevesearch/bleve/search" | |||
"github.com/blevesearch/bleve/search/searcher" | |||
) | |||
type GeoBoundingBoxQuery struct { | |||
TopLeft []float64 `json:"top_left,omitempty"` | |||
BottomRight []float64 `json:"bottom_right,omitempty"` | |||
FieldVal string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
} | |||
func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *GeoBoundingBoxQuery { | |||
return &GeoBoundingBoxQuery{ | |||
TopLeft: []float64{topLeftLon, topLeftLat}, | |||
BottomRight: []float64{bottomRightLon, bottomRightLat}, | |||
} | |||
} | |||
func (q *GeoBoundingBoxQuery) SetBoost(b float64) { | |||
boost := Boost(b) | |||
q.BoostVal = &boost | |||
} | |||
func (q *GeoBoundingBoxQuery) Boost() float64 { | |||
return q.BoostVal.Value() | |||
} | |||
func (q *GeoBoundingBoxQuery) SetField(f string) { | |||
q.FieldVal = f | |||
} | |||
func (q *GeoBoundingBoxQuery) Field() string { | |||
return q.FieldVal | |||
} | |||
func (q *GeoBoundingBoxQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { | |||
field := q.FieldVal | |||
if q.FieldVal == "" { | |||
field = m.DefaultSearchField() | |||
} | |||
if q.BottomRight[0] < q.TopLeft[0] { | |||
// cross date line, rewrite as two parts | |||
leftSearcher, err := searcher.NewGeoBoundingBoxSearcher(i, -180, q.BottomRight[1], q.BottomRight[0], q.TopLeft[1], field, q.BoostVal.Value(), options, true) | |||
if err != nil { | |||
return nil, err | |||
} | |||
rightSearcher, err := searcher.NewGeoBoundingBoxSearcher(i, q.TopLeft[0], q.BottomRight[1], 180, q.TopLeft[1], field, q.BoostVal.Value(), options, true) | |||
if err != nil { | |||
_ = leftSearcher.Close() | |||
return nil, err | |||
} | |||
return searcher.NewDisjunctionSearcher(i, []search.Searcher{leftSearcher, rightSearcher}, 0, options) | |||
} | |||
return searcher.NewGeoBoundingBoxSearcher(i, q.TopLeft[0], q.BottomRight[1], q.BottomRight[0], q.TopLeft[1], field, q.BoostVal.Value(), options, true) | |||
} | |||
func (q *GeoBoundingBoxQuery) Validate() error { | |||
return nil | |||
} | |||
func (q *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { | |||
tmp := struct { | |||
TopLeft interface{} `json:"top_left,omitempty"` | |||
BottomRight interface{} `json:"bottom_right,omitempty"` | |||
FieldVal string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
}{} | |||
err := json.Unmarshal(data, &tmp) | |||
if err != nil { | |||
return err | |||
} | |||
// now use our generic point parsing code from the geo package | |||
lon, lat, found := geo.ExtractGeoPoint(tmp.TopLeft) | |||
if !found { | |||
return fmt.Errorf("geo location top_left not in a valid format") | |||
} | |||
q.TopLeft = []float64{lon, lat} | |||
lon, lat, found = geo.ExtractGeoPoint(tmp.BottomRight) | |||
if !found { | |||
return fmt.Errorf("geo location bottom_right not in a valid format") | |||
} | |||
q.BottomRight = []float64{lon, lat} | |||
q.FieldVal = tmp.FieldVal | |||
q.BoostVal = tmp.BoostVal | |||
return nil | |||
} |
@ -0,0 +1,100 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package query | |||
import ( | |||
"encoding/json" | |||
"fmt" | |||
"github.com/blevesearch/bleve/geo" | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/mapping" | |||
"github.com/blevesearch/bleve/search" | |||
"github.com/blevesearch/bleve/search/searcher" | |||
) | |||
type GeoDistanceQuery struct { | |||
Location []float64 `json:"location,omitempty"` | |||
Distance string `json:"distance,omitempty"` | |||
FieldVal string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
} | |||
func NewGeoDistanceQuery(lon, lat float64, distance string) *GeoDistanceQuery { | |||
return &GeoDistanceQuery{ | |||
Location: []float64{lon, lat}, | |||
Distance: distance, | |||
} | |||
} | |||
func (q *GeoDistanceQuery) SetBoost(b float64) { | |||
boost := Boost(b) | |||
q.BoostVal = &boost | |||
} | |||
func (q *GeoDistanceQuery) Boost() float64 { | |||
return q.BoostVal.Value() | |||
} | |||
func (q *GeoDistanceQuery) SetField(f string) { | |||
q.FieldVal = f | |||
} | |||
func (q *GeoDistanceQuery) Field() string { | |||
return q.FieldVal | |||
} | |||
func (q *GeoDistanceQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, | |||
options search.SearcherOptions) (search.Searcher, error) { | |||
field := q.FieldVal | |||
if q.FieldVal == "" { | |||
field = m.DefaultSearchField() | |||
} | |||
dist, err := geo.ParseDistance(q.Distance) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return searcher.NewGeoPointDistanceSearcher(i, q.Location[0], q.Location[1], | |||
dist, field, q.BoostVal.Value(), options) | |||
} | |||
func (q *GeoDistanceQuery) Validate() error { | |||
return nil | |||
} | |||
func (q *GeoDistanceQuery) UnmarshalJSON(data []byte) error { | |||
tmp := struct { | |||
Location interface{} `json:"location,omitempty"` | |||
Distance string `json:"distance,omitempty"` | |||
FieldVal string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
}{} | |||
err := json.Unmarshal(data, &tmp) | |||
if err != nil { | |||
return err | |||
} | |||
// now use our generic point parsing code from the geo package | |||
lon, lat, found := geo.ExtractGeoPoint(tmp.Location) | |||
if !found { | |||
return fmt.Errorf("geo location not in a valid format") | |||
} | |||
q.Location = []float64{lon, lat} | |||
q.Distance = tmp.Distance | |||
q.FieldVal = tmp.FieldVal | |||
q.BoostVal = tmp.BoostVal | |||
return nil | |||
} |
@ -0,0 +1,80 @@ | |||
// Copyright (c) 2014 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package query | |||
import ( | |||
"encoding/json" | |||
"fmt" | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/mapping" | |||
"github.com/blevesearch/bleve/search" | |||
"github.com/blevesearch/bleve/search/searcher" | |||
) | |||
type MultiPhraseQuery struct { | |||
Terms [][]string `json:"terms"` | |||
Field string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
} | |||
// NewMultiPhraseQuery creates a new Query for finding | |||
// term phrases in the index. | |||
// It is like PhraseQuery, but each position in the | |||
// phrase may be satisfied by a list of terms | |||
// as opposed to just one. | |||
// At least one of the terms must exist in the correct | |||
// order, at the correct index offsets, in the | |||
// specified field. Queried field must have been indexed with | |||
// IncludeTermVectors set to true. | |||
func NewMultiPhraseQuery(terms [][]string, field string) *MultiPhraseQuery { | |||
return &MultiPhraseQuery{ | |||
Terms: terms, | |||
Field: field, | |||
} | |||
} | |||
func (q *MultiPhraseQuery) SetBoost(b float64) { | |||
boost := Boost(b) | |||
q.BoostVal = &boost | |||
} | |||
func (q *MultiPhraseQuery) Boost() float64 { | |||
return q.BoostVal.Value() | |||
} | |||
func (q *MultiPhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { | |||
return searcher.NewMultiPhraseSearcher(i, q.Terms, q.Field, options) | |||
} | |||
func (q *MultiPhraseQuery) Validate() error { | |||
if len(q.Terms) < 1 { | |||
return fmt.Errorf("phrase query must contain at least one term") | |||
} | |||
return nil | |||
} | |||
func (q *MultiPhraseQuery) UnmarshalJSON(data []byte) error { | |||
type _mphraseQuery MultiPhraseQuery | |||
tmp := _mphraseQuery{} | |||
err := json.Unmarshal(data, &tmp) | |||
if err != nil { | |||
return err | |||
} | |||
q.Terms = tmp.Terms | |||
q.Field = tmp.Field | |||
q.BoostVal = tmp.BoostVal | |||
return nil | |||
} |
@ -0,0 +1,95 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package query | |||
import ( | |||
"fmt" | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/mapping" | |||
"github.com/blevesearch/bleve/search" | |||
"github.com/blevesearch/bleve/search/searcher" | |||
) | |||
type TermRangeQuery struct { | |||
Min string `json:"min,omitempty"` | |||
Max string `json:"max,omitempty"` | |||
InclusiveMin *bool `json:"inclusive_min,omitempty"` | |||
InclusiveMax *bool `json:"inclusive_max,omitempty"` | |||
FieldVal string `json:"field,omitempty"` | |||
BoostVal *Boost `json:"boost,omitempty"` | |||
} | |||
// NewTermRangeQuery creates a new Query for ranges | |||
// of text term values. | |||
// Either, but not both endpoints can be nil. | |||
// The minimum value is inclusive. | |||
// The maximum value is exclusive. | |||
func NewTermRangeQuery(min, max string) *TermRangeQuery { | |||
return NewTermRangeInclusiveQuery(min, max, nil, nil) | |||
} | |||
// NewTermRangeInclusiveQuery creates a new Query for ranges | |||
// of numeric values. | |||
// Either, but not both endpoints can be nil. | |||
// Control endpoint inclusion with inclusiveMin, inclusiveMax. | |||
func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive *bool) *TermRangeQuery { | |||
return &TermRangeQuery{ | |||
Min: min, | |||
Max: max, | |||
InclusiveMin: minInclusive, | |||
InclusiveMax: maxInclusive, | |||
} | |||
} | |||
func (q *TermRangeQuery) SetBoost(b float64) { | |||
boost := Boost(b) | |||
q.BoostVal = &boost | |||
} | |||
func (q *TermRangeQuery) Boost() float64 { | |||
return q.BoostVal.Value() | |||
} | |||
func (q *TermRangeQuery) SetField(f string) { | |||
q.FieldVal = f | |||
} | |||
func (q *TermRangeQuery) Field() string { | |||
return q.FieldVal | |||
} | |||
func (q *TermRangeQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { | |||
field := q.FieldVal | |||
if q.FieldVal == "" { | |||
field = m.DefaultSearchField() | |||
} | |||
var minTerm []byte | |||
if q.Min != "" { | |||
minTerm = []byte(q.Min) | |||
} | |||
var maxTerm []byte | |||
if q.Max != "" { | |||
maxTerm = []byte(q.Max) | |||
} | |||
return searcher.NewTermRangeSearcher(i, minTerm, maxTerm, q.InclusiveMin, q.InclusiveMax, field, q.BoostVal.Value(), options) | |||
} | |||
func (q *TermRangeQuery) Validate() error { | |||
if q.Min == "" && q.Min == q.Max { | |||
return fmt.Errorf("term range query must specify min or max") | |||
} | |||
return nil | |||
} |
@ -0,0 +1,88 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package searcher | |||
import ( | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/search" | |||
) | |||
// FilterFunc defines a function which can filter documents | |||
// returning true means keep the document | |||
// returning false means do not keep the document | |||
type FilterFunc func(d *search.DocumentMatch) bool | |||
// FilteringSearcher wraps any other searcher, but checks any Next/Advance | |||
// call against the supplied FilterFunc | |||
type FilteringSearcher struct { | |||
child search.Searcher | |||
accept FilterFunc | |||
} | |||
func NewFilteringSearcher(s search.Searcher, filter FilterFunc) *FilteringSearcher { | |||
return &FilteringSearcher{ | |||
child: s, | |||
accept: filter, | |||
} | |||
} | |||
func (f *FilteringSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { | |||
next, err := f.child.Next(ctx) | |||
for next != nil && err == nil { | |||
if f.accept(next) { | |||
return next, nil | |||
} | |||
next, err = f.child.Next(ctx) | |||
} | |||
return nil, err | |||
} | |||
func (f *FilteringSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { | |||
adv, err := f.child.Advance(ctx, ID) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if adv == nil { | |||
return nil, nil | |||
} | |||
if f.accept(adv) { | |||
return adv, nil | |||
} | |||
return f.Next(ctx) | |||
} | |||
func (f *FilteringSearcher) Close() error { | |||
return f.child.Close() | |||
} | |||
func (f *FilteringSearcher) Weight() float64 { | |||
return f.child.Weight() | |||
} | |||
func (f *FilteringSearcher) SetQueryNorm(n float64) { | |||
f.child.SetQueryNorm(n) | |||
} | |||
func (f *FilteringSearcher) Count() uint64 { | |||
return f.child.Count() | |||
} | |||
func (f *FilteringSearcher) Min() int { | |||
return f.child.Min() | |||
} | |||
func (f *FilteringSearcher) DocumentMatchPoolSize() int { | |||
return f.child.DocumentMatchPoolSize() | |||
} |
@ -0,0 +1,173 @@ | |||
// Copyright (c) 2017 Couchbase, Inc. | |||
// | |||
// Licensed under the Apache License, Version 2.0 (the "License"); | |||
// you may not use this file except in compliance with the License. | |||
// You may obtain a copy of the License at | |||
// | |||
// http://www.apache.org/licenses/LICENSE-2.0 | |||
// | |||
// Unless required by applicable law or agreed to in writing, software | |||
// distributed under the License is distributed on an "AS IS" BASIS, | |||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
// See the License for the specific language governing permissions and | |||
// limitations under the License. | |||
package searcher | |||
import ( | |||
"github.com/blevesearch/bleve/document" | |||
"github.com/blevesearch/bleve/geo" | |||
"github.com/blevesearch/bleve/index" | |||
"github.com/blevesearch/bleve/numeric" | |||
"github.com/blevesearch/bleve/search" | |||
) | |||
func NewGeoBoundingBoxSearcher(indexReader index.IndexReader, minLon, minLat, | |||
maxLon, maxLat float64, field string, boost float64, | |||
options search.SearcherOptions, checkBoundaries bool) ( | |||
search.Searcher, error) { | |||
// track list of opened searchers, for cleanup on early exit | |||
var openedSearchers []search.Searcher | |||
cleanupOpenedSearchers := func() { | |||
for _, s := range openedSearchers { | |||
_ = s.Close() | |||
} | |||
} | |||
// do math to produce list of terms needed for this search | |||
onBoundaryTerms, notOnBoundaryTerms := ComputeGeoRange(0, (geo.GeoBits<<1)-1, | |||
minLon, minLat, maxLon, maxLat, checkBoundaries) | |||
var onBoundarySearcher search.Searcher | |||
if len(onBoundaryTerms) > 0 { | |||
rawOnBoundarySearcher, err := NewMultiTermSearcherBytes(indexReader, | |||
onBoundaryTerms, field, boost, options, false) | |||
if err != nil { | |||
return nil, err | |||
} | |||
// add filter to check points near the boundary | |||
onBoundarySearcher = NewFilteringSearcher(rawOnBoundarySearcher, | |||
buildRectFilter(indexReader, field, minLon, minLat, maxLon, maxLat)) | |||
openedSearchers = append(openedSearchers, onBoundarySearcher) | |||
} | |||
var notOnBoundarySearcher search.Searcher | |||
if len(notOnBoundaryTerms) > 0 { | |||
var err error | |||
notOnBoundarySearcher, err = NewMultiTermSearcherBytes(indexReader, | |||
notOnBoundaryTerms, field, boost, options, false) | |||
if err != nil { | |||
cleanupOpenedSearchers() | |||
return nil, err | |||
} | |||
openedSearchers = append(openedSearchers, notOnBoundarySearcher) | |||
} | |||
if onBoundarySearcher != nil && notOnBoundarySearcher != nil { | |||
rv, err := NewDisjunctionSearcher(indexReader, | |||
[]search.Searcher{ | |||
onBoundarySearcher, | |||
notOnBoundarySearcher, | |||
}, | |||
0, options) | |||
if err != nil { | |||
cleanupOpenedSearchers() | |||
return nil, err | |||
} | |||
return rv, nil | |||
} else if onBoundarySearcher != nil { | |||
return onBoundarySearcher, nil | |||
} else if notOnBoundarySearcher != nil { | |||
return notOnBoundarySearcher, nil | |||
} | |||
return NewMatchNoneSearcher(indexReader) | |||
} | |||
var geoMaxShift = document.GeoPrecisionStep * 4 | |||
var geoDetailLevel = ((geo.GeoBits << 1) - geoMaxShift) / 2 | |||
func ComputeGeoRange(term uint64, shift uint, | |||
sminLon, sminLat, smaxLon, smaxLat float64, | |||
checkBoundaries bool) ( | |||
onBoundary [][]byte, notOnBoundary [][]byte) { | |||
split := term | uint64(0x1)<<shift | |||
var upperMax uint64 | |||
if shift < 63 { | |||
upperMax = term | ((uint64(1) << (shift + 1)) - 1) | |||
} else { | |||
upperMax = 0xffffffffffffffff | |||
} | |||
lowerMax := split - 1 | |||
onBoundary, notOnBoundary = relateAndRecurse(term, lowerMax, shift, | |||
sminLon, sminLat, smaxLon, smaxLat, checkBoundaries) | |||
plusOnBoundary, plusNotOnBoundary := relateAndRecurse(split, upperMax, shift, | |||
sminLon, sminLat, smaxLon, smaxLat, checkBoundaries) | |||
onBoundary = append(onBoundary, plusOnBoundary...) | |||
notOnBoundary = append(notOnBoundary, plusNotOnBoundary...) | |||
return | |||
} | |||
func relateAndRecurse(start, end uint64, res uint, | |||
sminLon, sminLat, smaxLon, smaxLat float64, | |||
checkBoundaries bool) ( | |||
onBoundary [][]byte, notOnBoundary [][]byte) { | |||
minLon := geo.MortonUnhashLon(start) | |||
minLat := geo.MortonUnhashLat(start) | |||
maxLon := geo.MortonUnhashLon(end) | |||
maxLat := geo.MortonUnhashLat(end) | |||
level := ((geo.GeoBits << 1) - res) >> 1 | |||
within := res%document.GeoPrecisionStep == 0 && | |||
geo.RectWithin(minLon, minLat, maxLon, maxLat, | |||
sminLon, sminLat, smaxLon, smaxLat) | |||
if within || (level == geoDetailLevel && | |||
geo.RectIntersects(minLon, minLat, maxLon, maxLat, | |||
sminLon, sminLat, smaxLon, smaxLat)) { | |||
if !within && checkBoundaries { | |||
return [][]byte{ | |||
numeric.MustNewPrefixCodedInt64(int64(start), res), | |||
}, nil | |||
} | |||
return nil, | |||
[][]byte{ | |||
numeric.MustNewPrefixCodedInt64(int64(start), res), | |||
} | |||
} else if level < geoDetailLevel && | |||
geo.RectIntersects(minLon, minLat, maxLon, maxLat, | |||
sminLon, sminLat, smaxLon, smaxLat) { | |||
return ComputeGeoRange(start, res-1, sminLon, sminLat, smaxLon, smaxLat, | |||
checkBoundaries) | |||
} | |||
return nil, nil | |||
} | |||
func buildRectFilter(indexReader index.IndexReader, field string, | |||
minLon, minLat, maxLon, maxLat float64) FilterFunc { | |||
return func(d *search.DocumentMatch) bool { | |||
var lon, lat float64 | |||
var found bool | |||
err := indexReader.DocumentVisitFieldTerms(d.IndexInternalID, | |||
[]string{field}, func(field string, term []byte) { | |||
// only consider the values which are shifted 0 | |||
prefixCoded := numeric.PrefixCoded(term) | |||
shift, err := prefixCoded.Shift() | |||
if err == nil && shift == 0 { | |||
var i64 int64 | |||
i64, err = prefixCoded.Int64() | |||
if err == nil { | |||
lon = geo.MortonUnhashLon(uint64(i64)) | |||
lat = geo.MortonUnhashLat(uint64(i64)) | |||
found = true | |||
} | |||
} | |||
}) | |||
if err == nil && found { | |||
return geo.BoundingBoxContains(lon, lat, | |||
minLon, minLat, maxLon, maxLat) | |||
} | |||
return false | |||
} | |||
} |