master
parent
9ed53c342a
commit
2ac6c072ee
@ -0,0 +1,9 @@ |
||||
### es |
||||
|
||||
> 复制 gitea.drugeyes.vip/pharnexcloud/elastic/v7 的构建es查询参数的代码 |
||||
> 方便构造 github.com/elastic/go-elasticsearch/v7 查询语句 |
||||
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,13 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// Query represents the generic query interface. A query's sole purpose
|
||||
// is to return the source of the query as a JSON-serializable object.
|
||||
// Returning map[string]interface{} is the norm for queries.
|
||||
type Query interface { |
||||
// Source returns the JSON-serializable query request.
|
||||
Source() (interface{}, error) |
||||
} |
@ -0,0 +1,160 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"errors" |
||||
"fmt" |
||||
"strings" |
||||
) |
||||
|
||||
// Script holds all the parameters necessary to compile or find in cache
|
||||
// and then execute a script.
|
||||
//
|
||||
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.0/modules-scripting.html
|
||||
// for details of scripting.
|
||||
type Script struct { |
||||
script string |
||||
typ string |
||||
lang string |
||||
params map[string]interface{} |
||||
} |
||||
|
||||
// NewScript creates and initializes a new Script. By default, it is of
|
||||
// type "inline". Use NewScriptStored for a stored script (where type is "id").
|
||||
func NewScript(script string) *Script { |
||||
return &Script{ |
||||
script: script, |
||||
typ: "inline", |
||||
params: make(map[string]interface{}), |
||||
} |
||||
} |
||||
|
||||
// NewScriptInline creates and initializes a new inline script, i.e. code.
|
||||
func NewScriptInline(script string) *Script { |
||||
return NewScript(script).Type("inline") |
||||
} |
||||
|
||||
// NewScriptStored creates and initializes a new stored script.
|
||||
func NewScriptStored(script string) *Script { |
||||
return NewScript(script).Type("id") |
||||
} |
||||
|
||||
// Script is either the cache key of the script to be compiled/executed
|
||||
// or the actual script source code for inline scripts. For indexed
|
||||
// scripts this is the id used in the request. For file scripts this is
|
||||
// the file name.
|
||||
func (s *Script) Script(script string) *Script { |
||||
s.script = script |
||||
return s |
||||
} |
||||
|
||||
// Type sets the type of script: "inline" or "id".
|
||||
func (s *Script) Type(typ string) *Script { |
||||
s.typ = typ |
||||
return s |
||||
} |
||||
|
||||
// Lang sets the language of the script. The default scripting language
|
||||
// is Painless ("painless").
|
||||
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.0/modules-scripting.html
|
||||
// for details.
|
||||
func (s *Script) Lang(lang string) *Script { |
||||
s.lang = lang |
||||
return s |
||||
} |
||||
|
||||
// Param adds a key/value pair to the parameters that this script will be executed with.
|
||||
func (s *Script) Param(name string, value interface{}) *Script { |
||||
if s.params == nil { |
||||
s.params = make(map[string]interface{}) |
||||
} |
||||
s.params[name] = value |
||||
return s |
||||
} |
||||
|
||||
// Params sets the map of parameters this script will be executed with.
|
||||
func (s *Script) Params(params map[string]interface{}) *Script { |
||||
s.params = params |
||||
return s |
||||
} |
||||
|
||||
// Source returns the JSON serializable data for this Script.
|
||||
func (s *Script) Source() (interface{}, error) { |
||||
if s.typ == "" && s.lang == "" && len(s.params) == 0 { |
||||
return s.script, nil |
||||
} |
||||
source := make(map[string]interface{}) |
||||
// Beginning with 6.0, the type can only be "source" or "id"
|
||||
if s.typ == "" || s.typ == "inline" { |
||||
src, err := s.rawScriptSource(s.script) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
source["source"] = src |
||||
} else { |
||||
source["id"] = s.script |
||||
} |
||||
if s.lang != "" { |
||||
source["lang"] = s.lang |
||||
} |
||||
if len(s.params) > 0 { |
||||
source["params"] = s.params |
||||
} |
||||
return source, nil |
||||
} |
||||
|
||||
// rawScriptSource returns an embeddable script. If it uses a short
|
||||
// script form, e.g. "ctx._source.likes++" (without the quotes), it
|
||||
// is quoted. Otherwise it returns the raw script that will be directly
|
||||
// embedded into the JSON data.
|
||||
func (s *Script) rawScriptSource(script string) (interface{}, error) { |
||||
v := strings.TrimSpace(script) |
||||
if !strings.HasPrefix(v, "{") && !strings.HasPrefix(v, `"`) { |
||||
v = fmt.Sprintf("%q", v) |
||||
} |
||||
raw := json.RawMessage(v) |
||||
return &raw, nil |
||||
} |
||||
|
||||
// -- Script Field --
|
||||
|
||||
// ScriptField is a single script field.
|
||||
type ScriptField struct { |
||||
FieldName string // name of the field
|
||||
|
||||
script *Script |
||||
ignoreFailure *bool // used in e.g. ScriptSource
|
||||
} |
||||
|
||||
// NewScriptField creates and initializes a new ScriptField.
|
||||
func NewScriptField(fieldName string, script *Script) *ScriptField { |
||||
return &ScriptField{FieldName: fieldName, script: script} |
||||
} |
||||
|
||||
// IgnoreFailure indicates whether to ignore failures. It is used
|
||||
// in e.g. ScriptSource.
|
||||
func (f *ScriptField) IgnoreFailure(ignore bool) *ScriptField { |
||||
f.ignoreFailure = &ignore |
||||
return f |
||||
} |
||||
|
||||
// Source returns the serializable JSON for the ScriptField.
|
||||
func (f *ScriptField) Source() (interface{}, error) { |
||||
if f.script == nil { |
||||
return nil, errors.New("ScriptField expects script") |
||||
} |
||||
source := make(map[string]interface{}) |
||||
src, err := f.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
source["script"] = src |
||||
if v := f.ignoreFailure; v != nil { |
||||
source["ignore_failure"] = *v |
||||
} |
||||
return source, nil |
||||
} |
@ -0,0 +1,16 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// Aggregations can be seen as a unit-of-work that build
|
||||
// analytic information over a set of documents. It is
|
||||
// (in many senses) the follow-up of facets in Elasticsearch.
|
||||
// For more details about aggregations, visit:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations.html
|
||||
type Aggregation interface { |
||||
// Source returns a JSON-serializable aggregation that is a fragment
|
||||
// of the request sent to Elasticsearch.
|
||||
Source() (interface{}, error) |
||||
} |
@ -0,0 +1,422 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import "fmt" |
||||
|
||||
// TermsAggregation is a multi-bucket value source based aggregation
|
||||
// where buckets are dynamically built - one per unique value.
|
||||
//
|
||||
// See: http://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations-bucket-terms-aggregation.html
|
||||
type TermsAggregation struct { |
||||
field string |
||||
script *Script |
||||
missing interface{} |
||||
subAggregations map[string]Aggregation |
||||
meta map[string]interface{} |
||||
|
||||
size *int |
||||
shardSize *int |
||||
requiredSize *int |
||||
minDocCount *int |
||||
shardMinDocCount *int |
||||
valueType string |
||||
includeExclude *TermsAggregationIncludeExclude |
||||
executionHint string |
||||
collectionMode string |
||||
showTermDocCountError *bool |
||||
order []TermsOrder |
||||
} |
||||
|
||||
func NewTermsAggregation() *TermsAggregation { |
||||
return &TermsAggregation{ |
||||
subAggregations: make(map[string]Aggregation), |
||||
} |
||||
} |
||||
|
||||
func (a *TermsAggregation) Field(field string) *TermsAggregation { |
||||
a.field = field |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Script(script *Script) *TermsAggregation { |
||||
a.script = script |
||||
return a |
||||
} |
||||
|
||||
// Missing configures the value to use when documents miss a value.
|
||||
func (a *TermsAggregation) Missing(missing interface{}) *TermsAggregation { |
||||
a.missing = missing |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) SubAggregation(name string, subAggregation Aggregation) *TermsAggregation { |
||||
a.subAggregations[name] = subAggregation |
||||
return a |
||||
} |
||||
|
||||
// Meta sets the meta data to be included in the aggregation response.
|
||||
func (a *TermsAggregation) Meta(metaData map[string]interface{}) *TermsAggregation { |
||||
a.meta = metaData |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Size(size int) *TermsAggregation { |
||||
a.size = &size |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) RequiredSize(requiredSize int) *TermsAggregation { |
||||
a.requiredSize = &requiredSize |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) ShardSize(shardSize int) *TermsAggregation { |
||||
a.shardSize = &shardSize |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) MinDocCount(minDocCount int) *TermsAggregation { |
||||
a.minDocCount = &minDocCount |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) ShardMinDocCount(shardMinDocCount int) *TermsAggregation { |
||||
a.shardMinDocCount = &shardMinDocCount |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Include(regexp string) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.Include = regexp |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) IncludeValues(values ...interface{}) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.IncludeValues = append(a.includeExclude.IncludeValues, values...) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Exclude(regexp string) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.Exclude = regexp |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) ExcludeValues(values ...interface{}) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.ExcludeValues = append(a.includeExclude.ExcludeValues, values...) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Partition(p int) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.Partition = p |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) NumPartitions(n int) *TermsAggregation { |
||||
if a.includeExclude == nil { |
||||
a.includeExclude = &TermsAggregationIncludeExclude{} |
||||
} |
||||
a.includeExclude.NumPartitions = n |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) IncludeExclude(includeExclude *TermsAggregationIncludeExclude) *TermsAggregation { |
||||
a.includeExclude = includeExclude |
||||
return a |
||||
} |
||||
|
||||
// ValueType can be string, long, or double.
|
||||
func (a *TermsAggregation) ValueType(valueType string) *TermsAggregation { |
||||
a.valueType = valueType |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Order(order string, asc bool) *TermsAggregation { |
||||
a.order = append(a.order, TermsOrder{Field: order, Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByCount(asc bool) *TermsAggregation { |
||||
// "order" : { "_count" : "asc" }
|
||||
a.order = append(a.order, TermsOrder{Field: "_count", Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByCountAsc() *TermsAggregation { |
||||
return a.OrderByCount(true) |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByCountDesc() *TermsAggregation { |
||||
return a.OrderByCount(false) |
||||
} |
||||
|
||||
// Deprecated: Use OrderByKey instead.
|
||||
func (a *TermsAggregation) OrderByTerm(asc bool) *TermsAggregation { |
||||
// "order" : { "_term" : "asc" }
|
||||
a.order = append(a.order, TermsOrder{Field: "_term", Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
// Deprecated: Use OrderByKeyAsc instead.
|
||||
func (a *TermsAggregation) OrderByTermAsc() *TermsAggregation { |
||||
return a.OrderByTerm(true) |
||||
} |
||||
|
||||
// Deprecated: Use OrderByKeyDesc instead.
|
||||
func (a *TermsAggregation) OrderByTermDesc() *TermsAggregation { |
||||
return a.OrderByTerm(false) |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByKey(asc bool) *TermsAggregation { |
||||
// "order" : { "_term" : "asc" }
|
||||
a.order = append(a.order, TermsOrder{Field: "_key", Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByKeyAsc() *TermsAggregation { |
||||
return a.OrderByKey(true) |
||||
} |
||||
|
||||
func (a *TermsAggregation) OrderByKeyDesc() *TermsAggregation { |
||||
return a.OrderByKey(false) |
||||
} |
||||
|
||||
// OrderByAggregation creates a bucket ordering strategy which sorts buckets
|
||||
// based on a single-valued calc get.
|
||||
func (a *TermsAggregation) OrderByAggregation(aggName string, asc bool) *TermsAggregation { |
||||
// {
|
||||
// "aggs" : {
|
||||
// "genders" : {
|
||||
// "terms" : {
|
||||
// "field" : "gender",
|
||||
// "order" : { "avg_height" : "desc" }
|
||||
// },
|
||||
// "aggs" : {
|
||||
// "avg_height" : { "avg" : { "field" : "height" } }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
a.order = append(a.order, TermsOrder{Field: aggName, Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
// OrderByAggregationAndMetric creates a bucket ordering strategy which
|
||||
// sorts buckets based on a multi-valued calc get.
|
||||
func (a *TermsAggregation) OrderByAggregationAndMetric(aggName, metric string, asc bool) *TermsAggregation { |
||||
// {
|
||||
// "aggs" : {
|
||||
// "genders" : {
|
||||
// "terms" : {
|
||||
// "field" : "gender",
|
||||
// "order" : { "height_stats.avg" : "desc" }
|
||||
// },
|
||||
// "aggs" : {
|
||||
// "height_stats" : { "stats" : { "field" : "height" } }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
a.order = append(a.order, TermsOrder{Field: aggName + "." + metric, Ascending: asc}) |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) ExecutionHint(hint string) *TermsAggregation { |
||||
a.executionHint = hint |
||||
return a |
||||
} |
||||
|
||||
// Collection mode can be depth_first or breadth_first as of 1.4.0.
|
||||
func (a *TermsAggregation) CollectionMode(collectionMode string) *TermsAggregation { |
||||
a.collectionMode = collectionMode |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) ShowTermDocCountError(showTermDocCountError bool) *TermsAggregation { |
||||
a.showTermDocCountError = &showTermDocCountError |
||||
return a |
||||
} |
||||
|
||||
func (a *TermsAggregation) Source() (interface{}, error) { |
||||
// Example:
|
||||
// {
|
||||
// "aggs" : {
|
||||
// "genders" : {
|
||||
// "terms" : { "field" : "gender" }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// This method returns only the { "terms" : { "field" : "gender" } } part.
|
||||
|
||||
source := make(map[string]interface{}) |
||||
opts := make(map[string]interface{}) |
||||
source["terms"] = opts |
||||
|
||||
// ValuesSourceAggregationBuilder
|
||||
if a.field != "" { |
||||
opts["field"] = a.field |
||||
} |
||||
if a.script != nil { |
||||
src, err := a.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
opts["script"] = src |
||||
} |
||||
if a.missing != nil { |
||||
opts["missing"] = a.missing |
||||
} |
||||
|
||||
// TermsBuilder
|
||||
if a.size != nil && *a.size >= 0 { |
||||
opts["size"] = *a.size |
||||
} |
||||
if a.shardSize != nil && *a.shardSize >= 0 { |
||||
opts["shard_size"] = *a.shardSize |
||||
} |
||||
if a.requiredSize != nil && *a.requiredSize >= 0 { |
||||
opts["required_size"] = *a.requiredSize |
||||
} |
||||
if a.minDocCount != nil && *a.minDocCount >= 0 { |
||||
opts["min_doc_count"] = *a.minDocCount |
||||
} |
||||
if a.shardMinDocCount != nil && *a.shardMinDocCount >= 0 { |
||||
opts["shard_min_doc_count"] = *a.shardMinDocCount |
||||
} |
||||
if a.showTermDocCountError != nil { |
||||
opts["show_term_doc_count_error"] = *a.showTermDocCountError |
||||
} |
||||
if a.collectionMode != "" { |
||||
opts["collect_mode"] = a.collectionMode |
||||
} |
||||
if a.valueType != "" { |
||||
opts["value_type"] = a.valueType |
||||
} |
||||
if len(a.order) > 0 { |
||||
var orderSlice []interface{} |
||||
for _, order := range a.order { |
||||
src, err := order.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
orderSlice = append(orderSlice, src) |
||||
} |
||||
opts["order"] = orderSlice |
||||
} |
||||
|
||||
// Include/Exclude
|
||||
if ie := a.includeExclude; ie != nil { |
||||
if err := ie.MergeInto(opts); err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
if a.executionHint != "" { |
||||
opts["execution_hint"] = a.executionHint |
||||
} |
||||
|
||||
// AggregationBuilder (SubAggregations)
|
||||
if len(a.subAggregations) > 0 { |
||||
aggsMap := make(map[string]interface{}) |
||||
source["aggregations"] = aggsMap |
||||
for name, aggregate := range a.subAggregations { |
||||
src, err := aggregate.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
aggsMap[name] = src |
||||
} |
||||
} |
||||
|
||||
// Add Meta data if available
|
||||
if len(a.meta) > 0 { |
||||
source["meta"] = a.meta |
||||
} |
||||
|
||||
return source, nil |
||||
} |
||||
|
||||
// TermsAggregationIncludeExclude allows for include/exclude in a TermsAggregation.
|
||||
type TermsAggregationIncludeExclude struct { |
||||
Include string |
||||
Exclude string |
||||
IncludeValues []interface{} |
||||
ExcludeValues []interface{} |
||||
Partition int |
||||
NumPartitions int |
||||
} |
||||
|
||||
// Source returns a JSON serializable struct.
|
||||
func (ie *TermsAggregationIncludeExclude) Source() (interface{}, error) { |
||||
source := make(map[string]interface{}) |
||||
|
||||
// Include
|
||||
if ie.Include != "" { |
||||
source["include"] = ie.Include |
||||
} else if len(ie.IncludeValues) > 0 { |
||||
source["include"] = ie.IncludeValues |
||||
} else if ie.NumPartitions > 0 { |
||||
inc := make(map[string]interface{}) |
||||
inc["partition"] = ie.Partition |
||||
inc["num_partitions"] = ie.NumPartitions |
||||
source["include"] = inc |
||||
} |
||||
|
||||
// Exclude
|
||||
if ie.Exclude != "" { |
||||
source["exclude"] = ie.Exclude |
||||
} else if len(ie.ExcludeValues) > 0 { |
||||
source["exclude"] = ie.ExcludeValues |
||||
} |
||||
|
||||
return source, nil |
||||
} |
||||
|
||||
// MergeInto merges the values of the include/exclude options into source.
|
||||
func (ie *TermsAggregationIncludeExclude) MergeInto(source map[string]interface{}) error { |
||||
values, err := ie.Source() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mv, ok := values.(map[string]interface{}) |
||||
if !ok { |
||||
return fmt.Errorf("IncludeExclude: expected a map[string]interface{}, got %T", values) |
||||
} |
||||
for k, v := range mv { |
||||
source[k] = v |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// TermsOrder specifies a single order field for a terms aggregation.
|
||||
type TermsOrder struct { |
||||
Field string |
||||
Ascending bool |
||||
} |
||||
|
||||
// Source returns serializable JSON of the TermsOrder.
|
||||
func (order *TermsOrder) Source() (interface{}, error) { |
||||
source := make(map[string]string) |
||||
if order.Ascending { |
||||
source[order.Field] = "asc" |
||||
} else { |
||||
source[order.Field] = "desc" |
||||
} |
||||
return source, nil |
||||
} |
@ -0,0 +1,112 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// AvgAggregation is a single-value metrics aggregation that computes
|
||||
// the average of numeric values that are extracted from the
|
||||
// aggregated documents. These values can be extracted either from
|
||||
// specific numeric fields in the documents, or be generated by
|
||||
// a provided script.
|
||||
//
|
||||
// See: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations-metrics-avg-aggregation.html
|
||||
type AvgAggregation struct { |
||||
field string |
||||
script *Script |
||||
format string |
||||
missing interface{} |
||||
subAggregations map[string]Aggregation |
||||
meta map[string]interface{} |
||||
} |
||||
|
||||
func NewAvgAggregation() *AvgAggregation { |
||||
return &AvgAggregation{ |
||||
subAggregations: make(map[string]Aggregation), |
||||
} |
||||
} |
||||
|
||||
func (a *AvgAggregation) Field(field string) *AvgAggregation { |
||||
a.field = field |
||||
return a |
||||
} |
||||
|
||||
func (a *AvgAggregation) Script(script *Script) *AvgAggregation { |
||||
a.script = script |
||||
return a |
||||
} |
||||
|
||||
func (a *AvgAggregation) Format(format string) *AvgAggregation { |
||||
a.format = format |
||||
return a |
||||
} |
||||
|
||||
func (a *AvgAggregation) Missing(missing interface{}) *AvgAggregation { |
||||
a.missing = missing |
||||
return a |
||||
} |
||||
|
||||
func (a *AvgAggregation) SubAggregation(name string, subAggregation Aggregation) *AvgAggregation { |
||||
a.subAggregations[name] = subAggregation |
||||
return a |
||||
} |
||||
|
||||
// Meta sets the meta data to be included in the aggregation response.
|
||||
func (a *AvgAggregation) Meta(metaData map[string]interface{}) *AvgAggregation { |
||||
a.meta = metaData |
||||
return a |
||||
} |
||||
|
||||
func (a *AvgAggregation) Source() (interface{}, error) { |
||||
// Example:
|
||||
// {
|
||||
// "aggs" : {
|
||||
// "avg_grade" : { "avg" : { "field" : "grade" } }
|
||||
// }
|
||||
// }
|
||||
// This method returns only the { "avg" : { "field" : "grade" } } part.
|
||||
|
||||
source := make(map[string]interface{}) |
||||
opts := make(map[string]interface{}) |
||||
source["avg"] = opts |
||||
|
||||
// ValuesSourceAggregationBuilder
|
||||
if a.field != "" { |
||||
opts["field"] = a.field |
||||
} |
||||
if a.script != nil { |
||||
src, err := a.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
opts["script"] = src |
||||
} |
||||
|
||||
if a.format != "" { |
||||
opts["format"] = a.format |
||||
} |
||||
|
||||
if a.missing != nil { |
||||
opts["missing"] = a.missing |
||||
} |
||||
|
||||
// AggregationBuilder (SubAggregations)
|
||||
if len(a.subAggregations) > 0 { |
||||
aggsMap := make(map[string]interface{}) |
||||
source["aggregations"] = aggsMap |
||||
for name, aggregate := range a.subAggregations { |
||||
src, err := aggregate.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
aggsMap[name] = src |
||||
} |
||||
} |
||||
|
||||
// Add Meta data if available
|
||||
if len(a.meta) > 0 { |
||||
source["meta"] = a.meta |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,127 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// CardinalityAggregation is a single-value metrics aggregation that
|
||||
// calculates an approximate count of distinct values.
|
||||
// Values can be extracted either from specific fields in the document
|
||||
// or generated by a script.
|
||||
// See: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations-metrics-cardinality-aggregation.html
|
||||
type CardinalityAggregation struct { |
||||
field string |
||||
script *Script |
||||
format string |
||||
missing interface{} |
||||
subAggregations map[string]Aggregation |
||||
meta map[string]interface{} |
||||
precisionThreshold *int64 |
||||
rehash *bool |
||||
} |
||||
|
||||
func NewCardinalityAggregation() *CardinalityAggregation { |
||||
return &CardinalityAggregation{ |
||||
subAggregations: make(map[string]Aggregation), |
||||
} |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Field(field string) *CardinalityAggregation { |
||||
a.field = field |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Script(script *Script) *CardinalityAggregation { |
||||
a.script = script |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Format(format string) *CardinalityAggregation { |
||||
a.format = format |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Missing(missing interface{}) *CardinalityAggregation { |
||||
a.missing = missing |
||||
return a |
||||
} |
||||
func (a *CardinalityAggregation) SubAggregation(name string, subAggregation Aggregation) *CardinalityAggregation { |
||||
a.subAggregations[name] = subAggregation |
||||
return a |
||||
} |
||||
|
||||
// Meta sets the meta data to be included in the aggregation response.
|
||||
func (a *CardinalityAggregation) Meta(metaData map[string]interface{}) *CardinalityAggregation { |
||||
a.meta = metaData |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) PrecisionThreshold(threshold int64) *CardinalityAggregation { |
||||
a.precisionThreshold = &threshold |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Rehash(rehash bool) *CardinalityAggregation { |
||||
a.rehash = &rehash |
||||
return a |
||||
} |
||||
|
||||
func (a *CardinalityAggregation) Source() (interface{}, error) { |
||||
// Example:
|
||||
// {
|
||||
// "aggs" : {
|
||||
// "author_count" : {
|
||||
// "cardinality" : { "field" : "author" }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// This method returns only the "cardinality" : { "field" : "author" } part.
|
||||
|
||||
source := make(map[string]interface{}) |
||||
opts := make(map[string]interface{}) |
||||
source["cardinality"] = opts |
||||
|
||||
// ValuesSourceAggregationBuilder
|
||||
if a.field != "" { |
||||
opts["field"] = a.field |
||||
} |
||||
if a.script != nil { |
||||
src, err := a.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
opts["script"] = src |
||||
} |
||||
if a.missing != nil { |
||||
opts["missing"] = a.missing |
||||
} |
||||
if a.format != "" { |
||||
opts["format"] = a.format |
||||
} |
||||
if a.precisionThreshold != nil { |
||||
opts["precision_threshold"] = *a.precisionThreshold |
||||
} |
||||
if a.rehash != nil { |
||||
opts["rehash"] = *a.rehash |
||||
} |
||||
|
||||
// AggregationBuilder (SubAggregations)
|
||||
if len(a.subAggregations) > 0 { |
||||
aggsMap := make(map[string]interface{}) |
||||
source["aggregations"] = aggsMap |
||||
for name, aggregate := range a.subAggregations { |
||||
src, err := aggregate.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
aggsMap[name] = src |
||||
} |
||||
} |
||||
|
||||
// Add Meta data if available
|
||||
if len(a.meta) > 0 { |
||||
source["meta"] = a.meta |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,108 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// MaxAggregation is a single-value metrics aggregation that keeps track and
|
||||
// returns the maximum value among the numeric values extracted from
|
||||
// the aggregated documents. These values can be extracted either from
|
||||
// specific numeric fields in the documents, or be generated by
|
||||
// a provided script.
|
||||
// See: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations-metrics-max-aggregation.html
|
||||
type MaxAggregation struct { |
||||
field string |
||||
script *Script |
||||
format string |
||||
missing interface{} |
||||
subAggregations map[string]Aggregation |
||||
meta map[string]interface{} |
||||
} |
||||
|
||||
func NewMaxAggregation() *MaxAggregation { |
||||
return &MaxAggregation{ |
||||
subAggregations: make(map[string]Aggregation), |
||||
} |
||||
} |
||||
|
||||
func (a *MaxAggregation) Field(field string) *MaxAggregation { |
||||
a.field = field |
||||
return a |
||||
} |
||||
|
||||
func (a *MaxAggregation) Script(script *Script) *MaxAggregation { |
||||
a.script = script |
||||
return a |
||||
} |
||||
|
||||
func (a *MaxAggregation) Format(format string) *MaxAggregation { |
||||
a.format = format |
||||
return a |
||||
} |
||||
|
||||
func (a *MaxAggregation) Missing(missing interface{}) *MaxAggregation { |
||||
a.missing = missing |
||||
return a |
||||
} |
||||
|
||||
func (a *MaxAggregation) SubAggregation(name string, subAggregation Aggregation) *MaxAggregation { |
||||
a.subAggregations[name] = subAggregation |
||||
return a |
||||
} |
||||
|
||||
// Meta sets the meta data to be included in the aggregation response.
|
||||
func (a *MaxAggregation) Meta(metaData map[string]interface{}) *MaxAggregation { |
||||
a.meta = metaData |
||||
return a |
||||
} |
||||
func (a *MaxAggregation) Source() (interface{}, error) { |
||||
// Example:
|
||||
// {
|
||||
// "aggs" : {
|
||||
// "max_price" : { "max" : { "field" : "price" } }
|
||||
// }
|
||||
// }
|
||||
// This method returns only the { "max" : { "field" : "price" } } part.
|
||||
|
||||
source := make(map[string]interface{}) |
||||
opts := make(map[string]interface{}) |
||||
source["max"] = opts |
||||
|
||||
// ValuesSourceAggregationBuilder
|
||||
if a.field != "" { |
||||
opts["field"] = a.field |
||||
} |
||||
if a.script != nil { |
||||
src, err := a.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
opts["script"] = src |
||||
} |
||||
if a.format != "" { |
||||
opts["format"] = a.format |
||||
} |
||||
if a.missing != nil { |
||||
opts["missing"] = a.missing |
||||
} |
||||
|
||||
// AggregationBuilder (SubAggregations)
|
||||
if len(a.subAggregations) > 0 { |
||||
aggsMap := make(map[string]interface{}) |
||||
source["aggregations"] = aggsMap |
||||
for name, aggregate := range a.subAggregations { |
||||
src, err := aggregate.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
aggsMap[name] = src |
||||
} |
||||
} |
||||
|
||||
// Add Meta data if available
|
||||
if len(a.meta) > 0 { |
||||
source["meta"] = a.meta |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,109 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// MinAggregation is a single-value metrics aggregation that keeps track and
|
||||
// returns the minimum value among numeric values extracted from the
|
||||
// aggregated documents. These values can be extracted either from
|
||||
// specific numeric fields in the documents, or be generated by a
|
||||
// provided script.
|
||||
// See: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-aggregations-metrics-min-aggregation.html
|
||||
type MinAggregation struct { |
||||
field string |
||||
script *Script |
||||
format string |
||||
missing interface{} |
||||
subAggregations map[string]Aggregation |
||||
meta map[string]interface{} |
||||
} |
||||
|
||||
func NewMinAggregation() *MinAggregation { |
||||
return &MinAggregation{ |
||||
subAggregations: make(map[string]Aggregation), |
||||
} |
||||
} |
||||
|
||||
func (a *MinAggregation) Field(field string) *MinAggregation { |
||||
a.field = field |
||||
return a |
||||
} |
||||
|
||||
func (a *MinAggregation) Script(script *Script) *MinAggregation { |
||||
a.script = script |
||||
return a |
||||
} |
||||
|
||||
func (a *MinAggregation) Format(format string) *MinAggregation { |
||||
a.format = format |
||||
return a |
||||
} |
||||
|
||||
func (a *MinAggregation) Missing(missing interface{}) *MinAggregation { |
||||
a.missing = missing |
||||
return a |
||||
} |
||||
|
||||
func (a *MinAggregation) SubAggregation(name string, subAggregation Aggregation) *MinAggregation { |
||||
a.subAggregations[name] = subAggregation |
||||
return a |
||||
} |
||||
|
||||
// Meta sets the meta data to be included in the aggregation response.
|
||||
func (a *MinAggregation) Meta(metaData map[string]interface{}) *MinAggregation { |
||||
a.meta = metaData |
||||
return a |
||||
} |
||||
|
||||
func (a *MinAggregation) Source() (interface{}, error) { |
||||
// Example:
|
||||
// {
|
||||
// "aggs" : {
|
||||
// "min_price" : { "min" : { "field" : "price" } }
|
||||
// }
|
||||
// }
|
||||
// This method returns only the { "min" : { "field" : "price" } } part.
|
||||
|
||||
source := make(map[string]interface{}) |
||||
opts := make(map[string]interface{}) |
||||
source["min"] = opts |
||||
|
||||
// ValuesSourceAggregationBuilder
|
||||
if a.field != "" { |
||||
opts["field"] = a.field |
||||
} |
||||
if a.script != nil { |
||||
src, err := a.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
opts["script"] = src |
||||
} |
||||
if a.format != "" { |
||||
opts["format"] = a.format |
||||
} |
||||
if a.missing != nil { |
||||
opts["missing"] = a.missing |
||||
} |
||||
|
||||
// AggregationBuilder (SubAggregations)
|
||||
if len(a.subAggregations) > 0 { |
||||
aggsMap := make(map[string]interface{}) |
||||
source["aggregations"] = aggsMap |
||||
for name, aggregate := range a.subAggregations { |
||||
src, err := aggregate.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
aggsMap[name] = src |
||||
} |
||||
} |
||||
|
||||
// Add Meta data if available
|
||||
if len(a.meta) > 0 { |
||||
source["meta"] = a.meta |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,203 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import "fmt" |
||||
|
||||
// A bool query matches documents matching boolean
|
||||
// combinations of other queries.
|
||||
// For more details, see:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-bool-query.html
|
||||
type BoolQuery struct { |
||||
Query |
||||
mustClauses []Query |
||||
mustNotClauses []Query |
||||
filterClauses []Query |
||||
shouldClauses []Query |
||||
boost *float64 |
||||
minimumShouldMatch string |
||||
adjustPureNegative *bool |
||||
queryName string |
||||
} |
||||
|
||||
// Creates a new bool query.
|
||||
func NewBoolQuery() *BoolQuery { |
||||
return &BoolQuery{ |
||||
mustClauses: make([]Query, 0), |
||||
mustNotClauses: make([]Query, 0), |
||||
filterClauses: make([]Query, 0), |
||||
shouldClauses: make([]Query, 0), |
||||
} |
||||
} |
||||
|
||||
func (q *BoolQuery) Must(queries ...Query) *BoolQuery { |
||||
q.mustClauses = append(q.mustClauses, queries...) |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) MustNot(queries ...Query) *BoolQuery { |
||||
q.mustNotClauses = append(q.mustNotClauses, queries...) |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) Filter(filters ...Query) *BoolQuery { |
||||
q.filterClauses = append(q.filterClauses, filters...) |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) Should(queries ...Query) *BoolQuery { |
||||
q.shouldClauses = append(q.shouldClauses, queries...) |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) Boost(boost float64) *BoolQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) MinimumShouldMatch(minimumShouldMatch string) *BoolQuery { |
||||
q.minimumShouldMatch = minimumShouldMatch |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) MinimumNumberShouldMatch(minimumNumberShouldMatch int) *BoolQuery { |
||||
q.minimumShouldMatch = fmt.Sprintf("%d", minimumNumberShouldMatch) |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) AdjustPureNegative(adjustPureNegative bool) *BoolQuery { |
||||
q.adjustPureNegative = &adjustPureNegative |
||||
return q |
||||
} |
||||
|
||||
func (q *BoolQuery) QueryName(queryName string) *BoolQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Creates the query source for the bool query.
|
||||
func (q *BoolQuery) Source() (interface{}, error) { |
||||
// {
|
||||
// "bool" : {
|
||||
// "must" : {
|
||||
// "term" : { "user" : "kimchy" }
|
||||
// },
|
||||
// "must_not" : {
|
||||
// "range" : {
|
||||
// "age" : { "from" : 10, "to" : 20 }
|
||||
// }
|
||||
// },
|
||||
// "filter" : [
|
||||
// ...
|
||||
// ]
|
||||
// "should" : [
|
||||
// {
|
||||
// "term" : { "tag" : "wow" }
|
||||
// },
|
||||
// {
|
||||
// "term" : { "tag" : "elasticsearch" }
|
||||
// }
|
||||
// ],
|
||||
// "minimum_should_match" : 1,
|
||||
// "boost" : 1.0
|
||||
// }
|
||||
// }
|
||||
|
||||
query := make(map[string]interface{}) |
||||
|
||||
boolClause := make(map[string]interface{}) |
||||
query["bool"] = boolClause |
||||
|
||||
// must
|
||||
if len(q.mustClauses) == 1 { |
||||
src, err := q.mustClauses[0].Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
boolClause["must"] = src |
||||
} else if len(q.mustClauses) > 1 { |
||||
var clauses []interface{} |
||||
for _, subQuery := range q.mustClauses { |
||||
src, err := subQuery.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
clauses = append(clauses, src) |
||||
} |
||||
boolClause["must"] = clauses |
||||
} |
||||
|
||||
// must_not
|
||||
if len(q.mustNotClauses) == 1 { |
||||
src, err := q.mustNotClauses[0].Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
boolClause["must_not"] = src |
||||
} else if len(q.mustNotClauses) > 1 { |
||||
var clauses []interface{} |
||||
for _, subQuery := range q.mustNotClauses { |
||||
src, err := subQuery.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
clauses = append(clauses, src) |
||||
} |
||||
boolClause["must_not"] = clauses |
||||
} |
||||
|
||||
// filter
|
||||
if len(q.filterClauses) == 1 { |
||||
src, err := q.filterClauses[0].Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
boolClause["filter"] = src |
||||
} else if len(q.filterClauses) > 1 { |
||||
var clauses []interface{} |
||||
for _, subQuery := range q.filterClauses { |
||||
src, err := subQuery.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
clauses = append(clauses, src) |
||||
} |
||||
boolClause["filter"] = clauses |
||||
} |
||||
|
||||
// should
|
||||
if len(q.shouldClauses) == 1 { |
||||
src, err := q.shouldClauses[0].Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
boolClause["should"] = src |
||||
} else if len(q.shouldClauses) > 1 { |
||||
var clauses []interface{} |
||||
for _, subQuery := range q.shouldClauses { |
||||
src, err := subQuery.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
clauses = append(clauses, src) |
||||
} |
||||
boolClause["should"] = clauses |
||||
} |
||||
|
||||
if q.boost != nil { |
||||
boolClause["boost"] = *q.boost |
||||
} |
||||
if q.minimumShouldMatch != "" { |
||||
boolClause["minimum_should_match"] = q.minimumShouldMatch |
||||
} |
||||
if q.adjustPureNegative != nil { |
||||
boolClause["adjust_pure_negative"] = *q.adjustPureNegative |
||||
} |
||||
if q.queryName != "" { |
||||
boolClause["_name"] = q.queryName |
||||
} |
||||
|
||||
return query, nil |
||||
} |
@ -0,0 +1,189 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// MatchQuery is a family of queries that accepts text/numerics/dates,
|
||||
// analyzes them, and constructs a query.
|
||||
//
|
||||
// To create a new MatchQuery, use NewMatchQuery. To create specific types
|
||||
// of queries, e.g. a match_phrase query, use NewMatchPhrQuery(...).Type("phrase"),
|
||||
// or use one of the shortcuts e.g. NewMatchPhraseQuery(...).
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-match-query.html
|
||||
type MatchQuery struct { |
||||
name string |
||||
text interface{} |
||||
operator string // or / and
|
||||
analyzer string |
||||
boost *float64 |
||||
fuzziness string |
||||
prefixLength *int |
||||
maxExpansions *int |
||||
minimumShouldMatch string |
||||
fuzzyRewrite string |
||||
lenient *bool |
||||
fuzzyTranspositions *bool |
||||
zeroTermsQuery string |
||||
cutoffFrequency *float64 |
||||
queryName string |
||||
} |
||||
|
||||
// NewMatchQuery creates and initializes a new MatchQuery.
|
||||
func NewMatchQuery(name string, text interface{}) *MatchQuery { |
||||
return &MatchQuery{name: name, text: text} |
||||
} |
||||
|
||||
// Operator sets the operator to use when using a boolean query.
|
||||
// Can be "AND" or "OR" (default).
|
||||
func (q *MatchQuery) Operator(operator string) *MatchQuery { |
||||
q.operator = operator |
||||
return q |
||||
} |
||||
|
||||
// Analyzer explicitly sets the analyzer to use. It defaults to use explicit
|
||||
// mapping config for the field, or, if not set, the default search analyzer.
|
||||
func (q *MatchQuery) Analyzer(analyzer string) *MatchQuery { |
||||
q.analyzer = analyzer |
||||
return q |
||||
} |
||||
|
||||
// Fuzziness sets the fuzziness when evaluated to a fuzzy query type.
|
||||
// Defaults to "AUTO".
|
||||
func (q *MatchQuery) Fuzziness(fuzziness string) *MatchQuery { |
||||
q.fuzziness = fuzziness |
||||
return q |
||||
} |
||||
|
||||
// PrefixLength sets the length of a length of common (non-fuzzy)
|
||||
// prefix for fuzzy match queries. It must be non-negative.
|
||||
func (q *MatchQuery) PrefixLength(prefixLength int) *MatchQuery { |
||||
q.prefixLength = &prefixLength |
||||
return q |
||||
} |
||||
|
||||
// MaxExpansions is used with fuzzy or prefix type queries. It specifies
|
||||
// the number of term expansions to use. It defaults to unbounded so that
|
||||
// its recommended to set it to a reasonable value for faster execution.
|
||||
func (q *MatchQuery) MaxExpansions(maxExpansions int) *MatchQuery { |
||||
q.maxExpansions = &maxExpansions |
||||
return q |
||||
} |
||||
|
||||
// CutoffFrequency can be a value in [0..1] (or an absolute number >=1).
|
||||
// It represents the maximum treshold of a terms document frequency to be
|
||||
// considered a low frequency term.
|
||||
func (q *MatchQuery) CutoffFrequency(cutoff float64) *MatchQuery { |
||||
q.cutoffFrequency = &cutoff |
||||
return q |
||||
} |
||||
|
||||
// MinimumShouldMatch sets the optional minimumShouldMatch value to
|
||||
// apply to the query.
|
||||
func (q *MatchQuery) MinimumShouldMatch(minimumShouldMatch string) *MatchQuery { |
||||
q.minimumShouldMatch = minimumShouldMatch |
||||
return q |
||||
} |
||||
|
||||
// FuzzyRewrite sets the fuzzy_rewrite parameter controlling how the
|
||||
// fuzzy query will get rewritten.
|
||||
func (q *MatchQuery) FuzzyRewrite(fuzzyRewrite string) *MatchQuery { |
||||
q.fuzzyRewrite = fuzzyRewrite |
||||
return q |
||||
} |
||||
|
||||
// FuzzyTranspositions sets whether transpositions are supported in
|
||||
// fuzzy queries.
|
||||
//
|
||||
// The default metric used by fuzzy queries to determine a match is
|
||||
// the Damerau-Levenshtein distance formula which supports transpositions.
|
||||
// Setting transposition to false will
|
||||
// * switch to classic Levenshtein distance.
|
||||
// * If not set, Damerau-Levenshtein distance metric will be used.
|
||||
func (q *MatchQuery) FuzzyTranspositions(fuzzyTranspositions bool) *MatchQuery { |
||||
q.fuzzyTranspositions = &fuzzyTranspositions |
||||
return q |
||||
} |
||||
|
||||
// Lenient specifies whether format based failures will be ignored.
|
||||
func (q *MatchQuery) Lenient(lenient bool) *MatchQuery { |
||||
q.lenient = &lenient |
||||
return q |
||||
} |
||||
|
||||
// ZeroTermsQuery can be "all" or "none".
|
||||
func (q *MatchQuery) ZeroTermsQuery(zeroTermsQuery string) *MatchQuery { |
||||
q.zeroTermsQuery = zeroTermsQuery |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost to apply to this query.
|
||||
func (q *MatchQuery) Boost(boost float64) *MatchQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched filters per hit.
|
||||
func (q *MatchQuery) QueryName(queryName string) *MatchQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the function score query.
|
||||
func (q *MatchQuery) Source() (interface{}, error) { |
||||
// {"match":{"name":{"query":"value","type":"boolean/phrase"}}}
|
||||
source := make(map[string]interface{}) |
||||
|
||||
match := make(map[string]interface{}) |
||||
source["match"] = match |
||||
|
||||
query := make(map[string]interface{}) |
||||
match[q.name] = query |
||||
|
||||
query["query"] = q.text |
||||
|
||||
if q.operator != "" { |
||||
query["operator"] = q.operator |
||||
} |
||||
if q.analyzer != "" { |
||||
query["analyzer"] = q.analyzer |
||||
} |
||||
if q.fuzziness != "" { |
||||
query["fuzziness"] = q.fuzziness |
||||
} |
||||
if q.prefixLength != nil { |
||||
query["prefix_length"] = *q.prefixLength |
||||
} |
||||
if q.maxExpansions != nil { |
||||
query["max_expansions"] = *q.maxExpansions |
||||
} |
||||
if q.minimumShouldMatch != "" { |
||||
query["minimum_should_match"] = q.minimumShouldMatch |
||||
} |
||||
if q.fuzzyRewrite != "" { |
||||
query["fuzzy_rewrite"] = q.fuzzyRewrite |
||||
} |
||||
if q.lenient != nil { |
||||
query["lenient"] = *q.lenient |
||||
} |
||||
if q.fuzzyTranspositions != nil { |
||||
query["fuzzy_transpositions"] = *q.fuzzyTranspositions |
||||
} |
||||
if q.zeroTermsQuery != "" { |
||||
query["zero_terms_query"] = q.zeroTermsQuery |
||||
} |
||||
if q.cutoffFrequency != nil { |
||||
query["cutoff_frequency"] = *q.cutoffFrequency |
||||
} |
||||
if q.boost != nil { |
||||
query["boost"] = *q.boost |
||||
} |
||||
if q.queryName != "" { |
||||
query["_name"] = q.queryName |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,89 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// MatchPhraseQuery analyzes the text and creates a phrase query out of
|
||||
// the analyzed text.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-match-query-phrase.html
|
||||
type MatchPhraseQuery struct { |
||||
name string |
||||
value interface{} |
||||
analyzer string |
||||
slop *int |
||||
boost *float64 |
||||
queryName string |
||||
zeroTermsQuery string |
||||
} |
||||
|
||||
// NewMatchPhraseQuery creates and initializes a new MatchPhraseQuery.
|
||||
func NewMatchPhraseQuery(name string, value interface{}) *MatchPhraseQuery { |
||||
return &MatchPhraseQuery{name: name, value: value} |
||||
} |
||||
|
||||
// Analyzer explicitly sets the analyzer to use. It defaults to use explicit
|
||||
// mapping config for the field, or, if not set, the default search analyzer.
|
||||
func (q *MatchPhraseQuery) Analyzer(analyzer string) *MatchPhraseQuery { |
||||
q.analyzer = analyzer |
||||
return q |
||||
} |
||||
|
||||
// Slop sets the phrase slop if evaluated to a phrase query type.
|
||||
func (q *MatchPhraseQuery) Slop(slop int) *MatchPhraseQuery { |
||||
q.slop = &slop |
||||
return q |
||||
} |
||||
|
||||
// ZeroTermsQuery can be "all" or "none".
|
||||
func (q *MatchPhraseQuery) ZeroTermsQuery(zeroTermsQuery string) *MatchPhraseQuery { |
||||
q.zeroTermsQuery = zeroTermsQuery |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost to apply to this query.
|
||||
func (q *MatchPhraseQuery) Boost(boost float64) *MatchPhraseQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched filters per hit.
|
||||
func (q *MatchPhraseQuery) QueryName(queryName string) *MatchPhraseQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the function score query.
|
||||
func (q *MatchPhraseQuery) Source() (interface{}, error) { |
||||
// {"match_phrase":{"name":{"query":"value","analyzer":"my_analyzer"}}}
|
||||
source := make(map[string]interface{}) |
||||
|
||||
match := make(map[string]interface{}) |
||||
source["match_phrase"] = match |
||||
|
||||
query := make(map[string]interface{}) |
||||
match[q.name] = query |
||||
|
||||
query["query"] = q.value |
||||
|
||||
if q.analyzer != "" { |
||||
query["analyzer"] = q.analyzer |
||||
} |
||||
if q.slop != nil { |
||||
query["slop"] = *q.slop |
||||
} |
||||
if q.zeroTermsQuery != "" { |
||||
query["zero_terms_query"] = q.zeroTermsQuery |
||||
} |
||||
if q.boost != nil { |
||||
query["boost"] = *q.boost |
||||
} |
||||
if q.queryName != "" { |
||||
query["_name"] = q.queryName |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,89 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// MatchPhrasePrefixQuery is the same as match_phrase, except that it allows for
|
||||
// prefix matches on the last term in the text.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-match-query-phrase-prefix.html
|
||||
type MatchPhrasePrefixQuery struct { |
||||
name string |
||||
value interface{} |
||||
analyzer string |
||||
slop *int |
||||
maxExpansions *int |
||||
boost *float64 |
||||
queryName string |
||||
} |
||||
|
||||
// NewMatchPhrasePrefixQuery creates and initializes a new MatchPhrasePrefixQuery.
|
||||
func NewMatchPhrasePrefixQuery(name string, value interface{}) *MatchPhrasePrefixQuery { |
||||
return &MatchPhrasePrefixQuery{name: name, value: value} |
||||
} |
||||
|
||||
// Analyzer explicitly sets the analyzer to use. It defaults to use explicit
|
||||
// mapping config for the field, or, if not set, the default search analyzer.
|
||||
func (q *MatchPhrasePrefixQuery) Analyzer(analyzer string) *MatchPhrasePrefixQuery { |
||||
q.analyzer = analyzer |
||||
return q |
||||
} |
||||
|
||||
// Slop sets the phrase slop if evaluated to a phrase query type.
|
||||
func (q *MatchPhrasePrefixQuery) Slop(slop int) *MatchPhrasePrefixQuery { |
||||
q.slop = &slop |
||||
return q |
||||
} |
||||
|
||||
// MaxExpansions sets the number of term expansions to use.
|
||||
func (q *MatchPhrasePrefixQuery) MaxExpansions(n int) *MatchPhrasePrefixQuery { |
||||
q.maxExpansions = &n |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost to apply to this query.
|
||||
func (q *MatchPhrasePrefixQuery) Boost(boost float64) *MatchPhrasePrefixQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched filters per hit.
|
||||
func (q *MatchPhrasePrefixQuery) QueryName(queryName string) *MatchPhrasePrefixQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the function score query.
|
||||
func (q *MatchPhrasePrefixQuery) Source() (interface{}, error) { |
||||
// {"match_phrase_prefix":{"name":{"query":"value","max_expansions":10}}}
|
||||
source := make(map[string]interface{}) |
||||
|
||||
match := make(map[string]interface{}) |
||||
source["match_phrase_prefix"] = match |
||||
|
||||
query := make(map[string]interface{}) |
||||
match[q.name] = query |
||||
|
||||
query["query"] = q.value |
||||
|
||||
if q.analyzer != "" { |
||||
query["analyzer"] = q.analyzer |
||||
} |
||||
if q.slop != nil { |
||||
query["slop"] = *q.slop |
||||
} |
||||
if q.maxExpansions != nil { |
||||
query["max_expansions"] = *q.maxExpansions |
||||
} |
||||
if q.boost != nil { |
||||
query["boost"] = *q.boost |
||||
} |
||||
if q.queryName != "" { |
||||
query["_name"] = q.queryName |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,76 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// PrefixQuery matches documents that have fields containing terms
|
||||
// with a specified prefix (not analyzed).
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-prefix-query.html
|
||||
type PrefixQuery struct { |
||||
name string |
||||
prefix string |
||||
boost *float64 |
||||
rewrite string |
||||
caseInsensitive *bool |
||||
queryName string |
||||
} |
||||
|
||||
// NewPrefixQuery creates and initializes a new PrefixQuery.
|
||||
func NewPrefixQuery(name string, prefix string) *PrefixQuery { |
||||
return &PrefixQuery{name: name, prefix: prefix} |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *PrefixQuery) Boost(boost float64) *PrefixQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
func (q *PrefixQuery) Rewrite(rewrite string) *PrefixQuery { |
||||
q.rewrite = rewrite |
||||
return q |
||||
} |
||||
|
||||
func (q *PrefixQuery) CaseInsensitive(caseInsensitive bool) *PrefixQuery { |
||||
q.caseInsensitive = &caseInsensitive |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched_filters per hit.
|
||||
func (q *PrefixQuery) QueryName(queryName string) *PrefixQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the query.
|
||||
func (q *PrefixQuery) Source() (interface{}, error) { |
||||
source := make(map[string]interface{}) |
||||
query := make(map[string]interface{}) |
||||
source["prefix"] = query |
||||
|
||||
if q.boost == nil && q.rewrite == "" && q.queryName == "" && q.caseInsensitive == nil { |
||||
query[q.name] = q.prefix |
||||
} else { |
||||
subQuery := make(map[string]interface{}) |
||||
subQuery["value"] = q.prefix |
||||
if q.boost != nil { |
||||
subQuery["boost"] = *q.boost |
||||
} |
||||
if q.rewrite != "" { |
||||
subQuery["rewrite"] = q.rewrite |
||||
} |
||||
if q.caseInsensitive != nil { |
||||
subQuery["case_insensitive"] = *q.caseInsensitive |
||||
} |
||||
if q.queryName != "" { |
||||
subQuery["_name"] = q.queryName |
||||
} |
||||
query[q.name] = subQuery |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,350 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import ( |
||||
"fmt" |
||||
) |
||||
|
||||
// QueryStringQuery uses the query parser in order to parse its content.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-query-string-query.html
|
||||
type QueryStringQuery struct { |
||||
queryString string |
||||
defaultField string |
||||
defaultOperator string |
||||
analyzer string |
||||
quoteAnalyzer string |
||||
quoteFieldSuffix string |
||||
allowLeadingWildcard *bool |
||||
lowercaseExpandedTerms *bool // Deprecated: Decision is now made by the analyzer
|
||||
enablePositionIncrements *bool |
||||
analyzeWildcard *bool |
||||
locale string // Deprecated: Decision is now made by the analyzer
|
||||
boost *float64 |
||||
fuzziness string |
||||
fuzzyPrefixLength *int |
||||
fuzzyMaxExpansions *int |
||||
fuzzyRewrite string |
||||
phraseSlop *int |
||||
fields []string |
||||
fieldBoosts map[string]*float64 |
||||
tieBreaker *float64 |
||||
rewrite string |
||||
minimumShouldMatch string |
||||
lenient *bool |
||||
queryName string |
||||
timeZone string |
||||
maxDeterminizedStates *int |
||||
escape *bool |
||||
typ string |
||||
} |
||||
|
||||
// NewQueryStringQuery creates and initializes a new QueryStringQuery.
|
||||
func NewQueryStringQuery(queryString string) *QueryStringQuery { |
||||
return &QueryStringQuery{ |
||||
queryString: queryString, |
||||
fields: make([]string, 0), |
||||
fieldBoosts: make(map[string]*float64), |
||||
} |
||||
} |
||||
|
||||
// DefaultField specifies the field to run against when no prefix field
|
||||
// is specified. Only relevant when not explicitly adding fields the query
|
||||
// string will run against.
|
||||
func (q *QueryStringQuery) DefaultField(defaultField string) *QueryStringQuery { |
||||
q.defaultField = defaultField |
||||
return q |
||||
} |
||||
|
||||
// Field adds a field to run the query string against.
|
||||
func (q *QueryStringQuery) Field(field string) *QueryStringQuery { |
||||
q.fields = append(q.fields, field) |
||||
return q |
||||
} |
||||
|
||||
// Type sets how multiple fields should be combined to build textual part queries,
|
||||
// e.g. "best_fields".
|
||||
func (q *QueryStringQuery) Type(typ string) *QueryStringQuery { |
||||
q.typ = typ |
||||
return q |
||||
} |
||||
|
||||
// FieldWithBoost adds a field to run the query string against with a specific boost.
|
||||
func (q *QueryStringQuery) FieldWithBoost(field string, boost float64) *QueryStringQuery { |
||||
q.fields = append(q.fields, field) |
||||
q.fieldBoosts[field] = &boost |
||||
return q |
||||
} |
||||
|
||||
// TieBreaker is used when more than one field is used with the query string,
|
||||
// and combined queries are using dismax.
|
||||
func (q *QueryStringQuery) TieBreaker(tieBreaker float64) *QueryStringQuery { |
||||
q.tieBreaker = &tieBreaker |
||||
return q |
||||
} |
||||
|
||||
// DefaultOperator sets the boolean operator of the query parser used to
|
||||
// parse the query string.
|
||||
//
|
||||
// In default mode (OR) terms without any modifiers
|
||||
// are considered optional, e.g. "capital of Hungary" is equal to
|
||||
// "capital OR of OR Hungary".
|
||||
//
|
||||
// In AND mode, terms are considered to be in conjunction. The above mentioned
|
||||
// query is then parsed as "capital AND of AND Hungary".
|
||||
func (q *QueryStringQuery) DefaultOperator(operator string) *QueryStringQuery { |
||||
q.defaultOperator = operator |
||||
return q |
||||
} |
||||
|
||||
// Analyzer is an optional analyzer used to analyze the query string.
|
||||
// Note, if a field has search analyzer defined for it, then it will be used
|
||||
// automatically. Defaults to the smart search analyzer.
|
||||
func (q *QueryStringQuery) Analyzer(analyzer string) *QueryStringQuery { |
||||
q.analyzer = analyzer |
||||
return q |
||||
} |
||||
|
||||
// QuoteAnalyzer is an optional analyzer to be used to analyze the query string
|
||||
// for phrase searches. Note, if a field has search analyzer defined for it,
|
||||
// then it will be used automatically. Defaults to the smart search analyzer.
|
||||
func (q *QueryStringQuery) QuoteAnalyzer(quoteAnalyzer string) *QueryStringQuery { |
||||
q.quoteAnalyzer = quoteAnalyzer |
||||
return q |
||||
} |
||||
|
||||
// MaxDeterminizedState protects against too-difficult regular expression queries.
|
||||
func (q *QueryStringQuery) MaxDeterminizedState(maxDeterminizedStates int) *QueryStringQuery { |
||||
q.maxDeterminizedStates = &maxDeterminizedStates |
||||
return q |
||||
} |
||||
|
||||
// AllowLeadingWildcard specifies whether leading wildcards should be allowed
|
||||
// or not (defaults to true).
|
||||
func (q *QueryStringQuery) AllowLeadingWildcard(allowLeadingWildcard bool) *QueryStringQuery { |
||||
q.allowLeadingWildcard = &allowLeadingWildcard |
||||
return q |
||||
} |
||||
|
||||
// LowercaseExpandedTerms indicates whether terms of wildcard, prefix, fuzzy
|
||||
// and range queries are automatically lower-cased or not. Default is true.
|
||||
//
|
||||
// Deprecated: Decision is now made by the analyzer.
|
||||
func (q *QueryStringQuery) LowercaseExpandedTerms(lowercaseExpandedTerms bool) *QueryStringQuery { |
||||
q.lowercaseExpandedTerms = &lowercaseExpandedTerms |
||||
return q |
||||
} |
||||
|
||||
// EnablePositionIncrements indicates whether to enable position increments
|
||||
// in result query. Defaults to true.
|
||||
//
|
||||
// When set, result phrase and multi-phrase queries will be aware of position
|
||||
// increments. Useful when e.g. a StopFilter increases the position increment
|
||||
// of the token that follows an omitted token.
|
||||
func (q *QueryStringQuery) EnablePositionIncrements(enablePositionIncrements bool) *QueryStringQuery { |
||||
q.enablePositionIncrements = &enablePositionIncrements |
||||
return q |
||||
} |
||||
|
||||
// Fuzziness sets the edit distance for fuzzy queries. Default is "AUTO".
|
||||
func (q *QueryStringQuery) Fuzziness(fuzziness string) *QueryStringQuery { |
||||
q.fuzziness = fuzziness |
||||
return q |
||||
} |
||||
|
||||
// FuzzyPrefixLength sets the minimum prefix length for fuzzy queries.
|
||||
// Default is 1.
|
||||
func (q *QueryStringQuery) FuzzyPrefixLength(fuzzyPrefixLength int) *QueryStringQuery { |
||||
q.fuzzyPrefixLength = &fuzzyPrefixLength |
||||
return q |
||||
} |
||||
|
||||
func (q *QueryStringQuery) FuzzyMaxExpansions(fuzzyMaxExpansions int) *QueryStringQuery { |
||||
q.fuzzyMaxExpansions = &fuzzyMaxExpansions |
||||
return q |
||||
} |
||||
|
||||
func (q *QueryStringQuery) FuzzyRewrite(fuzzyRewrite string) *QueryStringQuery { |
||||
q.fuzzyRewrite = fuzzyRewrite |
||||
return q |
||||
} |
||||
|
||||
// PhraseSlop sets the default slop for phrases. If zero, then exact matches
|
||||
// are required. Default value is zero.
|
||||
func (q *QueryStringQuery) PhraseSlop(phraseSlop int) *QueryStringQuery { |
||||
q.phraseSlop = &phraseSlop |
||||
return q |
||||
} |
||||
|
||||
// AnalyzeWildcard indicates whether to enabled analysis on wildcard and prefix queries.
|
||||
func (q *QueryStringQuery) AnalyzeWildcard(analyzeWildcard bool) *QueryStringQuery { |
||||
q.analyzeWildcard = &analyzeWildcard |
||||
return q |
||||
} |
||||
|
||||
func (q *QueryStringQuery) Rewrite(rewrite string) *QueryStringQuery { |
||||
q.rewrite = rewrite |
||||
return q |
||||
} |
||||
|
||||
func (q *QueryStringQuery) MinimumShouldMatch(minimumShouldMatch string) *QueryStringQuery { |
||||
q.minimumShouldMatch = minimumShouldMatch |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *QueryStringQuery) Boost(boost float64) *QueryStringQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QuoteFieldSuffix is an optional field name suffix to automatically
|
||||
// try and add to the field searched when using quoted text.
|
||||
func (q *QueryStringQuery) QuoteFieldSuffix(quoteFieldSuffix string) *QueryStringQuery { |
||||
q.quoteFieldSuffix = quoteFieldSuffix |
||||
return q |
||||
} |
||||
|
||||
// Lenient indicates whether the query string parser should be lenient
|
||||
// when parsing field values. It defaults to the index setting and if not
|
||||
// set, defaults to false.
|
||||
func (q *QueryStringQuery) Lenient(lenient bool) *QueryStringQuery { |
||||
q.lenient = &lenient |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched_filters per hit.
|
||||
func (q *QueryStringQuery) QueryName(queryName string) *QueryStringQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Locale specifies the locale to be used for string conversions.
|
||||
//
|
||||
// Deprecated: Decision is now made by the analyzer.
|
||||
func (q *QueryStringQuery) Locale(locale string) *QueryStringQuery { |
||||
q.locale = locale |
||||
return q |
||||
} |
||||
|
||||
// TimeZone can be used to automatically adjust to/from fields using a
|
||||
// timezone. Only used with date fields, of course.
|
||||
func (q *QueryStringQuery) TimeZone(timeZone string) *QueryStringQuery { |
||||
q.timeZone = timeZone |
||||
return q |
||||
} |
||||
|
||||
// Escape performs escaping of the query string.
|
||||
func (q *QueryStringQuery) Escape(escape bool) *QueryStringQuery { |
||||
q.escape = &escape |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the query.
|
||||
func (q *QueryStringQuery) Source() (interface{}, error) { |
||||
source := make(map[string]interface{}) |
||||
query := make(map[string]interface{}) |
||||
source["query_string"] = query |
||||
|
||||
query["query"] = q.queryString |
||||
|
||||
if q.defaultField != "" { |
||||
query["default_field"] = q.defaultField |
||||
} |
||||
|
||||
if len(q.fields) > 0 { |
||||
var fields []string |
||||
for _, field := range q.fields { |
||||
if boost, found := q.fieldBoosts[field]; found { |
||||
if boost != nil { |
||||
fields = append(fields, fmt.Sprintf("%s^%f", field, *boost)) |
||||
} else { |
||||
fields = append(fields, field) |
||||
} |
||||
} else { |
||||
fields = append(fields, field) |
||||
} |
||||
} |
||||
query["fields"] = fields |
||||
} |
||||
|
||||
if q.tieBreaker != nil { |
||||
query["tie_breaker"] = *q.tieBreaker |
||||
} |
||||
if q.defaultOperator != "" { |
||||
query["default_operator"] = q.defaultOperator |
||||
} |
||||
if q.analyzer != "" { |
||||
query["analyzer"] = q.analyzer |
||||
} |
||||
if q.quoteAnalyzer != "" { |
||||
query["quote_analyzer"] = q.quoteAnalyzer |
||||
} |
||||
if q.maxDeterminizedStates != nil { |
||||
query["max_determinized_states"] = *q.maxDeterminizedStates |
||||
} |
||||
if q.allowLeadingWildcard != nil { |
||||
query["allow_leading_wildcard"] = *q.allowLeadingWildcard |
||||
} |
||||
if q.lowercaseExpandedTerms != nil { |
||||
query["lowercase_expanded_terms"] = *q.lowercaseExpandedTerms |
||||
} |
||||
if q.enablePositionIncrements != nil { |
||||
query["enable_position_increments"] = *q.enablePositionIncrements |
||||
} |
||||
if q.fuzziness != "" { |
||||
query["fuzziness"] = q.fuzziness |
||||
} |
||||
if q.boost != nil { |
||||
query["boost"] = *q.boost |
||||
} |
||||
if q.fuzzyPrefixLength != nil { |
||||
query["fuzzy_prefix_length"] = *q.fuzzyPrefixLength |
||||
} |
||||
if q.fuzzyMaxExpansions != nil { |
||||
query["fuzzy_max_expansions"] = *q.fuzzyMaxExpansions |
||||
} |
||||
if q.fuzzyRewrite != "" { |
||||
query["fuzzy_rewrite"] = q.fuzzyRewrite |
||||
} |
||||
if q.phraseSlop != nil { |
||||
query["phrase_slop"] = *q.phraseSlop |
||||
} |
||||
if q.analyzeWildcard != nil { |
||||
query["analyze_wildcard"] = *q.analyzeWildcard |
||||
} |
||||
if q.rewrite != "" { |
||||
query["rewrite"] = q.rewrite |
||||
} |
||||
if q.minimumShouldMatch != "" { |
||||
query["minimum_should_match"] = q.minimumShouldMatch |
||||
} |
||||
if q.quoteFieldSuffix != "" { |
||||
query["quote_field_suffix"] = q.quoteFieldSuffix |
||||
} |
||||
if q.lenient != nil { |
||||
query["lenient"] = *q.lenient |
||||
} |
||||
if q.queryName != "" { |
||||
query["_name"] = q.queryName |
||||
} |
||||
if q.locale != "" { |
||||
query["locale"] = q.locale |
||||
} |
||||
if q.timeZone != "" { |
||||
query["time_zone"] = q.timeZone |
||||
} |
||||
if q.escape != nil { |
||||
query["escape"] = *q.escape |
||||
} |
||||
if q.typ != "" { |
||||
query["type"] = q.typ |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,155 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// RangeQuery matches documents with fields that have terms within a certain range.
|
||||
//
|
||||
// For details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-range-query.html
|
||||
type RangeQuery struct { |
||||
name string |
||||
from interface{} |
||||
to interface{} |
||||
timeZone string |
||||
includeLower bool |
||||
includeUpper bool |
||||
boost *float64 |
||||
queryName string |
||||
format string |
||||
relation string |
||||
} |
||||
|
||||
// NewRangeQuery creates and initializes a new RangeQuery.
|
||||
func NewRangeQuery(name string) *RangeQuery { |
||||
return &RangeQuery{name: name, includeLower: true, includeUpper: true} |
||||
} |
||||
|
||||
// From indicates the from part of the RangeQuery.
|
||||
// Use nil to indicate an unbounded from part.
|
||||
func (q *RangeQuery) From(from interface{}) *RangeQuery { |
||||
q.from = from |
||||
return q |
||||
} |
||||
|
||||
// Gt indicates a greater-than value for the from part.
|
||||
// Use nil to indicate an unbounded from part.
|
||||
func (q *RangeQuery) Gt(from interface{}) *RangeQuery { |
||||
q.from = from |
||||
q.includeLower = false |
||||
return q |
||||
} |
||||
|
||||
// Gte indicates a greater-than-or-equal value for the from part.
|
||||
// Use nil to indicate an unbounded from part.
|
||||
func (q *RangeQuery) Gte(from interface{}) *RangeQuery { |
||||
q.from = from |
||||
q.includeLower = true |
||||
return q |
||||
} |
||||
|
||||
// To indicates the to part of the RangeQuery.
|
||||
// Use nil to indicate an unbounded to part.
|
||||
func (q *RangeQuery) To(to interface{}) *RangeQuery { |
||||
q.to = to |
||||
return q |
||||
} |
||||
|
||||
// Lt indicates a less-than value for the to part.
|
||||
// Use nil to indicate an unbounded to part.
|
||||
func (q *RangeQuery) Lt(to interface{}) *RangeQuery { |
||||
q.to = to |
||||
q.includeUpper = false |
||||
return q |
||||
} |
||||
|
||||
// Lte indicates a less-than-or-equal value for the to part.
|
||||
// Use nil to indicate an unbounded to part.
|
||||
func (q *RangeQuery) Lte(to interface{}) *RangeQuery { |
||||
q.to = to |
||||
q.includeUpper = true |
||||
return q |
||||
} |
||||
|
||||
// IncludeLower indicates whether the lower bound should be included or not.
|
||||
// Defaults to true.
|
||||
func (q *RangeQuery) IncludeLower(includeLower bool) *RangeQuery { |
||||
q.includeLower = includeLower |
||||
return q |
||||
} |
||||
|
||||
// IncludeUpper indicates whether the upper bound should be included or not.
|
||||
// Defaults to true.
|
||||
func (q *RangeQuery) IncludeUpper(includeUpper bool) *RangeQuery { |
||||
q.includeUpper = includeUpper |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *RangeQuery) Boost(boost float64) *RangeQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched_filters per hit.
|
||||
func (q *RangeQuery) QueryName(queryName string) *RangeQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// TimeZone is used for date fields. In that case, we can adjust the
|
||||
// from/to fields using a timezone.
|
||||
func (q *RangeQuery) TimeZone(timeZone string) *RangeQuery { |
||||
q.timeZone = timeZone |
||||
return q |
||||
} |
||||
|
||||
// Format is used for date fields. In that case, we can set the format
|
||||
// to be used instead of the mapper format.
|
||||
func (q *RangeQuery) Format(format string) *RangeQuery { |
||||
q.format = format |
||||
return q |
||||
} |
||||
|
||||
// Relation is used for range fields. which can be one of
|
||||
// "within", "contains", "intersects" (default) and "disjoint".
|
||||
func (q *RangeQuery) Relation(relation string) *RangeQuery { |
||||
q.relation = relation |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the query.
|
||||
func (q *RangeQuery) Source() (interface{}, error) { |
||||
source := make(map[string]interface{}) |
||||
|
||||
rangeQ := make(map[string]interface{}) |
||||
source["range"] = rangeQ |
||||
|
||||
params := make(map[string]interface{}) |
||||
rangeQ[q.name] = params |
||||
|
||||
params["from"] = q.from |
||||
params["to"] = q.to |
||||
if q.timeZone != "" { |
||||
params["time_zone"] = q.timeZone |
||||
} |
||||
if q.format != "" { |
||||
params["format"] = q.format |
||||
} |
||||
if q.relation != "" { |
||||
params["relation"] = q.relation |
||||
} |
||||
if q.boost != nil { |
||||
params["boost"] = *q.boost |
||||
} |
||||
params["include_lower"] = q.includeLower |
||||
params["include_upper"] = q.includeUpper |
||||
|
||||
if q.queryName != "" { |
||||
rangeQ["_name"] = q.queryName |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,213 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// RankFeatureQuery boosts the relevance score of documents based on the
|
||||
// numeric value of a rank_feature or rank_features field.
|
||||
//
|
||||
// The RankFeatureQuery is typically used in the should clause of a BoolQuery
|
||||
// so its relevance scores are added to other scores from the BoolQuery.
|
||||
//
|
||||
// For more details, see:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.14/query-dsl-rank-feature-query.html
|
||||
type RankFeatureQuery struct { |
||||
field string |
||||
scoreFunc RankFeatureScoreFunction |
||||
boost *float64 |
||||
queryName string |
||||
} |
||||
|
||||
// NewRankFeatureQuery creates and initializes a new RankFeatureQuery.
|
||||
func NewRankFeatureQuery(field string) *RankFeatureQuery { |
||||
return &RankFeatureQuery{ |
||||
field: field, |
||||
} |
||||
} |
||||
|
||||
// Field name.
|
||||
func (q *RankFeatureQuery) Field(field string) *RankFeatureQuery { |
||||
q.field = field |
||||
return q |
||||
} |
||||
|
||||
// ScoreFunction specifies the score function for the RankFeatureQuery.
|
||||
func (q *RankFeatureQuery) ScoreFunction(f RankFeatureScoreFunction) *RankFeatureQuery { |
||||
q.scoreFunc = f |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *RankFeatureQuery) Boost(boost float64) *RankFeatureQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used when
|
||||
// searching for matched_filters per hit.
|
||||
func (q *RankFeatureQuery) QueryName(queryName string) *RankFeatureQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns the JSON serializable content for this query.
|
||||
func (q *RankFeatureQuery) Source() (interface{}, error) { |
||||
// {
|
||||
// "rank_feature": {
|
||||
// "field": "pagerank",
|
||||
// "saturation": {
|
||||
// "pivot": 8
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
query := make(map[string]interface{}) |
||||
params := make(map[string]interface{}) |
||||
query["rank_feature"] = params |
||||
params["field"] = q.field |
||||
if q.scoreFunc != nil { |
||||
src, err := q.scoreFunc.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
params[q.scoreFunc.Name()] = src |
||||
} |
||||
if q.boost != nil { |
||||
params["boost"] = *q.boost |
||||
} |
||||
if q.queryName != "" { |
||||
params["_name"] = q.queryName |
||||
} |
||||
|
||||
return query, nil |
||||
} |
||||
|
||||
// -- Score functions --
|
||||
|
||||
// RankFeatureScoreFunction specifies the interface for score functions
|
||||
// in the context of a RankFeatureQuery.
|
||||
type RankFeatureScoreFunction interface { |
||||
Name() string |
||||
Source() (interface{}, error) |
||||
} |
||||
|
||||
// -- Log score function --
|
||||
|
||||
// RankFeatureLogScoreFunction represents a Logarithmic score function for a
|
||||
// RankFeatureQuery.
|
||||
//
|
||||
// See here for details:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.14/query-dsl-rank-feature-query.html#rank-feature-query-logarithm
|
||||
type RankFeatureLogScoreFunction struct { |
||||
scalingFactor float64 |
||||
} |
||||
|
||||
// NewRankFeatureLogScoreFunction returns a new RankFeatureLogScoreFunction
|
||||
// with the given scaling factor.
|
||||
func NewRankFeatureLogScoreFunction(scalingFactor float64) *RankFeatureLogScoreFunction { |
||||
return &RankFeatureLogScoreFunction{ |
||||
scalingFactor: scalingFactor, |
||||
} |
||||
} |
||||
|
||||
// Name of the score function.
|
||||
func (f *RankFeatureLogScoreFunction) Name() string { return "log" } |
||||
|
||||
// Source returns a serializable JSON object for building the query.
|
||||
func (f *RankFeatureLogScoreFunction) Source() (interface{}, error) { |
||||
return map[string]interface{}{ |
||||
"scaling_factor": f.scalingFactor, |
||||
}, nil |
||||
} |
||||
|
||||
// -- Saturation score function --
|
||||
|
||||
// RankFeatureSaturationScoreFunction represents a Log score function for a
|
||||
// RankFeatureQuery.
|
||||
//
|
||||
// See here for details:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.14/query-dsl-rank-feature-query.html#rank-feature-query-saturation
|
||||
type RankFeatureSaturationScoreFunction struct { |
||||
pivot *float64 |
||||
} |
||||
|
||||
// NewRankFeatureSaturationScoreFunction initializes a new
|
||||
// RankFeatureSaturationScoreFunction.
|
||||
func NewRankFeatureSaturationScoreFunction() *RankFeatureSaturationScoreFunction { |
||||
return &RankFeatureSaturationScoreFunction{} |
||||
} |
||||
|
||||
// Pivot specifies the pivot to use.
|
||||
func (f *RankFeatureSaturationScoreFunction) Pivot(pivot float64) *RankFeatureSaturationScoreFunction { |
||||
f.pivot = &pivot |
||||
return f |
||||
} |
||||
|
||||
// Name of the score function.
|
||||
func (f *RankFeatureSaturationScoreFunction) Name() string { return "saturation" } |
||||
|
||||
// Source returns a serializable JSON object for building the query.
|
||||
func (f *RankFeatureSaturationScoreFunction) Source() (interface{}, error) { |
||||
m := make(map[string]interface{}) |
||||
if f.pivot != nil { |
||||
m["pivot"] = *f.pivot |
||||
} |
||||
return m, nil |
||||
} |
||||
|
||||
// -- Sigmoid score function --
|
||||
|
||||
// RankFeatureSigmoidScoreFunction represents a Sigmoid score function for a
|
||||
// RankFeatureQuery.
|
||||
//
|
||||
// See here for details:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.14/query-dsl-rank-feature-query.html#rank-feature-query-sigmoid
|
||||
type RankFeatureSigmoidScoreFunction struct { |
||||
pivot float64 |
||||
exponent float64 |
||||
} |
||||
|
||||
// NewRankFeatureSigmoidScoreFunction returns a new RankFeatureSigmoidScoreFunction
|
||||
// with the given scaling factor.
|
||||
func NewRankFeatureSigmoidScoreFunction(pivot, exponent float64) *RankFeatureSigmoidScoreFunction { |
||||
return &RankFeatureSigmoidScoreFunction{ |
||||
pivot: pivot, |
||||
exponent: exponent, |
||||
} |
||||
} |
||||
|
||||
// Name of the score function.
|
||||
func (f *RankFeatureSigmoidScoreFunction) Name() string { return "sigmoid" } |
||||
|
||||
// Source returns a serializable JSON object for building the query.
|
||||
func (f *RankFeatureSigmoidScoreFunction) Source() (interface{}, error) { |
||||
return map[string]interface{}{ |
||||
"pivot": f.pivot, |
||||
"exponent": f.exponent, |
||||
}, nil |
||||
} |
||||
|
||||
// -- Linear score function --
|
||||
|
||||
// RankFeatureLinearScoreFunction represents a Linear score function for a
|
||||
// RankFeatureQuery.
|
||||
//
|
||||
// See here for details:
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.14/query-dsl-rank-feature-query.html#rank-feature-query-linear
|
||||
type RankFeatureLinearScoreFunction struct { |
||||
} |
||||
|
||||
// NewRankFeatureLinearScoreFunction initializes a new
|
||||
// RankFeatureLinearScoreFunction.
|
||||
func NewRankFeatureLinearScoreFunction() *RankFeatureLinearScoreFunction { |
||||
return &RankFeatureLinearScoreFunction{} |
||||
} |
||||
|
||||
// Name of the score function.
|
||||
func (f *RankFeatureLinearScoreFunction) Name() string { return "linear" } |
||||
|
||||
// Source returns a serializable JSON object for building the query.
|
||||
func (f *RankFeatureLinearScoreFunction) Source() (interface{}, error) { |
||||
return map[string]interface{}{}, nil |
||||
} |
@ -0,0 +1,27 @@ |
||||
// Copyright 2012-present Oliver Eilhard, John Stanford. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import "encoding/json" |
||||
|
||||
// RawStringQuery can be used to treat a string representation of an ES query
|
||||
// as a Query. Example usage:
|
||||
//
|
||||
// q := RawStringQuery("{\"match_all\":{}}")
|
||||
// db.Search().Query(q).From(1).Size(100).Do()
|
||||
type RawStringQuery string |
||||
|
||||
// NewRawStringQuery ininitializes a new RawStringQuery.
|
||||
// It is the same as RawStringQuery(q).
|
||||
func NewRawStringQuery(q string) RawStringQuery { |
||||
return RawStringQuery(q) |
||||
} |
||||
|
||||
// Source returns the JSON encoded body
|
||||
func (q RawStringQuery) Source() (interface{}, error) { |
||||
var f interface{} |
||||
err := json.Unmarshal([]byte(q), &f) |
||||
return f, err |
||||
} |
@ -0,0 +1,91 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// RegexpQuery allows you to use regular expression term queries.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-regexp-query.html
|
||||
type RegexpQuery struct { |
||||
name string |
||||
regexp string |
||||
flags string |
||||
boost *float64 |
||||
rewrite string |
||||
caseInsensitive *bool |
||||
queryName string |
||||
maxDeterminizedStates *int |
||||
} |
||||
|
||||
// NewRegexpQuery creates and initializes a new RegexpQuery.
|
||||
func NewRegexpQuery(name string, regexp string) *RegexpQuery { |
||||
return &RegexpQuery{name: name, regexp: regexp} |
||||
} |
||||
|
||||
// Flags sets the regexp flags.
|
||||
func (q *RegexpQuery) Flags(flags string) *RegexpQuery { |
||||
q.flags = flags |
||||
return q |
||||
} |
||||
|
||||
// MaxDeterminizedStates protects against complex regular expressions.
|
||||
func (q *RegexpQuery) MaxDeterminizedStates(maxDeterminizedStates int) *RegexpQuery { |
||||
q.maxDeterminizedStates = &maxDeterminizedStates |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *RegexpQuery) Boost(boost float64) *RegexpQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
func (q *RegexpQuery) Rewrite(rewrite string) *RegexpQuery { |
||||
q.rewrite = rewrite |
||||
return q |
||||
} |
||||
|
||||
func (q *RegexpQuery) CaseInsensitive(caseInsensitive bool) *RegexpQuery { |
||||
q.caseInsensitive = &caseInsensitive |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used
|
||||
// when searching for matched_filters per hit
|
||||
func (q *RegexpQuery) QueryName(queryName string) *RegexpQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns the JSON-serializable query data.
|
||||
func (q *RegexpQuery) Source() (interface{}, error) { |
||||
source := make(map[string]interface{}) |
||||
query := make(map[string]interface{}) |
||||
source["regexp"] = query |
||||
|
||||
x := make(map[string]interface{}) |
||||
x["value"] = q.regexp |
||||
if q.flags != "" { |
||||
x["flags"] = q.flags |
||||
} |
||||
if q.maxDeterminizedStates != nil { |
||||
x["max_determinized_states"] = *q.maxDeterminizedStates |
||||
} |
||||
if q.boost != nil { |
||||
x["boost"] = *q.boost |
||||
} |
||||
if q.rewrite != "" { |
||||
x["rewrite"] = q.rewrite |
||||
} |
||||
if q.caseInsensitive != nil { |
||||
x["case_insensitive"] = *q.caseInsensitive |
||||
} |
||||
if q.queryName != "" { |
||||
x["name"] = q.queryName |
||||
} |
||||
query[q.name] = x |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,51 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import "errors" |
||||
|
||||
// ScriptQuery allows to define scripts as filters.
|
||||
//
|
||||
// For details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-script-query.html
|
||||
type ScriptQuery struct { |
||||
script *Script |
||||
queryName string |
||||
} |
||||
|
||||
// NewScriptQuery creates and initializes a new ScriptQuery.
|
||||
func NewScriptQuery(script *Script) *ScriptQuery { |
||||
return &ScriptQuery{ |
||||
script: script, |
||||
} |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used
|
||||
// when searching for matched_filters per hit
|
||||
func (q *ScriptQuery) QueryName(queryName string) *ScriptQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the query.
|
||||
func (q *ScriptQuery) Source() (interface{}, error) { |
||||
if q.script == nil { |
||||
return nil, errors.New("ScriptQuery expected a script") |
||||
} |
||||
source := make(map[string]interface{}) |
||||
params := make(map[string]interface{}) |
||||
source["script"] = params |
||||
|
||||
src, err := q.script.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
params["script"] = src |
||||
|
||||
if q.queryName != "" { |
||||
params["_name"] = q.queryName |
||||
} |
||||
return source, nil |
||||
} |
@ -0,0 +1,110 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
import "errors" |
||||
|
||||
// ScriptScoreQuery uses a script to provide a custom score for returned documents.
|
||||
//
|
||||
// A ScriptScoreQuery query is useful if, for example, a scoring function is
|
||||
// expensive and you only need to calculate the score of a filtered set of documents.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.4/query-dsl-script-score-query.html
|
||||
type ScriptScoreQuery struct { |
||||
query Query |
||||
script *Script |
||||
minScore *float64 |
||||
boost *float64 |
||||
queryName string |
||||
} |
||||
|
||||
// NewScriptScoreQuery creates and initializes a new script_score query.
|
||||
func NewScriptScoreQuery(query Query, script *Script) *ScriptScoreQuery { |
||||
return &ScriptScoreQuery{ |
||||
query: query, |
||||
script: script, |
||||
} |
||||
} |
||||
|
||||
// Query to be used in the ScriptScoreQuery.
|
||||
func (q *ScriptScoreQuery) Query(query Query) *ScriptScoreQuery { |
||||
q.query = query |
||||
return q |
||||
} |
||||
|
||||
// Script to calculate the score.
|
||||
func (q *ScriptScoreQuery) Script(script *Script) *ScriptScoreQuery { |
||||
q.script = script |
||||
return q |
||||
} |
||||
|
||||
// MinScore sets the minimum score.
|
||||
func (q *ScriptScoreQuery) MinScore(minScore float64) *ScriptScoreQuery { |
||||
q.minScore = &minScore |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *ScriptScoreQuery) Boost(boost float64) *ScriptScoreQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter.
|
||||
func (q *ScriptScoreQuery) QueryName(queryName string) *ScriptScoreQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the function score query.
|
||||
func (q *ScriptScoreQuery) Source() (interface{}, error) { |
||||
// {
|
||||
// "script_score" : {
|
||||
// "query" : {
|
||||
// "match" : { "message": "elasticsearch" }
|
||||
// },
|
||||
// "script" : {
|
||||
// "source" : "doc['likes'].value / 10"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
source := make(map[string]interface{}) |
||||
query := make(map[string]interface{}) |
||||
source["script_score"] = query |
||||
|
||||
if q.query == nil { |
||||
return nil, errors.New("ScriptScoreQuery: Query is missing") |
||||
} |
||||
if q.script == nil { |
||||
return nil, errors.New("ScriptScoreQuery: Script is missing") |
||||
} |
||||
|
||||
if src, err := q.query.Source(); err != nil { |
||||
return nil, err |
||||
} else { |
||||
query["query"] = src |
||||
} |
||||
|
||||
if src, err := q.script.Source(); err != nil { |
||||
return nil, err |
||||
} else { |
||||
query["script"] = src |
||||
} |
||||
|
||||
if v := q.minScore; v != nil { |
||||
query["min_score"] = *v |
||||
} |
||||
|
||||
if v := q.boost; v != nil { |
||||
query["boost"] = *v |
||||
} |
||||
if q.queryName != "" { |
||||
query["_name"] = q.queryName |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,53 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// SliceQuery allows to partition the documents into several slices.
|
||||
// It is used e.g. to slice scroll operations in Elasticsearch 5.0 or later.
|
||||
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-request-scroll.html#sliced-scroll
|
||||
// for details.
|
||||
type SliceQuery struct { |
||||
field string |
||||
id *int |
||||
max *int |
||||
} |
||||
|
||||
// NewSliceQuery creates a new SliceQuery.
|
||||
func NewSliceQuery() *SliceQuery { |
||||
return &SliceQuery{} |
||||
} |
||||
|
||||
// Field is the name of the field to slice against (_uid by default).
|
||||
func (s *SliceQuery) Field(field string) *SliceQuery { |
||||
s.field = field |
||||
return s |
||||
} |
||||
|
||||
// Id is the id of the slice.
|
||||
func (s *SliceQuery) Id(id int) *SliceQuery { |
||||
s.id = &id |
||||
return s |
||||
} |
||||
|
||||
// Max is the maximum number of slices.
|
||||
func (s *SliceQuery) Max(max int) *SliceQuery { |
||||
s.max = &max |
||||
return s |
||||
} |
||||
|
||||
// Source returns the JSON body.
|
||||
func (s *SliceQuery) Source() (interface{}, error) { |
||||
m := make(map[string]interface{}) |
||||
if s.field != "" { |
||||
m["field"] = s.field |
||||
} |
||||
if s.id != nil { |
||||
m["id"] = *s.id |
||||
} |
||||
if s.max != nil { |
||||
m["max"] = *s.max |
||||
} |
||||
return m, nil |
||||
} |
@ -0,0 +1,67 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// TermQuery finds documents that contain the exact term specified
|
||||
// in the inverted index.
|
||||
//
|
||||
// For details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-term-query.html
|
||||
type TermQuery struct { |
||||
name string |
||||
value interface{} |
||||
boost *float64 |
||||
caseInsensitive *bool |
||||
queryName string |
||||
} |
||||
|
||||
// NewTermQuery creates and initializes a new TermQuery.
|
||||
func NewTermQuery(name string, value interface{}) *TermQuery { |
||||
return &TermQuery{name: name, value: value} |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *TermQuery) Boost(boost float64) *TermQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
func (q *TermQuery) CaseInsensitive(caseInsensitive bool) *TermQuery { |
||||
q.caseInsensitive = &caseInsensitive |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used
|
||||
// when searching for matched_filters per hit
|
||||
func (q *TermQuery) QueryName(queryName string) *TermQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Source returns JSON for the query.
|
||||
func (q *TermQuery) Source() (interface{}, error) { |
||||
// {"term":{"name":"value"}}
|
||||
source := make(map[string]interface{}) |
||||
tq := make(map[string]interface{}) |
||||
source["term"] = tq |
||||
|
||||
if q.boost == nil && q.caseInsensitive == nil && q.queryName == "" { |
||||
tq[q.name] = q.value |
||||
} else { |
||||
subQ := make(map[string]interface{}) |
||||
subQ["value"] = q.value |
||||
if q.boost != nil { |
||||
subQ["boost"] = *q.boost |
||||
} |
||||
if q.caseInsensitive != nil { |
||||
subQ["case_insensitive"] = *q.caseInsensitive |
||||
} |
||||
if q.queryName != "" { |
||||
subQ["_name"] = q.queryName |
||||
} |
||||
tq[q.name] = subQ |
||||
} |
||||
return source, nil |
||||
} |
@ -0,0 +1,88 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// TermsQuery filters documents that have fields that match any
|
||||
// of the provided terms (not analyzed).
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-terms-query.html
|
||||
type TermsQuery struct { |
||||
name string |
||||
values []interface{} |
||||
termsLookup *TermsLookup |
||||
queryName string |
||||
boost *float64 |
||||
} |
||||
|
||||
// NewTermsQuery creates and initializes a new TermsQuery.
|
||||
func NewTermsQuery(name string, values ...interface{}) *TermsQuery { |
||||
q := &TermsQuery{ |
||||
name: name, |
||||
values: make([]interface{}, 0), |
||||
} |
||||
if len(values) > 0 { |
||||
q.values = append(q.values, values...) |
||||
} |
||||
return q |
||||
} |
||||
|
||||
// NewTermsQueryFromStrings creates and initializes a new TermsQuery
|
||||
// from strings.
|
||||
func NewTermsQueryFromStrings(name string, values ...string) *TermsQuery { |
||||
q := &TermsQuery{ |
||||
name: name, |
||||
values: make([]interface{}, 0), |
||||
} |
||||
for _, v := range values { |
||||
q.values = append(q.values, v) |
||||
} |
||||
return q |
||||
} |
||||
|
||||
// TermsLookup adds terms lookup details to the query.
|
||||
func (q *TermsQuery) TermsLookup(lookup *TermsLookup) *TermsQuery { |
||||
q.termsLookup = lookup |
||||
return q |
||||
} |
||||
|
||||
// Boost sets the boost for this query.
|
||||
func (q *TermsQuery) Boost(boost float64) *TermsQuery { |
||||
q.boost = &boost |
||||
return q |
||||
} |
||||
|
||||
// QueryName sets the query name for the filter that can be used
|
||||
// when searching for matched_filters per hit
|
||||
func (q *TermsQuery) QueryName(queryName string) *TermsQuery { |
||||
q.queryName = queryName |
||||
return q |
||||
} |
||||
|
||||
// Creates the query source for the term query.
|
||||
func (q *TermsQuery) Source() (interface{}, error) { |
||||
// {"terms":{"name":["value1","value2"]}}
|
||||
source := make(map[string]interface{}) |
||||
params := make(map[string]interface{}) |
||||
source["terms"] = params |
||||
|
||||
if q.termsLookup != nil { |
||||
src, err := q.termsLookup.Source() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
params[q.name] = src |
||||
} else { |
||||
params[q.name] = q.values |
||||
if q.boost != nil { |
||||
params["boost"] = *q.boost |
||||
} |
||||
if q.queryName != "" { |
||||
params["_name"] = q.queryName |
||||
} |
||||
} |
||||
|
||||
return source, nil |
||||
} |
@ -0,0 +1,76 @@ |
||||
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-license.
|
||||
// See http://olivere.mit-license.org/license.txt for details.
|
||||
|
||||
package es |
||||
|
||||
// TermsLookup encapsulates the parameters needed to fetch terms.
|
||||
//
|
||||
// For more details, see
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-terms-query.html#query-dsl-terms-lookup.
|
||||
type TermsLookup struct { |
||||
index string |
||||
typ string |
||||
id string |
||||
path string |
||||
routing string |
||||
} |
||||
|
||||
// NewTermsLookup creates and initializes a new TermsLookup.
|
||||
func NewTermsLookup() *TermsLookup { |
||||
t := &TermsLookup{} |
||||
return t |
||||
} |
||||
|
||||
// Index name.
|
||||
func (t *TermsLookup) Index(index string) *TermsLookup { |
||||
t.index = index |
||||
return t |
||||
} |
||||
|
||||
// Type name.
|
||||
//
|
||||
// Deprecated: Types are in the process of being removed.
|
||||
func (t *TermsLookup) Type(typ string) *TermsLookup { |
||||
t.typ = typ |
||||
return t |
||||
} |
||||
|
||||
// Id to look up.
|
||||
func (t *TermsLookup) Id(id string) *TermsLookup { |
||||
t.id = id |
||||
return t |
||||
} |
||||
|
||||
// Path to use for lookup.
|
||||
func (t *TermsLookup) Path(path string) *TermsLookup { |
||||
t.path = path |
||||
return t |
||||
} |
||||
|
||||
// Routing value.
|
||||
func (t *TermsLookup) Routing(routing string) *TermsLookup { |
||||
t.routing = routing |
||||
return t |
||||
} |
||||
|
||||
// Source creates the JSON source of the builder.
|
||||
func (t *TermsLookup) Source() (interface{}, error) { |
||||
src := make(map[string]interface{}) |
||||
if t.index != "" { |
||||
src["index"] = t.index |
||||
} |
||||
if t.typ != "" { |
||||
src["type"] = t.typ |
||||
} |
||||
if t.id != "" { |
||||
src["id"] = t.id |
||||
} |
||||
if t.path != "" { |
||||
src["path"] = t.path |
||||
} |
||||
if t.routing != "" { |
||||
src["routing"] = t.routing |
||||
} |
||||
return src, nil |
||||
} |
Loading…
Reference in new issue