From d0ac1dd451b33fa10015d2119e726bbdd4a9fcd1 Mon Sep 17 00:00:00 2001
From: Anton Sattarov <sattarov@perx.ru>
Date: Fri, 14 Apr 2023 12:25:49 +0300
Subject: [PATCH] sip

---
 pkg/data/data.go                              | 294 +++++++++++
 pkg/data/data_test.go                         | 374 ++++++++++++++
 pkg/filter/filter.go                          | 410 +++++++++++++++
 pkg/filter/filter_test.go                     | 479 ++++++++++++++++++
 pkg/items/expr_test.go                        |  22 +-
 pkg/items/item.go                             |   2 +-
 pkg/items/item_test.go                        |   5 +-
 pkg/items/mocks/Items.go                      |   5 +-
 pkg/items/service.go                          |   4 +-
 .../grpc/protobuf_type_converters.microgen.go |   2 +-
 10 files changed, 1576 insertions(+), 21 deletions(-)
 create mode 100644 pkg/data/data.go
 create mode 100644 pkg/data/data_test.go
 create mode 100644 pkg/filter/filter.go
 create mode 100644 pkg/filter/filter_test.go

diff --git a/pkg/data/data.go b/pkg/data/data.go
new file mode 100644
index 00000000..0540055a
--- /dev/null
+++ b/pkg/data/data.go
@@ -0,0 +1,294 @@
+package data
+
+import (
+	"strconv"
+	"strings"
+)
+
+const DefaultFieldDelimiter = "."
+
+type DeleteValueType struct{}
+
+var DeleteValue DeleteValueType
+
+// TODO: везде добавить поддержку массивов и массивов объектов
+
+// Сделано на базе библиотеки https://github.com/knadh/koanf
+
+// Flatten takes a map[string]interface{} and traverses it and flattens
+// nested children into keys delimited by delim.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+//
+// eg: `{ "parent": { "child": 123 }}` becomes `{ "parent.child": 123 }`
+// In addition, it keeps track of and returns a map of the delimited keypaths with
+// a slice of key parts, for eg: { "parent.child": ["parent", "child"] }. This
+// parts list is used to remember the key path's original structure to
+// unflatten later.
+func Flatten(m map[string]interface{}, keys []string, delim string) (map[string]interface{}, map[string][]string) {
+	var (
+		out    = make(map[string]interface{})
+		keyMap = make(map[string][]string)
+	)
+
+	flatten(m, keys, delim, out, keyMap)
+	return out, keyMap
+}
+
+func flatten(m map[string]interface{}, keys []string, delim string, out map[string]interface{}, keyMap map[string][]string) {
+	for key, val := range m {
+		// Copy the incoming key paths into a fresh list
+		// and append the current key in the iteration.
+		kp := make([]string, 0, len(keys)+1)
+		kp = append(kp, keys...)
+		kp = append(kp, key)
+
+		switch cur := val.(type) {
+		case map[string]interface{}:
+			// Empty map.
+			if len(cur) == 0 {
+				newKey := strings.Join(kp, delim)
+				out[newKey] = val
+				keyMap[newKey] = kp
+				continue
+			}
+
+			// It's a nested map. Flatten it recursively.
+			flatten(cur, kp, delim, out, keyMap)
+		default:
+			newKey := strings.Join(kp, delim)
+			out[newKey] = val
+			keyMap[newKey] = kp
+		}
+	}
+}
+
+// Unflatten takes a flattened key:value map (non-nested with delimited keys)
+// and returns a nested map where the keys are split into hierarchies by the given
+// delimiter. For instance, `parent.child.key: 1` to `{parent: {child: {key: 1}}}`
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Unflatten(m map[string]interface{}, delim string) map[string]interface{} {
+	out := make(map[string]interface{})
+
+	// Iterate through the flat conf map.
+	for k, v := range m {
+		var (
+			keys = strings.Split(k, delim)
+			next = out
+		)
+
+		// Iterate through key parts, for eg:, parent.child.key
+		// will be ["parent", "child", "key"]
+		for _, k := range keys[:len(keys)-1] {
+			sub, ok := next[k]
+			if !ok {
+				// If the key does not exist in the map, create it.
+				sub = make(map[string]interface{})
+				next[k] = sub
+			}
+			if n, ok := sub.(map[string]interface{}); ok {
+				next = n
+			}
+		}
+
+		// Assign the value.
+		next[keys[len(keys)-1]] = v
+	}
+	return out
+}
+
+// Delete removes the entry present at a given path, from the interface
+// if it is an object or an array.
+// The path is the key map slice, for eg:, parent.child.key -> [parent child key].
+// Any empty, nested map on the path, is recursively deleted.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Delete(field string, data any, delim ...string) error {
+	return set(getPath(field, delim...), data, DeleteValue)
+}
+
+// DeleteMany removes the entries present at a given paths, from the interface
+func DeleteMany(paths []string, value any, delim ...string) {
+	if value == nil || len(paths) == 0 {
+		return
+	}
+	for _, path := range paths {
+		Delete(path, value, delim...)
+	}
+}
+
+// Search recursively searches the interface for a given path. The path is
+// the key map slice, for eg:, parent.child.key -> [parent child key].
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Search(in interface{}, path []string) interface{} {
+	switch val := in.(type) {
+
+	case map[string]interface{}:
+		next, ok := val[path[0]]
+		if ok {
+			if len(path) == 1 {
+				return next
+			}
+			switch v := next.(type) {
+			case map[string]interface{}, []interface{}:
+				return Search(v, path[1:])
+			}
+		}
+	case []interface{}:
+		out := make([]interface{}, len(val))
+		for i, e := range val {
+			out[i] = Search(e, path)
+		}
+		return out
+	}
+	return nil
+}
+
+func getPath(field string, delim ...string) []string {
+	if field == "" {
+		return nil
+	}
+
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	return strings.Split(field, d)
+}
+
+func Set(field string, data, value any, delim ...string) error {
+	return set(getPath(field, delim...), data, value)
+}
+
+func set(path []string, data, value any) error {
+	if len(path) == 0 {
+		return nil
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		if len(path) == 1 {
+
+			if _, ok := value.(DeleteValueType); ok {
+				delete(v, path[0])
+				return nil
+			}
+
+			v[path[0]] = value
+			return nil
+		}
+
+		next, ok := v[path[0]]
+		if !ok {
+			next = make(map[string]interface{})
+			v[path[0]] = next
+		}
+		return set(path[1:], next, value)
+
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil {
+			for _, vv := range v {
+				if err = set(path, vv, value); err != nil {
+					return err
+				}
+			}
+		}
+		if idx >= len(v) {
+			return nil
+		}
+		return set(path[1:], v[idx], value)
+	}
+
+	return nil
+}
+
+func Get(field string, data any, delim ...string) (any, bool) {
+	return get(getPath(field, delim...), data)
+}
+
+func get(path []string, data any) (any, bool) {
+	if len(path) == 0 {
+		return data, true
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		val, ok := v[path[0]]
+		if !ok {
+			return nil, false
+		}
+		return get(path[1:], val)
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil || idx >= len(v) {
+			return nil, false
+		}
+		return get(path[1:], v[idx])
+	}
+
+	return nil, false
+}
+
+// Keep keeps the entries present at a given paths, from the interface and remove other data
+// if it is an object or an array.
+// The path is the sting with delim, for eg:, parent.child.key
+func Keep(paths []string, data any, delim ...string) {
+	if len(paths) == 0 {
+		data = nil
+		return
+	}
+	switch val := data.(type) {
+	case map[string]interface{}:
+		for k, v := range val {
+			if Contains(k, paths) {
+				continue
+			}
+			p := getObjectPaths(k, paths, delim...)
+			if len(p) == 0 {
+				delete(val, k)
+			}
+			Keep(p, v, delim...)
+		}
+	case []interface{}:
+		for _, ar := range val {
+			Keep(paths, ar, delim...)
+		}
+	}
+}
+
+func getObjectPaths(prefix string, arr []string, delim ...string) []string {
+	var res []string
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	for _, v := range arr {
+		if strings.HasPrefix(v, prefix+d) {
+			res = append(res, strings.TrimPrefix(v, prefix+d))
+		}
+	}
+	return res
+}
+
+func CloneMap(m map[string]interface{}) map[string]interface{} {
+	if m == nil {
+		return m
+	}
+
+	c := make(map[string]interface{}, len(m))
+	for k, v := range m {
+		c[k] = v
+	}
+	return c
+}
diff --git a/pkg/data/data_test.go b/pkg/data/data_test.go
new file mode 100644
index 00000000..785eefbb
--- /dev/null
+++ b/pkg/data/data_test.go
@@ -0,0 +1,374 @@
+package data
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestDelete(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		field string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			"a",
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a",
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a.a",
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		// Решили что автоматически удалять пустые объекты/слайсы не нужно
+		//{
+		//	"empty object",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//}, {
+		//	"empty array",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Delete(tt.field, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestDeleteMany(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		paths []string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2", "d": "2"},
+			[]string{"a", "d"},
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2", "b": "4"}},
+			[]string{"a.a", "a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2", "c": 0},
+				map[string]interface{}{"a": "3", "b": "4", "c": 0},
+			}, "z": "2"},
+			[]string{"a.a", "a.c"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.a"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		},
+		{
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			DeleteMany(tt.paths, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestSearch(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			"1",
+		},
+		{
+			"object",
+			map[string]interface{}{
+				"a": map[string]interface{}{"a": "1", "z": "2"},
+				"b": map[string]interface{}{"c": "1", "d": "2"},
+			},
+			[]string{"a"},
+			map[string]interface{}{"a": "1", "z": "2"},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a", "a"},
+			"1",
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a", "a"},
+			[]interface{}{"1", "3"},
+		},
+		{
+			"object field from array of arrays",
+			[]interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			},
+			[]string{"a"},
+			[]interface{}{[]interface{}{"1"}, []interface{}{"3"}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			out := Search(tt.in, tt.path)
+			assert.Equal(t, tt.out, out)
+		})
+	}
+}
+
+func TestSet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+		value any
+	}
+	tests := []struct {
+		name     string
+		args     args
+		wantData any
+		wantErr  assert.ErrorAssertionFunc
+	}{
+		{"Simple", args{"a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "a"}, assert.NoError},
+		{"New key", args{"b", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": "a"}, assert.NoError},
+		{"Path", args{"a.b.c", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, "c"}, map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}, assert.NoError},
+		{"Delete", args{"a.b", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, DeleteValue}, map[string]any{"a": map[string]any{}}, assert.NoError},
+		{"Create map", args{"b.a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Map value", args{"a", map[string]interface{}{"a": "0"}, map[string]interface{}{"a": "a"}}, map[string]interface{}{"a": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Slice", args{"a.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "a", "b": "b"}}}, assert.NoError},
+		{"Slice", args{"a.0.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "0", "b": "b"}}}, assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			data := tt.args.data
+			tt.wantErr(t, Set(tt.args.field, data, tt.args.value), fmt.Sprintf("Set(%v, %v, %v)", tt.args.field, data, tt.args.value))
+			assert.Equal(t, tt.wantData, data)
+		})
+	}
+}
+
+func TestGet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+	}
+	tests := []struct {
+		name  string
+		args  args
+		want  any
+		found bool
+	}{
+		{"Direct value", args{"", 100}, 100, true},
+		{"Not found", args{"a", 100}, nil, false},
+		{"Simple", args{"a", map[string]any{"a": "0"}}, "0", true},
+		{"Path", args{"a.b.c", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, "c", true},
+		{"Incorrect path", args{"a.b.wrong", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, nil, false},
+		{"Map value", args{"a.b", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, map[string]any{"c": "c"}, true},
+		{"Slice", args{"a.1.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, "1", true},
+		{"Slice out of range", args{"a.2.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, nil, false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, got1 := Get(tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.want, got, "Get(%v, %v)", tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.found, got1, "Get(%v, %v)", tt.args.field, tt.args.data)
+		})
+	}
+}
+
+func TestKeep(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			map[string]interface{}{"a": "1"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+		},
+		{
+			"no field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"z"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a.a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a.a", "z"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1"},
+				map[string]interface{}{"a": "3"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.b", "z"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		}, {
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Keep(tt.path, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go
new file mode 100644
index 00000000..ea2f1d43
--- /dev/null
+++ b/pkg/filter/filter.go
@@ -0,0 +1,410 @@
+package filter
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/hashicorp/go-multierror"
+	"github.com/mitchellh/mapstructure"
+	"go.mongodb.org/mongo-driver/bson"
+	"go.mongodb.org/mongo-driver/x/bsonx"
+)
+
+type Op string
+
+const (
+	Equal          Op = "eq"
+	NotEqual       Op = "neq"
+	Less           Op = "lt"
+	LessOrEqual    Op = "lte"
+	Greater        Op = "gt"
+	GreaterOrEqual Op = "gte"
+	In             Op = "in"
+	NotIn          Op = "nin"
+	Contains       Op = "contains"
+	NotContains    Op = "ncontains"
+	Or             Op = "or"
+	And            Op = "and"
+	Near           Op = "near"
+)
+
+type Filter struct {
+	Op    Op
+	Field string
+	Value interface{}
+}
+
+func (f Filter) Format(s fmt.State, verb rune) {
+	fmt.Fprintf(s, "{Op:%s Field:%s Value:%+v}", f.Op, f.Field, f.Value)
+}
+
+func NewFilter(op Op, field string, val interface{}) *Filter {
+	return &Filter{
+		Op:    op,
+		Field: field,
+		Value: val,
+	}
+}
+
+type FilterHandler struct {
+	schemas  []*schema.Schema
+	qbuilder QueryBuilder
+	prefix   string
+}
+
+func NewFilterHandler(sch ...*schema.Schema) *FilterHandler {
+	return &FilterHandler{
+		schemas: sch,
+		//qbuilder: qb,
+	}
+}
+
+func (h *FilterHandler) SetTrimPrefix(prefix string) *FilterHandler {
+	h.prefix = prefix
+	return h
+}
+
+func (h *FilterHandler) removeFieldPrefix(f string) string {
+	if h.prefix != "" {
+		return strings.TrimPrefix(f, h.prefix+".")
+	}
+	return f
+}
+
+func (h *FilterHandler) AddSchema(sch ...*schema.Schema) *FilterHandler {
+	for _, s := range sch {
+		h.schemas = append(h.schemas, s)
+	}
+	return h
+}
+
+func (h *FilterHandler) SetQueryBuilder(qb QueryBuilder) {
+	h.qbuilder = qb
+}
+
+func (h *FilterHandler) Validate(filter ...*Filter) (err error) {
+	if len(h.schemas) == 0 {
+		return errors.New("no schema provided")
+	}
+
+	for _, sch := range h.schemas {
+		var merr *multierror.Error
+
+		for _, f := range filter {
+			if err := h.validate(sch, f); err != nil {
+				merr = multierror.Append(merr, err)
+			}
+		}
+		if merr != nil {
+			merr.ErrorFormat = func(i []error) string {
+				return fmt.Sprintf("%d validation error(s)", len(i))
+			}
+			return errors.WithField(merr, "filter")
+		}
+	}
+	return nil
+}
+
+// todo: '$elemMatch' - запросы к полю-массиву на попадание в условие: '{ results: { $elemMatch: { $gte: 80, $lt: 85 } }' ?
+
+func (h *FilterHandler) validate(sch *schema.Schema, f *Filter) (err error) {
+	if f == nil {
+		return
+	}
+
+	fld := h.removeFieldPrefix(f.Field)
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		if f.Value, err = schema.Decode(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+		if err = validate.Validate(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+	case In, NotIn:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+		val := reflect.ValueOf(f.Value)
+		if val.IsZero() || (val.Kind() != reflect.Array && val.Kind() != reflect.Slice) {
+			return h.formatErr(f.Field, f.Op, errors.New("\"IN/NOT IN\" operations require array type for value"))
+		}
+
+		switch fld.GetType().(type) {
+		case *field.ArrayType:
+			f.Value, err = schema.Decode(nil, fld, f.Value)
+			if err != nil {
+				return h.formatErr(f.Field, f.Op, err)
+			}
+		default:
+			decodedVal := make([]interface{}, 0, val.Len())
+			for i := 0; i < val.Len(); i++ {
+				v, err := schema.Decode(nil, fld, val.Index(i).Interface())
+				if err != nil {
+					return h.formatErr(f.Field, f.Op, err)
+				}
+				decodedVal = append(decodedVal, v)
+			}
+
+			f.Value = decodedVal
+		}
+
+	case Contains, NotContains:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		typ := fld.GetType()
+
+		if typ.Name() != "string" && typ.Name() != "array" {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+		}
+		if typ.Name() == "array" {
+			params := fld.Params.(*field.ArrayParameters)
+			if params.Item != nil || params.Item.GetType().Name() != "string" {
+				return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+			}
+		}
+
+		if reflect.TypeOf(f.Value).Kind() != reflect.String {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require value to be 'string'"))
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("array of filters should be provided for operations "))
+		}
+		for _, f := range fltrs {
+			err = h.validate(sch, f)
+			if err != nil {
+				return err
+			}
+		}
+
+	case Near:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		_, ok := fld.Params.(*field.LocationParameters)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("field must be a location"))
+		}
+
+		value, ok := f.Value.(map[string]interface{})
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should be map"))
+		}
+
+		point, ok := value["point"]
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should have location"))
+		}
+
+		var p field.GeoJSON
+		if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": point}, &p); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+
+		maxD, ok := value["distance"]
+		if ok {
+			v := reflect.ValueOf(maxD)
+			if !v.Type().ConvertibleTo(reflect.TypeOf(float64(0))) {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance must be a number"))
+			}
+			val := v.Convert(reflect.TypeOf(float64(0)))
+			if val.Float() < 0 {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance should not be negative"))
+			}
+		}
+
+	default:
+		return h.formatErr(f.Field, f.Op, errors.New("unknown operation"))
+	}
+
+	return nil
+}
+
+func (*FilterHandler) formatErr(args ...interface{}) error {
+	var (
+		f   string
+		op  Op
+		err error
+	)
+	for _, arg := range args {
+		switch v := arg.(type) {
+		case string:
+			f = v
+		case Op:
+			op = v
+		case error:
+			err = v
+		}
+	}
+	return errors.WithField(fmt.Errorf("op: '%s' %s", op, err), f)
+}
+
+func (h *FilterHandler) Query(filter ...*Filter) interface{} {
+	return h.qbuilder.Query(filter...)
+}
+
+type QueryBuilder interface {
+	Query(filter ...*Filter) interface{}
+	SetFieldPrefix(string)
+}
+
+type mongoQueryBuilder struct {
+	m      map[Op]string
+	prefix string
+}
+
+func NewMongoQueryBuilder() QueryBuilder {
+	b := new(mongoQueryBuilder)
+	b.m = map[Op]string{
+		Equal:          "$eq",
+		NotEqual:       "$ne",
+		Less:           "$lt",
+		LessOrEqual:    "$lte",
+		Greater:        "$gt",
+		GreaterOrEqual: "$gte",
+		In:             "$in",
+		NotIn:          "$nin",
+		Contains:       "$regex",
+		NotContains:    "$not",
+		Or:             "$or",
+		And:            "$and",
+		Near:           "$near",
+	}
+	return b
+}
+
+func (b *mongoQueryBuilder) getOp(op Op) string {
+	return b.m[op]
+}
+
+func (b *mongoQueryBuilder) SetFieldPrefix(prefix string) {
+	b.prefix = prefix
+}
+
+func (b *mongoQueryBuilder) Query(filters ...*Filter) interface{} {
+	if len(filters) == 0 {
+		return bson.M{}
+	}
+	filter := &Filter{Op: And, Value: filters}
+	return b.query(filter)
+}
+
+func (b *mongoQueryBuilder) query(f *Filter) bson.M {
+	if f == nil {
+		return nil
+	}
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual, In, NotIn:
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): f.Value,
+			},
+		}
+	case Contains, NotContains:
+
+		val, _ := f.Value.(string)
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): bsonx.Regex(val, ""),
+			},
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return nil
+		}
+
+		arr := bson.A{}
+		for _, fltr := range fltrs {
+			arr = append(arr, b.query(fltr))
+		}
+		return bson.M{
+			b.getOp(f.Op): arr,
+		}
+	case Near:
+		val, ok := f.Value.(map[string]interface{})
+		if ok {
+			var p field.GeoJSON
+			c, ok := val["point"]
+			if !ok {
+				return nil
+			}
+			if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": c}, &p); err != nil {
+				return nil
+			}
+			q := bson.D{{Key: "$geometry", Value: p}}
+
+			if maxD, ok := val["distance"]; ok {
+				q = append(q, bson.E{Key: "$maxDistance", Value: maxD})
+			}
+
+			return bson.M{
+				b.field(f.Field + ".geometry"): bson.M{b.getOp(f.Op): q},
+			}
+		}
+	}
+
+	return nil
+}
+
+func (b *mongoQueryBuilder) field(f string) string {
+	if b.prefix == "" || strings.HasPrefix(f, b.prefix) {
+		return f
+	}
+	return b.prefix + "." + f
+}
+
+// $text search ??
+//func (b *mongoQueryBuilder) textSearchQuery(filters ...*Filter) string {
+//	cnt, notcnt := "", ""
+//	for _, f := range filters {
+//		val, ok := f.Value.(string)
+//		if !ok {
+//			continue
+//		}
+//		switch f.Op {
+//		case Contains:
+//			if len(cnt) > 0 {
+//				cnt += " "
+//			}
+//			cnt += val
+//		case NotContains:
+//			words := strings.Split(val, " ")
+//			for _, w := range words {
+//				if len(notcnt) > 0 {
+//					notcnt += " "
+//				}
+//				notcnt += "-" + w
+//			}
+//		}
+//	}
+//	if len(cnt) == 0 {
+//		return ""
+//	}
+//	if len(notcnt) > 0 {
+//		cnt += " " + notcnt
+//	}
+//	return cnt
+//}
diff --git a/pkg/filter/filter_test.go b/pkg/filter/filter_test.go
new file mode 100644
index 00000000..22770bc7
--- /dev/null
+++ b/pkg/filter/filter_test.go
@@ -0,0 +1,479 @@
+package filter
+
+import (
+	"context"
+	"fmt"
+	"os"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+	"go.mongodb.org/mongo-driver/bson/primitive"
+	"go.mongodb.org/mongo-driver/mongo"
+	"go.mongodb.org/mongo-driver/mongo/options"
+)
+
+func TestFilterHandler(t *testing.T) {
+
+	sch := schema.New(
+		"str", field.String(),
+		"num", field.Number(field.NumberFormatInt),
+		"obj", field.Object(
+			"bool", field.Bool(),
+			"arr", field.Array(field.Time()),
+			"list", field.Array(
+				field.Object(
+					"num1", field.Number(field.NumberFormatFloat),
+					"str1", field.String(),
+				),
+			),
+		),
+		"date", field.Time(),
+		"geo", field.Location(),
+	)
+	h := NewFilterHandler(sch)
+	ph := NewFilterHandler(sch).SetTrimPrefix("data")
+
+	h.SetQueryBuilder(NewMongoQueryBuilder())
+	ph.SetQueryBuilder(NewMongoQueryBuilder())
+
+	var err error
+
+	t.Run("Validate", func(t *testing.T) {
+		t.Run("Simple", func(t *testing.T) {
+			t.Run("String", func(t *testing.T) {
+				f := &Filter{Op: Equal, Field: "str", Value: "zzz"}
+				err = h.Validate(f)
+				require.NoError(t, err)
+
+				f = &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+			})
+			t.Run("Int", func(t *testing.T) {
+				f := &Filter{Op: NotEqual, Field: "num", Value: 5.0}
+				err = h.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+
+				f = &Filter{Op: NotEqual, Field: "data.num", Value: 5.0}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+			})
+			t.Run("Time", func(t *testing.T) {
+				f := &Filter{Op: LessOrEqual, Field: "date", Value: "22 Dec 1997"}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: LessOrEqual, Field: "data.date", Value: "22 Dec 1997"}
+				err = ph.Validate(f)
+				require.Error(t, err)
+			})
+			t.Run("Location", func(t *testing.T) {
+				f := &Filter{Op: Near, Field: "geo", Value: ""}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: ""}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+				fv := map[string]interface{}{
+					"point":    []float64{55, 55},
+					"distance": 1000,
+				}
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+
+				fv["distance"] = -1
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+			})
+		})
+		t.Run("Embedded array field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = h.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+
+			f = &Filter{Op: In, Field: "data.obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+		})
+		t.Run("Embedded string contains", func(t *testing.T) {
+			f := &Filter{Op: Contains, Field: "obj.list.str1", Value: "zzz"}
+			err = h.Validate(f)
+			require.NoError(t, err)
+
+			f = &Filter{Op: Contains, Field: "data.obj.list.str1", Value: "zzz"}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+		})
+		t.Run("Compound filter with 'OR' operation", func(t *testing.T) {
+			t.Run("No Err", func(t *testing.T) {
+				w1, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+				w2, _ := time.Parse(time.RFC3339, "2015-12-01T22:08:41Z")
+
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: true},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: true},
+					}},
+				}
+				err = ph.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+			})
+			t.Run("Multiple Errors", func(t *testing.T) {
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+			})
+		})
+	})
+
+	t.Run("Build Query", func(t *testing.T) {
+		t.Run("No Filters", func(t *testing.T) {
+			res := h.Query()
+			require.IsType(t, res, primitive.M{})
+
+			pres := ph.Query()
+			assert.Equal(t, res, pres, "пустой запрос с префиксом и без должны быть одинаковые")
+		})
+		t.Run("Equal String", func(t *testing.T) {
+			f := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"data.str": primitive.M{"$eq": "zzz"}}}}, b)
+
+			pf := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			pres := ph.Query(pf)
+			assert.Equal(t, res, pres, "запрос в БД с полями с префиксом и без должны быть одинаковые")
+		})
+		t.Run("In Array", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+		t.Run("Several ops for one field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+	})
+}
+
+func TestFilterHandler_Integration(t *testing.T) {
+	ctx := context.Background()
+
+	uri := os.Getenv("MONGO_URL")
+	if uri == "" {
+		uri = "mongodb://localhost:27017"
+	}
+	opts := options.Client().SetConnectTimeout(15 * time.Second).ApplyURI(uri)
+	client, err := mongo.Connect(context.Background(), opts)
+	require.NoError(t, err)
+	err = client.Ping(ctx, nil)
+	require.NoError(t, err)
+
+	sch := schema.New(
+		"name", field.String(validate.Required()),
+		"color", field.String(),
+		"qty", field.Number(field.NumberFormatInt),
+		"info", field.Object(
+			"is_fruit", field.Bool(),
+			"similar", field.Array(
+				field.Object(
+					"name", field.Number(field.NumberFormatFloat),
+					"color", field.String(),
+				),
+			),
+			"desc", field.String(),
+		),
+		"produced", field.Time(),
+		"shipment", field.Array(field.String()),
+	)
+
+	w1, _ := time.Parse(time.RFC3339, "2020-01-01T10:08:41Z")
+	w2, _ := time.Parse(time.RFC3339, "2020-05-01T10:08:41Z")
+	w3, _ := time.Parse(time.RFC3339, "2020-10-01T10:08:41Z")
+
+	items := []map[string]interface{}{
+		{
+			"name":  "apple",
+			"color": "red",
+			"qty":   25,
+			"info": map[string]interface{}{
+				"is_fruit": true,
+				"similar": []interface{}{
+					map[string]interface{}{"name": "pear", "color": "yellow"},
+					map[string]interface{}{"name": "lemon", "color": "yellow"},
+				},
+				"desc": "An apple is the edible fruit . Apple trees are cultivated worldwide and have religious and mythological " +
+					"significance in many cultures. Apples are eaten with honey at the Jewish New Year of Rosh Hashanah to symbolize a sweet new year.",
+			},
+			"produced":   w1,
+			"shipment":   []interface{}{"Russia", "Iran"},
+			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.751472, 37.618727}},
+		},
+		{
+			"name":  "orange",
+			"color": "orange",
+			"qty":   10,
+			"info": map[string]interface{}{
+				"is_fruit": true,
+				"similar": []interface{}{
+					map[string]interface{}{"name": "lemon", "color": "yellow"},
+					map[string]interface{}{"name": "grapefruit", "color": "red"},
+				},
+				"desc": "The orange is the edible fruit of various citrus species; a hybrid between pomelo and mandarin. Orange trees are widely grown" +
+					" in tropical and subtropical climates for their sweet fruit. The fruit of the orange tree can be eaten fresh, or processed for its juice or fragrant peel.",
+			},
+			"produced":   w2,
+			"shipment":   []interface{}{"Egypt", "Iran"},
+			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.716797, 37.552809}},
+		},
+		{
+			"name":  "tomato",
+			"color": "red",
+			"qty":   1,
+			"info": map[string]interface{}{
+				"is_fruit": false,
+				"similar": []interface{}{
+					map[string]interface{}{"name": "cucumber", "color": "green"},
+					map[string]interface{}{"name": "apple", "color": "yellow"},
+				},
+				"desc": "The tomato is the edible red berry. The tomato is consumed in diverse ways, raw or cooked, in many dishes, " +
+					"sauces, salads, and drinks. Numerous varieties of the tomato plant are widely grown in temperate climates across the world.",
+			},
+			"produced":   w3,
+			"shipment":   []interface{}{"Russia", "Italy"},
+			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.760688, 37.619125}},
+		},
+	}
+
+	db := client.Database("perxis_test_filter")
+	coll := db.Collection("items")
+	coll.Drop(ctx)
+
+	for _, item := range items {
+		_, err = coll.InsertOne(ctx, item)
+		require.NoError(t, err)
+	}
+
+	h := NewFilterHandler(sch)
+	h.SetQueryBuilder(NewMongoQueryBuilder())
+
+	t.Run("By Color [Equal/NotEqual]", func(t *testing.T) {
+		t.Run("Red", func(t *testing.T) {
+			query := h.Query(&Filter{Op: Equal, Field: "color", Value: "red"})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 2)
+			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+		})
+		t.Run("Not Red", func(t *testing.T) {
+			query := h.Query(&Filter{Op: NotEqual, Field: "color", Value: "red"})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 1)
+			assert.Equal(t, "orange", data[0]["name"])
+		})
+	})
+	t.Run("By Quantity [Less/Greater]", func(t *testing.T) {
+		query := h.Query(&Filter{Op: LessOrEqual, Field: "qty", Value: 25}, &Filter{Op: Greater, Field: "qty", Value: 1})
+		res, err := coll.Find(ctx, query)
+		require.NoError(t, err)
+
+		var data []map[string]interface{}
+		err = res.All(ctx, &data)
+		require.NoError(t, err)
+		require.Len(t, data, 2)
+		assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+	})
+	t.Run("Not Fruit [Equal embedded field]", func(t *testing.T) {
+		query := h.Query(&Filter{Op: Equal, Field: "info.is_fruit", Value: false})
+		res, err := coll.Find(ctx, query)
+		require.NoError(t, err)
+
+		var data []map[string]interface{}
+		err = res.All(ctx, &data)
+		require.NoError(t, err)
+		require.Len(t, data, 1)
+		assert.Equal(t, "tomato", data[0]["name"])
+	})
+	t.Run("By Similar [In/NotIn]", func(t *testing.T) {
+		t.Run("Similar to cucumber, pear", func(t *testing.T) {
+			query := h.Query(&Filter{Op: In, Field: "info.similar.name", Value: []string{"cucumber", "pear"}})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 2)
+			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+		})
+		t.Run("Not Similar to cucumber, pear", func(t *testing.T) {
+			query := h.Query(&Filter{Op: NotIn, Field: "info.similar.name", Value: []string{"cucumber", "grapefruit"}})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 1)
+			assert.Equal(t, "apple", data[0]["name"])
+		})
+	})
+	t.Run("By Description [Contains/NotContains]", func(t *testing.T) {
+		t.Run("Contains", func(t *testing.T) {
+			query := h.Query(&Filter{Op: And, Value: []*Filter{
+				&Filter{Op: In, Field: "info.similar.color", Value: []string{"yellow"}},
+				&Filter{Op: Contains, Field: "info.desc", Value: "edible fruit"},
+			}})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 2)
+			assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+		})
+		t.Run("Not Contains", func(t *testing.T) {
+			query := h.Query(&Filter{Op: NotContains, Field: "info.desc", Value: "fruit"})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			for _, d := range data {
+				fmt.Println(d["name"])
+			}
+			require.Len(t, data, 1)
+			assert.Equal(t, "tomato", data[0]["name"])
+		})
+	})
+	t.Run("By Shipment [Contains/NotContains]", func(t *testing.T) {
+		t.Run("Contains", func(t *testing.T) {
+			query := h.Query(
+				&Filter{Op: Contains, Field: "shipment", Value: "Russia"},
+			)
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			require.Len(t, data, 2)
+			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+		})
+		t.Run("Not Contains", func(t *testing.T) {
+			query := h.Query(&Filter{Op: NotContains, Field: "shipment", Value: "Iran"})
+			res, err := coll.Find(ctx, query)
+			require.NoError(t, err)
+
+			var data []map[string]interface{}
+			err = res.All(ctx, &data)
+			require.NoError(t, err)
+			for _, d := range data {
+				fmt.Println(d["name"])
+			}
+			require.Len(t, data, 1)
+			assert.Equal(t, "tomato", data[0]["name"])
+		})
+	})
+	t.Run("Compound Query", func(t *testing.T) {
+		query := h.Query(&Filter{Op: Or, Value: []*Filter{
+			&Filter{Op: And, Value: []*Filter{
+				&Filter{Op: In, Field: "color", Value: []interface{}{"red", "yellow", "green"}},
+				&Filter{Op: Less, Field: "qty", Value: 10},
+			}}, // 1 - tomato
+			&Filter{Op: Equal, Field: "name", Value: "pepper"}, // 0
+			&Filter{Op: And, Value: []*Filter{
+				&Filter{Op: GreaterOrEqual, Field: "produced", Value: w1},
+				&Filter{Op: Less, Field: "produced", Value: w2}, // 1 - apple
+			}},
+		}})
+		res, err := coll.Find(ctx, query)
+		require.NoError(t, err)
+
+		var data []map[string]interface{}
+		err = res.All(ctx, &data)
+		require.NoError(t, err)
+		require.Len(t, data, 2)
+		assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+	})
+}
diff --git a/pkg/items/expr_test.go b/pkg/items/expr_test.go
index 4d8a1518..b788f227 100644
--- a/pkg/items/expr_test.go
+++ b/pkg/items/expr_test.go
@@ -4,17 +4,17 @@ import (
 	"context"
 	"testing"
 
-	"git.perx.ru/perxis/perxis/auth"
-	"git.perx.ru/perxis/perxis/pkg/expr"
-	mockscollaborators "git.perx.ru/perxis/perxis/services/collaborators/mocks"
-	"git.perx.ru/perxis/perxis/services/members"
-	mocksmembers "git.perx.ru/perxis/perxis/services/members/mocks"
-	"git.perx.ru/perxis/perxis/services/roles"
-	mocksroles "git.perx.ru/perxis/perxis/services/roles/mocks"
-	"git.perx.ru/perxis/perxis/services/spaces"
-	mocksSpaces "git.perx.ru/perxis/perxis/services/spaces/mocks"
-	"git.perx.ru/perxis/perxis/services/users"
-	mocksusers "git.perx.ru/perxis/perxis/services/users/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	mockscollaborators "git.perx.ru/perxis/perxis-go/pkg/collaborators/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	mocksmembers "git.perx.ru/perxis/perxis-go/pkg/members/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	mocksroles "git.perx.ru/perxis/perxis-go/pkg/roles/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	mocksSpaces "git.perx.ru/perxis/perxis-go/pkg/spaces/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	mocksusers "git.perx.ru/perxis/perxis-go/pkg/users/mocks"
 	"github.com/pkg/errors"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/mock"
diff --git a/pkg/items/item.go b/pkg/items/item.go
index 223f8a22..340e66a2 100644
--- a/pkg/items/item.go
+++ b/pkg/items/item.go
@@ -286,7 +286,7 @@ func (i Item) ProcessData(ctx context.Context, sch *schema.Schema, fn ProcessDat
 
 // IsSystemField возвращает являться ли поле системным
 func IsSystemField(field string) bool {
-	if util.Contains(field, SystemFields) {
+	if data.Contains(field, SystemFields) {
 		return true
 	}
 	return false
diff --git a/pkg/items/item_test.go b/pkg/items/item_test.go
index 2c2c442d..fb54fc50 100644
--- a/pkg/items/item_test.go
+++ b/pkg/items/item_test.go
@@ -5,9 +5,8 @@ import (
 	"testing"
 	"time"
 
-	"git.perx.ru/perxis/perxis/schema"
-	"git.perx.ru/perxis/perxis/schema/field"
-
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
 	"github.com/stretchr/testify/assert"
 )
 
diff --git a/pkg/items/mocks/Items.go b/pkg/items/mocks/Items.go
index 50708b92..1d3ea35f 100644
--- a/pkg/items/mocks/Items.go
+++ b/pkg/items/mocks/Items.go
@@ -5,10 +5,9 @@ package mocks
 import (
 	context "context"
 
-	items "git.perx.ru/perxis/perxis/services/items"
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
 	mock "github.com/stretchr/testify/mock"
-
-	schema "git.perx.ru/perxis/perxis/schema"
 )
 
 // Items is an autogenerated mock type for the Items type
diff --git a/pkg/items/service.go b/pkg/items/service.go
index 533f5099..c10a69c5 100644
--- a/pkg/items/service.go
+++ b/pkg/items/service.go
@@ -6,8 +6,8 @@ import (
 
 	"git.perx.ru/perxis/perxis-go/pkg/errors"
 	"git.perx.ru/perxis/perxis-go/pkg/filter"
-	"git.perx.ru/perxis/perxis-go/schema"
-	"git.perx.ru/perxis/perxis-go/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
 )
 
 // @microgen grpc
diff --git a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
index 3cdee975..7eae9965 100644
--- a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
+++ b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
@@ -11,9 +11,9 @@ import (
 	"git.perx.ru/perxis/perxis-go/pkg/filter"
 	service "git.perx.ru/perxis/perxis-go/pkg/items"
 	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
 	pbcommon "git.perx.ru/perxis/perxis-go/proto/common"
 	pb "git.perx.ru/perxis/perxis-go/proto/items"
-	"git.perx.ru/perxis/perxis-go/schema"
 	jsoniter "github.com/json-iterator/go"
 	"google.golang.org/protobuf/types/known/structpb"
 )
-- 
GitLab