diff --git a/pkg/data/data.go b/pkg/data/data.go new file mode 100644 index 0000000000000000000000000000000000000000..0540055ad4e213f666b0cf72019b9b4b9c39fbe8 --- /dev/null +++ b/pkg/data/data.go @@ -0,0 +1,294 @@ +package data + +import ( + "strconv" + "strings" +) + +const DefaultFieldDelimiter = "." + +type DeleteValueType struct{} + +var DeleteValue DeleteValueType + +// TODO: везде добавить поддержку массивов Рё массивов объектов + +// Сделано РЅР° базе библиотеки https://github.com/knadh/koanf + +// Flatten takes a map[string]interface{} and traverses it and flattens +// nested children into keys delimited by delim. +// +// It's important to note that all nested maps should be +// map[string]interface{} and not map[interface{}]interface{}. +// Use IntfaceKeysToStrings() to convert if necessary. +// +// eg: `{ "parent": { "child": 123 }}` becomes `{ "parent.child": 123 }` +// In addition, it keeps track of and returns a map of the delimited keypaths with +// a slice of key parts, for eg: { "parent.child": ["parent", "child"] }. This +// parts list is used to remember the key path's original structure to +// unflatten later. +func Flatten(m map[string]interface{}, keys []string, delim string) (map[string]interface{}, map[string][]string) { + var ( + out = make(map[string]interface{}) + keyMap = make(map[string][]string) + ) + + flatten(m, keys, delim, out, keyMap) + return out, keyMap +} + +func flatten(m map[string]interface{}, keys []string, delim string, out map[string]interface{}, keyMap map[string][]string) { + for key, val := range m { + // Copy the incoming key paths into a fresh list + // and append the current key in the iteration. + kp := make([]string, 0, len(keys)+1) + kp = append(kp, keys...) + kp = append(kp, key) + + switch cur := val.(type) { + case map[string]interface{}: + // Empty map. + if len(cur) == 0 { + newKey := strings.Join(kp, delim) + out[newKey] = val + keyMap[newKey] = kp + continue + } + + // It's a nested map. Flatten it recursively. + flatten(cur, kp, delim, out, keyMap) + default: + newKey := strings.Join(kp, delim) + out[newKey] = val + keyMap[newKey] = kp + } + } +} + +// Unflatten takes a flattened key:value map (non-nested with delimited keys) +// and returns a nested map where the keys are split into hierarchies by the given +// delimiter. For instance, `parent.child.key: 1` to `{parent: {child: {key: 1}}}` +// +// It's important to note that all nested maps should be +// map[string]interface{} and not map[interface{}]interface{}. +// Use IntfaceKeysToStrings() to convert if necessary. +func Unflatten(m map[string]interface{}, delim string) map[string]interface{} { + out := make(map[string]interface{}) + + // Iterate through the flat conf map. + for k, v := range m { + var ( + keys = strings.Split(k, delim) + next = out + ) + + // Iterate through key parts, for eg:, parent.child.key + // will be ["parent", "child", "key"] + for _, k := range keys[:len(keys)-1] { + sub, ok := next[k] + if !ok { + // If the key does not exist in the map, create it. + sub = make(map[string]interface{}) + next[k] = sub + } + if n, ok := sub.(map[string]interface{}); ok { + next = n + } + } + + // Assign the value. + next[keys[len(keys)-1]] = v + } + return out +} + +// Delete removes the entry present at a given path, from the interface +// if it is an object or an array. +// The path is the key map slice, for eg:, parent.child.key -> [parent child key]. +// Any empty, nested map on the path, is recursively deleted. +// +// It's important to note that all nested maps should be +// map[string]interface{} and not map[interface{}]interface{}. +// Use IntfaceKeysToStrings() to convert if necessary. +func Delete(field string, data any, delim ...string) error { + return set(getPath(field, delim...), data, DeleteValue) +} + +// DeleteMany removes the entries present at a given paths, from the interface +func DeleteMany(paths []string, value any, delim ...string) { + if value == nil || len(paths) == 0 { + return + } + for _, path := range paths { + Delete(path, value, delim...) + } +} + +// Search recursively searches the interface for a given path. The path is +// the key map slice, for eg:, parent.child.key -> [parent child key]. +// +// It's important to note that all nested maps should be +// map[string]interface{} and not map[interface{}]interface{}. +// Use IntfaceKeysToStrings() to convert if necessary. +func Search(in interface{}, path []string) interface{} { + switch val := in.(type) { + + case map[string]interface{}: + next, ok := val[path[0]] + if ok { + if len(path) == 1 { + return next + } + switch v := next.(type) { + case map[string]interface{}, []interface{}: + return Search(v, path[1:]) + } + } + case []interface{}: + out := make([]interface{}, len(val)) + for i, e := range val { + out[i] = Search(e, path) + } + return out + } + return nil +} + +func getPath(field string, delim ...string) []string { + if field == "" { + return nil + } + + d := DefaultFieldDelimiter + if len(delim) > 0 { + d = delim[0] + } + return strings.Split(field, d) +} + +func Set(field string, data, value any, delim ...string) error { + return set(getPath(field, delim...), data, value) +} + +func set(path []string, data, value any) error { + if len(path) == 0 { + return nil + } + + switch v := data.(type) { + case map[string]interface{}: + if len(path) == 1 { + + if _, ok := value.(DeleteValueType); ok { + delete(v, path[0]) + return nil + } + + v[path[0]] = value + return nil + } + + next, ok := v[path[0]] + if !ok { + next = make(map[string]interface{}) + v[path[0]] = next + } + return set(path[1:], next, value) + + case []interface{}: + idx, err := strconv.Atoi(path[0]) + if err != nil { + for _, vv := range v { + if err = set(path, vv, value); err != nil { + return err + } + } + } + if idx >= len(v) { + return nil + } + return set(path[1:], v[idx], value) + } + + return nil +} + +func Get(field string, data any, delim ...string) (any, bool) { + return get(getPath(field, delim...), data) +} + +func get(path []string, data any) (any, bool) { + if len(path) == 0 { + return data, true + } + + switch v := data.(type) { + case map[string]interface{}: + val, ok := v[path[0]] + if !ok { + return nil, false + } + return get(path[1:], val) + case []interface{}: + idx, err := strconv.Atoi(path[0]) + if err != nil || idx >= len(v) { + return nil, false + } + return get(path[1:], v[idx]) + } + + return nil, false +} + +// Keep keeps the entries present at a given paths, from the interface and remove other data +// if it is an object or an array. +// The path is the sting with delim, for eg:, parent.child.key +func Keep(paths []string, data any, delim ...string) { + if len(paths) == 0 { + data = nil + return + } + switch val := data.(type) { + case map[string]interface{}: + for k, v := range val { + if Contains(k, paths) { + continue + } + p := getObjectPaths(k, paths, delim...) + if len(p) == 0 { + delete(val, k) + } + Keep(p, v, delim...) + } + case []interface{}: + for _, ar := range val { + Keep(paths, ar, delim...) + } + } +} + +func getObjectPaths(prefix string, arr []string, delim ...string) []string { + var res []string + d := DefaultFieldDelimiter + if len(delim) > 0 { + d = delim[0] + } + for _, v := range arr { + if strings.HasPrefix(v, prefix+d) { + res = append(res, strings.TrimPrefix(v, prefix+d)) + } + } + return res +} + +func CloneMap(m map[string]interface{}) map[string]interface{} { + if m == nil { + return m + } + + c := make(map[string]interface{}, len(m)) + for k, v := range m { + c[k] = v + } + return c +} diff --git a/pkg/data/data_test.go b/pkg/data/data_test.go new file mode 100644 index 0000000000000000000000000000000000000000..785eefbb868a68c9d8c6b2f75b8f861ab2041e11 --- /dev/null +++ b/pkg/data/data_test.go @@ -0,0 +1,374 @@ +package data + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDelete(t *testing.T) { + tests := []struct { + name string + in interface{} + field string + out interface{} + }{ + { + "simple", + map[string]interface{}{"a": "1", "z": "2"}, + "a", + map[string]interface{}{"z": "2"}, + }, + { + "object", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + "a", + map[string]interface{}{}, + }, + { + "object field", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + "a.a", + map[string]interface{}{"a": map[string]interface{}{"z": "2"}}, + }, + { + "object field from map with array", + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + map[string]interface{}{"a": "3", "b": "4"}, + }, "z": "2"}, + "a.a", + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"b": "2"}, + map[string]interface{}{"b": "4"}, + }, "z": "2"}, + }, + { + "object field from map with array of arrays", + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + }, []interface{}{ + map[string]interface{}{"a": "3", "b": "4"}, + }, + }, "z": "2"}, + "a.a", + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"b": "2"}, + }, []interface{}{ + map[string]interface{}{"b": "4"}, + }, + }, "z": "2"}, + }, + // Решили что автоматически удалять пустые объекты/слайсы РЅРµ РЅСѓР¶РЅРѕ + //{ + // "empty object", + // map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}}, + // []string{"a", "a"}, + // map[string]interface{}{}, + //}, { + // "empty array", + // map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}}, + // []string{"a", "a"}, + // map[string]interface{}{}, + //}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + Delete(tt.field, tt.in) + assert.Equal(t, tt.out, tt.in) + }) + } +} + +func TestDeleteMany(t *testing.T) { + tests := []struct { + name string + in interface{} + paths []string + out interface{} + }{ + { + "simple", + map[string]interface{}{"a": "1", "z": "2", "d": "2"}, + []string{"a", "d"}, + map[string]interface{}{"z": "2"}, + }, + { + "object", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + []string{"a"}, + map[string]interface{}{}, + }, + { + "object field", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2", "b": "4"}}, + []string{"a.a", "a.b"}, + map[string]interface{}{"a": map[string]interface{}{"z": "2"}}, + }, + { + "object field from map with array", + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"a": "1", "b": "2", "c": 0}, + map[string]interface{}{"a": "3", "b": "4", "c": 0}, + }, "z": "2"}, + []string{"a.a", "a.c"}, + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"b": "2"}, + map[string]interface{}{"b": "4"}, + }, "z": "2"}, + }, + { + "object field from map with array of arrays", + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + }, []interface{}{ + map[string]interface{}{"a": "3", "b": "4"}, + }, + }, "z": "2"}, + []string{"a.a"}, + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"b": "2"}, + }, []interface{}{ + map[string]interface{}{"b": "4"}, + }, + }, "z": "2"}, + }, + { + "empty object", + map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}}, + []string{"a.a", "a"}, + map[string]interface{}{}, + }, + { + "field not exist in object", + map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}}, + []string{"a.b"}, + map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}}, + }, + { + "empty array", + map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}}, + []string{"a.a", "a"}, + map[string]interface{}{}, + }, + { + "field not exist in array", + map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}}, + []string{"a.b"}, + map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + DeleteMany(tt.paths, tt.in) + assert.Equal(t, tt.out, tt.in) + }) + } +} + +func TestSearch(t *testing.T) { + tests := []struct { + name string + in interface{} + path []string + out interface{} + }{ + { + "simple", + map[string]interface{}{"a": "1", "z": "2"}, + []string{"a"}, + "1", + }, + { + "object", + map[string]interface{}{ + "a": map[string]interface{}{"a": "1", "z": "2"}, + "b": map[string]interface{}{"c": "1", "d": "2"}, + }, + []string{"a"}, + map[string]interface{}{"a": "1", "z": "2"}, + }, + { + "object field", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + []string{"a", "a"}, + "1", + }, + { + "object field from map with array", + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + map[string]interface{}{"a": "3", "b": "4"}, + }, "z": "2"}, + []string{"a", "a"}, + []interface{}{"1", "3"}, + }, + { + "object field from array of arrays", + []interface{}{ + []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + }, []interface{}{ + map[string]interface{}{"a": "3", "b": "4"}, + }, + }, + []string{"a"}, + []interface{}{[]interface{}{"1"}, []interface{}{"3"}}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + out := Search(tt.in, tt.path) + assert.Equal(t, tt.out, out) + }) + } +} + +func TestSet(t *testing.T) { + type args struct { + field string + data any + value any + } + tests := []struct { + name string + args args + wantData any + wantErr assert.ErrorAssertionFunc + }{ + {"Simple", args{"a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "a"}, assert.NoError}, + {"New key", args{"b", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": "a"}, assert.NoError}, + {"Path", args{"a.b.c", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, "c"}, map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}, assert.NoError}, + {"Delete", args{"a.b", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, DeleteValue}, map[string]any{"a": map[string]any{}}, assert.NoError}, + {"Create map", args{"b.a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": map[string]interface{}{"a": "a"}}, assert.NoError}, + {"Map value", args{"a", map[string]interface{}{"a": "0"}, map[string]interface{}{"a": "a"}}, map[string]interface{}{"a": map[string]interface{}{"a": "a"}}, assert.NoError}, + {"Slice", args{"a.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "a", "b": "b"}}}, assert.NoError}, + {"Slice", args{"a.0.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "0", "b": "b"}}}, assert.NoError}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data := tt.args.data + tt.wantErr(t, Set(tt.args.field, data, tt.args.value), fmt.Sprintf("Set(%v, %v, %v)", tt.args.field, data, tt.args.value)) + assert.Equal(t, tt.wantData, data) + }) + } +} + +func TestGet(t *testing.T) { + type args struct { + field string + data any + } + tests := []struct { + name string + args args + want any + found bool + }{ + {"Direct value", args{"", 100}, 100, true}, + {"Not found", args{"a", 100}, nil, false}, + {"Simple", args{"a", map[string]any{"a": "0"}}, "0", true}, + {"Path", args{"a.b.c", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, "c", true}, + {"Incorrect path", args{"a.b.wrong", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, nil, false}, + {"Map value", args{"a.b", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, map[string]any{"c": "c"}, true}, + {"Slice", args{"a.1.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, "1", true}, + {"Slice out of range", args{"a.2.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, nil, false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, got1 := Get(tt.args.field, tt.args.data) + assert.Equalf(t, tt.want, got, "Get(%v, %v)", tt.args.field, tt.args.data) + assert.Equalf(t, tt.found, got1, "Get(%v, %v)", tt.args.field, tt.args.data) + }) + } +} + +func TestKeep(t *testing.T) { + tests := []struct { + name string + in interface{} + path []string + out interface{} + }{ + { + "simple", + map[string]interface{}{"a": "1", "z": "2"}, + []string{"a"}, + map[string]interface{}{"a": "1"}, + }, + { + "object", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + []string{"a"}, + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + }, + { + "no field", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + []string{"z"}, + map[string]interface{}{}, + }, + { + "object field", + map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}}, + []string{"a.a"}, + map[string]interface{}{"a": map[string]interface{}{"a": "1"}}, + }, + { + "object field from map with array", + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + map[string]interface{}{"a": "3", "b": "4"}, + }, "z": "2"}, + []string{"a.a", "z"}, + map[string]interface{}{"a": []interface{}{ + map[string]interface{}{"a": "1"}, + map[string]interface{}{"a": "3"}, + }, "z": "2"}, + }, + { + "object field from map with array of arrays", + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"a": "1", "b": "2"}, + }, []interface{}{ + map[string]interface{}{"a": "3", "b": "4"}, + }, + }, "z": "2"}, + []string{"a.b", "z"}, + map[string]interface{}{"a": []interface{}{ + []interface{}{ + map[string]interface{}{"b": "2"}, + }, []interface{}{ + map[string]interface{}{"b": "4"}, + }, + }, "z": "2"}, + }, + { + "empty object", + map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}}, + []string{"a.b"}, + map[string]interface{}{"a": map[string]interface{}{}}, + }, { + "empty array", + map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}}, + []string{"a.b"}, + map[string]interface{}{"a": map[string]interface{}{}}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + Keep(tt.path, tt.in) + assert.Equal(t, tt.out, tt.in) + }) + } +} diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go new file mode 100644 index 0000000000000000000000000000000000000000..ea2f1d436aba0ecced0a6473440e9fb4a782664d --- /dev/null +++ b/pkg/filter/filter.go @@ -0,0 +1,410 @@ +package filter + +import ( + "fmt" + "reflect" + "strings" + + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/schema" + "git.perx.ru/perxis/perxis-go/pkg/schema/field" + "git.perx.ru/perxis/perxis-go/pkg/schema/validate" + "github.com/hashicorp/go-multierror" + "github.com/mitchellh/mapstructure" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/x/bsonx" +) + +type Op string + +const ( + Equal Op = "eq" + NotEqual Op = "neq" + Less Op = "lt" + LessOrEqual Op = "lte" + Greater Op = "gt" + GreaterOrEqual Op = "gte" + In Op = "in" + NotIn Op = "nin" + Contains Op = "contains" + NotContains Op = "ncontains" + Or Op = "or" + And Op = "and" + Near Op = "near" +) + +type Filter struct { + Op Op + Field string + Value interface{} +} + +func (f Filter) Format(s fmt.State, verb rune) { + fmt.Fprintf(s, "{Op:%s Field:%s Value:%+v}", f.Op, f.Field, f.Value) +} + +func NewFilter(op Op, field string, val interface{}) *Filter { + return &Filter{ + Op: op, + Field: field, + Value: val, + } +} + +type FilterHandler struct { + schemas []*schema.Schema + qbuilder QueryBuilder + prefix string +} + +func NewFilterHandler(sch ...*schema.Schema) *FilterHandler { + return &FilterHandler{ + schemas: sch, + //qbuilder: qb, + } +} + +func (h *FilterHandler) SetTrimPrefix(prefix string) *FilterHandler { + h.prefix = prefix + return h +} + +func (h *FilterHandler) removeFieldPrefix(f string) string { + if h.prefix != "" { + return strings.TrimPrefix(f, h.prefix+".") + } + return f +} + +func (h *FilterHandler) AddSchema(sch ...*schema.Schema) *FilterHandler { + for _, s := range sch { + h.schemas = append(h.schemas, s) + } + return h +} + +func (h *FilterHandler) SetQueryBuilder(qb QueryBuilder) { + h.qbuilder = qb +} + +func (h *FilterHandler) Validate(filter ...*Filter) (err error) { + if len(h.schemas) == 0 { + return errors.New("no schema provided") + } + + for _, sch := range h.schemas { + var merr *multierror.Error + + for _, f := range filter { + if err := h.validate(sch, f); err != nil { + merr = multierror.Append(merr, err) + } + } + if merr != nil { + merr.ErrorFormat = func(i []error) string { + return fmt.Sprintf("%d validation error(s)", len(i)) + } + return errors.WithField(merr, "filter") + } + } + return nil +} + +// todo: '$elemMatch' - запросы Рє полю-массиву РЅР° попадание РІ условие: '{ results: { $elemMatch: { $gte: 80, $lt: 85 } }' ? + +func (h *FilterHandler) validate(sch *schema.Schema, f *Filter) (err error) { + if f == nil { + return + } + + fld := h.removeFieldPrefix(f.Field) + + switch f.Op { + case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual: + fld := sch.GetField(fld) + if fld == nil { + return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema")) + } + + if f.Value, err = schema.Decode(nil, fld, f.Value); err != nil { + return h.formatErr(f.Field, f.Op, err) + } + if err = validate.Validate(nil, fld, f.Value); err != nil { + return h.formatErr(f.Field, f.Op, err) + } + case In, NotIn: + fld := sch.GetField(fld) + if fld == nil { + return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema")) + } + val := reflect.ValueOf(f.Value) + if val.IsZero() || (val.Kind() != reflect.Array && val.Kind() != reflect.Slice) { + return h.formatErr(f.Field, f.Op, errors.New("\"IN/NOT IN\" operations require array type for value")) + } + + switch fld.GetType().(type) { + case *field.ArrayType: + f.Value, err = schema.Decode(nil, fld, f.Value) + if err != nil { + return h.formatErr(f.Field, f.Op, err) + } + default: + decodedVal := make([]interface{}, 0, val.Len()) + for i := 0; i < val.Len(); i++ { + v, err := schema.Decode(nil, fld, val.Index(i).Interface()) + if err != nil { + return h.formatErr(f.Field, f.Op, err) + } + decodedVal = append(decodedVal, v) + } + + f.Value = decodedVal + } + + case Contains, NotContains: + fld := sch.GetField(fld) + if fld == nil { + return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema")) + } + + typ := fld.GetType() + + if typ.Name() != "string" && typ.Name() != "array" { + return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'")) + } + if typ.Name() == "array" { + params := fld.Params.(*field.ArrayParameters) + if params.Item != nil || params.Item.GetType().Name() != "string" { + return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'")) + } + } + + if reflect.TypeOf(f.Value).Kind() != reflect.String { + return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require value to be 'string'")) + } + + case Or, And: + fltrs, ok := f.Value.([]*Filter) + if !ok { + return h.formatErr(f.Field, f.Op, errors.New("array of filters should be provided for operations ")) + } + for _, f := range fltrs { + err = h.validate(sch, f) + if err != nil { + return err + } + } + + case Near: + fld := sch.GetField(fld) + if fld == nil { + return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema")) + } + + _, ok := fld.Params.(*field.LocationParameters) + if !ok { + return h.formatErr(f.Field, f.Op, errors.New("field must be a location")) + } + + value, ok := f.Value.(map[string]interface{}) + if !ok { + return h.formatErr(f.Field, f.Op, errors.New("filter value should be map")) + } + + point, ok := value["point"] + if !ok { + return h.formatErr(f.Field, f.Op, errors.New("filter value should have location")) + } + + var p field.GeoJSON + if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": point}, &p); err != nil { + return h.formatErr(f.Field, f.Op, err) + } + + maxD, ok := value["distance"] + if ok { + v := reflect.ValueOf(maxD) + if !v.Type().ConvertibleTo(reflect.TypeOf(float64(0))) { + return h.formatErr(f.Field, f.Op, errors.New("filter value distance must be a number")) + } + val := v.Convert(reflect.TypeOf(float64(0))) + if val.Float() < 0 { + return h.formatErr(f.Field, f.Op, errors.New("filter value distance should not be negative")) + } + } + + default: + return h.formatErr(f.Field, f.Op, errors.New("unknown operation")) + } + + return nil +} + +func (*FilterHandler) formatErr(args ...interface{}) error { + var ( + f string + op Op + err error + ) + for _, arg := range args { + switch v := arg.(type) { + case string: + f = v + case Op: + op = v + case error: + err = v + } + } + return errors.WithField(fmt.Errorf("op: '%s' %s", op, err), f) +} + +func (h *FilterHandler) Query(filter ...*Filter) interface{} { + return h.qbuilder.Query(filter...) +} + +type QueryBuilder interface { + Query(filter ...*Filter) interface{} + SetFieldPrefix(string) +} + +type mongoQueryBuilder struct { + m map[Op]string + prefix string +} + +func NewMongoQueryBuilder() QueryBuilder { + b := new(mongoQueryBuilder) + b.m = map[Op]string{ + Equal: "$eq", + NotEqual: "$ne", + Less: "$lt", + LessOrEqual: "$lte", + Greater: "$gt", + GreaterOrEqual: "$gte", + In: "$in", + NotIn: "$nin", + Contains: "$regex", + NotContains: "$not", + Or: "$or", + And: "$and", + Near: "$near", + } + return b +} + +func (b *mongoQueryBuilder) getOp(op Op) string { + return b.m[op] +} + +func (b *mongoQueryBuilder) SetFieldPrefix(prefix string) { + b.prefix = prefix +} + +func (b *mongoQueryBuilder) Query(filters ...*Filter) interface{} { + if len(filters) == 0 { + return bson.M{} + } + filter := &Filter{Op: And, Value: filters} + return b.query(filter) +} + +func (b *mongoQueryBuilder) query(f *Filter) bson.M { + if f == nil { + return nil + } + + switch f.Op { + case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual, In, NotIn: + return bson.M{ + b.field(f.Field): bson.M{ + b.getOp(f.Op): f.Value, + }, + } + case Contains, NotContains: + + val, _ := f.Value.(string) + return bson.M{ + b.field(f.Field): bson.M{ + b.getOp(f.Op): bsonx.Regex(val, ""), + }, + } + + case Or, And: + fltrs, ok := f.Value.([]*Filter) + if !ok { + return nil + } + + arr := bson.A{} + for _, fltr := range fltrs { + arr = append(arr, b.query(fltr)) + } + return bson.M{ + b.getOp(f.Op): arr, + } + case Near: + val, ok := f.Value.(map[string]interface{}) + if ok { + var p field.GeoJSON + c, ok := val["point"] + if !ok { + return nil + } + if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": c}, &p); err != nil { + return nil + } + q := bson.D{{Key: "$geometry", Value: p}} + + if maxD, ok := val["distance"]; ok { + q = append(q, bson.E{Key: "$maxDistance", Value: maxD}) + } + + return bson.M{ + b.field(f.Field + ".geometry"): bson.M{b.getOp(f.Op): q}, + } + } + } + + return nil +} + +func (b *mongoQueryBuilder) field(f string) string { + if b.prefix == "" || strings.HasPrefix(f, b.prefix) { + return f + } + return b.prefix + "." + f +} + +// $text search ?? +//func (b *mongoQueryBuilder) textSearchQuery(filters ...*Filter) string { +// cnt, notcnt := "", "" +// for _, f := range filters { +// val, ok := f.Value.(string) +// if !ok { +// continue +// } +// switch f.Op { +// case Contains: +// if len(cnt) > 0 { +// cnt += " " +// } +// cnt += val +// case NotContains: +// words := strings.Split(val, " ") +// for _, w := range words { +// if len(notcnt) > 0 { +// notcnt += " " +// } +// notcnt += "-" + w +// } +// } +// } +// if len(cnt) == 0 { +// return "" +// } +// if len(notcnt) > 0 { +// cnt += " " + notcnt +// } +// return cnt +//} diff --git a/pkg/filter/filter_test.go b/pkg/filter/filter_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7283e26a0dfebcc5159211e49c30dc29150f12db --- /dev/null +++ b/pkg/filter/filter_test.go @@ -0,0 +1,473 @@ +package filter + +import ( + "testing" + "time" + + "git.perx.ru/perxis/perxis-go/pkg/schema" + "git.perx.ru/perxis/perxis-go/pkg/schema/field" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestFilterHandler(t *testing.T) { + + sch := schema.New( + "str", field.String(), + "num", field.Number(field.NumberFormatInt), + "obj", field.Object( + "bool", field.Bool(), + "arr", field.Array(field.Time()), + "list", field.Array( + field.Object( + "num1", field.Number(field.NumberFormatFloat), + "str1", field.String(), + ), + ), + ), + "date", field.Time(), + "geo", field.Location(), + ) + h := NewFilterHandler(sch) + ph := NewFilterHandler(sch).SetTrimPrefix("data") + + h.SetQueryBuilder(NewMongoQueryBuilder()) + ph.SetQueryBuilder(NewMongoQueryBuilder()) + + var err error + + t.Run("Validate", func(t *testing.T) { + t.Run("Simple", func(t *testing.T) { + t.Run("String", func(t *testing.T) { + f := &Filter{Op: Equal, Field: "str", Value: "zzz"} + err = h.Validate(f) + require.NoError(t, err) + + f = &Filter{Op: Equal, Field: "data.str", Value: "zzz"} + err = ph.Validate(f) + require.NoError(t, err) + }) + t.Run("Int", func(t *testing.T) { + f := &Filter{Op: NotEqual, Field: "num", Value: 5.0} + err = h.Validate(f) + require.NoError(t, err) + assert.IsType(t, int64(0), f.Value) + + f = &Filter{Op: NotEqual, Field: "data.num", Value: 5.0} + err = ph.Validate(f) + require.NoError(t, err) + assert.IsType(t, int64(0), f.Value) + }) + t.Run("Time", func(t *testing.T) { + f := &Filter{Op: LessOrEqual, Field: "date", Value: "22 Dec 1997"} + err = h.Validate(f) + require.Error(t, err) + + f = &Filter{Op: LessOrEqual, Field: "data.date", Value: "22 Dec 1997"} + err = ph.Validate(f) + require.Error(t, err) + }) + t.Run("Location", func(t *testing.T) { + f := &Filter{Op: Near, Field: "geo", Value: ""} + err = h.Validate(f) + require.Error(t, err) + + f = &Filter{Op: Near, Field: "data.geo", Value: ""} + err = ph.Validate(f) + require.Error(t, err) + + fv := map[string]interface{}{ + "point": []float64{55, 55}, + "distance": 1000, + } + + f = &Filter{Op: Near, Field: "data.geo", Value: fv} + err = ph.Validate(f) + require.NoError(t, err) + + fv["distance"] = -1 + f = &Filter{Op: Near, Field: "data.geo", Value: fv} + err = ph.Validate(f) + require.Error(t, err) + + }) + }) + t.Run("Embedded array field", func(t *testing.T) { + w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z") + f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}} + err = h.Validate(f) + require.NoError(t, err) + assert.Equal(t, w, f.Value.([]interface{})[0]) + + f = &Filter{Op: In, Field: "data.obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}} + err = ph.Validate(f) + require.NoError(t, err) + assert.Equal(t, w, f.Value.([]interface{})[0]) + }) + t.Run("Embedded string contains", func(t *testing.T) { + f := &Filter{Op: Contains, Field: "obj.list.str1", Value: "zzz"} + err = h.Validate(f) + require.NoError(t, err) + + f = &Filter{Op: Contains, Field: "data.obj.list.str1", Value: "zzz"} + err = ph.Validate(f) + require.NoError(t, err) + }) + t.Run("Compound filter with 'OR' operation", func(t *testing.T) { + t.Run("No Err", func(t *testing.T) { + w1, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z") + w2, _ := time.Parse(time.RFC3339, "2015-12-01T22:08:41Z") + + ff := []*Filter{ + {Op: In, Field: "date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}}, + {Op: Or, Field: "", Value: []*Filter{ + {Op: And, Field: "", Value: []*Filter{ + {Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"}, + {Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"}, + }}, + {Op: Equal, Field: "obj.bool", Value: true}, + }}, + } + err = h.Validate(ff...) + require.NoError(t, err) + assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{})) + assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value) + assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value) + + ff = []*Filter{ + {Op: In, Field: "data.date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}}, + {Op: Or, Field: "", Value: []*Filter{ + {Op: And, Field: "", Value: []*Filter{ + {Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"}, + {Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"}, + }}, + {Op: Equal, Field: "data.obj.bool", Value: true}, + }}, + } + err = ph.Validate(ff...) + require.NoError(t, err) + assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{})) + assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value) + assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value) + }) + t.Run("Multiple Errors", func(t *testing.T) { + ff := []*Filter{ + {Op: In, Field: "date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}}, + {Op: Or, Field: "", Value: []*Filter{ + {Op: And, Field: "", Value: []*Filter{ + {Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"}, + {Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"}, + }}, + {Op: Equal, Field: "obj.bool", Value: 15}, + }}, + } + err = h.Validate(ff...) + require.Error(t, err) + assert.Equal(t, err.Error(), "2 validation error(s)") + + ff = []*Filter{ + {Op: In, Field: "data.date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}}, + {Op: Or, Field: "", Value: []*Filter{ + {Op: And, Field: "", Value: []*Filter{ + {Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"}, + {Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"}, + }}, + {Op: Equal, Field: "data.obj.bool", Value: 15}, + }}, + } + err = h.Validate(ff...) + require.Error(t, err) + assert.Equal(t, err.Error(), "2 validation error(s)") + }) + }) + }) + + t.Run("Build Query", func(t *testing.T) { + t.Run("No Filters", func(t *testing.T) { + res := h.Query() + require.IsType(t, res, primitive.M{}) + + pres := ph.Query() + assert.Equal(t, res, pres, "пустой запрос СЃ префиксом Рё без должны быть одинаковые") + }) + t.Run("Equal String", func(t *testing.T) { + f := &Filter{Op: Equal, Field: "data.str", Value: "zzz"} + res := h.Query(f) + b, ok := res.(primitive.M) + require.True(t, ok) + assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"data.str": primitive.M{"$eq": "zzz"}}}}, b) + + pf := &Filter{Op: Equal, Field: "data.str", Value: "zzz"} + pres := ph.Query(pf) + assert.Equal(t, res, pres, "запрос РІ БД СЃ полями СЃ префиксом Рё без должны быть одинаковые") + }) + t.Run("In Array", func(t *testing.T) { + w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z") + f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}} + res := h.Query(f) + b, ok := res.(primitive.M) + require.True(t, ok) + assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b) + }) + t.Run("Several ops for one field", func(t *testing.T) { + w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z") + f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}} + res := h.Query(f) + b, ok := res.(primitive.M) + require.True(t, ok) + assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b) + }) + }) +} + +//func TestFilterHandler_Integration(t *testing.T) { +// ctx := context.Background() +// +// uri := os.Getenv("MONGO_URL") +// if uri == "" { +// uri = "mongodb://localhost:27017" +// } +// opts := options.Client().SetConnectTimeout(15 * time.Second).ApplyURI(uri) +// client, err := mongo.Connect(context.Background(), opts) +// require.NoError(t, err) +// err = client.Ping(ctx, nil) +// require.NoError(t, err) +// +// sch := schema.New( +// "name", field.String(validate.Required()), +// "color", field.String(), +// "qty", field.Number(field.NumberFormatInt), +// "info", field.Object( +// "is_fruit", field.Bool(), +// "similar", field.Array( +// field.Object( +// "name", field.Number(field.NumberFormatFloat), +// "color", field.String(), +// ), +// ), +// "desc", field.String(), +// ), +// "produced", field.Time(), +// "shipment", field.Array(field.String()), +// ) +// +// w1, _ := time.Parse(time.RFC3339, "2020-01-01T10:08:41Z") +// w2, _ := time.Parse(time.RFC3339, "2020-05-01T10:08:41Z") +// w3, _ := time.Parse(time.RFC3339, "2020-10-01T10:08:41Z") +// +// items := []map[string]interface{}{ +// { +// "name": "apple", +// "color": "red", +// "qty": 25, +// "info": map[string]interface{}{ +// "is_fruit": true, +// "similar": []interface{}{ +// map[string]interface{}{"name": "pear", "color": "yellow"}, +// map[string]interface{}{"name": "lemon", "color": "yellow"}, +// }, +// "desc": "An apple is the edible fruit . Apple trees are cultivated worldwide and have religious and mythological " + +// "significance in many cultures. Apples are eaten with honey at the Jewish New Year of Rosh Hashanah to symbolize a sweet new year.", +// }, +// "produced": w1, +// "shipment": []interface{}{"Russia", "Iran"}, +// "storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.751472, 37.618727}}, +// }, +// { +// "name": "orange", +// "color": "orange", +// "qty": 10, +// "info": map[string]interface{}{ +// "is_fruit": true, +// "similar": []interface{}{ +// map[string]interface{}{"name": "lemon", "color": "yellow"}, +// map[string]interface{}{"name": "grapefruit", "color": "red"}, +// }, +// "desc": "The orange is the edible fruit of various citrus species; a hybrid between pomelo and mandarin. Orange trees are widely grown" + +// " in tropical and subtropical climates for their sweet fruit. The fruit of the orange tree can be eaten fresh, or processed for its juice or fragrant peel.", +// }, +// "produced": w2, +// "shipment": []interface{}{"Egypt", "Iran"}, +// "storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.716797, 37.552809}}, +// }, +// { +// "name": "tomato", +// "color": "red", +// "qty": 1, +// "info": map[string]interface{}{ +// "is_fruit": false, +// "similar": []interface{}{ +// map[string]interface{}{"name": "cucumber", "color": "green"}, +// map[string]interface{}{"name": "apple", "color": "yellow"}, +// }, +// "desc": "The tomato is the edible red berry. The tomato is consumed in diverse ways, raw or cooked, in many dishes, " + +// "sauces, salads, and drinks. Numerous varieties of the tomato plant are widely grown in temperate climates across the world.", +// }, +// "produced": w3, +// "shipment": []interface{}{"Russia", "Italy"}, +// "storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.760688, 37.619125}}, +// }, +// } +// +// db := client.Database("perxis_test_filter") +// coll := db.Collection("items") +// coll.Drop(ctx) +// +// for _, item := range items { +// _, err = coll.InsertOne(ctx, item) +// require.NoError(t, err) +// } +// +// h := NewFilterHandler(sch) +// h.SetQueryBuilder(NewMongoQueryBuilder()) +// +// t.Run("By Color [Equal/NotEqual]", func(t *testing.T) { +// t.Run("Red", func(t *testing.T) { +// query := h.Query(&Filter{Op: Equal, Field: "color", Value: "red"}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +// t.Run("Not Red", func(t *testing.T) { +// query := h.Query(&Filter{Op: NotEqual, Field: "color", Value: "red"}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 1) +// assert.Equal(t, "orange", data[0]["name"]) +// }) +// }) +// t.Run("By Quantity [Less/Greater]", func(t *testing.T) { +// query := h.Query(&Filter{Op: LessOrEqual, Field: "qty", Value: 25}, &Filter{Op: Greater, Field: "qty", Value: 1}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +// t.Run("Not Fruit [Equal embedded field]", func(t *testing.T) { +// query := h.Query(&Filter{Op: Equal, Field: "info.is_fruit", Value: false}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 1) +// assert.Equal(t, "tomato", data[0]["name"]) +// }) +// t.Run("By Similar [In/NotIn]", func(t *testing.T) { +// t.Run("Similar to cucumber, pear", func(t *testing.T) { +// query := h.Query(&Filter{Op: In, Field: "info.similar.name", Value: []string{"cucumber", "pear"}}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +// t.Run("Not Similar to cucumber, pear", func(t *testing.T) { +// query := h.Query(&Filter{Op: NotIn, Field: "info.similar.name", Value: []string{"cucumber", "grapefruit"}}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 1) +// assert.Equal(t, "apple", data[0]["name"]) +// }) +// }) +// t.Run("By Description [Contains/NotContains]", func(t *testing.T) { +// t.Run("Contains", func(t *testing.T) { +// query := h.Query(&Filter{Op: And, Value: []*Filter{ +// &Filter{Op: In, Field: "info.similar.color", Value: []string{"yellow"}}, +// &Filter{Op: Contains, Field: "info.desc", Value: "edible fruit"}, +// }}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +// t.Run("Not Contains", func(t *testing.T) { +// query := h.Query(&Filter{Op: NotContains, Field: "info.desc", Value: "fruit"}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// for _, d := range data { +// fmt.Println(d["name"]) +// } +// require.Len(t, data, 1) +// assert.Equal(t, "tomato", data[0]["name"]) +// }) +// }) +// t.Run("By Shipment [Contains/NotContains]", func(t *testing.T) { +// t.Run("Contains", func(t *testing.T) { +// query := h.Query( +// &Filter{Op: Contains, Field: "shipment", Value: "Russia"}, +// ) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +// t.Run("Not Contains", func(t *testing.T) { +// query := h.Query(&Filter{Op: NotContains, Field: "shipment", Value: "Iran"}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// for _, d := range data { +// fmt.Println(d["name"]) +// } +// require.Len(t, data, 1) +// assert.Equal(t, "tomato", data[0]["name"]) +// }) +// }) +// t.Run("Compound Query", func(t *testing.T) { +// query := h.Query(&Filter{Op: Or, Value: []*Filter{ +// &Filter{Op: And, Value: []*Filter{ +// &Filter{Op: In, Field: "color", Value: []interface{}{"red", "yellow", "green"}}, +// &Filter{Op: Less, Field: "qty", Value: 10}, +// }}, // 1 - tomato +// &Filter{Op: Equal, Field: "name", Value: "pepper"}, // 0 +// &Filter{Op: And, Value: []*Filter{ +// &Filter{Op: GreaterOrEqual, Field: "produced", Value: w1}, +// &Filter{Op: Less, Field: "produced", Value: w2}, // 1 - apple +// }}, +// }}) +// res, err := coll.Find(ctx, query) +// require.NoError(t, err) +// +// var data []map[string]interface{} +// err = res.All(ctx, &data) +// require.NoError(t, err) +// require.Len(t, data, 2) +// assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]}) +// }) +//} diff --git a/pkg/items/codec.go b/pkg/items/codec.go new file mode 100644 index 0000000000000000000000000000000000000000..6264c3b582a2af08c1746c763807b315c0ae2fa9 --- /dev/null +++ b/pkg/items/codec.go @@ -0,0 +1,9 @@ +package items + +type Encoder interface { + Encode(item *Item) (any, error) +} + +type Decoder interface { + Decode(value any, item *Item) error +} diff --git a/pkg/items/context.go b/pkg/items/context.go new file mode 100644 index 0000000000000000000000000000000000000000..87e600e5b40da50381245a626e8228ab20485de8 --- /dev/null +++ b/pkg/items/context.go @@ -0,0 +1,71 @@ +package items + +import ( + "context" + + "git.perx.ru/perxis/perxis-go/pkg/clients" + "git.perx.ru/perxis/perxis-go/pkg/environments" + "git.perx.ru/perxis/perxis-go/pkg/spaces" +) + +type Context struct { + Items + Clients clients.Clients + + SpaceID string + EnvID string + CollectionID string + ItemID string + Item *Item + Space *spaces.Space + Environment *environments.Environment + + ViewSpaceID string + ViewEnvironmentID string + ViewCollectionID string + ViewSpace *spaces.Space + ViewEnvironment *environments.Environment +} + +type itemsCtx struct{} + +func WithContext(ctx context.Context, itmCtx *Context) context.Context { + if ctx == nil { + ctx = context.Background() + } + + if itmCtx.ViewSpaceID == "" { + itmCtx.ViewSpaceID = itmCtx.SpaceID + } + if itmCtx.ViewEnvironmentID == "" { + itmCtx.ViewEnvironmentID = itmCtx.EnvID + } + if itmCtx.ViewCollectionID == "" { + itmCtx.ViewCollectionID = itmCtx.CollectionID + } + if itmCtx.ViewSpace == nil { + itmCtx.ViewSpace = itmCtx.Space + } + if itmCtx.ViewEnvironment == nil { + itmCtx.ViewEnvironment = itmCtx.Environment + } + + p, _ := ctx.Value(itemsCtx{}).(*Context) + if p != nil { + *p = *itmCtx + return ctx + } + + return context.WithValue(ctx, itemsCtx{}, itmCtx) +} + +func GetContext(ctx context.Context) *Context { + if ctx == nil { + return new(Context) + } + p, _ := ctx.Value(itemsCtx{}).(*Context) + if p == nil { + return new(Context) + } + return p +} diff --git a/pkg/items/events.go b/pkg/items/events.go new file mode 100644 index 0000000000000000000000000000000000000000..14ff72216edb875ed3ca15cc879913fdb21a7ed7 --- /dev/null +++ b/pkg/items/events.go @@ -0,0 +1,140 @@ +package items + +import ( + "git.perx.ru/perxis/perxis-go/pkg/errors" + pb "git.perx.ru/perxis/perxis-go/proto/items" + "github.com/golang/protobuf/proto" +) + +const ( + EventCreateItem = "create_item" + EventUpdateItem = "update_item" + EventPublishItem = "publish_item" + EventUnpublishItem = "unpublish_item" + EventDeleteItem = "delete_item" + + DefaultEventSubject = "content.{{.EventType}}.{{.SpaceID}}.{{.EnvID}}.{{.CollectionID}}.{{.ItemID}}" +) + +var ( + ErrInvalidEventType = func(expected string, got any) error { + return errors.Errorf("invalid message type: expected '%s', got '%t'", expected, got) + } +) + +type EventCreate struct { + SpaceID string + EnvID string + CollectionID string + ItemID string +} + +func (e EventCreate) ToProto() (proto.Message, error) { + return &pb.EventCreate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil +} + +func (e *EventCreate) FromProto(message proto.Message) error { + p, ok := message.(*pb.EventCreate) + if !ok { + return ErrInvalidEventType("*pb.EventCreate", message) + } + + e.SpaceID = p.SpaceId + e.EnvID = p.EnvId + e.CollectionID = p.CollectionId + e.ItemID = p.ItemId + return nil +} + +type EventUpdate struct { + SpaceID string + EnvID string + CollectionID string + ItemID string +} + +func (e EventUpdate) ToProto() (proto.Message, error) { + return &pb.EventUpdate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil +} + +func (e *EventUpdate) FromProto(message proto.Message) error { + p, ok := message.(*pb.EventUpdate) + if !ok { + return ErrInvalidEventType("*pb.EventUpdate", message) + } + + e.SpaceID = p.SpaceId + e.EnvID = p.EnvId + e.CollectionID = p.CollectionId + e.ItemID = p.ItemId + return nil +} + +type EventPublish struct { + SpaceID string + EnvID string + CollectionID string + ItemID string +} + +func (e EventPublish) ToProto() (proto.Message, error) { + return &pb.EventPublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil +} + +func (e *EventPublish) FromProto(message proto.Message) error { + p, ok := message.(*pb.EventPublish) + if !ok { + return ErrInvalidEventType("*pb.EventPublish", message) + } + e.SpaceID = p.SpaceId + e.EnvID = p.EnvId + e.CollectionID = p.CollectionId + e.ItemID = p.ItemId + return nil +} + +type EventUnpublish struct { + SpaceID string + EnvID string + CollectionID string + ItemID string +} + +func (e EventUnpublish) ToProto() (proto.Message, error) { + return &pb.EventUnpublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil +} + +func (e *EventUnpublish) FromProto(message proto.Message) error { + p, ok := message.(*pb.EventUnpublish) + if !ok { + return ErrInvalidEventType("*pb.EventUnpublish", message) + } + e.SpaceID = p.SpaceId + e.EnvID = p.EnvId + e.CollectionID = p.CollectionId + e.ItemID = p.ItemId + return nil +} + +type EventDelete struct { + SpaceID string + EnvID string + CollectionID string + ItemID string +} + +func (e EventDelete) ToProto() (proto.Message, error) { + return &pb.EventDelete{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil +} + +func (e *EventDelete) FromProto(message proto.Message) error { + p, ok := message.(*pb.EventDelete) + if !ok { + return ErrInvalidEventType("*pb.EventDelete", message) + } + e.SpaceID = p.SpaceId + e.EnvID = p.EnvId + e.CollectionID = p.CollectionId + e.ItemID = p.ItemId + return nil +} diff --git a/pkg/items/item.go b/pkg/items/item.go new file mode 100644 index 0000000000000000000000000000000000000000..fc3a5154f621b601c02761c126be3ce72aa979ca --- /dev/null +++ b/pkg/items/item.go @@ -0,0 +1,566 @@ +package items + +import ( + "context" + "fmt" + "reflect" + "time" + + "git.perx.ru/perxis/perxis-go/pkg/data" + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/schema" + "git.perx.ru/perxis/perxis-go/pkg/schema/field" + pb "git.perx.ru/perxis/perxis-go/proto/items" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" +) + +var ( + ErrNotSystemField = errors.New("not a system field") + ErrIncorrectValue = errors.New("incorrect value") + ErrIncorrectField = errors.New("incorrect field") +) + +type State int + +func (s State) String() string { + switch s { + case StateDraft: + return "Draft" + case StateArchived: + return "Archived" + case StateChanged: + return "Changed" + case StatePublished: + return "Published" + } + return "Unknown" +} + +const ( + StateDraft State = iota + StatePublished + StateChanged + StateArchived + + StateMax = StateArchived + + SoftDeleteSeparator = "___" +) + +var PermissionsAllowAny = &Permissions{ + Edit: true, + Archive: true, + Publish: true, + SoftDelete: true, + HardDelete: true, +} + +// SystemFields - системные поля Item +var SystemFields = []string{ + "id", + "space_id", + "env_id", + "collection_id", + "state", + "created_rev_at", + "created_by", + "created_at", + "updated_at", + "updated_by", + "revision_id", + "published_at", + "published_by", + "archived_at", + "archived_by", + "data", + "translations", + "locale", + "deleted", + "hidden", + "template", +} + +type Permissions struct { + Edit bool + Archive bool + Publish bool + SoftDelete bool + HardDelete bool +} + +type Item struct { + ID string `json:"id" bson:"_id"` // ID - Рдентификатор записи. Автоматически генерируется системой РїСЂРё сохранении первой ревизии. + SpaceID string `json:"spaceId" bson:"-"` + EnvID string `json:"envId" bson:"-"` + CollectionID string `json:"collectionId" bson:"-"` + State State `json:"state" bson:"state"` + CreatedRevAt time.Time `json:"createdRevAt,omitempty" bson:"created_rev_at,omitempty"` + CreatedBy string `json:"createdBy,omitempty" bson:"created_by,omitempty"` + CreatedAt time.Time `json:"createdAt,omitempty" bson:"created_at,omitempty"` + UpdatedAt time.Time `json:"updatedAt,omitempty" bson:"updated_at,omitempty"` + UpdatedBy string `json:"updatedBy,omitempty" bson:"updated_by,omitempty"` + Data map[string]interface{} `json:"data" bson:"data"` + Locale string `json:"locale" bson:"-"` + Translations map[string]map[string]interface{} `json:"translations" bson:"translations,omitempty"` + RevisionID string `json:"revId,omitempty" bson:"revision_id"` + PublishedAt time.Time `json:"publishedAt,omitempty" bson:"published_at,omitempty"` + PublishedBy string `json:"publishedBy,omitempty" bson:"published_by,omitempty"` + ArchivedAt time.Time `json:"archivedAt,omitempty" bson:"archived_at,omitempty"` + ArchivedBy string `json:"archivedBy,omitempty" bson:"archived_by,omitempty"` + Permissions *Permissions `json:"permissions,omitempty" bson:"-"` + + // Флаги записи + Deleted bool `json:"deleted" bson:"deleted,omitempty"` + Hidden bool `json:"hidden" bson:"hidden,omitempty"` + Template bool `json:"template" bson:"template,omitempty"` +} + +func NewItem(spaceID, envID, collID, id string, data map[string]interface{}, translations map[string]map[string]interface{}) *Item { + return &Item{ + ID: id, + SpaceID: spaceID, + EnvID: envID, + CollectionID: collID, + Data: data, + Translations: translations, + } +} + +func (i *Item) Clone() *Item { + itm := *i + itm.Data = data.CloneMap(i.Data) + + if i.Translations != nil { + itm.Translations = make(map[string]map[string]interface{}, len(i.Translations)) + for t, m := range i.Translations { + itm.Translations[t] = data.CloneMap(m) + } + } + + return &itm +} + +func (i *Item) ToMap() map[string]interface{} { + return map[string]interface{}{ + "id": i.ID, + "space_id": i.SpaceID, + "env_id": i.EnvID, + "collection_id": i.CollectionID, + "state": i.State, + "created_rev_at": i.CreatedRevAt, + "created_by": i.CreatedBy, + "created_at": i.CreatedAt, + "updated_at": i.UpdatedAt, + "updated_by": i.UpdatedBy, + "revision_id": i.RevisionID, + "published_at": i.PublishedAt, + "published_by": i.PublishedBy, + "archived_at": i.ArchivedAt, + "archived_by": i.ArchivedBy, + "data": i.Data, + "translations": i.Translations, + "locale": i.Locale, + "deleted": i.Deleted, + "hidden": i.Hidden, + "template": i.Template, + } +} + +func (i *Item) SetData(locale string, data map[string]interface{}) { + if locale != "" { + if i.Translations == nil { + i.Translations = make(map[string]map[string]interface{}) + } + i.Translations[locale] = data + return + } + i.Data = data +} + +func (i *Item) GetData(locale string) map[string]interface{} { + if locale != "" && i.Translations != nil { + translation, _ := i.Translations[locale] + return MergeData(i.Data, translation) + } + return i.Data +} + +func (i Item) Encode(ctx context.Context, s *schema.Schema) (*Item, error) { + if i.Data != nil { + dt, err := schema.Encode(nil, s, i.Data) + if err != nil { + //return errors.WithField(err, "data") + return nil, err + } + i.Data = dt.(map[string]interface{}) + } + if len(i.Translations) > 0 { + for l, v := range i.Translations { + dt, err := schema.Encode(nil, s, v) + if err != nil { + //return errors.WithField(err, fmt.Sprintf("translations.%s", l)) + return nil, err + } + i.Translations[l] = dt.(map[string]interface{}) + } + } + return &i, nil +} + +func (i Item) Decode(ctx context.Context, s *schema.Schema) (res *Item, err error) { + + if i.Data != nil { + i.Data, err = s.Decode(ctx, i.Data) + if err != nil { + return nil, err + //return errors.WithField(err, "data") + } + } + + return &i, nil +} + +// MergeData дополняет отсутствующие данные РёР· оригинальных данных +func MergeData(data ...map[string]interface{}) map[string]interface{} { + merge := make(map[string]interface{}) + for _, d := range data { + for k, v := range d { + merge[k] = v + } + } + return merge +} + +// ClearData убирает данные которые РЅРµ изменились РїРѕ сравнению СЃ оригинальными данными +func ClearData(data ...map[string]interface{}) map[string]interface{} { + var clear map[string]interface{} + + for _, d := range data { + if clear == nil { + clear = d + continue + } + + for k, v := range d { + if reflect.DeepEqual(clear[k], v) { + delete(clear, k) + } + } + } + + return clear +} + +type ProcessDataFunc func(ctx context.Context, sch *schema.Schema, data map[string]interface{}) (map[string]interface{}, error) + +func (i Item) ProcessData(ctx context.Context, sch *schema.Schema, fn ProcessDataFunc, locales ...string) (*Item, error) { + if i.Data != nil { + dt, err := fn(ctx, sch, i.Data) + if err != nil { + return nil, errors.WithField(err, "data") + } + i.Data = dt + } + + tr := make(map[string]map[string]interface{}) + for _, l := range locales { + + data := i.GetData(l) + + dt, err := fn(ctx, sch, data) + if err != nil { + return nil, errors.WithField(err, fmt.Sprintf("translations.%s", l)) + } + tr[l] = dt + + } + + i.Translations = nil + if len(tr) > 0 { + i.Translations = tr + } + + return &i, nil +} + +// IsSystemField возвращает являться ли поле системным +func IsSystemField(field string) bool { + if data.Contains(field, SystemFields) { + return true + } + return false +} + +// SetSystemField устанавливает значение системного поля +func (i *Item) SetSystemField(field string, value interface{}) error { + ok := true + switch field { + case "id": + i.ID, ok = value.(string) + case "space_id": + i.SpaceID, ok = value.(string) + case "env_id": + i.EnvID, ok = value.(string) + case "collection_id": + i.CollectionID, ok = value.(string) + case "created_rev_at": + i.CreatedRevAt, ok = value.(time.Time) + case "created_by": + i.CreatedBy, ok = value.(string) + case "created_at": + i.CreatedAt, ok = value.(time.Time) + case "updated_by": + i.UpdatedBy, ok = value.(string) + case "updated_at": + i.UpdatedAt, ok = value.(time.Time) + case "revision_id": + i.RevisionID, ok = value.(string) + case "published_by": + i.PublishedBy, ok = value.(string) + case "published_at": + i.PublishedAt, ok = value.(time.Time) + case "hidden": + i.Hidden, ok = value.(bool) + case "deleted": + i.Deleted, ok = value.(bool) + case "template": + i.Template, ok = value.(bool) + default: + return ErrNotSystemField + } + + if !ok { + return ErrIncorrectValue + } + + return nil +} + +// GetSystem устанавливает значение системного поля +func (i *Item) GetSystem(field string) (any, error) { + switch field { + case "id": + return i.ID, nil + case "space_id": + return i.SpaceID, nil + case "env_id": + return i.EnvID, nil + case "collection_id": + return i.CollectionID, nil + case "created_rev_at": + return i.CreatedRevAt, nil + case "created_by": + return i.CreatedBy, nil + case "created_at": + return i.CreatedAt, nil + case "updated_by": + return i.UpdatedBy, nil + case "updated_at": + return i.UpdatedAt, nil + case "revision_id": + return i.RevisionID, nil + case "published_by": + return i.PublishedBy, nil + case "published_at": + return i.PublishedAt, nil + case "hidden": + return i.Hidden, nil + case "deleted": + return i.Deleted, nil + case "template": + return i.Template, nil + } + + return nil, ErrNotSystemField +} + +func (i *Item) setItemData(field string, value interface{}) error { + if i.Data == nil { + i.Data = make(map[string]any) + } + + return data.Set(field, i.Data, value) +} + +func (i *Item) getItemData(field string) (any, error) { + if i.Data != nil { + if v, ok := data.Get(field, i.Data); ok { + return v, nil + } + } + + return nil, ErrIncorrectField +} + +// Set устанавливает значение поля +func (i *Item) Set(field string, value interface{}) error { + if err := i.SetSystemField(field, value); !errors.Is(err, ErrNotSystemField) { + return errors.Wrapf(err, "fail to set system field '%s' value", field) + } + + return i.setItemData(field, value) +} + +// Get возвращает значение поля +func (i *Item) Get(field string) (any, error) { + if v, err := i.GetSystem(field); err == nil { + return v, err + } + + return i.getItemData(field) +} + +// GetSystemField возвращает описание поля для системных аттрибутов Item +func GetSystemField(fld string) (*field.Field, error) { + switch fld { + case "id", "space_id", "env_id", "collection_id", "revision_id": + return field.String(), nil + case "created_rev_at", "created_at", "updated_at", "published_at": + return field.Time(), nil + case "created_by", "updated_by", "published_by": + return field.String(), nil + case "hidden", "deleted", "template": + return field.Bool(), nil + } + + return nil, ErrNotSystemField +} + +// GetField возвращает значение поля +func GetField(field string, sch *schema.Schema) (*field.Field, error) { + if f, err := GetSystemField(field); err == nil { + return f, err + } + + f := sch.GetField(field) + if f == nil { + return nil, ErrIncorrectField + } + + return f, nil +} + +// GetSystemNamedFields возвращает описание всех системных полей Item +func GetSystemNamedFields() []field.NamedField { + fields := make([]field.NamedField, 0, len(SystemFields)) + for _, n := range SystemFields { + f := field.NamedField{Name: n} + f.Field, _ = GetSystemField(n) + fields = append(fields, f) + } + + return fields +} + +func ItemToProto(item *Item) *pb.Item { + if item == nil { + return nil + } + + protoItem := &pb.Item{ + Id: item.ID, + SpaceId: item.SpaceID, + EnvId: item.EnvID, + CollectionId: item.CollectionID, + State: pb.Item_State(item.State), + CreatedBy: item.CreatedBy, + UpdatedBy: item.UpdatedBy, + RevisionId: item.RevisionID, + PublishedBy: item.PublishedBy, + ArchivedBy: item.ArchivedBy, + Locale: item.Locale, + Hidden: item.Hidden, + Template: item.Template, + Deleted: item.Deleted, + } + + if item.Data != nil { + protoItem.Data, _ = structpb.NewStruct(item.Data) + } + if item.Translations != nil { + protoItem.Translations = make(map[string]*structpb.Struct, len(item.Translations)) + for k, v := range item.Translations { + protoItem.Translations[k], _ = structpb.NewStruct(v) + } + } + + protoItem.CreatedRevAt = timestamppb.New(item.CreatedRevAt) + protoItem.PublishedAt = timestamppb.New(item.PublishedAt) + protoItem.ArchivedAt = timestamppb.New(item.ArchivedAt) + protoItem.CreatedAt = timestamppb.New(item.CreatedAt) + protoItem.UpdatedAt = timestamppb.New(item.UpdatedAt) + + if item.Permissions != nil { + protoItem.Permissions = &pb.Permissions{ + Edit: item.Permissions.Edit, + Archive: item.Permissions.Archive, + Publish: item.Permissions.Publish, + SoftDelete: item.Permissions.SoftDelete, + HardDelete: item.Permissions.HardDelete, + } + } + + return protoItem +} + +func ItemFromProto(protoItem *pb.Item) *Item { + + if protoItem == nil { + return nil + } + + item := &Item{ + ID: protoItem.Id, + SpaceID: protoItem.SpaceId, + EnvID: protoItem.EnvId, + CollectionID: protoItem.CollectionId, + State: State(protoItem.State), + CreatedBy: protoItem.CreatedBy, + UpdatedBy: protoItem.UpdatedBy, + RevisionID: protoItem.RevisionId, + PublishedBy: protoItem.PublishedBy, + ArchivedBy: protoItem.ArchivedBy, + Locale: protoItem.Locale, + Hidden: protoItem.Hidden, + Template: protoItem.Template, + Deleted: protoItem.Deleted, + } + + if protoItem.Data != nil { + item.Data = protoItem.Data.AsMap() + } + + if protoItem.Translations != nil { + item.Translations = make(map[string]map[string]interface{}, len(protoItem.Translations)) + for k, v := range protoItem.Translations { + item.Translations[k] = v.AsMap() + } + } + + if protoItem.Permissions != nil { + item.Permissions = &Permissions{ + Edit: protoItem.Permissions.Edit, + Archive: protoItem.Permissions.Archive, + Publish: protoItem.Permissions.Publish, + SoftDelete: protoItem.Permissions.SoftDelete, + HardDelete: protoItem.Permissions.HardDelete, + } + } + + item.CreatedRevAt = protoItem.CreatedRevAt.AsTime() + item.PublishedAt = protoItem.PublishedAt.AsTime() + item.ArchivedAt = protoItem.ArchivedAt.AsTime() + item.CreatedAt = protoItem.CreatedAt.AsTime() + item.UpdatedAt = protoItem.UpdatedAt.AsTime() + + return item +} + +func GetItemIDs(arr []*Item) []string { + res := make([]string, len(arr)) + for i, e := range arr { + res[i] = e.ID + } + return res +} diff --git a/pkg/items/item_test.go b/pkg/items/item_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fb54fc501f45281bbafd37983de3b8638d5692d4 --- /dev/null +++ b/pkg/items/item_test.go @@ -0,0 +1,61 @@ +package items + +import ( + "fmt" + "testing" + "time" + + "git.perx.ru/perxis/perxis-go/pkg/schema" + "git.perx.ru/perxis/perxis-go/pkg/schema/field" + "github.com/stretchr/testify/assert" +) + +func TestItem_Set(t *testing.T) { + item := &Item{} + + item.Set("id", "id") + assert.Equal(t, "id", item.ID) + now := time.Now() + + item.Set("created_at", now) + assert.Equal(t, now, item.CreatedAt) + + item.Set("a.b.c", 101) + assert.Equal(t, map[string]any{"a": map[string]any{"b": map[string]any{"c": 101}}}, item.Data) + +} + +func TestGetField(t *testing.T) { + sch := schema.New( + "a", field.String(), + "obj", field.Object( + "a", field.Number(field.NumberFormatFloat), + "b", field.String(), + ), + "arr", field.Array(field.Object("a", field.Time())), + ) + + tests := []struct { + name string + field string + want *field.Field + wantErr assert.ErrorAssertionFunc + }{ + {"Simple", "a", field.String(), assert.NoError}, + {"Incorrect field", "b", nil, assert.Error}, + {"Object", "obj", field.Object("a", field.Number(field.NumberFormatFloat), "b", field.String()), assert.NoError}, + {"Object path", "obj.a", field.Number(field.NumberFormatFloat), assert.NoError}, + {"Array", "arr", field.Array(field.Object("a", field.Time())), assert.NoError}, + {"Array path", "arr.a", field.Time(), assert.NoError}, + {"Array item", "arr.", field.Object("a", field.Time()), assert.NoError}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := GetField(tt.field, sch) + if !tt.wantErr(t, err, fmt.Sprintf("GetField(%v, sch)", tt.field)) { + return + } + assert.Equalf(t, tt.want, got, "GetField(%v, sch)", tt.field) + }) + } +} diff --git a/pkg/items/mocks/Items.go b/pkg/items/mocks/Items.go new file mode 100644 index 0000000000000000000000000000000000000000..1d3ea35f22d13e65afc70e6c7cc847c60aa0f8a7 --- /dev/null +++ b/pkg/items/mocks/Items.go @@ -0,0 +1,538 @@ +// Code generated by mockery v2.14.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + items "git.perx.ru/perxis/perxis-go/pkg/items" + schema "git.perx.ru/perxis/perxis-go/pkg/schema" + mock "github.com/stretchr/testify/mock" +) + +// Items is an autogenerated mock type for the Items type +type Items struct { + mock.Mock +} + +// Aggregate provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options +func (_m *Items) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (map[string]interface{}, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, filter) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) map[string]interface{}); ok { + r0 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// AggregatePublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options +func (_m *Items) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (map[string]interface{}, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, filter) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) map[string]interface{}); ok { + r0 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Archive provides a mock function with given fields: ctx, item, options +func (_m *Items) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.ArchiveOptions) error); ok { + r0 = rf(ctx, item, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Create provides a mock function with given fields: ctx, item, opts +func (_m *Items) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (*items.Item, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *items.Item + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.CreateOptions) *items.Item); ok { + r0 = rf(ctx, item, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*items.Item) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.CreateOptions) error); ok { + r1 = rf(ctx, item, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options +func (_m *Items) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.DeleteOptions) error); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options +func (_m *Items) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) ([]*items.Item, int, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, filter) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []*items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) []*items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*items.Item) + } + } + + var r1 int + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) int); ok { + r1 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r1 = ret.Get(1).(int) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) error); ok { + r2 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// FindArchived provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options +func (_m *Items) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) ([]*items.Item, int, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, filter) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []*items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) []*items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*items.Item) + } + } + + var r1 int + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) int); ok { + r1 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r1 = ret.Get(1).(int) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) error); ok { + r2 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// FindPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options +func (_m *Items) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) ([]*items.Item, int, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, filter) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []*items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) []*items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*items.Item) + } + } + + var r1 int + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) int); ok { + r1 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r1 = ret.Get(1).(int) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) error); ok { + r2 = rf(ctx, spaceId, envId, collectionId, filter, options...) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options +func (_m *Items) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (*items.Item, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetOptions) *items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*items.Item) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options +func (_m *Items) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (*items.Item, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) *items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*items.Item) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetRevision provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, revisionId, options +func (_m *Items) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (*items.Item, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId, revisionId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) *items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*items.Item) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Introspect provides a mock function with given fields: ctx, item, opts +func (_m *Items) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (*items.Item, *schema.Schema, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *items.Item + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *items.Item); ok { + r0 = rf(ctx, item, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*items.Item) + } + } + + var r1 *schema.Schema + if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *schema.Schema); ok { + r1 = rf(ctx, item, opts...) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*schema.Schema) + } + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, *items.Item, ...*items.IntrospectOptions) error); ok { + r2 = rf(ctx, item, opts...) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// ListRevisions provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options +func (_m *Items) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) ([]*items.Item, error) { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []*items.Item + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) []*items.Item); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*items.Item) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) error); ok { + r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Publish provides a mock function with given fields: ctx, item, options +func (_m *Items) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.PublishOptions) error); ok { + r0 = rf(ctx, item, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Unarchive provides a mock function with given fields: ctx, item, options +func (_m *Items) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnarchiveOptions) error); ok { + r0 = rf(ctx, item, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Undelete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options +func (_m *Items) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, spaceId, envId, collectionId, itemId) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.UndeleteOptions) error); ok { + r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Unpublish provides a mock function with given fields: ctx, item, options +func (_m *Items) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnpublishOptions) error); ok { + r0 = rf(ctx, item, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Update provides a mock function with given fields: ctx, item, options +func (_m *Items) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) error { + _va := make([]interface{}, len(options)) + for _i := range options { + _va[_i] = options[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, item) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UpdateOptions) error); ok { + r0 = rf(ctx, item, options...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +type mockConstructorTestingTNewItems interface { + mock.TestingT + Cleanup(func()) +} + +// NewItems creates a new instance of Items. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +func NewItems(t mockConstructorTestingTNewItems) *Items { + mock := &Items{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/items/mocks/PreSaver.go b/pkg/items/mocks/PreSaver.go new file mode 100644 index 0000000000000000000000000000000000000000..6010e17bd14db83518507ac53ab35076a6cbc5bf --- /dev/null +++ b/pkg/items/mocks/PreSaver.go @@ -0,0 +1,62 @@ +// Code generated by mockery v2.14.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + items "git.perx.ru/perxis/perxis-go/pkg/items" + field "git.perx.ru/perxis/perxis-go/pkg/schema/field" + + mock "github.com/stretchr/testify/mock" +) + +// PreSaver is an autogenerated mock type for the PreSaver type +type PreSaver struct { + mock.Mock +} + +// PreSave provides a mock function with given fields: ctx, f, v, itemCtx +func (_m *PreSaver) PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *items.Context) (interface{}, bool, error) { + ret := _m.Called(ctx, f, v, itemCtx) + + var r0 interface{} + if rf, ok := ret.Get(0).(func(context.Context, *field.Field, interface{}, *items.Context) interface{}); ok { + r0 = rf(ctx, f, v, itemCtx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(interface{}) + } + } + + var r1 bool + if rf, ok := ret.Get(1).(func(context.Context, *field.Field, interface{}, *items.Context) bool); ok { + r1 = rf(ctx, f, v, itemCtx) + } else { + r1 = ret.Get(1).(bool) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, *field.Field, interface{}, *items.Context) error); ok { + r2 = rf(ctx, f, v, itemCtx) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +type mockConstructorTestingTNewPreSaver interface { + mock.TestingT + Cleanup(func()) +} + +// NewPreSaver creates a new instance of PreSaver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +func NewPreSaver(t mockConstructorTestingTNewPreSaver) *PreSaver { + mock := &PreSaver{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/items/options.go b/pkg/items/options.go new file mode 100644 index 0000000000000000000000000000000000000000..d48a1cdaacb157e46c1185a37602c1703bb3b59e --- /dev/null +++ b/pkg/items/options.go @@ -0,0 +1,422 @@ +package items + +import "git.perx.ru/perxis/perxis-go/pkg/options" + +type Options struct { + Env map[string]interface{} + Filter []string + PermissionsFilter []string +} + +func MergeOptions(opts ...Options) Options { + o := Options{ + Env: make(map[string]interface{}), + Filter: make([]string, 0), + } + + for _, opt := range opts { + + for k, v := range opt.Env { + o.Env[k] = v + } + + o.Filter = append(o.Filter, opt.Filter...) + o.PermissionsFilter = append(o.PermissionsFilter, opt.PermissionsFilter...) + } + + return o +} + +type CreateOptions struct { + Options + + UpdateAttrs bool +} + +func MergeCreateOptions(opts ...*CreateOptions) *CreateOptions { + o := &CreateOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + if opt.UpdateAttrs { + o.UpdateAttrs = true + } + + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type IntrospectOptions struct { + Options + Locale string +} + +func MergeIntrospectOptions(opts ...*IntrospectOptions) *IntrospectOptions { + o := &IntrospectOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type GetOptions struct { + Options +} + +func MergeGetOptions(opts ...*GetOptions) *GetOptions { + o := &GetOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type FindOptions struct { + Options + options.FindOptions + Deleted bool + Regular bool + Hidden bool + Templates bool +} + +func NewFindOptions(opts ...interface{}) *FindOptions { + fo := &FindOptions{} + fo.FindOptions = *options.MergeFindOptions(opts...) + return fo +} + +func MergeFindOptions(opts ...*FindOptions) *FindOptions { + o := NewFindOptions() + for _, opt := range opts { + if opt == nil { + continue + } + o.Regular = o.Regular || opt.Regular + o.Templates = o.Templates || opt.Templates + o.Hidden = o.Hidden || opt.Hidden + o.Deleted = o.Deleted || opt.Deleted + o.Options = MergeOptions(o.Options, opt.Options) + o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions) + } + return o +} + +type UpdateOptions struct { + Options + + UpdateAttrs bool +} + +func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions { + o := &UpdateOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + if opt.UpdateAttrs { + o.UpdateAttrs = true + } + + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type DeleteOptions struct { + Options + + Erase bool +} + +func MergeDeleteOptions(opts ...*DeleteOptions) *DeleteOptions { + o := &DeleteOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + if opt.Erase { + o.Erase = true + } + + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type SoftDeleteOptions struct { + Options +} + +func MergeSoftDeleteOptions(opts ...*SoftDeleteOptions) *SoftDeleteOptions { + o := &SoftDeleteOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type UndeleteOptions struct { + Options +} + +func MergeUndeleteOptions(opts ...*UndeleteOptions) *UndeleteOptions { + o := &UndeleteOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type PublishOptions struct { + Options + + UpdateAttrs bool +} + +func MergePublishOptions(opts ...*PublishOptions) *PublishOptions { + o := &PublishOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + if opt.UpdateAttrs { + o.UpdateAttrs = true + } + + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type UnpublishOptions struct { + Options +} + +func MergeUnpublishOptions(opts ...*UnpublishOptions) *UnpublishOptions { + o := &UnpublishOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type GetPublishedOptions struct { + Options + LocaleID string +} + +func NewGetPublishedOptions(oo ...interface{}) *GetPublishedOptions { + fo := &GetPublishedOptions{} + for _, o := range oo { + switch o := o.(type) { + case string: + fo.LocaleID = o + } + } + return fo +} + +func MergeGetPublishedOptions(opts ...*GetPublishedOptions) *GetPublishedOptions { + o := &GetPublishedOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + if opt.LocaleID != "" { + o.LocaleID = opt.LocaleID + } + } + return o +} + +type FindPublishedOptions struct { + Options + options.FindOptions + LocaleID string + Regular bool + Hidden bool + Templates bool +} + +func NewFindPublishedOptions(opts ...interface{}) *FindPublishedOptions { + fo := &FindPublishedOptions{} + for _, o := range opts { + switch o := o.(type) { + case string: + fo.LocaleID = o + } + } + + fo.FindOptions = *options.MergeFindOptions(opts...) + return fo +} + +func MergeFindPublishedOptions(opts ...*FindPublishedOptions) *FindPublishedOptions { + o := NewFindPublishedOptions() + for _, opt := range opts { + if opt == nil { + continue + } + o.Regular = o.Regular || opt.Regular + o.Templates = o.Templates || opt.Templates + o.Hidden = o.Hidden || opt.Hidden + o.Options = MergeOptions(o.Options, opt.Options) + o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions) + + if opt.LocaleID != "" { + o.LocaleID = opt.LocaleID + } + } + return o +} + +type GetRevisionOptions struct { + Options +} + +func MergeGetRevisionOptions(opts ...*GetRevisionOptions) *GetRevisionOptions { + o := &GetRevisionOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type ListRevisionsOptions struct { + Options + options.FindOptions +} + +func MergeListRevisionsOptions(opts ...*ListRevisionsOptions) *ListRevisionsOptions { + o := &ListRevisionsOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions) + } + return o +} + +type ArchiveOptions struct { + Options +} + +func MergeArchiveOptions(opts ...*ArchiveOptions) *ArchiveOptions { + o := &ArchiveOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type FindArchivedOptions struct { + Options + options.FindOptions +} + +func NewFindArchivedOptions(oo ...interface{}) *FindArchivedOptions { + fo := &FindArchivedOptions{} + fo.FindOptions = *options.MergeFindOptions(oo...) + return fo +} + +func MergeFindArchivedOptions(opts ...*FindArchivedOptions) *FindArchivedOptions { + o := NewFindArchivedOptions() + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + o.FindOptions = *options.MergeFindOptions(o.FindOptions, opt.FindOptions) + } + return o +} + +type UnarchiveOptions struct { + Options +} + +func MergeUnarchiveOptions(opts ...*UnarchiveOptions) *UnarchiveOptions { + o := &UnarchiveOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + } + return o +} + +type AggregateOptions struct { + Options + options.SortOptions + + // Fields поля которые должны быть возвращены или вычислены РІ результате. + // Ключ (string) - РёРјСЏ поля РїРѕРґ которым будет добавляться результат. + // Значение (string) - является выражением, вычисление которого сформирует результат + // Функции для выражений (для поля F, типа T): + // - distinct(F) - РІСЃРµ значения поля, тип результат []T + // - min(F) - минимальное значение поля, тип результат T + // - max(F) - максимальное значение поля, тип результат T + // - avg(F) - среднее значения поля, тип результат T + // - sum(F) - СЃСѓРјРјР° значений поля, тип результат T + // - count() - число записей, тип результат int + Fields map[string]string +} + +func MergeAggregateOptions(opts ...*AggregateOptions) *AggregateOptions { + o := &AggregateOptions{} + for _, opt := range opts { + if opt == nil { + continue + } + o.Options = MergeOptions(o.Options, opt.Options) + + if o.Fields == nil { + o.Fields = opt.Fields + continue + } + for k, v := range opt.Fields { + o.Fields[k] = v + } + } + return o +} + +type AggregatePublishedOptions AggregateOptions + +func MergeAggregatePublishedOptions(opts ...*AggregatePublishedOptions) *AggregatePublishedOptions { + ao := make([]*AggregateOptions, len(opts)) + for i, opt := range opts { + ao[i] = (*AggregateOptions)(opt) + } + merged := MergeAggregateOptions(ao...) + return (*AggregatePublishedOptions)(merged) +} diff --git a/pkg/items/service.go b/pkg/items/service.go new file mode 100644 index 0000000000000000000000000000000000000000..c10a69c55878a1fbfe571dc0fad594c338dccb99 --- /dev/null +++ b/pkg/items/service.go @@ -0,0 +1,151 @@ +package items + +import ( + "context" + "regexp" + + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/filter" + "git.perx.ru/perxis/perxis-go/pkg/schema" + "git.perx.ru/perxis/perxis-go/pkg/schema/field" +) + +// @microgen grpc +// @protobuf git.perx.ru/perxis/perxis-go/proto/items +// @grpc-addr content.items.Items +type Items interface { + Create(ctx context.Context, item *Item, opts ...*CreateOptions) (created *Item, err error) + Introspect(ctx context.Context, item *Item, opts ...*IntrospectOptions) (itm *Item, sch *schema.Schema, err error) + Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetOptions) (item *Item, err error) + Find(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindOptions) (items []*Item, total int, err error) + Update(ctx context.Context, item *Item, options ...*UpdateOptions) (err error) + + // Delete выполняет удаление элемента + // Если установлен флаг DeleteOptions.Erase то данные Р±СѓРґСѓС‚ полностью удалены РёР· системы. + // Р’ противном случае выполняется "РјСЏРіРєРѕРµ удаление", элемент помечается как удаленный Рё может быть восстановлен СЃ помощью метода Items.Undelete Рё получен РІ Items.Get/Find + Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*DeleteOptions) (err error) + + // Undelete восстанавливает элементы после "РјСЏРіРєРѕРіРѕ удаление" + Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*UndeleteOptions) (err error) + + Publish(ctx context.Context, item *Item, options ...*PublishOptions) (err error) + Unpublish(ctx context.Context, item *Item, options ...*UnpublishOptions) (err error) + GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetPublishedOptions) (item *Item, err error) + FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindPublishedOptions) (items []*Item, total int, err error) + + GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*GetRevisionOptions) (item *Item, err error) + ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*ListRevisionsOptions) (items []*Item, err error) + + Archive(ctx context.Context, item *Item, options ...*ArchiveOptions) (err error) + FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindArchivedOptions) (items []*Item, total int, err error) + Unarchive(ctx context.Context, item *Item, options ...*UnarchiveOptions) (err error) + + // Aggregate выполняет агрегацию данных + Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregateOptions) (result map[string]interface{}, err error) + // AggregatePublished выполняет агрегацию опубликованных данных + AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregatePublishedOptions) (result map[string]interface{}, err error) +} + +// PreSaver - интерфейс, который может быть реализован полем, чтобы получать событие PreSave перед сохранением Item РІ Storage +type PreSaver interface { + PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *Context) (interface{}, bool, error) +} + +type Filter struct { + ID []string + Data []*filter.Filter + Search string // РџРѕРёСЃРє, одновременно поддерживается только РѕРґРёРЅ запрос + Q []string +} + +func NewFilter(params ...interface{}) *Filter { + f := &Filter{} + for _, p := range params { + switch v := p.(type) { + case *filter.Filter: + f.Data = append(f.Data, v) + case string: + f.Q = append(f.Q, v) + } + } + return f +} + +// AggregateExpRe - формат, которому должна соответствовать формула расчета данных +var AggregateExpRe = regexp.MustCompile(`([a-zA-Z]+)\((.*)\)`) + +func ParseAggregateExp(exp string) (string, string, bool) { + ss := AggregateExpRe.FindAllStringSubmatch(exp, -1) + if len(ss) == 0 || len(ss[0]) < 2 { + return "", "", false + } + return ss[0][1], ss[0][2], true +} + +func DecodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) { + result := make(map[string]interface{}, len(r)) + for outputField, exp := range request { + + funcName, fldName, ok := ParseAggregateExp(exp) + if !ok || fldName == "" { + if v, ok := r[outputField]; ok { + result[outputField] = v + } + continue + } + + schemaFld := s.GetField(fldName) + if schemaFld == nil { + if v, ok := r[outputField]; ok { + result[outputField] = v + } + continue + } + + if funcName == "distinct" { + schemaFld = field.Array(schemaFld) + } + + data, err := schema.Decode(ctx, schemaFld, r[outputField]) + if err != nil { + return nil, errors.Wrapf(err, "decode data for field '%s'", outputField) + } + result[outputField] = data + } + + return result, nil +} + +func EncodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) { + result := make(map[string]interface{}, len(r)) + for outputField, exp := range request { + + funcName, fldName, ok := ParseAggregateExp(exp) + if !ok || fldName == "" { + if v, ok := r[outputField]; ok { + result[outputField] = v + } + continue + } + + schemaFld := s.GetField(fldName) + if schemaFld == nil { + if v, ok := r[outputField]; ok { + result[outputField] = v + } + continue + } + + if funcName == "distinct" { + schemaFld = field.Array(schemaFld) + } + + data, err := schema.Encode(ctx, schemaFld, r[outputField]) + if err != nil { + return nil, errors.Wrapf(err, "decode data for field '%s'", outputField) + } + result[outputField] = data + } + + return result, nil +} diff --git a/pkg/items/transport/client.go b/pkg/items/transport/client.go new file mode 100644 index 0000000000000000000000000000000000000000..3f6bd04ceab90dad415d963c6db3d1a9f4fb4b47 --- /dev/null +++ b/pkg/items/transport/client.go @@ -0,0 +1,266 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transport + +import ( + "context" + + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/items" + "git.perx.ru/perxis/perxis-go/pkg/schema" + "github.com/hashicorp/go-multierror" + "google.golang.org/grpc/status" +) + +func (set EndpointsSet) Create(arg0 context.Context, arg1 *items.Item, arg2 ...*items.CreateOptions) (res0 *items.Item, res1 error) { + request := CreateRequest{ + Item: arg1, + Opts: arg2, + } + response, res1 := set.CreateEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*CreateResponse).Created, res1 +} + +func (set EndpointsSet) Introspect(arg0 context.Context, arg1 *items.Item, arg2 ...*items.IntrospectOptions) (res0 *items.Item, res1 *schema.Schema, res2 error) { + request := IntrospectRequest{ + Item: arg1, + Opts: arg2, + } + response, res2 := set.IntrospectEndpoint(arg0, &request) + if res2 != nil { + return + } + resp := response.(*IntrospectResponse) + + if len(resp.ValidationErrors) > 0 { + var merr *multierror.Error + for _, err := range resp.ValidationErrors { + var fieldErr errors.FieldError + if errors.As(err, &fieldErr) { + merr = multierror.Append(merr, fieldErr) + } + } + + res2 = errors.Wrap(merr, "validation error") + + } + return resp.Item, resp.Schema, res2 +} + +func (set EndpointsSet) Get(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetOptions) (res0 *items.Item, res1 error) { + request := GetRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + SpaceId: arg1, + Options: arg5, + } + response, res1 := set.GetEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*GetResponse).Item, res1 +} + +func (set EndpointsSet) Find(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindOptions) (res0 []*items.Item, res1 int, res2 error) { + request := FindRequest{ + CollectionId: arg3, + EnvId: arg2, + Filter: arg4, + Options: arg5, + SpaceId: arg1, + } + response, res2 := set.FindEndpoint(arg0, &request) + if res2 != nil { + return + } + return response.(*FindResponse).Items, response.(*FindResponse).Total, res2 +} + +func (set EndpointsSet) Update(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UpdateOptions) (res0 error) { + request := UpdateRequest{Item: arg1, Options: arg2} + _, res0 = set.UpdateEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) Delete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.DeleteOptions) (res0 error) { + request := DeleteRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + SpaceId: arg1, + Options: options, + } + _, res0 = set.DeleteEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) Undelete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.UndeleteOptions) (res0 error) { + request := UndeleteRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + SpaceId: arg1, + Options: options, + } + _, res0 = set.UndeleteEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) Publish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.PublishOptions) (res0 error) { + request := PublishRequest{Item: arg1, Options: arg2} + _, res0 = set.PublishEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) Unpublish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnpublishOptions) (res0 error) { + request := UnpublishRequest{Item: arg1, Options: arg2} + _, res0 = set.UnpublishEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) GetPublished(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetPublishedOptions) (res0 *items.Item, res1 error) { + request := GetPublishedRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + SpaceId: arg1, + Options: arg5, + } + response, res1 := set.GetPublishedEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*GetPublishedResponse).Item, res1 +} + +func (set EndpointsSet) FindPublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindPublishedOptions) (res0 []*items.Item, res1 int, res2 error) { + request := FindPublishedRequest{ + CollectionId: arg3, + EnvId: arg2, + Filter: arg4, + Options: arg5, + SpaceId: arg1, + } + response, res2 := set.FindPublishedEndpoint(arg0, &request) + if res2 != nil { + return + } + return response.(*FindPublishedResponse).Items, response.(*FindPublishedResponse).Total, res2 +} + +func (set EndpointsSet) GetRevision(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 string, arg5 string, arg6 ...*items.GetRevisionOptions) (res0 *items.Item, res1 error) { + request := GetRevisionRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + RevisionId: arg5, + SpaceId: arg1, + Options: arg6, + } + response, res1 := set.GetRevisionEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*GetRevisionResponse).Item, res1 +} + +func (set EndpointsSet) ListRevisions(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.ListRevisionsOptions) (res0 []*items.Item, res1 error) { + request := ListRevisionsRequest{ + CollectionId: arg3, + EnvId: arg2, + ItemId: arg4, + SpaceId: arg1, + Options: arg5, + } + response, res1 := set.ListRevisionsEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*ListRevisionsResponse).Items, res1 +} + +func (set EndpointsSet) Archive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.ArchiveOptions) (res0 error) { + request := ArchiveRequest{Item: arg1, Options: arg2} + _, res0 = set.ArchiveEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) FindArchived(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindArchivedOptions) (res0 []*items.Item, res1 int, res2 error) { + request := FindArchivedRequest{ + CollectionId: arg3, + EnvId: arg2, + Options: arg5, + Filter: arg4, + SpaceId: arg1, + } + response, res2 := set.FindArchivedEndpoint(arg0, &request) + if res2 != nil { + return + } + return response.(*FindArchivedResponse).Items, response.(*FindArchivedResponse).Total, res2 +} + +func (set EndpointsSet) Unarchive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnarchiveOptions) (res0 error) { + request := UnarchiveRequest{Item: arg1, Options: arg2} + _, res0 = set.UnarchiveEndpoint(arg0, &request) + if res0 != nil { + return + } + return res0 +} + +func (set EndpointsSet) Aggregate(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregateOptions) (res0 map[string]interface{}, res1 error) { + request := AggregateRequest{ + CollectionId: arg3, + EnvId: arg2, + Filter: arg4, + Options: arg5, + SpaceId: arg1, + } + response, res1 := set.AggregateEndpoint(arg0, &request) + if res1 != nil { + return + } + return response.(*AggregateResponse).Result, res1 +} + +func (set EndpointsSet) AggregatePublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregatePublishedOptions) (res0 map[string]interface{}, res1 error) { + request := AggregatePublishedRequest{ + CollectionId: arg3, + EnvId: arg2, + Filter: arg4, + Options: arg5, + SpaceId: arg1, + } + response, res1 := set.AggregatePublishedEndpoint(arg0, &request) + + if res1 != nil { + if e, ok := status.FromError(res1); ok { + res1 = errors.New(e.Message()) + } + return + } + return response.(*AggregatePublishedResponse).Result, res1 +} diff --git a/pkg/items/transport/endpoints.microgen.go b/pkg/items/transport/endpoints.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..5a6e8d5a678cd7180deca17a97f615fe7793ff6e --- /dev/null +++ b/pkg/items/transport/endpoints.microgen.go @@ -0,0 +1,27 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transport + +import endpoint "github.com/go-kit/kit/endpoint" + +// EndpointsSet implements Items API and used for transport purposes. +type EndpointsSet struct { + CreateEndpoint endpoint.Endpoint + IntrospectEndpoint endpoint.Endpoint + GetEndpoint endpoint.Endpoint + FindEndpoint endpoint.Endpoint + UpdateEndpoint endpoint.Endpoint + DeleteEndpoint endpoint.Endpoint + UndeleteEndpoint endpoint.Endpoint + PublishEndpoint endpoint.Endpoint + UnpublishEndpoint endpoint.Endpoint + GetPublishedEndpoint endpoint.Endpoint + FindPublishedEndpoint endpoint.Endpoint + GetRevisionEndpoint endpoint.Endpoint + ListRevisionsEndpoint endpoint.Endpoint + ArchiveEndpoint endpoint.Endpoint + FindArchivedEndpoint endpoint.Endpoint + UnarchiveEndpoint endpoint.Endpoint + AggregateEndpoint endpoint.Endpoint + AggregatePublishedEndpoint endpoint.Endpoint +} diff --git a/pkg/items/transport/exchanges.microgen.go b/pkg/items/transport/exchanges.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..b601946f74837d41df38b07e3c5887ba8698b183 --- /dev/null +++ b/pkg/items/transport/exchanges.microgen.go @@ -0,0 +1,186 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transport + +import ( + items "git.perx.ru/perxis/perxis-go/pkg/items" + schema "git.perx.ru/perxis/perxis-go/pkg/schema" +) + +type ( + CreateRequest struct { + Item *items.Item `json:"item"` + Opts []*items.CreateOptions `json:"opts"` // This field was defined with ellipsis (...). + } + CreateResponse struct { + Created *items.Item `json:"created"` + } + + IntrospectRequest struct { + Item *items.Item `json:"item"` + Opts []*items.IntrospectOptions `json:"opts"` // This field was defined with ellipsis (...). + } + IntrospectResponse struct { + Item *items.Item `json:"item"` + Schema *schema.Schema `json:"schema"` + ValidationErrors []error `json:"validation_errors"` + } + + GetRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + Options []*items.GetOptions `json:"options"` // This field was defined with ellipsis (...). + } + GetResponse struct { + Item *items.Item `json:"item"` + } + + FindRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + Filter *items.Filter `json:"filter"` + Options []*items.FindOptions `json:"options"` // This field was defined with ellipsis (...). + } + FindResponse struct { + Items []*items.Item `json:"items"` + Total int `json:"total"` + } + + UpdateRequest struct { + Item *items.Item `json:"item"` + Options []*items.UpdateOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + UpdateResponse struct{} + + DeleteRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + Options []*items.DeleteOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + DeleteResponse struct{} + + UndeleteRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + Options []*items.UndeleteOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + UndeleteResponse struct{} + + PublishRequest struct { + Item *items.Item `json:"item"` + Options []*items.PublishOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + PublishResponse struct{} + + UnpublishRequest struct { + Item *items.Item `json:"item"` + Options []*items.UnpublishOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + UnpublishResponse struct{} + + GetPublishedRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + Options []*items.GetPublishedOptions `json:"options"` // This field was defined with ellipsis (...). + } + GetPublishedResponse struct { + Item *items.Item `json:"item"` + } + + FindPublishedRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + Filter *items.Filter `json:"filter"` + Options []*items.FindPublishedOptions `json:"options"` // This field was defined with ellipsis (...). + } + FindPublishedResponse struct { + Items []*items.Item `json:"items"` + Total int `json:"total"` + } + + GetRevisionRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + RevisionId string `json:"revision_id"` + Options []*items.GetRevisionOptions `json:"options"` // This field was defined with ellipsis (...). + } + GetRevisionResponse struct { + Item *items.Item `json:"item"` + } + + ListRevisionsRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + ItemId string `json:"item_id"` + Options []*items.ListRevisionsOptions `json:"options"` // This field was defined with ellipsis (...). + } + ListRevisionsResponse struct { + Items []*items.Item `json:"items"` + } + + ArchiveRequest struct { + Item *items.Item `json:"item"` + Options []*items.ArchiveOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + ArchiveResponse struct{} + + FindArchivedRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + Filter *items.Filter `json:"filter"` + Options []*items.FindArchivedOptions `json:"options"` // This field was defined with ellipsis (...). + } + FindArchivedResponse struct { + Items []*items.Item `json:"items"` + Total int `json:"total"` + } + + UnarchiveRequest struct { + Item *items.Item `json:"item"` + Options []*items.UnarchiveOptions `json:"options"` // This field was defined with ellipsis (...). + } + // Formal exchange type, please do not delete. + UnarchiveResponse struct{} + + AggregateRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + Filter *items.Filter `json:"filter"` + Options []*items.AggregateOptions `json:"options"` // This field was defined with ellipsis (...). + } + AggregateResponse struct { + Result map[string]interface{} `json:"result"` + } + + AggregatePublishedRequest struct { + SpaceId string `json:"space_id"` + EnvId string `json:"env_id"` + CollectionId string `json:"collection_id"` + Filter *items.Filter `json:"filter"` + Options []*items.AggregatePublishedOptions `json:"options"` // This field was defined with ellipsis (...). + } + AggregatePublishedResponse struct { + Result map[string]interface{} `json:"result"` + } +) diff --git a/pkg/items/transport/grpc/client.go b/pkg/items/transport/grpc/client.go new file mode 100644 index 0000000000000000000000000000000000000000..faea7cc6703746ba91b0af0e831431ffd76044fc --- /dev/null +++ b/pkg/items/transport/grpc/client.go @@ -0,0 +1,34 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transportgrpc + +import ( + grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc" + transport "git.perx.ru/perxis/perxis-go/pkg/items/transport" + grpckit "github.com/go-kit/kit/transport/grpc" + grpc "google.golang.org/grpc" +) + +func NewClient(conn *grpc.ClientConn, opts ...grpckit.ClientOption) transport.EndpointsSet { + c := NewGRPCClient(conn, "", opts...) + return transport.EndpointsSet{ + CreateEndpoint: grpcerr.ClientMiddleware(c.CreateEndpoint), + IntrospectEndpoint: grpcerr.ClientMiddleware(c.IntrospectEndpoint), + GetEndpoint: grpcerr.ClientMiddleware(c.GetEndpoint), + FindEndpoint: grpcerr.ClientMiddleware(c.FindEndpoint), + UpdateEndpoint: grpcerr.ClientMiddleware(c.UpdateEndpoint), + DeleteEndpoint: grpcerr.ClientMiddleware(c.DeleteEndpoint), + UndeleteEndpoint: grpcerr.ClientMiddleware(c.UndeleteEndpoint), + PublishEndpoint: grpcerr.ClientMiddleware(c.PublishEndpoint), + UnpublishEndpoint: grpcerr.ClientMiddleware(c.UnpublishEndpoint), + GetPublishedEndpoint: grpcerr.ClientMiddleware(c.GetPublishedEndpoint), + FindPublishedEndpoint: grpcerr.ClientMiddleware(c.FindPublishedEndpoint), + GetRevisionEndpoint: grpcerr.ClientMiddleware(c.GetRevisionEndpoint), + ListRevisionsEndpoint: grpcerr.ClientMiddleware(c.ListRevisionsEndpoint), + ArchiveEndpoint: grpcerr.ClientMiddleware(c.ArchiveEndpoint), + FindArchivedEndpoint: grpcerr.ClientMiddleware(c.FindArchivedEndpoint), + UnarchiveEndpoint: grpcerr.ClientMiddleware(c.UnarchiveEndpoint), + AggregateEndpoint: grpcerr.ClientMiddleware(c.AggregateEndpoint), + AggregatePublishedEndpoint: grpcerr.ClientMiddleware(c.AggregatePublishedEndpoint), + } +} diff --git a/pkg/items/transport/grpc/client.microgen.go b/pkg/items/transport/grpc/client.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..a91c8d16495333a356ebeb3e48100c40e0f7bc91 --- /dev/null +++ b/pkg/items/transport/grpc/client.microgen.go @@ -0,0 +1,145 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transportgrpc + +import ( + transport "git.perx.ru/perxis/perxis-go/pkg/items/transport" + pb "git.perx.ru/perxis/perxis-go/proto/items" + grpckit "github.com/go-kit/kit/transport/grpc" + empty "github.com/golang/protobuf/ptypes/empty" + grpc "google.golang.org/grpc" +) + +func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet { + if addr == "" { + addr = "content.items.Items" + } + return transport.EndpointsSet{ + ArchiveEndpoint: grpckit.NewClient( + conn, addr, "Archive", + _Encode_Archive_Request, + _Decode_Archive_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + CreateEndpoint: grpckit.NewClient( + conn, addr, "Create", + _Encode_Create_Request, + _Decode_Create_Response, + pb.CreateResponse{}, + opts..., + ).Endpoint(), + DeleteEndpoint: grpckit.NewClient( + conn, addr, "Delete", + _Encode_Delete_Request, + _Decode_Delete_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + UndeleteEndpoint: grpckit.NewClient( + conn, addr, "Undelete", + _Encode_Undelete_Request, + _Decode_Undelete_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + FindArchivedEndpoint: grpckit.NewClient( + conn, addr, "FindArchived", + _Encode_FindArchived_Request, + _Decode_FindArchived_Response, + pb.FindArchivedResponse{}, + opts..., + ).Endpoint(), + FindEndpoint: grpckit.NewClient( + conn, addr, "Find", + _Encode_Find_Request, + _Decode_Find_Response, + pb.FindResponse{}, + opts..., + ).Endpoint(), + FindPublishedEndpoint: grpckit.NewClient( + conn, addr, "FindPublished", + _Encode_FindPublished_Request, + _Decode_FindPublished_Response, + pb.FindPublishedResponse{}, + opts..., + ).Endpoint(), + GetEndpoint: grpckit.NewClient( + conn, addr, "Get", + _Encode_Get_Request, + _Decode_Get_Response, + pb.GetResponse{}, + opts..., + ).Endpoint(), + GetPublishedEndpoint: grpckit.NewClient( + conn, addr, "GetPublished", + _Encode_GetPublished_Request, + _Decode_GetPublished_Response, + pb.GetPublishedResponse{}, + opts..., + ).Endpoint(), + GetRevisionEndpoint: grpckit.NewClient( + conn, addr, "GetRevision", + _Encode_GetRevision_Request, + _Decode_GetRevision_Response, + pb.GetRevisionResponse{}, + opts..., + ).Endpoint(), + IntrospectEndpoint: grpckit.NewClient( + conn, addr, "Introspect", + _Encode_Introspect_Request, + _Decode_Introspect_Response, + pb.IntrospectResponse{}, + opts..., + ).Endpoint(), + ListRevisionsEndpoint: grpckit.NewClient( + conn, addr, "ListRevisions", + _Encode_ListRevisions_Request, + _Decode_ListRevisions_Response, + pb.ListRevisionsResponse{}, + opts..., + ).Endpoint(), + PublishEndpoint: grpckit.NewClient( + conn, addr, "Publish", + _Encode_Publish_Request, + _Decode_Publish_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + UnarchiveEndpoint: grpckit.NewClient( + conn, addr, "Unarchive", + _Encode_Unarchive_Request, + _Decode_Unarchive_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + UnpublishEndpoint: grpckit.NewClient( + conn, addr, "Unpublish", + _Encode_Unpublish_Request, + _Decode_Unpublish_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + UpdateEndpoint: grpckit.NewClient( + conn, addr, "Update", + _Encode_Update_Request, + _Decode_Update_Response, + empty.Empty{}, + opts..., + ).Endpoint(), + AggregateEndpoint: grpckit.NewClient( + conn, addr, "Aggregate", + _Encode_Aggregate_Request, + _Decode_Aggregate_Response, + pb.AggregateResponse{}, + opts..., + ).Endpoint(), + AggregatePublishedEndpoint: grpckit.NewClient( + conn, addr, "AggregatePublished", + _Encode_AggregatePublished_Request, + _Decode_AggregatePublished_Response, + pb.AggregatePublishedResponse{}, + opts..., + ).Endpoint(), + } +} diff --git a/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..69a696df329a6e28e5912af8815df9852c0c504c --- /dev/null +++ b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go @@ -0,0 +1,1010 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +// Please, do not change functions names! +package transportgrpc + +import ( + "context" + "errors" + + transport "git.perx.ru/perxis/perxis-go/pkg/items/transport" + pb "git.perx.ru/perxis/perxis-go/proto/items" + empty "github.com/golang/protobuf/ptypes/empty" +) + +func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil CreateRequest") + } + req := request.(*transport.CreateRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + + opts, err := CreateOptionsToProto(req.Opts) + if err != nil { + return nil, err + } + return &pb.CreateRequest{ + Item: reqItem, + Options: opts, + }, nil +} + +func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetRequest") + } + req := request.(*transport.GetRequest) + return &pb.GetRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + SpaceId: req.SpaceId, + }, nil +} + +func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindRequest") + } + req := request.(*transport.FindRequest) + reqFilter, err := PtrFilterToProto(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ElPtrFindOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.FindRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + SpaceId: req.SpaceId, + Options: reqOptions, + Filter: reqFilter, + }, nil +} + +func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UpdateRequest") + } + req := request.(*transport.UpdateRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + + opts, err := UpdateOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.UpdateRequest{ + Item: reqItem, + Options: opts, + }, nil +} + +func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil DeleteRequest") + } + req := request.(*transport.DeleteRequest) + + opts, err := DeleteOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.DeleteRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + SpaceId: req.SpaceId, + Options: opts, + }, nil +} + +func _Encode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UndeleteRequest") + } + req := request.(*transport.UndeleteRequest) + return &pb.UndeleteRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + SpaceId: req.SpaceId, + }, nil +} + +func _Encode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil PublishRequest") + } + req := request.(*transport.PublishRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + + opts, err := PublishOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.PublishRequest{ + Item: reqItem, + Options: opts, + }, nil +} + +func _Encode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UnpublishRequest") + } + req := request.(*transport.UnpublishRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + return &pb.UnpublishRequest{Item: reqItem}, nil +} + +func _Encode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetPublishedRequest") + } + req := request.(*transport.GetPublishedRequest) + reqOptions, err := ElPtrGetPublishedOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.GetPublishedRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + SpaceId: req.SpaceId, + Options: reqOptions, + }, nil +} + +func _Encode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindPublishedRequest") + } + req := request.(*transport.FindPublishedRequest) + reqFilter, err := PtrFilterToProto(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ElPtrFindPublishedOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.FindPublishedRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + Options: reqOptions, + Filter: reqFilter, + SpaceId: req.SpaceId, + }, nil +} + +func _Encode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetRevisionRequest") + } + req := request.(*transport.GetRevisionRequest) + return &pb.GetRevisionRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + RevisionId: req.RevisionId, + SpaceId: req.SpaceId, + }, nil +} + +func _Encode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil ListRevisionsRequest") + } + req := request.(*transport.ListRevisionsRequest) + reqOptions, err := ElPtrListRevisionsOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.ListRevisionsRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + ItemId: req.ItemId, + SpaceId: req.SpaceId, + Options: reqOptions, + }, nil +} + +func _Encode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil ArchiveRequest") + } + req := request.(*transport.ArchiveRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + return &pb.ArchiveRequest{Item: reqItem}, nil +} + +func _Encode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindArchivedRequest") + } + req := request.(*transport.FindArchivedRequest) + reqFilter, err := PtrFilterToProto(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ElPtrFindArchivedOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.FindArchivedRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + Options: reqOptions, + Filter: reqFilter, + SpaceId: req.SpaceId, + }, nil +} + +func _Encode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UnarchiveRequest") + } + req := request.(*transport.UnarchiveRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + return &pb.UnarchiveRequest{Item: reqItem}, nil +} + +func _Encode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil AggregateRequest") + } + req := request.(*transport.AggregateRequest) + reqFilter, err := PtrFilterToProto(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ElPtrAggregateOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.AggregateRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + SpaceId: req.SpaceId, + Options: reqOptions, + Filter: reqFilter, + }, nil +} + +func _Encode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil AggregatePublishedRequest") + } + req := request.(*transport.AggregatePublishedRequest) + reqFilter, err := PtrFilterToProto(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ElPtrAggregatePublishedOptionsToProto(req.Options) + if err != nil { + return nil, err + } + return &pb.AggregatePublishedRequest{ + CollectionId: req.CollectionId, + EnvId: req.EnvId, + SpaceId: req.SpaceId, + Options: reqOptions, + Filter: reqFilter, + }, nil +} + +func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil CreateResponse") + } + resp := response.(*transport.CreateResponse) + respCreated, err := PtrItemToProto(resp.Created) + if err != nil { + return nil, err + } + return &pb.CreateResponse{Created: respCreated}, nil +} + +func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetResponse") + } + resp := response.(*transport.GetResponse) + respItem, err := PtrItemToProto(resp.Item) + if err != nil { + return nil, err + } + return &pb.GetResponse{Item: respItem}, nil +} + +func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindResponse") + } + resp := response.(*transport.FindResponse) + respItems, err := ListPtrItemToProto(resp.Items) + if err != nil { + return nil, err + } + return &pb.FindResponse{ + Items: respItems, + Total: int32(resp.Total), + }, nil +} + +func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetPublishedResponse") + } + resp := response.(*transport.GetPublishedResponse) + respItem, err := PtrItemToProto(resp.Item) + if err != nil { + return nil, err + } + return &pb.GetPublishedResponse{Item: respItem}, nil +} + +func _Encode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindPublishedResponse") + } + resp := response.(*transport.FindPublishedResponse) + respItems, err := ListPtrItemToProto(resp.Items) + if err != nil { + return nil, err + } + return &pb.FindPublishedResponse{ + Items: respItems, + Total: int32(resp.Total), + }, nil +} + +func _Encode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetRevisionResponse") + } + resp := response.(*transport.GetRevisionResponse) + respItem, err := PtrItemToProto(resp.Item) + if err != nil { + return nil, err + } + return &pb.GetRevisionResponse{Item: respItem}, nil +} + +func _Encode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil ListRevisionsResponse") + } + resp := response.(*transport.ListRevisionsResponse) + respItems, err := ListPtrItemToProto(resp.Items) + if err != nil { + return nil, err + } + return &pb.ListRevisionsResponse{Items: respItems}, nil +} + +func _Encode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindArchivedResponse") + } + resp := response.(*transport.FindArchivedResponse) + respItems, err := ListPtrItemToProto(resp.Items) + if err != nil { + return nil, err + } + return &pb.FindArchivedResponse{ + Items: respItems, + Total: int32(resp.Total), + }, nil +} + +func _Encode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil AggregateResponse") + } + resp := response.(*transport.AggregateResponse) + result, err := MapStringInterfaceToProto(resp.Result) + if err != nil { + return nil, err + } + return &pb.AggregateResponse{ + Result: result, + }, nil +} + +func _Encode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil AggregateResponse") + } + resp := response.(*transport.AggregatePublishedResponse) + result, err := MapStringInterfaceToProto(resp.Result) + if err != nil { + return nil, err + } + return &pb.AggregatePublishedResponse{ + Result: result, + }, nil +} + +func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil CreateRequest") + } + req := request.(*pb.CreateRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + + opts, err := ProtoToCreateOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.CreateRequest{ + Item: reqItem, + Opts: opts, + }, nil +} + +func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetRequest") + } + req := request.(*pb.GetRequest) + return &transport.GetRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil AggregateRequest") + } + req := request.(*pb.AggregateRequest) + reqFilter, err := ProtoToPtrFilter(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ProtoToPtrServicesAggregateOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.AggregateRequest{ + SpaceId: string(req.SpaceId), + EnvId: string(req.EnvId), + CollectionId: string(req.CollectionId), + Filter: reqFilter, + Options: reqOptions, + }, nil +} + +func _Decode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil AggregatePublishedRequest") + } + req := request.(*pb.AggregatePublishedRequest) + reqFilter, err := ProtoToPtrFilter(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ProtoToPtrServicesAggregatePublishedOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.AggregatePublishedRequest{ + SpaceId: string(req.SpaceId), + EnvId: string(req.EnvId), + CollectionId: string(req.CollectionId), + Filter: reqFilter, + Options: reqOptions, + }, nil +} + +func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindRequest") + } + req := request.(*pb.FindRequest) + reqFilter, err := ProtoToPtrFilter(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ProtoToElPtrFindOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.FindRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + Options: reqOptions, + Filter: reqFilter, + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UpdateRequest") + } + req := request.(*pb.UpdateRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + + opts, err := ProtoToUpdateOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.UpdateRequest{ + Item: reqItem, + Options: opts, + }, nil +} + +func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil DeleteRequest") + } + req := request.(*pb.DeleteRequest) + + opts, err := ProtoToDeleteOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.DeleteRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + SpaceId: string(req.SpaceId), + Options: opts, + }, nil +} + +func _Decode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UndeleteRequest") + } + req := request.(*pb.UndeleteRequest) + return &transport.UndeleteRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil PublishRequest") + } + req := request.(*pb.PublishRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + + opts, err := ProtoToPublishOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.PublishRequest{ + Item: reqItem, + Options: opts, + }, nil +} + +func _Decode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UnpublishRequest") + } + req := request.(*pb.UnpublishRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + return &transport.UnpublishRequest{Item: reqItem}, nil +} + +func _Decode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetPublishedRequest") + } + req := request.(*pb.GetPublishedRequest) + reqOptions, err := ProtoToElPtrGetPublishedOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.GetPublishedRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + SpaceId: string(req.SpaceId), + Options: reqOptions, + }, nil +} + +func _Decode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindPublishedRequest") + } + req := request.(*pb.FindPublishedRequest) + reqFilter, err := ProtoToPtrFilter(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ProtoToElPtrFindPublishedOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.FindPublishedRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + Options: reqOptions, + Filter: reqFilter, + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil GetRevisionRequest") + } + req := request.(*pb.GetRevisionRequest) + return &transport.GetRevisionRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + RevisionId: string(req.RevisionId), + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil ListRevisionsRequest") + } + req := request.(*pb.ListRevisionsRequest) + reqOptions, err := ProtoToElPtrListRevisionsOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.ListRevisionsRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + ItemId: string(req.ItemId), + SpaceId: string(req.SpaceId), + Options: reqOptions, + }, nil +} + +func _Decode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil ArchiveRequest") + } + req := request.(*pb.ArchiveRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + return &transport.ArchiveRequest{Item: reqItem}, nil +} + +func _Decode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil FindArchivedRequest") + } + req := request.(*pb.FindArchivedRequest) + reqFilter, err := ProtoToPtrFilter(req.Filter) + if err != nil { + return nil, err + } + reqOptions, err := ProtoToElPtrFindArchivedOptions(req.Options) + if err != nil { + return nil, err + } + return &transport.FindArchivedRequest{ + CollectionId: string(req.CollectionId), + EnvId: string(req.EnvId), + Options: reqOptions, + Filter: reqFilter, + SpaceId: string(req.SpaceId), + }, nil +} + +func _Decode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil UnarchiveRequest") + } + req := request.(*pb.UnarchiveRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + return &transport.UnarchiveRequest{Item: reqItem}, nil +} + +func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil CreateResponse") + } + resp := response.(*pb.CreateResponse) + respCreated, err := ProtoToPtrItem(resp.Created) + if err != nil { + return nil, err + } + return &transport.CreateResponse{Created: respCreated}, nil +} + +func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetResponse") + } + resp := response.(*pb.GetResponse) + respItem, err := ProtoToPtrItem(resp.Item) + if err != nil { + return nil, err + } + return &transport.GetResponse{Item: respItem}, nil +} + +func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindResponse") + } + resp := response.(*pb.FindResponse) + respItems, err := ProtoToListPtrItem(resp.Items) + if err != nil { + return nil, err + } + return &transport.FindResponse{ + Items: respItems, + Total: int(resp.Total), + }, nil +} + +func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetPublishedResponse") + } + resp := response.(*pb.GetPublishedResponse) + respItem, err := ProtoToPtrItem(resp.Item) + if err != nil { + return nil, err + } + return &transport.GetPublishedResponse{Item: respItem}, nil +} + +func _Decode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindPublishedResponse") + } + resp := response.(*pb.FindPublishedResponse) + respItems, err := ProtoToListPtrItem(resp.Items) + if err != nil { + return nil, err + } + return &transport.FindPublishedResponse{ + Items: respItems, + Total: int(resp.Total), + }, nil +} + +func _Decode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil GetRevisionResponse") + } + resp := response.(*pb.GetRevisionResponse) + respItem, err := ProtoToPtrItem(resp.Item) + if err != nil { + return nil, err + } + return &transport.GetRevisionResponse{Item: respItem}, nil +} + +func _Decode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil ListRevisionsResponse") + } + resp := response.(*pb.ListRevisionsResponse) + respItems, err := ProtoToListPtrItem(resp.Items) + if err != nil { + return nil, err + } + return &transport.ListRevisionsResponse{Items: respItems}, nil +} + +func _Decode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Decode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil FindArchivedResponse") + } + resp := response.(*pb.FindArchivedResponse) + respItems, err := ProtoToListPtrItem(resp.Items) + if err != nil { + return nil, err + } + return &transport.FindArchivedResponse{ + Items: respItems, + Total: int(resp.Total), + }, nil +} + +func _Decode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) { + return &empty.Empty{}, nil +} + +func _Encode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil IntrospectRequest") + } + req := request.(*transport.IntrospectRequest) + reqItem, err := PtrItemToProto(req.Item) + if err != nil { + return nil, err + } + return &pb.IntrospectRequest{ + Item: reqItem, + }, nil +} + +func _Encode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil IntrospectResponse") + } + resp := response.(*transport.IntrospectResponse) + respItm, err := PtrItemToProto(resp.Item) + if err != nil { + return nil, err + } + respSch, err := PtrSchemaSchemaToProto(resp.Schema) + if err != nil { + return nil, err + } + respErrors, err := ValidationErrorsToProto(resp.ValidationErrors) + return &pb.IntrospectResponse{ + Item: respItm, + Schema: respSch, + ValidationErrors: respErrors, + }, nil +} + +func _Decode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) { + if request == nil { + return nil, errors.New("nil IntrospectRequest") + } + req := request.(*pb.IntrospectRequest) + reqItem, err := ProtoToPtrItem(req.Item) + if err != nil { + return nil, err + } + return &transport.IntrospectRequest{ + Item: reqItem, + }, nil +} + +func _Decode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil IntrospectResponse") + } + resp := response.(*pb.IntrospectResponse) + respItm, err := ProtoToPtrItem(resp.Item) + if err != nil { + return nil, err + } + respSch, err := ProtoToPtrSchemaSchema(resp.Schema) + if err != nil { + return nil, err + } + respErrs, err := ProtoToValidationErrors(resp.ValidationErrors) + if err != nil { + return nil, err + } + return &transport.IntrospectResponse{ + Item: respItm, + Schema: respSch, + ValidationErrors: respErrs, + }, nil +} + +func _Decode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil AggregateResponse") + } + resp := response.(*pb.AggregateResponse) + result, err := ProtoToMapStringInterface(resp.Result) + if err != nil { + return nil, err + } + return &transport.AggregateResponse{ + Result: result, + }, nil +} + +func _Decode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) { + if response == nil { + return nil, errors.New("nil AggregatePublishedResponse") + } + resp := response.(*pb.AggregatePublishedResponse) + result, err := ProtoToMapStringInterface(resp.Result) + if err != nil { + return nil, err + } + return &transport.AggregatePublishedResponse{ + Result: result, + }, nil +} diff --git a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..7eae996594e266ddfc712183bd3e1d7c4a39c78a --- /dev/null +++ b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go @@ -0,0 +1,627 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +// It is better for you if you do not change functions names! +// This file will never be overwritten. +package transportgrpc + +import ( + "fmt" + + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/filter" + service "git.perx.ru/perxis/perxis-go/pkg/items" + "git.perx.ru/perxis/perxis-go/pkg/options" + "git.perx.ru/perxis/perxis-go/pkg/schema" + pbcommon "git.perx.ru/perxis/perxis-go/proto/common" + pb "git.perx.ru/perxis/perxis-go/proto/items" + jsoniter "github.com/json-iterator/go" + "google.golang.org/protobuf/types/known/structpb" +) + +func MapStringInterfaceToProto(data map[string]interface{}) (*structpb.Struct, error) { + if data == nil { + return nil, nil + } + return structpb.NewStruct(data) +} + +func ProtoToMapStringInterface(protoData *structpb.Struct) (map[string]interface{}, error) { + if protoData == nil { + return nil, nil + } + return protoData.AsMap(), nil +} + +func MapStringMapStringInterfaceToProto(translations map[string]map[string]interface{}) (map[string]*structpb.Struct, error) { + if translations == nil { + return nil, nil + } + res := make(map[string]*structpb.Struct, len(translations)) + for k, v := range translations { + res[k], _ = MapStringInterfaceToProto(v) + } + return res, nil +} + +func PtrPermissionsToProto(permissions *service.Permissions) (*pb.Permissions, error) { + if permissions == nil { + return nil, nil + } + + return &pb.Permissions{ + Edit: permissions.Edit, + Archive: permissions.Archive, + Publish: permissions.Publish, + SoftDelete: permissions.SoftDelete, + HardDelete: permissions.HardDelete, + }, + nil +} + +func ProtoToPtrPermissions(protoPermissions *pb.Permissions) (*service.Permissions, error) { + if protoPermissions == nil { + return nil, nil + } + + return &service.Permissions{ + Edit: protoPermissions.Edit, + Archive: protoPermissions.Archive, + Publish: protoPermissions.Publish, + SoftDelete: protoPermissions.SoftDelete, + HardDelete: protoPermissions.HardDelete, + }, + nil +} + +func ProtoToMapStringMapStringInterface(protoTranslations map[string]*structpb.Struct) (map[string]map[string]interface{}, error) { + if protoTranslations == nil { + return nil, nil + } + res := make(map[string]map[string]interface{}, len(protoTranslations)) + for k, v := range protoTranslations { + res[k], _ = ProtoToMapStringInterface(v) + } + return res, nil +} + +func PtrItemToProto(item *service.Item) (*pb.Item, error) { + return service.ItemToProto(item), nil +} + +func ProtoToPtrItem(protoItem *pb.Item) (*service.Item, error) { + return service.ItemFromProto(protoItem), nil +} + +func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) { + if filter == nil { + return nil, nil + } + + dt := make([]*pbcommon.Filter, 0, len(filter.Data)) + for _, f := range filter.Data { + pf := &pbcommon.Filter{ + Op: string(f.Op), + Field: f.Field, + } + + val, err := structpb.NewValue(f.Value) + if err != nil { + return nil, err + } + pf.Value = val + dt = append(dt, pf) + } + + return &pb.Filter{ + Id: filter.ID, + Data: dt, + Q: filter.Q, + }, nil +} + +func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) { + if protoFilter == nil { + return nil, nil + } + + dt := make([]*filter.Filter, 0, len(protoFilter.Data)) + for _, pf := range protoFilter.Data { + + f := &filter.Filter{ + Op: filter.Op(pf.Op), + Field: pf.Field, + Value: pf.Value.AsInterface(), + } + + dt = append(dt, f) + } + + return &service.Filter{ + ID: protoFilter.Id, + Data: dt, + Q: protoFilter.Q, + }, nil +} + +func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*pbcommon.FindOptions, error) { + if opts == nil { + return nil, nil + } + return &pbcommon.FindOptions{ + Sort: opts.Sort, + PageNum: int32(opts.PageNum), + PageSize: int32(opts.PageSize), + Fields: opts.Fields, + ExcludeFields: opts.ExcludeFields, + }, nil +} + +func ProtoToPtrServicesFindOptions(protoOpts *pbcommon.FindOptions) (*options.FindOptions, error) { + if protoOpts == nil { + return nil, nil + } + return &options.FindOptions{ + SortOptions: options.SortOptions{ + Sort: protoOpts.Sort, + }, + PaginationOptions: options.PaginationOptions{ + PageNum: int(protoOpts.PageNum), + PageSize: int(protoOpts.PageSize), + }, + FieldOptions: options.FieldOptions{ + Fields: protoOpts.Fields, + ExcludeFields: protoOpts.ExcludeFields, + }, + }, nil +} + +func ListPtrItemToProto(items []*service.Item) ([]*pb.Item, error) { + protoItems := make([]*pb.Item, 0, len(items)) + for _, itm := range items { + pi, err := PtrItemToProto(itm) + if err != nil { + return nil, err + } + protoItems = append(protoItems, pi) + } + return protoItems, nil +} + +func ProtoToListPtrItem(protoItems []*pb.Item) ([]*service.Item, error) { + items := make([]*service.Item, 0, len(protoItems)) + for _, itm := range protoItems { + pi, err := ProtoToPtrItem(itm) + if err != nil { + return nil, err + } + items = append(items, pi) + } + return items, nil +} + +func ProtoToCreateOptions(protoOptions *pb.CreateOptions) ([]*service.CreateOptions, error) { + if protoOptions == nil { + return nil, nil + } + return []*service.CreateOptions{ + {UpdateAttrs: protoOptions.UpdateAttrs}, + }, nil +} + +func CreateOptionsToProto(options []*service.CreateOptions) (*pb.CreateOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeCreateOptions(options...) + + return &pb.CreateOptions{ + UpdateAttrs: opts.UpdateAttrs, + }, nil +} + +func ElPtrGetOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrGetOptions() { + panic("function not provided") // TODO: provide converter +} + +func ElPtrFindOptionsToProto(options []*service.FindOptions) (*pb.FindOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeFindOptions(options...) + + var err error + + fo := &pb.FindOptions{ + Deleted: opts.Deleted, + Regular: opts.Regular, + Hidden: opts.Hidden, + Templates: opts.Templates, + } + + fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions) + if err != nil { + return nil, err + } + + return fo, nil +} + +func ProtoToElPtrFindOptions(protoOptions *pb.FindOptions) ([]*service.FindOptions, error) { + if protoOptions == nil { + return nil, nil + } + + var err error + fo := &service.FindOptions{ + Deleted: protoOptions.Deleted, + Regular: protoOptions.Regular, + Hidden: protoOptions.Hidden, + Templates: protoOptions.Templates, + } + + o, err := ProtoToPtrServicesFindOptions(protoOptions.Options) + if err != nil { + return nil, err + } + if o != nil { + fo.FindOptions = *o + } + + return []*service.FindOptions{fo}, nil +} + +func ProtoToUpdateOptions(protoOptions *pb.UpdateOptions) ([]*service.UpdateOptions, error) { + if protoOptions == nil { + return nil, nil + } + return []*service.UpdateOptions{ + {UpdateAttrs: protoOptions.UpdateAttrs}, + }, nil +} + +func UpdateOptionsToProto(options []*service.UpdateOptions) (*pb.UpdateOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeUpdateOptions(options...) + + return &pb.UpdateOptions{ + UpdateAttrs: opts.UpdateAttrs, + }, nil +} + +func ProtoToDeleteOptions(protoOptions *pb.DeleteOptions) ([]*service.DeleteOptions, error) { + if protoOptions == nil { + return nil, nil + } + return []*service.DeleteOptions{ + {Erase: protoOptions.Erase}, + }, nil +} + +func DeleteOptionsToProto(options []*service.DeleteOptions) (*pb.DeleteOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeDeleteOptions(options...) + + return &pb.DeleteOptions{ + Erase: opts.Erase, + }, nil +} + +func ProtoToPublishOptions(protoOptions *pb.PublishOptions) ([]*service.PublishOptions, error) { + if protoOptions == nil { + return nil, nil + } + return []*service.PublishOptions{ + {UpdateAttrs: protoOptions.UpdateAttrs}, + }, nil +} + +func PublishOptionsToProto(options []*service.PublishOptions) (*pb.PublishOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergePublishOptions(options...) + + return &pb.PublishOptions{ + UpdateAttrs: opts.UpdateAttrs, + }, nil +} + +func ElPtrUnpublishOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrUnpublishOptions() { + panic("function not provided") // TODO: provide converter +} + +func ElPtrGetPublishedOptionsToProto(options []*service.GetPublishedOptions) (*pb.GetPublishedOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeGetPublishedOptions(options...) + + return &pb.GetPublishedOptions{LocaleId: opts.LocaleID}, nil +} + +func ProtoToElPtrGetPublishedOptions(protoOptions *pb.GetPublishedOptions) ([]*service.GetPublishedOptions, error) { + if protoOptions == nil { + return nil, nil + } + + return []*service.GetPublishedOptions{{LocaleID: protoOptions.LocaleId}}, nil +} + +func ElPtrFindPublishedOptionsToProto(options []*service.FindPublishedOptions) (*pb.FindPublishedOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeFindPublishedOptions(options...) + + var err error + + fo := &pb.FindPublishedOptions{ + Regular: opts.Regular, + Hidden: opts.Hidden, + Templates: opts.Templates, + } + fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions) + if err != nil { + return nil, err + } + + fo.LocaleId = opts.LocaleID + + return fo, nil +} + +func ProtoToElPtrFindPublishedOptions(protoOptions *pb.FindPublishedOptions) ([]*service.FindPublishedOptions, error) { + if protoOptions == nil { + return nil, nil + } + + var err error + fo := &service.FindPublishedOptions{ + Regular: protoOptions.Regular, + Hidden: protoOptions.Hidden, + Templates: protoOptions.Templates, + } + + o, err := ProtoToPtrServicesFindOptions(protoOptions.Options) + if err != nil { + return nil, err + } + if o != nil { + fo.FindOptions = *o + } + + fo.LocaleID = protoOptions.LocaleId + + return []*service.FindPublishedOptions{fo}, nil +} + +func ElPtrGetRevisionOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrGetRevisionOptions() { + panic("function not provided") // TODO: provide converter +} + +func ElPtrListRevisionsOptionsToProto(options []*service.ListRevisionsOptions) (*pb.ListRevisionsOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeListRevisionsOptions(options...) + + var err error + + fo := &pb.ListRevisionsOptions{} + + fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions) + if err != nil { + return nil, err + } + + return fo, nil +} + +func ProtoToElPtrListRevisionsOptions(protoOptions *pb.ListRevisionsOptions) ([]*service.ListRevisionsOptions, error) { + if protoOptions == nil { + return nil, nil + } + + var err error + fo := &service.ListRevisionsOptions{} + + o, err := ProtoToPtrServicesFindOptions(protoOptions.Options) + if err != nil { + return nil, err + } + if o != nil { + fo.FindOptions = *o + } + + return []*service.ListRevisionsOptions{fo}, nil +} + +func ElPtrArchiveOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrArchiveOptions() { + panic("function not provided") // TODO: provide converter +} + +func ElPtrFindArchivedOptionsToProto(options []*service.FindArchivedOptions) (*pb.FindArchivedOptions, error) { + if options == nil { + return nil, nil + } + + opts := service.MergeFindArchivedOptions(options...) + + var err error + + fo := &pb.FindArchivedOptions{} + + fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions) + if err != nil { + return nil, err + } + + return fo, nil +} + +func ProtoToElPtrFindArchivedOptions(protoOptions *pb.FindArchivedOptions) ([]*service.FindArchivedOptions, error) { + if protoOptions == nil { + return nil, nil + } + + var err error + fo := &service.FindArchivedOptions{} + + o, err := ProtoToPtrServicesFindOptions(protoOptions.Options) + if err != nil { + return nil, err + } + if o != nil { + fo.FindOptions = *o + } + + return []*service.FindArchivedOptions{fo}, nil +} + +func ElPtrUnarchiveOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrUnarchiveOptions() { + panic("function not provided") // TODO: provide converter +} + +func ElPtrIntrospectOptionsToProto() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToElPtrIntrospectOptions() { + panic("function not provided") // TODO: provide converter +} + +func ProtoToPtrServicesAggregateOptions(protoOpts *pb.AggregateOptions) ([]*service.AggregateOptions, error) { + if protoOpts == nil { + return nil, nil + } + return []*service.AggregateOptions{&service.AggregateOptions{Fields: protoOpts.Fields}}, nil +} + +func PtrServicesAggregateOptionsToProto(opts *service.AggregateOptions) (*pb.AggregateOptions, error) { + if opts == nil { + return nil, nil + } + return &pb.AggregateOptions{ + Fields: opts.Fields, + }, nil +} + +func ElPtrAggregateOptionsToProto(options []*service.AggregateOptions) (*pb.AggregateOptions, error) { + if options == nil { + return nil, nil + } + opts := service.MergeAggregateOptions(options...) + return PtrServicesAggregateOptionsToProto(opts) +} + +func ProtoToPtrServicesAggregatePublishedOptions(protoOpts *pb.AggregatePublishedOptions) ([]*service.AggregatePublishedOptions, error) { + if protoOpts == nil { + return nil, nil + } + return []*service.AggregatePublishedOptions{&service.AggregatePublishedOptions{Fields: protoOpts.Fields}}, nil +} + +func PtrServicesAggregatePublishedOptionsToProto(opts *service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) { + if opts == nil { + return nil, nil + } + return &pb.AggregatePublishedOptions{ + Fields: opts.Fields, + }, nil +} + +func ElPtrAggregatePublishedOptionsToProto(options []*service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) { + if options == nil { + return nil, nil + } + opts := service.MergeAggregatePublishedOptions(options...) + return PtrServicesAggregatePublishedOptionsToProto(opts) +} + +func PtrSchemaSchemaToProto(sch *schema.Schema) (string, error) { + if sch == nil { + return "", nil + } + res, err := jsoniter.MarshalToString(sch) + if err != nil { + return "", err + } + return res, nil +} + +func ProtoToPtrSchemaSchema(protoSch string) (*schema.Schema, error) { + if protoSch == "" { + return nil, nil + } + sch := schema.New() + err := sch.UnmarshalJSON([]byte(protoSch)) + if err != nil { + return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error()) + } + return sch, nil +} + +func ValidationErrorsToProto(errs []error) ([]*pbcommon.Error_BadRequest_FieldViolation, error) { + if errs == nil { + return nil, nil + } + + var validationErrors []*pbcommon.Error_BadRequest_FieldViolation + for _, err := range errs { + + var fieldError errors.FieldError + if errors.As(err, &fieldError) { + validationErrors = append(validationErrors, &pbcommon.Error_BadRequest_FieldViolation{ + Description: errors.Unwrap(fieldError).Error(), + Field: fieldError.Field(), + }) + } + } + + return validationErrors, nil +} + +func ProtoToValidationErrors(protoErrs []*pbcommon.Error_BadRequest_FieldViolation) ([]error, error) { + if protoErrs == nil { + return nil, nil + } + + var validationErrors []error + for _, err := range protoErrs { + validationErrors = append(validationErrors, errors.WithField(errors.New(err.Description), err.Field)) + } + + return validationErrors, nil +} diff --git a/pkg/items/transport/grpc/server.go b/pkg/items/transport/grpc/server.go new file mode 100644 index 0000000000000000000000000000000000000000..4ac8a3b02dd4991518d8b132707bb2dd0ce3c362 --- /dev/null +++ b/pkg/items/transport/grpc/server.go @@ -0,0 +1,34 @@ +package transportgrpc + +import ( + grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc" + "git.perx.ru/perxis/perxis-go/pkg/items" + "git.perx.ru/perxis/perxis-go/pkg/items/transport" + pb "git.perx.ru/perxis/perxis-go/proto/items" + grpckit "github.com/go-kit/kit/transport/grpc" +) + +func NewServer(svc items.Items, opts ...grpckit.ServerOption) pb.ItemsServer { + eps := transport.Endpoints(svc) + eps = transport.EndpointsSet{ + CreateEndpoint: grpcerr.ServerMiddleware(eps.CreateEndpoint), + IntrospectEndpoint: grpcerr.ServerMiddleware(eps.IntrospectEndpoint), + GetEndpoint: grpcerr.ServerMiddleware(eps.GetEndpoint), + FindEndpoint: grpcerr.ServerMiddleware(eps.FindEndpoint), + UpdateEndpoint: grpcerr.ServerMiddleware(eps.UpdateEndpoint), + DeleteEndpoint: grpcerr.ServerMiddleware(eps.DeleteEndpoint), + UndeleteEndpoint: grpcerr.ServerMiddleware(eps.UndeleteEndpoint), + PublishEndpoint: grpcerr.ServerMiddleware(eps.PublishEndpoint), + UnpublishEndpoint: grpcerr.ServerMiddleware(eps.UnpublishEndpoint), + GetPublishedEndpoint: grpcerr.ServerMiddleware(eps.GetPublishedEndpoint), + FindPublishedEndpoint: grpcerr.ServerMiddleware(eps.FindPublishedEndpoint), + GetRevisionEndpoint: grpcerr.ServerMiddleware(eps.GetRevisionEndpoint), + ListRevisionsEndpoint: grpcerr.ServerMiddleware(eps.ListRevisionsEndpoint), + ArchiveEndpoint: grpcerr.ServerMiddleware(eps.ArchiveEndpoint), + FindArchivedEndpoint: grpcerr.ServerMiddleware(eps.FindArchivedEndpoint), + UnarchiveEndpoint: grpcerr.ServerMiddleware(eps.UnarchiveEndpoint), + AggregateEndpoint: grpcerr.ServerMiddleware(eps.AggregateEndpoint), + AggregatePublishedEndpoint: grpcerr.ServerMiddleware(eps.AggregatePublishedEndpoint), + } + return NewGRPCServer(&eps, opts...) +} diff --git a/pkg/items/transport/grpc/server.microgen.go b/pkg/items/transport/grpc/server.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..a904b1e5a610dc12e38768d88b92eee392a6d7af --- /dev/null +++ b/pkg/items/transport/grpc/server.microgen.go @@ -0,0 +1,292 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +// DO NOT EDIT. +package transportgrpc + +import ( + transport "git.perx.ru/perxis/perxis-go/pkg/items/transport" + pb "git.perx.ru/perxis/perxis-go/proto/items" + grpc "github.com/go-kit/kit/transport/grpc" + empty "github.com/golang/protobuf/ptypes/empty" + context "golang.org/x/net/context" +) + +type itemsServer struct { + create grpc.Handler + introspect grpc.Handler + get grpc.Handler + find grpc.Handler + update grpc.Handler + delete grpc.Handler + undelete grpc.Handler + publish grpc.Handler + unpublish grpc.Handler + getPublished grpc.Handler + findPublished grpc.Handler + getRevision grpc.Handler + listRevisions grpc.Handler + archive grpc.Handler + findArchived grpc.Handler + unarchive grpc.Handler + aggregate grpc.Handler + aggregatePublished grpc.Handler + + pb.UnimplementedItemsServer +} + +func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ItemsServer { + return &itemsServer{ + archive: grpc.NewServer( + endpoints.ArchiveEndpoint, + _Decode_Archive_Request, + _Encode_Archive_Response, + opts..., + ), + create: grpc.NewServer( + endpoints.CreateEndpoint, + _Decode_Create_Request, + _Encode_Create_Response, + opts..., + ), + delete: grpc.NewServer( + endpoints.DeleteEndpoint, + _Decode_Delete_Request, + _Encode_Delete_Response, + opts..., + ), + undelete: grpc.NewServer( + endpoints.UndeleteEndpoint, + _Decode_Undelete_Request, + _Encode_Undelete_Response, + opts..., + ), + find: grpc.NewServer( + endpoints.FindEndpoint, + _Decode_Find_Request, + _Encode_Find_Response, + opts..., + ), + findArchived: grpc.NewServer( + endpoints.FindArchivedEndpoint, + _Decode_FindArchived_Request, + _Encode_FindArchived_Response, + opts..., + ), + findPublished: grpc.NewServer( + endpoints.FindPublishedEndpoint, + _Decode_FindPublished_Request, + _Encode_FindPublished_Response, + opts..., + ), + get: grpc.NewServer( + endpoints.GetEndpoint, + _Decode_Get_Request, + _Encode_Get_Response, + opts..., + ), + getPublished: grpc.NewServer( + endpoints.GetPublishedEndpoint, + _Decode_GetPublished_Request, + _Encode_GetPublished_Response, + opts..., + ), + getRevision: grpc.NewServer( + endpoints.GetRevisionEndpoint, + _Decode_GetRevision_Request, + _Encode_GetRevision_Response, + opts..., + ), + introspect: grpc.NewServer( + endpoints.IntrospectEndpoint, + _Decode_Introspect_Request, + _Encode_Introspect_Response, + opts..., + ), + listRevisions: grpc.NewServer( + endpoints.ListRevisionsEndpoint, + _Decode_ListRevisions_Request, + _Encode_ListRevisions_Response, + opts..., + ), + publish: grpc.NewServer( + endpoints.PublishEndpoint, + _Decode_Publish_Request, + _Encode_Publish_Response, + opts..., + ), + unarchive: grpc.NewServer( + endpoints.UnarchiveEndpoint, + _Decode_Unarchive_Request, + _Encode_Unarchive_Response, + opts..., + ), + unpublish: grpc.NewServer( + endpoints.UnpublishEndpoint, + _Decode_Unpublish_Request, + _Encode_Unpublish_Response, + opts..., + ), + update: grpc.NewServer( + endpoints.UpdateEndpoint, + _Decode_Update_Request, + _Encode_Update_Response, + opts..., + ), + aggregate: grpc.NewServer( + endpoints.AggregateEndpoint, + _Decode_Aggregate_Request, + _Encode_Aggregate_Response, + opts..., + ), + aggregatePublished: grpc.NewServer( + endpoints.AggregatePublishedEndpoint, + _Decode_AggregatePublished_Request, + _Encode_AggregatePublished_Response, + opts..., + ), + } +} + +func (S *itemsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) { + _, resp, err := S.create.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.CreateResponse), nil +} + +func (S *itemsServer) Introspect(ctx context.Context, req *pb.IntrospectRequest) (*pb.IntrospectResponse, error) { + _, resp, err := S.introspect.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.IntrospectResponse), nil +} + +func (S *itemsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) { + _, resp, err := S.get.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.GetResponse), nil +} + +func (S *itemsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) { + _, resp, err := S.find.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.FindResponse), nil +} + +func (S *itemsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) { + _, resp, err := S.update.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) { + _, resp, err := S.delete.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) Undelete(ctx context.Context, req *pb.UndeleteRequest) (*empty.Empty, error) { + _, resp, err := S.undelete.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) Publish(ctx context.Context, req *pb.PublishRequest) (*empty.Empty, error) { + _, resp, err := S.publish.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) Unpublish(ctx context.Context, req *pb.UnpublishRequest) (*empty.Empty, error) { + _, resp, err := S.unpublish.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) GetPublished(ctx context.Context, req *pb.GetPublishedRequest) (*pb.GetPublishedResponse, error) { + _, resp, err := S.getPublished.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.GetPublishedResponse), nil +} + +func (S *itemsServer) FindPublished(ctx context.Context, req *pb.FindPublishedRequest) (*pb.FindPublishedResponse, error) { + _, resp, err := S.findPublished.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.FindPublishedResponse), nil +} + +func (S *itemsServer) GetRevision(ctx context.Context, req *pb.GetRevisionRequest) (*pb.GetRevisionResponse, error) { + _, resp, err := S.getRevision.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.GetRevisionResponse), nil +} + +func (S *itemsServer) ListRevisions(ctx context.Context, req *pb.ListRevisionsRequest) (*pb.ListRevisionsResponse, error) { + _, resp, err := S.listRevisions.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.ListRevisionsResponse), nil +} + +func (S *itemsServer) Archive(ctx context.Context, req *pb.ArchiveRequest) (*empty.Empty, error) { + _, resp, err := S.archive.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) FindArchived(ctx context.Context, req *pb.FindArchivedRequest) (*pb.FindArchivedResponse, error) { + _, resp, err := S.findArchived.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.FindArchivedResponse), nil +} + +func (S *itemsServer) Unarchive(ctx context.Context, req *pb.UnarchiveRequest) (*empty.Empty, error) { + _, resp, err := S.unarchive.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*empty.Empty), nil +} + +func (S *itemsServer) Aggregate(ctx context.Context, req *pb.AggregateRequest) (*pb.AggregateResponse, error) { + _, resp, err := S.aggregate.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.AggregateResponse), nil +} + +func (S *itemsServer) AggregatePublished(ctx context.Context, req *pb.AggregatePublishedRequest) (*pb.AggregatePublishedResponse, error) { + _, resp, err := S.aggregatePublished.ServeGRPC(ctx, req) + if err != nil { + return nil, err + } + return resp.(*pb.AggregatePublishedResponse), nil +} diff --git a/pkg/items/transport/server.microgen.go b/pkg/items/transport/server.microgen.go new file mode 100644 index 0000000000000000000000000000000000000000..4ba5f4a265125ea7b7168fab8d9c7c0f747f23bf --- /dev/null +++ b/pkg/items/transport/server.microgen.go @@ -0,0 +1,220 @@ +// Code generated by microgen 0.9.1. DO NOT EDIT. + +package transport + +import ( + "context" + "strings" + + "git.perx.ru/perxis/perxis-go/pkg/errors" + "git.perx.ru/perxis/perxis-go/pkg/items" + endpoint "github.com/go-kit/kit/endpoint" + "github.com/hashicorp/go-multierror" +) + +func Endpoints(svc items.Items) EndpointsSet { + return EndpointsSet{ + ArchiveEndpoint: ArchiveEndpoint(svc), + CreateEndpoint: CreateEndpoint(svc), + DeleteEndpoint: DeleteEndpoint(svc), + UndeleteEndpoint: UndeleteEndpoint(svc), + FindArchivedEndpoint: FindArchivedEndpoint(svc), + FindEndpoint: FindEndpoint(svc), + FindPublishedEndpoint: FindPublishedEndpoint(svc), + GetEndpoint: GetEndpoint(svc), + GetPublishedEndpoint: GetPublishedEndpoint(svc), + GetRevisionEndpoint: GetRevisionEndpoint(svc), + IntrospectEndpoint: IntrospectEndpoint(svc), + ListRevisionsEndpoint: ListRevisionsEndpoint(svc), + PublishEndpoint: PublishEndpoint(svc), + UnarchiveEndpoint: UnarchiveEndpoint(svc), + UnpublishEndpoint: UnpublishEndpoint(svc), + UpdateEndpoint: UpdateEndpoint(svc), + AggregateEndpoint: AggregateEndpoint(svc), + AggregatePublishedEndpoint: AggregatePublishedEndpoint(svc), + } +} + +func CreateEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*CreateRequest) + res0, res1 := svc.Create(arg0, req.Item, req.Opts...) + return &CreateResponse{Created: res0}, res1 + } +} + +func IntrospectEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*IntrospectRequest) + res0, res1, res2 := svc.Introspect(arg0, req.Item, req.Opts...) + resp := &IntrospectResponse{ + Item: res0, + Schema: res1, + } + if res2 != nil { + + err := res2 + + var merr *multierror.Error + if (strings.Contains(err.Error(), "validation error") || + strings.Contains(err.Error(), "modification error") || + strings.Contains(err.Error(), "decode error") || + strings.Contains(err.Error(), "encode error")) && errors.As(err, &merr) { + + errs := make([]error, 0) + for _, e := range merr.WrappedErrors() { + var errField errors.FieldError + if errors.As(e, &errField) { + errs = append(errs, e) + } + } + + if len(errs) > 0 { + resp.ValidationErrors = errs + res2 = nil + } + } + } + return resp, res2 + } +} + +func GetEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*GetRequest) + res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...) + return &GetResponse{Item: res0}, res1 + } +} + +func FindEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*FindRequest) + res0, res1, res2 := svc.Find(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...) + return &FindResponse{ + Items: res0, + Total: res1, + }, res2 + } +} + +func UpdateEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*UpdateRequest) + res0 := svc.Update(arg0, req.Item, req.Options...) + return &UpdateResponse{}, res0 + } +} + +func DeleteEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*DeleteRequest) + res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...) + return &DeleteResponse{}, res0 + } +} + +func UndeleteEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*UndeleteRequest) + res0 := svc.Undelete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...) + return &UndeleteResponse{}, res0 + } +} + +func PublishEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*PublishRequest) + res0 := svc.Publish(arg0, req.Item, req.Options...) + return &PublishResponse{}, res0 + } +} + +func UnpublishEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*UnpublishRequest) + res0 := svc.Unpublish(arg0, req.Item, req.Options...) + return &UnpublishResponse{}, res0 + } +} + +func GetPublishedEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*GetPublishedRequest) + res0, res1 := svc.GetPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...) + return &GetPublishedResponse{Item: res0}, res1 + } +} + +func FindPublishedEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*FindPublishedRequest) + res0, res1, res2 := svc.FindPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...) + return &FindPublishedResponse{ + Items: res0, + Total: res1, + }, res2 + } +} + +func GetRevisionEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*GetRevisionRequest) + res0, res1 := svc.GetRevision(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.RevisionId, req.Options...) + return &GetRevisionResponse{Item: res0}, res1 + } +} + +func ListRevisionsEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*ListRevisionsRequest) + res0, res1 := svc.ListRevisions(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...) + return &ListRevisionsResponse{Items: res0}, res1 + } +} + +func ArchiveEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*ArchiveRequest) + res0 := svc.Archive(arg0, req.Item, req.Options...) + return &ArchiveResponse{}, res0 + } +} + +func FindArchivedEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*FindArchivedRequest) + res0, res1, res2 := svc.FindArchived(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...) + return &FindArchivedResponse{ + Items: res0, + Total: res1, + }, res2 + } +} + +func UnarchiveEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*UnarchiveRequest) + res0 := svc.Unarchive(arg0, req.Item, req.Options...) + return &UnarchiveResponse{}, res0 + } +} + +func AggregateEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*AggregateRequest) + res0, res1 := svc.Aggregate(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...) + return &AggregateResponse{ + Result: res0, + }, res1 + } +} +func AggregatePublishedEndpoint(svc items.Items) endpoint.Endpoint { + return func(arg0 context.Context, request interface{}) (interface{}, error) { + req := request.(*AggregatePublishedRequest) + res0, res1 := svc.AggregatePublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...) + return &AggregatePublishedResponse{ + Result: res0, + }, res1 + } +}