diff --git a/go.mod b/go.mod
index 14a85cd72aebd444fa384bc1788849052a937c4c..05e37d08d81a77ffc8319f1094590b4ec4f236ab 100644
--- a/go.mod
+++ b/go.mod
@@ -11,13 +11,14 @@ require (
 	github.com/hashicorp/golang-lru v0.5.4
 	github.com/json-iterator/go v1.1.12
 	github.com/mitchellh/mapstructure v1.4.2
+	github.com/nats-io/nats.go v1.23.0
 	github.com/pkg/errors v0.9.1
 	github.com/rs/xid v1.4.0
 	github.com/stretchr/testify v1.8.0
 	go.mongodb.org/mongo-driver v1.11.4
 	go.uber.org/zap v1.19.1
-	golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
-	golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2
+	golang.org/x/crypto v0.5.0
+	golang.org/x/net v0.5.0
 	google.golang.org/grpc v1.45.0
 	google.golang.org/protobuf v1.28.0
 	gopkg.in/yaml.v3 v3.0.1
@@ -35,6 +36,8 @@ require (
 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
 	github.com/modern-go/reflect2 v1.0.2 // indirect
 	github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect
+	github.com/nats-io/nkeys v0.3.0 // indirect
+	github.com/nats-io/nuid v1.0.1 // indirect
 	github.com/pmezard/go-difflib v1.0.0 // indirect
 	github.com/stretchr/objx v0.4.0 // indirect
 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect
@@ -44,7 +47,7 @@ require (
 	go.uber.org/atomic v1.9.0 // indirect
 	go.uber.org/multierr v1.7.0 // indirect
 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
-	golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect
-	golang.org/x/text v0.3.7 // indirect
+	golang.org/x/sys v0.4.0 // indirect
+	golang.org/x/text v0.6.0 // indirect
 	google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4 // indirect
 )
diff --git a/go.sum b/go.sum
index 511239a6e719ed8e4be2d67f6fc16c582aad70ef..1b2e49b505ab646f23e970362f978a04fae0a2b4 100644
--- a/go.sum
+++ b/go.sum
@@ -99,6 +99,7 @@ github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1
 github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
 github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
 github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
 github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo=
 github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -108,6 +109,14 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
 github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
 github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0=
 github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
+github.com/nats-io/jwt/v2 v2.0.3 h1:i/O6cmIsjpcQyWDYNcq2JyZ3/VTF8SJ4JWluI5OhpvI=
+github.com/nats-io/nats-server/v2 v2.5.0 h1:wsnVaaXH9VRSg+A2MVg5Q727/CqxnmPLGFQ3YZYKTQg=
+github.com/nats-io/nats.go v1.23.0 h1:lR28r7IX44WjYgdiKz9GmUeW0uh/m33uD3yEjLZ2cOE=
+github.com/nats-io/nats.go v1.23.0/go.mod h1:ki/Scsa23edbh8IRZbCuNXR9TDcbvfaSijKtaqQgw+Q=
+github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8=
+github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4=
+github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
+github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@@ -160,8 +169,10 @@ go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
+golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
 golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE=
+golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
 golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
 golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
@@ -178,9 +189,11 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
 golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
-golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
 golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw=
+golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
 golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
 golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
 golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -202,16 +215,18 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
 golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw=
-golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18=
+golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k=
+golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac h1:7zkz7BUtwNFFqcowJ+RIgu2MaV/MapERkDIy+mwPyjs=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
diff --git a/pkg/collections/collection.go b/pkg/collections/collection.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b3c5dd310ac8af32ee11e9e18c4aa1a3d4c72f3
--- /dev/null
+++ b/pkg/collections/collection.go
@@ -0,0 +1,180 @@
+package collections
+
+import (
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// Config
+type Config struct {
+	SourceSpaceID      string
+	SourceEnvID        string
+	SourceCollectionID string
+	SourceSchema       *schema.Schema
+}
+
+// Access - описывает текущие ограничения на доступ к элементам коллекции для текущего
+// пользователя
+type Access struct {
+	Actions         []permission.Action // Список разрешенных действия с элементами коллекции
+	HiddenFields    []string            // Поля не отображаемые в интерфейсе и не возвращаемые API
+	ReadonlyFields  []string            // Поля недоступные для редактирования и не обновляемые через API
+	WriteonlyFields []string            // Поля отображаемые в интерфейсе, но не возвращаемые в API
+}
+
+func (a Access) Clone() *Access {
+
+	clone := &Access{
+		Actions:         make([]permission.Action, len(a.Actions)),
+		HiddenFields:    make([]string, len(a.HiddenFields)),
+		ReadonlyFields:  make([]string, len(a.ReadonlyFields)),
+		WriteonlyFields: make([]string, len(a.WriteonlyFields)),
+	}
+
+	copy(clone.Actions, a.Actions)
+	copy(clone.HiddenFields, a.HiddenFields)
+	copy(clone.ReadonlyFields, a.ReadonlyFields)
+	copy(clone.WriteonlyFields, a.WriteonlyFields)
+
+	return clone
+}
+
+func (a Access) Can(action permission.Action) bool {
+	for _, act := range a.Actions {
+		if act == action {
+			return true
+		}
+	}
+	return false
+}
+
+type Collection struct {
+	ID      string         `json:"id" bson:"id"`
+	SpaceID string         `json:"spaceId" bson:"-"`
+	EnvID   string         `json:"envId" bson:"-"`
+	Name    string         `json:"name" bson:"name"`
+	Single  *bool          `json:"single" bson:"single,omitempty"` // В коллекции может быть только один документ
+	System  *bool          `json:"system" bson:"system,omitempty"` // Системная коллекция
+	NoData  *bool          `json:"no_data" bson:"no_data"`         // Коллекция не содержит элементы. Схема используется для включения в другие схемы
+	Hidden  bool           `json:"hidden" bson:"hidden"`           // Коллекция скрыта в административном интерфейсе
+	Schema  *schema.Schema `json:"schema" bson:"schema"`
+	Access  *Access        `json:"access" bson:"-"` // Ограничения на доступ к элементам коллекции. Отсутствие объекта означает неограниченный доступ
+
+	// StateInfo отображает состояние коллекции:
+	// - State: идентификатор состояния коллекции (new/preparing/ready/error/changed)
+	// - Info: дополнительная информация о состоянии коллекции (например, если при
+	//   применении схемы к коллекции произошла ошибка)
+	// - StartedAt: время, в которое коллекция перешла в состояние `Preparing`
+	StateInfo *StateInfo `json:"state_info" bson:"state_info,omitempty"` // todo: показывать в интерфейсе как readonly
+
+	// View - Если значение поля непустое, то коллекция является View ("отображением"
+	// части данных другой коллекции согласно View.Filter)
+	View *View `json:"view,omitempty" bson:"view,omitempty"`
+
+	// Tags - список тегов коллекции. Добавляются при отправке событий events
+	Tags []string `json:"tags,omitempty" bson:"tags,omitempty"`
+
+	Config *Config `json:"-" bson:"-"`
+}
+
+type View struct {
+	SpaceID      string `json:"space_id" bson:"space_id"`             // SpaceID оригинальной коллекции
+	EnvID        string `json:"environment_id" bson:"environment_id"` // EnvID оригинальной коллекции
+	CollectionID string `json:"collection_id" bson:"collection_id"`   // CollectionID оригинальной коллекции
+	Filter       string `json:"filter" bson:"filter,omitempty"`       // Правила фильтрации записей оригинальной коллекции
+}
+
+type StateInfo struct {
+	State     State     `json:"state" bson:"state"`
+	Info      string    `json:"info" bson:"info"`
+	StartedAt time.Time `json:"started_at,omitempty" bson:"started_at,omitempty"`
+}
+
+type State int
+
+func (s State) String() string {
+	var state string
+
+	switch s {
+	case StateNew:
+		state = "New"
+	case StatePreparing:
+		state = "Preparing"
+	case StateReady:
+		state = "Ready"
+	case StateError:
+		state = "Error"
+	case StateChanged:
+		state = "Changed"
+	default:
+		state = "Unknown"
+	}
+
+	return state
+}
+
+const (
+	StateNew State = iota
+	StatePreparing
+	StateReady
+	StateError
+	StateChanged
+)
+
+func (c Collection) Clone() *Collection {
+
+	clone := &Collection{
+		ID:      c.ID,
+		SpaceID: c.SpaceID,
+		EnvID:   c.EnvID,
+		Name:    c.Name,
+		NoData:  c.NoData,
+		Hidden:  c.Hidden,
+	}
+
+	if c.Single != nil {
+		single := *c.Single
+		clone.Single = &single
+	}
+	if c.System != nil {
+		system := *c.System
+		clone.System = &system
+	}
+	if c.Schema != nil {
+		clone.Schema = c.Schema.Clone(false)
+	}
+	if c.Access != nil {
+		clone.Access = c.Access.Clone()
+	}
+	if c.StateInfo != nil {
+		info := *c.StateInfo
+		clone.StateInfo = &info
+	}
+	if c.View != nil {
+		view := *c.View
+		clone.View = &view
+	}
+	if c.Config != nil {
+		cfg := *c.Config
+		clone.Config = &cfg
+	}
+	if c.Tags != nil {
+		clone.Tags = append([]string{}, c.Tags...)
+	}
+
+	return clone
+}
+
+func (c Collection) IsSingle() bool {
+	return c.Single != nil && *c.Single
+}
+
+func (c Collection) IsNoData() bool {
+	return c.NoData != nil && *c.NoData
+}
+
+func (c Collection) IsView() bool {
+	return c.View != nil
+}
diff --git a/pkg/collections/mocks/Collections.go b/pkg/collections/mocks/Collections.go
new file mode 100644
index 0000000000000000000000000000000000000000..e52dffabfd0c8764ab1edd08979b873666d1935e
--- /dev/null
+++ b/pkg/collections/mocks/Collections.go
@@ -0,0 +1,163 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Collections is an autogenerated mock type for the Collections type
+type Collections struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, collection
+func (_m *Collections) Create(ctx context.Context, collection *collections.Collection) (*collections.Collection, error) {
+	ret := _m.Called(ctx, collection)
+
+	var r0 *collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, *collections.Collection) *collections.Collection); ok {
+		r0 = rf(ctx, collection)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *collections.Collection) error); ok {
+		r1 = rf(ctx, collection)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId
+func (_m *Collections) Delete(ctx context.Context, spaceId string, envId string, collectionId string) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, options
+func (_m *Collections) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*collections.GetOptions) (*collections.Collection, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, ...*collections.GetOptions) *collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, ...*collections.GetOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// List provides a mock function with given fields: ctx, spaceId, envId, filter
+func (_m *Collections) List(ctx context.Context, spaceId string, envId string, filter *collections.Filter) ([]*collections.Collection, error) {
+	ret := _m.Called(ctx, spaceId, envId, filter)
+
+	var r0 []*collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, *collections.Filter) []*collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId, filter)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, *collections.Filter) error); ok {
+		r1 = rf(ctx, spaceId, envId, filter)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// SetSchema provides a mock function with given fields: ctx, spaceId, envId, collectionId, _a4
+func (_m *Collections) SetSchema(ctx context.Context, spaceId string, envId string, collectionId string, _a4 *schema.Schema) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId, _a4)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *schema.Schema) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, _a4)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// SetState provides a mock function with given fields: ctx, spaceId, envId, collectionId, state
+func (_m *Collections) SetState(ctx context.Context, spaceId string, envId string, collectionId string, state *collections.StateInfo) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId, state)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *collections.StateInfo) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, state)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, coll
+func (_m *Collections) Update(ctx context.Context, coll *collections.Collection) error {
+	ret := _m.Called(ctx, coll)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *collections.Collection) error); ok {
+		r0 = rf(ctx, coll)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewCollections interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewCollections creates a new instance of Collections. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewCollections(t mockConstructorTestingTNewCollections) *Collections {
+	mock := &Collections{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/collections/options.go b/pkg/collections/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..7bdf2b70bef9349d90cf012c5cf2e996698f13f0
--- /dev/null
+++ b/pkg/collections/options.go
@@ -0,0 +1,15 @@
+package collections
+
+type GetOptions struct {
+	DisableSchemaIncludes bool
+}
+
+func MergeGetOptions(opts ...*GetOptions) *GetOptions {
+	o := &GetOptions{}
+	for _, opt := range opts {
+		if opt.DisableSchemaIncludes {
+			o.DisableSchemaIncludes = true
+		}
+	}
+	return o
+}
diff --git a/pkg/collections/service.go b/pkg/collections/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..e73133e7c5ecbf6f58d29b2f07343f05bb13d5b0
--- /dev/null
+++ b/pkg/collections/service.go
@@ -0,0 +1,30 @@
+package collections
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/collections
+// @grpc-addr content.collections.Collections
+type Collections interface {
+	Create(ctx context.Context, collection *Collection) (created *Collection, err error)
+	Get(ctx context.Context, spaceId, envId, collectionId string, options ...*GetOptions) (collection *Collection, err error)
+	List(ctx context.Context, spaceId, envId string, filter *Filter) (collections []*Collection, err error)
+	Update(ctx context.Context, coll *Collection) (err error)
+	SetSchema(ctx context.Context, spaceId, envId, collectionId string, schema *schema.Schema) (err error)
+
+	// @microgen -
+	SetState(ctx context.Context, spaceId, envId, collectionId string, state *StateInfo) (err error)
+	Delete(ctx context.Context, spaceId, envId, collectionId string) (err error)
+}
+
+type Filter struct {
+	IncludeNoData bool     `json:"include_no_data,omitempty"`
+	IncludeHidden bool     `json:"include_hidden,omitempty"`
+	ExcludeSystem bool     `json:"exclude_system,omitempty"`
+	Name          []string `json:"name,omitempty"`
+	ID            []string `json:"id,omitempty"`
+}
diff --git a/pkg/collections/transport/client.microgen.go b/pkg/collections/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7fd5b53f360a927e48c00edcd3f4dc7606dd47fe
--- /dev/null
+++ b/pkg/collections/transport/client.microgen.go
@@ -0,0 +1,107 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *collections.Collection) (res0 *collections.Collection, res1 error) {
+	request := CreateRequest{Collection: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 ...*collections.GetOptions) (res0 *collections.Collection, res1 error) {
+	request := GetRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Options:      arg4,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Collection, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string, arg2 string, arg3 *collections.Filter) (res0 []*collections.Collection, res1 error) {
+	request := ListRequest{
+		EnvId:   arg2,
+		Filter:  arg3,
+		SpaceId: arg1,
+	}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Collections, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *collections.Collection) (res0 error) {
+	request := UpdateRequest{Coll: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) SetSchema(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *schema.Schema) (res0 error) {
+	request := SetSchemaRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Schema:       arg4,
+		SpaceId:      arg1,
+	}
+	_, res0 = set.SetSchemaEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) SetState(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *collections.StateInfo) (res0 error) {
+	return
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string, arg3 string) (res0 error) {
+	request := DeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		SpaceId:      arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/collections/transport/endpoints.microgen.go b/pkg/collections/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..0b4643bbf83e24b8f751d5dbaac676ea2a4f2f5b
--- /dev/null
+++ b/pkg/collections/transport/endpoints.microgen.go
@@ -0,0 +1,15 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Collections API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint    endpoint.Endpoint
+	GetEndpoint       endpoint.Endpoint
+	ListEndpoint      endpoint.Endpoint
+	UpdateEndpoint    endpoint.Endpoint
+	SetSchemaEndpoint endpoint.Endpoint
+	DeleteEndpoint    endpoint.Endpoint
+}
diff --git a/pkg/collections/transport/exchanges.microgen.go b/pkg/collections/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3aabcbc890d2e5691fe6167024254bba71f5fe2d
--- /dev/null
+++ b/pkg/collections/transport/exchanges.microgen.go
@@ -0,0 +1,59 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+type (
+	CreateRequest struct {
+		Collection *collections.Collection `json:"collection"`
+	}
+	CreateResponse struct {
+		Created *collections.Collection `json:"created"`
+	}
+
+	GetRequest struct {
+		SpaceId      string                    `json:"space_id"`
+		EnvId        string                    `json:"env_id"`
+		CollectionId string                    `json:"collection_id"`
+		Options      []*collections.GetOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetResponse struct {
+		Collection *collections.Collection `json:"collection"`
+	}
+
+	ListRequest struct {
+		SpaceId string              `json:"space_id"`
+		EnvId   string              `json:"env_id"`
+		Filter  *collections.Filter `json:"filter"`
+	}
+	ListResponse struct {
+		Collections []*collections.Collection `json:"collections"`
+	}
+
+	UpdateRequest struct {
+		Coll *collections.Collection `json:"coll"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	SetSchemaRequest struct {
+		SpaceId      string         `json:"space_id"`
+		EnvId        string         `json:"env_id"`
+		CollectionId string         `json:"collection_id"`
+		Schema       *schema.Schema `json:"schema"`
+	}
+	// Formal exchange type, please do not delete.
+	SetSchemaResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId      string `json:"space_id"`
+		EnvId        string `json:"env_id"`
+		CollectionId string `json:"collection_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+)
diff --git a/pkg/collections/transport/grpc/client.microgen.go b/pkg/collections/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..0f41276cea0c2112aa1eb316b83d745c88208b07
--- /dev/null
+++ b/pkg/collections/transport/grpc/client.microgen.go
@@ -0,0 +1,61 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.collections.Collections"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+		SetSchemaEndpoint: grpckit.NewClient(
+			conn, addr, "SetSchema",
+			_Encode_SetSchema_Request,
+			_Decode_SetSchema_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..34c541f8f4e05556c58c2a0a8547812a3afe4e1e
--- /dev/null
+++ b/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,273 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqCollection, err := PtrCollectionToProto(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Collection: reqCollection}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      GetOptionsToProto(req.Options),
+	}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRequest{
+		EnvId:   req.EnvId,
+		Filter:  reqFilter,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqColl, err := PtrCollectionToProto(req.Coll)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Collection: reqColl}, nil
+}
+
+func _Encode_SetSchema_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetSchemaRequest")
+	}
+	req := request.(*transport.SetSchemaRequest)
+	reqSchema, err := PtrSchemaSchemaToProto(req.Schema)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.SetSchemaRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Schema:       reqSchema,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrCollectionToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respCollection, err := PtrCollectionToProto(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Collection: respCollection}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respCollections, err := ListPtrCollectionToProto(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Collections: respCollections}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_SetSchema_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqCollection, err := ProtoToPtrCollection(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Collection: reqCollection}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		SpaceId:      string(req.SpaceId),
+		Options:      ProtoToGetOptions(req.Options),
+	}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRequest{
+		EnvId:   string(req.EnvId),
+		Filter:  reqFilter,
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqColl, err := ProtoToPtrCollection(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Coll: reqColl}, nil
+}
+
+func _Decode_SetSchema_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetSchemaRequest")
+	}
+	req := request.(*pb.SetSchemaRequest)
+	reqSchema, err := ProtoToPtrSchemaSchema(req.Schema)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.SetSchemaRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Schema:       reqSchema,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrCollection(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respCollection, err := ProtoToPtrCollection(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Collection: respCollection}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respCollections, err := ProtoToListPtrCollection(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Collections: respCollections}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_SetSchema_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go b/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..d19bee8b078380d6af962eac8e7738ba0170cd98
--- /dev/null
+++ b/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,229 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	service "git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	commonpb "git.perx.ru/perxis/perxis-go/proto/common"
+	jsoniter "github.com/json-iterator/go"
+	"google.golang.org/protobuf/types/known/timestamppb"
+)
+
+func PtrSchemaSchemaToProto(schema *schema.Schema) (string, error) {
+	if schema == nil {
+		return "", nil
+	}
+	res, err := jsoniter.MarshalToString(schema)
+	if err != nil {
+		return "", err
+	}
+	return res, nil
+}
+
+func ProtoToPtrSchemaSchema(protoSchema string) (*schema.Schema, error) {
+	if protoSchema == "" {
+		return nil, nil
+	}
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(protoSchema))
+	if err != nil {
+		return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+	}
+	return sch, nil
+}
+
+func PtrCollectionToProto(coll *service.Collection) (*pb.Collection, error) {
+	if coll == nil {
+		return nil, nil
+	}
+
+	var access *pb.Access
+
+	if coll.Access != nil {
+		actions := make([]commonpb.Action, len(coll.Access.Actions))
+		for i, a := range coll.Access.Actions {
+			actions[i] = commonpb.Action(a)
+		}
+		access = &pb.Access{
+			Actions:         actions,
+			HiddenFields:    coll.Access.HiddenFields,
+			ReadonlyFields:  coll.Access.ReadonlyFields,
+			WriteonlyFields: coll.Access.WriteonlyFields,
+		}
+	}
+	protoCollection := &pb.Collection{
+		Id:      coll.ID,
+		SpaceId: coll.SpaceID,
+		EnvId:   coll.EnvID,
+		Name:    coll.Name,
+		Single:  coll.Single,
+		System:  coll.System,
+		NoData:  coll.NoData,
+		Access:  access,
+		Hidden:  coll.Hidden,
+		Tags:    coll.Tags,
+	}
+
+	if coll.StateInfo != nil {
+		protoCollection.StateInfo = &pb.Collection_StateInfo{
+			State:     pb.Collection_State(coll.StateInfo.State),
+			Info:      coll.StateInfo.Info,
+			StartedAt: timestamppb.New(coll.StateInfo.StartedAt),
+		}
+	}
+
+	sch, err := PtrSchemaSchemaToProto(coll.Schema)
+	if err != nil {
+		return nil, err
+	}
+	protoCollection.Schema = sch
+
+	if coll.View != nil {
+		protoCollection.View = &pb.Collection_View{
+			SpaceId:      coll.View.SpaceID,
+			EnvId:        coll.View.EnvID,
+			CollectionId: coll.View.CollectionID,
+			Filter:       coll.View.Filter,
+		}
+	}
+
+	return protoCollection, nil
+}
+
+func ProtoToPtrCollection(protoCollection *pb.Collection) (*service.Collection, error) {
+	if protoCollection == nil {
+		return nil, nil
+	}
+
+	var access *service.Access
+
+	if protoCollection.Access != nil {
+		actions := make([]permission.Action, len(protoCollection.Access.Actions))
+		for i, a := range protoCollection.Access.Actions {
+			actions[i] = permission.Action(a)
+		}
+		access = &service.Access{
+			Actions:         actions,
+			HiddenFields:    protoCollection.Access.HiddenFields,
+			ReadonlyFields:  protoCollection.Access.ReadonlyFields,
+			WriteonlyFields: protoCollection.Access.WriteonlyFields,
+		}
+	}
+	collection := &service.Collection{
+		ID:      protoCollection.Id,
+		SpaceID: protoCollection.SpaceId,
+		EnvID:   protoCollection.EnvId,
+		Name:    protoCollection.Name,
+		Single:  protoCollection.Single,
+		System:  protoCollection.System,
+		NoData:  protoCollection.NoData,
+		Access:  access,
+		Hidden:  protoCollection.Hidden,
+		Tags:    protoCollection.Tags,
+	}
+
+	if protoCollection.StateInfo != nil {
+		collection.StateInfo = &service.StateInfo{
+			State:     service.State(protoCollection.StateInfo.State),
+			Info:      protoCollection.StateInfo.Info,
+			StartedAt: protoCollection.StateInfo.StartedAt.AsTime(),
+		}
+	}
+
+	schm, err := ProtoToPtrSchemaSchema(protoCollection.Schema)
+	if err != nil {
+		return nil, err
+	}
+	collection.Schema = schm
+
+	if protoCollection.View != nil {
+		collection.View = &service.View{
+			SpaceID:      protoCollection.View.SpaceId,
+			EnvID:        protoCollection.View.EnvId,
+			CollectionID: protoCollection.View.CollectionId,
+			Filter:       protoCollection.View.Filter,
+		}
+	}
+
+	return collection, nil
+}
+
+func ListPtrCollectionToProto(collections []*service.Collection) ([]*pb.Collection, error) {
+	protoCollections := make([]*pb.Collection, 0, len(collections))
+	for _, collection := range collections {
+		protoCollection, err := PtrCollectionToProto(collection)
+		if err != nil {
+			return nil, err
+		}
+		protoCollections = append(protoCollections, protoCollection)
+	}
+	return protoCollections, nil
+}
+
+func ProtoToListPtrCollection(protoCollections []*pb.Collection) ([]*service.Collection, error) {
+	collections := make([]*service.Collection, 0, len(protoCollections))
+	for _, protoCollection := range protoCollections {
+		collection, err := ProtoToPtrCollection(protoCollection)
+		if err != nil {
+			return nil, err
+		}
+		collections = append(collections, collection)
+	}
+	return collections, nil
+}
+
+func ProtoToPtrBool(protoSingle *bool) (*bool, error) {
+	panic("function not provided") // TODO: provide converter
+}
+
+func PtrBoolToProto(single *bool) (*bool, error) {
+	panic("function not provided") // TODO: provide converter
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.ListRequest_Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	return &pb.ListRequest_Filter{
+		ExcludeSystem: filter.ExcludeSystem,
+		IncludeNoData: filter.IncludeNoData,
+		IncludeHidden: filter.IncludeHidden,
+		Name:          filter.Name,
+		Id:            filter.ID,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.ListRequest_Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	return &service.Filter{
+		IncludeNoData: protoFilter.IncludeNoData,
+		IncludeHidden: protoFilter.IncludeHidden,
+		ExcludeSystem: protoFilter.ExcludeSystem,
+		Name:          protoFilter.Name,
+		ID:            protoFilter.Id,
+	}, nil
+}
+
+func GetOptionsToProto(options []*service.GetOptions) *pb.GetOptions {
+	opts := service.MergeGetOptions(options...)
+	return &pb.GetOptions{DisableSchemaIncludes: opts.DisableSchemaIncludes}
+}
+
+func ProtoToGetOptions(protoOptions *pb.GetOptions) []*service.GetOptions {
+	if protoOptions == nil {
+		return nil
+	}
+	opts := &service.GetOptions{
+		DisableSchemaIncludes: protoOptions.DisableSchemaIncludes,
+	}
+	return []*service.GetOptions{opts}
+}
diff --git a/pkg/collections/transport/grpc/server.microgen.go b/pkg/collections/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..169e726d32ec7cfbbc465a7100d10aad2b80d481
--- /dev/null
+++ b/pkg/collections/transport/grpc/server.microgen.go
@@ -0,0 +1,112 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type collectionsServer struct {
+	create    grpc.Handler
+	get       grpc.Handler
+	list      grpc.Handler
+	update    grpc.Handler
+	setSchema grpc.Handler
+	delete    grpc.Handler
+
+	pb.UnimplementedCollectionsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.CollectionsServer {
+	return &collectionsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+		setSchema: grpc.NewServer(
+			endpoints.SetSchemaEndpoint,
+			_Decode_SetSchema_Request,
+			_Encode_SetSchema_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *collectionsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *collectionsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *collectionsServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *collectionsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *collectionsServer) SetSchema(ctx context.Context, req *pb.SetSchemaRequest) (*empty.Empty, error) {
+	_, resp, err := S.setSchema.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *collectionsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/collections/transport/server.microgen.go b/pkg/collections/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..958e28ab78913b23389469ce53316be45dd82c48
--- /dev/null
+++ b/pkg/collections/transport/server.microgen.go
@@ -0,0 +1,69 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc collections.Collections) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint:    CreateEndpoint(svc),
+		DeleteEndpoint:    DeleteEndpoint(svc),
+		GetEndpoint:       GetEndpoint(svc),
+		ListEndpoint:      ListEndpoint(svc),
+		SetSchemaEndpoint: SetSchemaEndpoint(svc),
+		UpdateEndpoint:    UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Collection)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Options...)
+		return &GetResponse{Collection: res0}, res1
+	}
+}
+
+func ListEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId, req.EnvId, req.Filter)
+		return &ListResponse{Collections: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Coll)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func SetSchemaEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*SetSchemaRequest)
+		res0 := svc.SetSchema(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Schema)
+		return &SetSchemaResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId)
+		return &DeleteResponse{}, res0
+	}
+}
diff --git a/pkg/data/data.go b/pkg/data/data.go
new file mode 100644
index 0000000000000000000000000000000000000000..0540055ad4e213f666b0cf72019b9b4b9c39fbe8
--- /dev/null
+++ b/pkg/data/data.go
@@ -0,0 +1,294 @@
+package data
+
+import (
+	"strconv"
+	"strings"
+)
+
+const DefaultFieldDelimiter = "."
+
+type DeleteValueType struct{}
+
+var DeleteValue DeleteValueType
+
+// TODO: везде добавить поддержку массивов и массивов объектов
+
+// Сделано на базе библиотеки https://github.com/knadh/koanf
+
+// Flatten takes a map[string]interface{} and traverses it and flattens
+// nested children into keys delimited by delim.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+//
+// eg: `{ "parent": { "child": 123 }}` becomes `{ "parent.child": 123 }`
+// In addition, it keeps track of and returns a map of the delimited keypaths with
+// a slice of key parts, for eg: { "parent.child": ["parent", "child"] }. This
+// parts list is used to remember the key path's original structure to
+// unflatten later.
+func Flatten(m map[string]interface{}, keys []string, delim string) (map[string]interface{}, map[string][]string) {
+	var (
+		out    = make(map[string]interface{})
+		keyMap = make(map[string][]string)
+	)
+
+	flatten(m, keys, delim, out, keyMap)
+	return out, keyMap
+}
+
+func flatten(m map[string]interface{}, keys []string, delim string, out map[string]interface{}, keyMap map[string][]string) {
+	for key, val := range m {
+		// Copy the incoming key paths into a fresh list
+		// and append the current key in the iteration.
+		kp := make([]string, 0, len(keys)+1)
+		kp = append(kp, keys...)
+		kp = append(kp, key)
+
+		switch cur := val.(type) {
+		case map[string]interface{}:
+			// Empty map.
+			if len(cur) == 0 {
+				newKey := strings.Join(kp, delim)
+				out[newKey] = val
+				keyMap[newKey] = kp
+				continue
+			}
+
+			// It's a nested map. Flatten it recursively.
+			flatten(cur, kp, delim, out, keyMap)
+		default:
+			newKey := strings.Join(kp, delim)
+			out[newKey] = val
+			keyMap[newKey] = kp
+		}
+	}
+}
+
+// Unflatten takes a flattened key:value map (non-nested with delimited keys)
+// and returns a nested map where the keys are split into hierarchies by the given
+// delimiter. For instance, `parent.child.key: 1` to `{parent: {child: {key: 1}}}`
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Unflatten(m map[string]interface{}, delim string) map[string]interface{} {
+	out := make(map[string]interface{})
+
+	// Iterate through the flat conf map.
+	for k, v := range m {
+		var (
+			keys = strings.Split(k, delim)
+			next = out
+		)
+
+		// Iterate through key parts, for eg:, parent.child.key
+		// will be ["parent", "child", "key"]
+		for _, k := range keys[:len(keys)-1] {
+			sub, ok := next[k]
+			if !ok {
+				// If the key does not exist in the map, create it.
+				sub = make(map[string]interface{})
+				next[k] = sub
+			}
+			if n, ok := sub.(map[string]interface{}); ok {
+				next = n
+			}
+		}
+
+		// Assign the value.
+		next[keys[len(keys)-1]] = v
+	}
+	return out
+}
+
+// Delete removes the entry present at a given path, from the interface
+// if it is an object or an array.
+// The path is the key map slice, for eg:, parent.child.key -> [parent child key].
+// Any empty, nested map on the path, is recursively deleted.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Delete(field string, data any, delim ...string) error {
+	return set(getPath(field, delim...), data, DeleteValue)
+}
+
+// DeleteMany removes the entries present at a given paths, from the interface
+func DeleteMany(paths []string, value any, delim ...string) {
+	if value == nil || len(paths) == 0 {
+		return
+	}
+	for _, path := range paths {
+		Delete(path, value, delim...)
+	}
+}
+
+// Search recursively searches the interface for a given path. The path is
+// the key map slice, for eg:, parent.child.key -> [parent child key].
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Search(in interface{}, path []string) interface{} {
+	switch val := in.(type) {
+
+	case map[string]interface{}:
+		next, ok := val[path[0]]
+		if ok {
+			if len(path) == 1 {
+				return next
+			}
+			switch v := next.(type) {
+			case map[string]interface{}, []interface{}:
+				return Search(v, path[1:])
+			}
+		}
+	case []interface{}:
+		out := make([]interface{}, len(val))
+		for i, e := range val {
+			out[i] = Search(e, path)
+		}
+		return out
+	}
+	return nil
+}
+
+func getPath(field string, delim ...string) []string {
+	if field == "" {
+		return nil
+	}
+
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	return strings.Split(field, d)
+}
+
+func Set(field string, data, value any, delim ...string) error {
+	return set(getPath(field, delim...), data, value)
+}
+
+func set(path []string, data, value any) error {
+	if len(path) == 0 {
+		return nil
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		if len(path) == 1 {
+
+			if _, ok := value.(DeleteValueType); ok {
+				delete(v, path[0])
+				return nil
+			}
+
+			v[path[0]] = value
+			return nil
+		}
+
+		next, ok := v[path[0]]
+		if !ok {
+			next = make(map[string]interface{})
+			v[path[0]] = next
+		}
+		return set(path[1:], next, value)
+
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil {
+			for _, vv := range v {
+				if err = set(path, vv, value); err != nil {
+					return err
+				}
+			}
+		}
+		if idx >= len(v) {
+			return nil
+		}
+		return set(path[1:], v[idx], value)
+	}
+
+	return nil
+}
+
+func Get(field string, data any, delim ...string) (any, bool) {
+	return get(getPath(field, delim...), data)
+}
+
+func get(path []string, data any) (any, bool) {
+	if len(path) == 0 {
+		return data, true
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		val, ok := v[path[0]]
+		if !ok {
+			return nil, false
+		}
+		return get(path[1:], val)
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil || idx >= len(v) {
+			return nil, false
+		}
+		return get(path[1:], v[idx])
+	}
+
+	return nil, false
+}
+
+// Keep keeps the entries present at a given paths, from the interface and remove other data
+// if it is an object or an array.
+// The path is the sting with delim, for eg:, parent.child.key
+func Keep(paths []string, data any, delim ...string) {
+	if len(paths) == 0 {
+		data = nil
+		return
+	}
+	switch val := data.(type) {
+	case map[string]interface{}:
+		for k, v := range val {
+			if Contains(k, paths) {
+				continue
+			}
+			p := getObjectPaths(k, paths, delim...)
+			if len(p) == 0 {
+				delete(val, k)
+			}
+			Keep(p, v, delim...)
+		}
+	case []interface{}:
+		for _, ar := range val {
+			Keep(paths, ar, delim...)
+		}
+	}
+}
+
+func getObjectPaths(prefix string, arr []string, delim ...string) []string {
+	var res []string
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	for _, v := range arr {
+		if strings.HasPrefix(v, prefix+d) {
+			res = append(res, strings.TrimPrefix(v, prefix+d))
+		}
+	}
+	return res
+}
+
+func CloneMap(m map[string]interface{}) map[string]interface{} {
+	if m == nil {
+		return m
+	}
+
+	c := make(map[string]interface{}, len(m))
+	for k, v := range m {
+		c[k] = v
+	}
+	return c
+}
diff --git a/pkg/data/data_test.go b/pkg/data/data_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..785eefbb868a68c9d8c6b2f75b8f861ab2041e11
--- /dev/null
+++ b/pkg/data/data_test.go
@@ -0,0 +1,374 @@
+package data
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestDelete(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		field string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			"a",
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a",
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a.a",
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		// Решили что автоматически удалять пустые объекты/слайсы не нужно
+		//{
+		//	"empty object",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//}, {
+		//	"empty array",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Delete(tt.field, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestDeleteMany(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		paths []string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2", "d": "2"},
+			[]string{"a", "d"},
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2", "b": "4"}},
+			[]string{"a.a", "a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2", "c": 0},
+				map[string]interface{}{"a": "3", "b": "4", "c": 0},
+			}, "z": "2"},
+			[]string{"a.a", "a.c"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.a"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		},
+		{
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			DeleteMany(tt.paths, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestSearch(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			"1",
+		},
+		{
+			"object",
+			map[string]interface{}{
+				"a": map[string]interface{}{"a": "1", "z": "2"},
+				"b": map[string]interface{}{"c": "1", "d": "2"},
+			},
+			[]string{"a"},
+			map[string]interface{}{"a": "1", "z": "2"},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a", "a"},
+			"1",
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a", "a"},
+			[]interface{}{"1", "3"},
+		},
+		{
+			"object field from array of arrays",
+			[]interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			},
+			[]string{"a"},
+			[]interface{}{[]interface{}{"1"}, []interface{}{"3"}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			out := Search(tt.in, tt.path)
+			assert.Equal(t, tt.out, out)
+		})
+	}
+}
+
+func TestSet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+		value any
+	}
+	tests := []struct {
+		name     string
+		args     args
+		wantData any
+		wantErr  assert.ErrorAssertionFunc
+	}{
+		{"Simple", args{"a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "a"}, assert.NoError},
+		{"New key", args{"b", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": "a"}, assert.NoError},
+		{"Path", args{"a.b.c", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, "c"}, map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}, assert.NoError},
+		{"Delete", args{"a.b", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, DeleteValue}, map[string]any{"a": map[string]any{}}, assert.NoError},
+		{"Create map", args{"b.a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Map value", args{"a", map[string]interface{}{"a": "0"}, map[string]interface{}{"a": "a"}}, map[string]interface{}{"a": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Slice", args{"a.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "a", "b": "b"}}}, assert.NoError},
+		{"Slice", args{"a.0.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "0", "b": "b"}}}, assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			data := tt.args.data
+			tt.wantErr(t, Set(tt.args.field, data, tt.args.value), fmt.Sprintf("Set(%v, %v, %v)", tt.args.field, data, tt.args.value))
+			assert.Equal(t, tt.wantData, data)
+		})
+	}
+}
+
+func TestGet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+	}
+	tests := []struct {
+		name  string
+		args  args
+		want  any
+		found bool
+	}{
+		{"Direct value", args{"", 100}, 100, true},
+		{"Not found", args{"a", 100}, nil, false},
+		{"Simple", args{"a", map[string]any{"a": "0"}}, "0", true},
+		{"Path", args{"a.b.c", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, "c", true},
+		{"Incorrect path", args{"a.b.wrong", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, nil, false},
+		{"Map value", args{"a.b", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, map[string]any{"c": "c"}, true},
+		{"Slice", args{"a.1.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, "1", true},
+		{"Slice out of range", args{"a.2.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, nil, false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, got1 := Get(tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.want, got, "Get(%v, %v)", tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.found, got1, "Get(%v, %v)", tt.args.field, tt.args.data)
+		})
+	}
+}
+
+func TestKeep(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			map[string]interface{}{"a": "1"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+		},
+		{
+			"no field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"z"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a.a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a.a", "z"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1"},
+				map[string]interface{}{"a": "3"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.b", "z"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		}, {
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Keep(tt.path, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
diff --git a/pkg/events/events.go b/pkg/events/events.go
new file mode 100644
index 0000000000000000000000000000000000000000..454f690a196b08782d8110b0c2c13b186853a776
--- /dev/null
+++ b/pkg/events/events.go
@@ -0,0 +1,55 @@
+package events
+
+type Subscription interface {
+	Unsubscribe() error
+}
+
+type Connection interface {
+	Publish(subject string, msg any, opts ...PublishOption) error
+	Subscribe(subject string, handler any, opts ...SubscribeOption) (Subscription, error)
+	Close() error
+}
+
+type PublishOptions struct {
+	Tags []string
+}
+
+func NewPublishOptions(opts ...PublishOption) *PublishOptions {
+	o := &PublishOptions{}
+	for _, opt := range opts {
+		if opt != nil {
+			opt(o)
+		}
+	}
+	return o
+}
+
+type PublishOption func(options *PublishOptions)
+
+func Tag(tag ...string) PublishOption {
+	return func(o *PublishOptions) {
+		o.Tags = tag
+	}
+}
+
+type SubscribeOptions struct {
+	FilterTags []string
+}
+
+func NewSubscribeOptions(opts ...SubscribeOption) *SubscribeOptions {
+	o := &SubscribeOptions{}
+	for _, opt := range opts {
+		if opt != nil {
+			opt(o)
+		}
+	}
+	return o
+}
+
+type SubscribeOption func(options *SubscribeOptions)
+
+func FilterTag(tag ...string) SubscribeOption {
+	return func(o *SubscribeOptions) {
+		o.FilterTags = tag
+	}
+}
diff --git a/pkg/events/mocks/Connection.go b/pkg/events/mocks/Connection.go
new file mode 100644
index 0000000000000000000000000000000000000000..a295924662ba10243a788c219359d60720789759
--- /dev/null
+++ b/pkg/events/mocks/Connection.go
@@ -0,0 +1,96 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Connection is an autogenerated mock type for the Connection type
+type Connection struct {
+	mock.Mock
+}
+
+// Close provides a mock function with given fields:
+func (_m *Connection) Close() error {
+	ret := _m.Called()
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func() error); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Publish provides a mock function with given fields: subject, msg, opts
+func (_m *Connection) Publish(subject string, msg interface{}, opts ...events.PublishOption) error {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, subject, msg)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.PublishOption) error); ok {
+		r0 = rf(subject, msg, opts...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Subscribe provides a mock function with given fields: subject, handler, opts
+func (_m *Connection) Subscribe(subject string, handler interface{}, opts ...events.SubscribeOption) (events.Subscription, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, subject, handler)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 events.Subscription
+	var r1 error
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.SubscribeOption) (events.Subscription, error)); ok {
+		return rf(subject, handler, opts...)
+	}
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.SubscribeOption) events.Subscription); ok {
+		r0 = rf(subject, handler, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(events.Subscription)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(string, interface{}, ...events.SubscribeOption) error); ok {
+		r1 = rf(subject, handler, opts...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+type mockConstructorTestingTNewConnection interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewConnection creates a new instance of Connection. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewConnection(t mockConstructorTestingTNewConnection) *Connection {
+	mock := &Connection{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/MsgFilter.go b/pkg/events/mocks/MsgFilter.go
new file mode 100644
index 0000000000000000000000000000000000000000..8e1340743309bfc3097e478e7aac7f1880bfb157
--- /dev/null
+++ b/pkg/events/mocks/MsgFilter.go
@@ -0,0 +1,44 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	nats "github.com/nats-io/nats.go"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// MsgFilter is an autogenerated mock type for the MsgFilter type
+type MsgFilter struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: _a0
+func (_m *MsgFilter) Execute(_a0 *nats.Msg) *nats.Msg {
+	ret := _m.Called(_a0)
+
+	var r0 *nats.Msg
+	if rf, ok := ret.Get(0).(func(*nats.Msg) *nats.Msg); ok {
+		r0 = rf(_a0)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*nats.Msg)
+		}
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewMsgFilter interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewMsgFilter creates a new instance of MsgFilter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewMsgFilter(t mockConstructorTestingTNewMsgFilter) *MsgFilter {
+	mock := &MsgFilter{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/ProtoEncoder.go b/pkg/events/mocks/ProtoEncoder.go
new file mode 100644
index 0000000000000000000000000000000000000000..f0916a3c720b41de88ba029c2d44e23bffbb42c7
--- /dev/null
+++ b/pkg/events/mocks/ProtoEncoder.go
@@ -0,0 +1,68 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	mock "github.com/stretchr/testify/mock"
+	protoiface "google.golang.org/protobuf/runtime/protoiface"
+)
+
+// ProtoEncoder is an autogenerated mock type for the ProtoEncoder type
+type ProtoEncoder struct {
+	mock.Mock
+}
+
+// FromProto provides a mock function with given fields: message
+func (_m *ProtoEncoder) FromProto(message protoiface.MessageV1) error {
+	ret := _m.Called(message)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(protoiface.MessageV1) error); ok {
+		r0 = rf(message)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// ToProto provides a mock function with given fields:
+func (_m *ProtoEncoder) ToProto() (protoiface.MessageV1, error) {
+	ret := _m.Called()
+
+	var r0 protoiface.MessageV1
+	var r1 error
+	if rf, ok := ret.Get(0).(func() (protoiface.MessageV1, error)); ok {
+		return rf()
+	}
+	if rf, ok := ret.Get(0).(func() protoiface.MessageV1); ok {
+		r0 = rf()
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(protoiface.MessageV1)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func() error); ok {
+		r1 = rf()
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+type mockConstructorTestingTNewProtoEncoder interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewProtoEncoder creates a new instance of ProtoEncoder. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewProtoEncoder(t mockConstructorTestingTNewProtoEncoder) *ProtoEncoder {
+	mock := &ProtoEncoder{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/PublishOption.go b/pkg/events/mocks/PublishOption.go
new file mode 100644
index 0000000000000000000000000000000000000000..f3517b7602f5b9714cc9de98ac6413bf324f91e8
--- /dev/null
+++ b/pkg/events/mocks/PublishOption.go
@@ -0,0 +1,33 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// PublishOption is an autogenerated mock type for the PublishOption type
+type PublishOption struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: options
+func (_m *PublishOption) Execute(options *events.PublishOptions) {
+	_m.Called(options)
+}
+
+type mockConstructorTestingTNewPublishOption interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewPublishOption creates a new instance of PublishOption. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewPublishOption(t mockConstructorTestingTNewPublishOption) *PublishOption {
+	mock := &PublishOption{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/SubscribeOption.go b/pkg/events/mocks/SubscribeOption.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b2a9449f517c4d0881a53a64194139d50203961
--- /dev/null
+++ b/pkg/events/mocks/SubscribeOption.go
@@ -0,0 +1,33 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// SubscribeOption is an autogenerated mock type for the SubscribeOption type
+type SubscribeOption struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: options
+func (_m *SubscribeOption) Execute(options *events.SubscribeOptions) {
+	_m.Called(options)
+}
+
+type mockConstructorTestingTNewSubscribeOption interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewSubscribeOption creates a new instance of SubscribeOption. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewSubscribeOption(t mockConstructorTestingTNewSubscribeOption) *SubscribeOption {
+	mock := &SubscribeOption{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/Subscription.go b/pkg/events/mocks/Subscription.go
new file mode 100644
index 0000000000000000000000000000000000000000..b43ed0fa5e55b3d026a6d682b44facb0f60c1ed3
--- /dev/null
+++ b/pkg/events/mocks/Subscription.go
@@ -0,0 +1,39 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import mock "github.com/stretchr/testify/mock"
+
+// Subscription is an autogenerated mock type for the Subscription type
+type Subscription struct {
+	mock.Mock
+}
+
+// Unsubscribe provides a mock function with given fields:
+func (_m *Subscription) Unsubscribe() error {
+	ret := _m.Called()
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func() error); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewSubscription interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewSubscription creates a new instance of Subscription. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewSubscription(t mockConstructorTestingTNewSubscription) *Subscription {
+	mock := &Subscription{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/nats.go b/pkg/events/nats.go
new file mode 100644
index 0000000000000000000000000000000000000000..4540a48216432f2332c4a9913051b3d8934127ed
--- /dev/null
+++ b/pkg/events/nats.go
@@ -0,0 +1,197 @@
+package events
+
+import (
+	"reflect"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/nats-io/nats.go"
+)
+
+type natsConnetion struct {
+	Conn *nats.Conn
+	enc  nats.Encoder
+	// добавление префикса для всех топиков
+	prefix string
+}
+
+func Open(url string, prefix string) (Connection, error) {
+	var err error
+	b := new(natsConnetion)
+	b.Conn, err = nats.Connect(url)
+	if err != nil {
+		return nil, err
+	}
+	b.enc = &ProtobufEncoder{}
+	b.prefix = prefix
+	return b, nil
+}
+
+func (c *natsConnetion) getSubject(subject string) string {
+	if c.prefix != "" {
+		subject = c.prefix + "." + subject
+	}
+	return subject
+}
+
+func (c *natsConnetion) Publish(subject string, msg any, opts ...PublishOption) error {
+	m := &nats.Msg{Subject: c.getSubject(subject)}
+	switch v := msg.(type) {
+	case *nats.Msg:
+		m = v
+	case []byte:
+		m.Data = v
+	default:
+		data, err := c.enc.Encode(subject, v)
+		if err != nil {
+			return err
+		}
+		m.Data = data
+	}
+
+	filters := PublishFilters(NewPublishOptions(opts...))
+	if len(filters) > 0 {
+		for _, f := range filters {
+			if m = f(m); m == nil {
+				return nil
+			}
+		}
+	}
+
+	return c.Conn.PublishMsg(m)
+}
+
+func (c *natsConnetion) Subscribe(subject string, handler any, opts ...SubscribeOption) (Subscription, error) {
+
+	subject = c.getSubject(subject)
+	return c.subscribe(subject, handler, SubscribeFilters(NewSubscribeOptions(opts...)))
+}
+
+func (c *natsConnetion) Close() (err error) {
+	if err = c.Conn.Drain(); err != nil {
+		return err
+	}
+	c.Conn.Close()
+	return
+}
+
+// Dissect the cb Handler's signature
+func argInfo(cb nats.Handler) (reflect.Type, int) {
+	cbType := reflect.TypeOf(cb)
+	if cbType.Kind() != reflect.Func {
+		panic("handler needs to be a func")
+	}
+	numArgs := cbType.NumIn()
+	if numArgs == 0 {
+		return nil, numArgs
+	}
+	return cbType.In(numArgs - 1), numArgs
+}
+
+var emptyMsgType = reflect.TypeOf(&nats.Msg{})
+
+type MsgFilter func(*nats.Msg) *nats.Msg
+
+// Internal implementation that all public functions will use.
+func (c *natsConnetion) subscribe(subject string, cb nats.Handler, filters []MsgFilter) (*nats.Subscription, error) {
+	if cb == nil {
+		return nil, errors.New("handler required for subscription")
+	}
+	argType, numArgs := argInfo(cb)
+	if argType == nil {
+		return nil, errors.New("handler requires at least one argument")
+	}
+
+	cbValue := reflect.ValueOf(cb)
+	wantsRaw := (argType == emptyMsgType)
+
+	natsCB := func(m *nats.Msg) {
+		if len(filters) > 0 {
+			for _, f := range filters {
+				if m = f(m); m == nil {
+					return
+				}
+			}
+		}
+
+		var oV []reflect.Value
+		if wantsRaw {
+			oV = []reflect.Value{reflect.ValueOf(m)}
+		} else {
+			var oPtr reflect.Value
+			if argType.Kind() != reflect.Ptr {
+				oPtr = reflect.New(argType)
+			} else {
+				oPtr = reflect.New(argType.Elem())
+			}
+			if err := c.enc.Decode(m.Subject, m.Data, oPtr.Interface()); err != nil {
+				if errorHandler := c.Conn.ErrorHandler(); errorHandler != nil {
+					errorHandler(c.Conn, m.Sub, errors.Wrap(err, "Got an unmarshal error"))
+				}
+				return
+			}
+			if argType.Kind() != reflect.Ptr {
+				oPtr = reflect.Indirect(oPtr)
+			}
+
+			switch numArgs {
+			case 1:
+				oV = []reflect.Value{oPtr}
+			case 2:
+				subV := reflect.ValueOf(m.Subject)
+				oV = []reflect.Value{subV, oPtr}
+			case 3:
+				subV := reflect.ValueOf(m.Subject)
+				replyV := reflect.ValueOf(m.Reply)
+				oV = []reflect.Value{subV, replyV, oPtr}
+			}
+
+		}
+		cbValue.Call(oV)
+	}
+
+	return c.Conn.Subscribe(subject, natsCB)
+}
+
+func PublishFilters(opts *PublishOptions) []MsgFilter {
+	if opts == nil {
+		return nil
+	}
+	var filters []MsgFilter
+
+	if len(opts.Tags) > 0 {
+		filters = append(filters, func(msg *nats.Msg) *nats.Msg {
+			if msg.Header == nil {
+				msg.Header = make(nats.Header)
+			}
+			for _, v := range opts.Tags {
+				msg.Header.Add("Tag", v)
+			}
+			return msg
+		})
+	}
+
+	return filters
+}
+
+func SubscribeFilters(opts *SubscribeOptions) []MsgFilter {
+	if opts == nil {
+		return nil
+	}
+	var filters []MsgFilter
+
+	if len(opts.FilterTags) > 0 {
+		filters = append(filters, func(msg *nats.Msg) *nats.Msg {
+			tags := msg.Header.Values("Tag")
+			for _, tag := range tags {
+				for _, v := range opts.FilterTags {
+					if v == tag {
+						return msg
+					}
+				}
+			}
+			return nil
+		})
+	}
+
+	return filters
+}
diff --git a/pkg/events/nats_integration_test.go b/pkg/events/nats_integration_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..8fe38dbe1080d6a1ec7acdc9c565afd4c3852cdc
--- /dev/null
+++ b/pkg/events/nats_integration_test.go
@@ -0,0 +1,82 @@
+//go:build integration
+
+package events
+
+import (
+	"testing"
+	"time"
+
+	pb "git.perx.ru/perxis/perxis-go/pkg/events/test_proto"
+	"github.com/golang/protobuf/proto"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+type Test struct {
+	Text string
+}
+
+func (t *Test) ToProto() (proto.Message, error) {
+	return &pb.Test{Text: t.Text}, nil
+}
+
+func (t *Test) FromProto(message proto.Message) error {
+	t.Text = message.(*pb.Test).Text
+	return nil
+}
+
+func TestNatsBroker(t *testing.T) {
+
+	b, err := Open("nats://localhost:4222", "")
+	require.NoError(t, err)
+
+	resCh := make(chan string, 3)
+	_, err = b.Subscribe("a.*.c.>", func(t *Test) { resCh <- t.Text })
+	require.NoError(t, err)
+
+	require.NoError(t, b.Publish("a.b.c", &Test{Text: "1"}))
+	require.NoError(t, b.Publish("a.b.c.d", &Test{Text: "2"}))
+	require.NoError(t, b.Publish("a.b.c.d.e", &Test{Text: "3"}))
+	require.NoError(t, b.Publish("a.x.c", &Test{Text: "4"}))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "5"}))
+
+	time.Sleep(200 * time.Millisecond)
+	require.NoError(t, b.Close())
+	close(resCh)
+	assert.ElementsMatch(t, []string{"2", "3", "5"}, func() []string {
+		var res []string
+		for v := range resCh {
+			res = append(res, v)
+		}
+		return res
+	}())
+}
+
+func TestTags(t *testing.T) {
+
+	b, err := Open("nats://localhost:4222", "")
+	require.NoError(t, err)
+
+	resCh := make(chan string, 3)
+	_, err = b.Subscribe("a.*.c.>", func(t *Test) { resCh <- t.Text }, FilterTag("one", "two", "three"))
+	require.NoError(t, err)
+
+	require.NoError(t, b.Publish("a.b.c", &Test{Text: "1"}))
+	require.NoError(t, b.Publish("a.b.c.d", &Test{Text: "2"}))
+	require.NoError(t, b.Publish("a.b.c.d.e", &Test{Text: "3"}, Tag("one")))
+	require.NoError(t, b.Publish("a.x.c", &Test{Text: "4"}))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "5"}, Tag("two")))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "6"}, Tag("two", "one")))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "7"}, Tag("four")))
+
+	time.Sleep(200 * time.Millisecond)
+	require.NoError(t, b.Close())
+	close(resCh)
+	assert.ElementsMatch(t, []string{"3", "5", "6"}, func() []string {
+		var res []string
+		for v := range resCh {
+			res = append(res, v)
+		}
+		return res
+	}())
+}
diff --git a/pkg/events/proto_encoder.go b/pkg/events/proto_encoder.go
new file mode 100644
index 0000000000000000000000000000000000000000..f18c2f576daddd4af61c9ca8138bbbc1903ad280
--- /dev/null
+++ b/pkg/events/proto_encoder.go
@@ -0,0 +1,63 @@
+package events
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/golang/protobuf/proto"
+	"github.com/nats-io/nats.go"
+	"github.com/nats-io/nats.go/encoders/protobuf"
+)
+
+type ProtoEncoder interface {
+	ToProto() (proto.Message, error)
+	FromProto(message proto.Message) error
+}
+
+const (
+	ProtobufEncoderName = "protobuf"
+)
+
+func init() {
+	nats.RegisterEncoder(ProtobufEncoderName, &ProtobufEncoder{})
+}
+
+type ProtobufEncoder struct {
+	protobuf.ProtobufEncoder
+}
+
+var (
+	ErrInvalidProtoMsgEncode = errors.New("events: object passed to encode must implement ProtoEncoder")
+	ErrInvalidProtoMsgDecode = errors.New("events: object passed to decode must implement ProtoDecoder")
+)
+
+func (pb *ProtobufEncoder) Encode(subject string, v interface{}) ([]byte, error) {
+	if v == nil {
+		return nil, nil
+	}
+	e, ok := v.(ProtoEncoder)
+	if !ok {
+		return nil, ErrInvalidProtoMsgEncode
+	}
+
+	m, err := e.ToProto()
+	if err != nil {
+		return nil, errors.Wrap(err, "nats: encode to proto")
+	}
+
+	return pb.ProtobufEncoder.Encode(subject, m)
+}
+
+func (pb *ProtobufEncoder) Decode(subject string, data []byte, vPtr interface{}) error {
+
+	enc, ok := vPtr.(ProtoEncoder)
+	if !ok {
+		return ErrInvalidProtoMsgDecode
+	}
+
+	msg, _ := enc.ToProto()
+
+	if err := pb.ProtobufEncoder.Decode(subject, data, msg); err != nil {
+		return err
+	}
+
+	return enc.FromProto(msg)
+}
diff --git a/pkg/events/test_proto/test.pb.go b/pkg/events/test_proto/test.pb.go
new file mode 100644
index 0000000000000000000000000000000000000000..de333160b391355e2d56b0976e547d58f63a62e1
--- /dev/null
+++ b/pkg/events/test_proto/test.pb.go
@@ -0,0 +1,143 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// 	protoc-gen-go v1.27.1
+// 	protoc        v3.21.5
+// source: test.proto
+
+package test_proto
+
+import (
+	protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+	protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+	reflect "reflect"
+	sync "sync"
+)
+
+const (
+	// Verify that this generated code is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+	// Verify that runtime/protoimpl is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+type Test struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
+}
+
+func (x *Test) Reset() {
+	*x = Test{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_test_proto_msgTypes[0]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Test) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Test) ProtoMessage() {}
+
+func (x *Test) ProtoReflect() protoreflect.Message {
+	mi := &file_test_proto_msgTypes[0]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Test.ProtoReflect.Descriptor instead.
+func (*Test) Descriptor() ([]byte, []int) {
+	return file_test_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *Test) GetText() string {
+	if x != nil {
+		return x.Text
+	}
+	return ""
+}
+
+var File_test_proto protoreflect.FileDescriptor
+
+var file_test_proto_rawDesc = []byte{
+	0x0a, 0x0a, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x74, 0x65,
+	0x73, 0x74, 0x22, 0x1a, 0x0a, 0x04, 0x54, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65,
+	0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x42, 0x38,
+	0x5a, 0x36, 0x67, 0x69, 0x74, 0x2e, 0x70, 0x65, 0x72, 0x78, 0x2e, 0x72, 0x75, 0x2f, 0x70, 0x65,
+	0x72, 0x78, 0x69, 0x73, 0x2f, 0x70, 0x65, 0x72, 0x78, 0x69, 0x73, 0x2f, 0x62, 0x72, 0x6f, 0x6b,
+	0x65, 0x72, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x74, 0x65,
+	0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+}
+
+var (
+	file_test_proto_rawDescOnce sync.Once
+	file_test_proto_rawDescData = file_test_proto_rawDesc
+)
+
+func file_test_proto_rawDescGZIP() []byte {
+	file_test_proto_rawDescOnce.Do(func() {
+		file_test_proto_rawDescData = protoimpl.X.CompressGZIP(file_test_proto_rawDescData)
+	})
+	return file_test_proto_rawDescData
+}
+
+var file_test_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
+var file_test_proto_goTypes = []interface{}{
+	(*Test)(nil), // 0: test.Test
+}
+var file_test_proto_depIdxs = []int32{
+	0, // [0:0] is the sub-list for method output_type
+	0, // [0:0] is the sub-list for method input_type
+	0, // [0:0] is the sub-list for extension type_name
+	0, // [0:0] is the sub-list for extension extendee
+	0, // [0:0] is the sub-list for field type_name
+}
+
+func init() { file_test_proto_init() }
+func file_test_proto_init() {
+	if File_test_proto != nil {
+		return
+	}
+	if !protoimpl.UnsafeEnabled {
+		file_test_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Test); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+	}
+	type x struct{}
+	out := protoimpl.TypeBuilder{
+		File: protoimpl.DescBuilder{
+			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+			RawDescriptor: file_test_proto_rawDesc,
+			NumEnums:      0,
+			NumMessages:   1,
+			NumExtensions: 0,
+			NumServices:   0,
+		},
+		GoTypes:           file_test_proto_goTypes,
+		DependencyIndexes: file_test_proto_depIdxs,
+		MessageInfos:      file_test_proto_msgTypes,
+	}.Build()
+	File_test_proto = out.File
+	file_test_proto_rawDesc = nil
+	file_test_proto_goTypes = nil
+	file_test_proto_depIdxs = nil
+}
diff --git a/pkg/events/test_proto/test.proto b/pkg/events/test_proto/test.proto
new file mode 100644
index 0000000000000000000000000000000000000000..fecbc9d39bf39c65d97dc8d21cba8933a4243450
--- /dev/null
+++ b/pkg/events/test_proto/test.proto
@@ -0,0 +1,9 @@
+syntax = "proto3";
+
+option go_package = "git.perx.ru/perxis/perxis-go/broker/test_proto;test_proto";
+
+package test;
+
+message Test {
+  string text = 1;
+}
diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go
new file mode 100644
index 0000000000000000000000000000000000000000..ea2f1d436aba0ecced0a6473440e9fb4a782664d
--- /dev/null
+++ b/pkg/filter/filter.go
@@ -0,0 +1,410 @@
+package filter
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/hashicorp/go-multierror"
+	"github.com/mitchellh/mapstructure"
+	"go.mongodb.org/mongo-driver/bson"
+	"go.mongodb.org/mongo-driver/x/bsonx"
+)
+
+type Op string
+
+const (
+	Equal          Op = "eq"
+	NotEqual       Op = "neq"
+	Less           Op = "lt"
+	LessOrEqual    Op = "lte"
+	Greater        Op = "gt"
+	GreaterOrEqual Op = "gte"
+	In             Op = "in"
+	NotIn          Op = "nin"
+	Contains       Op = "contains"
+	NotContains    Op = "ncontains"
+	Or             Op = "or"
+	And            Op = "and"
+	Near           Op = "near"
+)
+
+type Filter struct {
+	Op    Op
+	Field string
+	Value interface{}
+}
+
+func (f Filter) Format(s fmt.State, verb rune) {
+	fmt.Fprintf(s, "{Op:%s Field:%s Value:%+v}", f.Op, f.Field, f.Value)
+}
+
+func NewFilter(op Op, field string, val interface{}) *Filter {
+	return &Filter{
+		Op:    op,
+		Field: field,
+		Value: val,
+	}
+}
+
+type FilterHandler struct {
+	schemas  []*schema.Schema
+	qbuilder QueryBuilder
+	prefix   string
+}
+
+func NewFilterHandler(sch ...*schema.Schema) *FilterHandler {
+	return &FilterHandler{
+		schemas: sch,
+		//qbuilder: qb,
+	}
+}
+
+func (h *FilterHandler) SetTrimPrefix(prefix string) *FilterHandler {
+	h.prefix = prefix
+	return h
+}
+
+func (h *FilterHandler) removeFieldPrefix(f string) string {
+	if h.prefix != "" {
+		return strings.TrimPrefix(f, h.prefix+".")
+	}
+	return f
+}
+
+func (h *FilterHandler) AddSchema(sch ...*schema.Schema) *FilterHandler {
+	for _, s := range sch {
+		h.schemas = append(h.schemas, s)
+	}
+	return h
+}
+
+func (h *FilterHandler) SetQueryBuilder(qb QueryBuilder) {
+	h.qbuilder = qb
+}
+
+func (h *FilterHandler) Validate(filter ...*Filter) (err error) {
+	if len(h.schemas) == 0 {
+		return errors.New("no schema provided")
+	}
+
+	for _, sch := range h.schemas {
+		var merr *multierror.Error
+
+		for _, f := range filter {
+			if err := h.validate(sch, f); err != nil {
+				merr = multierror.Append(merr, err)
+			}
+		}
+		if merr != nil {
+			merr.ErrorFormat = func(i []error) string {
+				return fmt.Sprintf("%d validation error(s)", len(i))
+			}
+			return errors.WithField(merr, "filter")
+		}
+	}
+	return nil
+}
+
+// todo: '$elemMatch' - запросы к полю-массиву на попадание в условие: '{ results: { $elemMatch: { $gte: 80, $lt: 85 } }' ?
+
+func (h *FilterHandler) validate(sch *schema.Schema, f *Filter) (err error) {
+	if f == nil {
+		return
+	}
+
+	fld := h.removeFieldPrefix(f.Field)
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		if f.Value, err = schema.Decode(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+		if err = validate.Validate(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+	case In, NotIn:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+		val := reflect.ValueOf(f.Value)
+		if val.IsZero() || (val.Kind() != reflect.Array && val.Kind() != reflect.Slice) {
+			return h.formatErr(f.Field, f.Op, errors.New("\"IN/NOT IN\" operations require array type for value"))
+		}
+
+		switch fld.GetType().(type) {
+		case *field.ArrayType:
+			f.Value, err = schema.Decode(nil, fld, f.Value)
+			if err != nil {
+				return h.formatErr(f.Field, f.Op, err)
+			}
+		default:
+			decodedVal := make([]interface{}, 0, val.Len())
+			for i := 0; i < val.Len(); i++ {
+				v, err := schema.Decode(nil, fld, val.Index(i).Interface())
+				if err != nil {
+					return h.formatErr(f.Field, f.Op, err)
+				}
+				decodedVal = append(decodedVal, v)
+			}
+
+			f.Value = decodedVal
+		}
+
+	case Contains, NotContains:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		typ := fld.GetType()
+
+		if typ.Name() != "string" && typ.Name() != "array" {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+		}
+		if typ.Name() == "array" {
+			params := fld.Params.(*field.ArrayParameters)
+			if params.Item != nil || params.Item.GetType().Name() != "string" {
+				return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+			}
+		}
+
+		if reflect.TypeOf(f.Value).Kind() != reflect.String {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require value to be 'string'"))
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("array of filters should be provided for operations "))
+		}
+		for _, f := range fltrs {
+			err = h.validate(sch, f)
+			if err != nil {
+				return err
+			}
+		}
+
+	case Near:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		_, ok := fld.Params.(*field.LocationParameters)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("field must be a location"))
+		}
+
+		value, ok := f.Value.(map[string]interface{})
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should be map"))
+		}
+
+		point, ok := value["point"]
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should have location"))
+		}
+
+		var p field.GeoJSON
+		if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": point}, &p); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+
+		maxD, ok := value["distance"]
+		if ok {
+			v := reflect.ValueOf(maxD)
+			if !v.Type().ConvertibleTo(reflect.TypeOf(float64(0))) {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance must be a number"))
+			}
+			val := v.Convert(reflect.TypeOf(float64(0)))
+			if val.Float() < 0 {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance should not be negative"))
+			}
+		}
+
+	default:
+		return h.formatErr(f.Field, f.Op, errors.New("unknown operation"))
+	}
+
+	return nil
+}
+
+func (*FilterHandler) formatErr(args ...interface{}) error {
+	var (
+		f   string
+		op  Op
+		err error
+	)
+	for _, arg := range args {
+		switch v := arg.(type) {
+		case string:
+			f = v
+		case Op:
+			op = v
+		case error:
+			err = v
+		}
+	}
+	return errors.WithField(fmt.Errorf("op: '%s' %s", op, err), f)
+}
+
+func (h *FilterHandler) Query(filter ...*Filter) interface{} {
+	return h.qbuilder.Query(filter...)
+}
+
+type QueryBuilder interface {
+	Query(filter ...*Filter) interface{}
+	SetFieldPrefix(string)
+}
+
+type mongoQueryBuilder struct {
+	m      map[Op]string
+	prefix string
+}
+
+func NewMongoQueryBuilder() QueryBuilder {
+	b := new(mongoQueryBuilder)
+	b.m = map[Op]string{
+		Equal:          "$eq",
+		NotEqual:       "$ne",
+		Less:           "$lt",
+		LessOrEqual:    "$lte",
+		Greater:        "$gt",
+		GreaterOrEqual: "$gte",
+		In:             "$in",
+		NotIn:          "$nin",
+		Contains:       "$regex",
+		NotContains:    "$not",
+		Or:             "$or",
+		And:            "$and",
+		Near:           "$near",
+	}
+	return b
+}
+
+func (b *mongoQueryBuilder) getOp(op Op) string {
+	return b.m[op]
+}
+
+func (b *mongoQueryBuilder) SetFieldPrefix(prefix string) {
+	b.prefix = prefix
+}
+
+func (b *mongoQueryBuilder) Query(filters ...*Filter) interface{} {
+	if len(filters) == 0 {
+		return bson.M{}
+	}
+	filter := &Filter{Op: And, Value: filters}
+	return b.query(filter)
+}
+
+func (b *mongoQueryBuilder) query(f *Filter) bson.M {
+	if f == nil {
+		return nil
+	}
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual, In, NotIn:
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): f.Value,
+			},
+		}
+	case Contains, NotContains:
+
+		val, _ := f.Value.(string)
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): bsonx.Regex(val, ""),
+			},
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return nil
+		}
+
+		arr := bson.A{}
+		for _, fltr := range fltrs {
+			arr = append(arr, b.query(fltr))
+		}
+		return bson.M{
+			b.getOp(f.Op): arr,
+		}
+	case Near:
+		val, ok := f.Value.(map[string]interface{})
+		if ok {
+			var p field.GeoJSON
+			c, ok := val["point"]
+			if !ok {
+				return nil
+			}
+			if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": c}, &p); err != nil {
+				return nil
+			}
+			q := bson.D{{Key: "$geometry", Value: p}}
+
+			if maxD, ok := val["distance"]; ok {
+				q = append(q, bson.E{Key: "$maxDistance", Value: maxD})
+			}
+
+			return bson.M{
+				b.field(f.Field + ".geometry"): bson.M{b.getOp(f.Op): q},
+			}
+		}
+	}
+
+	return nil
+}
+
+func (b *mongoQueryBuilder) field(f string) string {
+	if b.prefix == "" || strings.HasPrefix(f, b.prefix) {
+		return f
+	}
+	return b.prefix + "." + f
+}
+
+// $text search ??
+//func (b *mongoQueryBuilder) textSearchQuery(filters ...*Filter) string {
+//	cnt, notcnt := "", ""
+//	for _, f := range filters {
+//		val, ok := f.Value.(string)
+//		if !ok {
+//			continue
+//		}
+//		switch f.Op {
+//		case Contains:
+//			if len(cnt) > 0 {
+//				cnt += " "
+//			}
+//			cnt += val
+//		case NotContains:
+//			words := strings.Split(val, " ")
+//			for _, w := range words {
+//				if len(notcnt) > 0 {
+//					notcnt += " "
+//				}
+//				notcnt += "-" + w
+//			}
+//		}
+//	}
+//	if len(cnt) == 0 {
+//		return ""
+//	}
+//	if len(notcnt) > 0 {
+//		cnt += " " + notcnt
+//	}
+//	return cnt
+//}
diff --git a/pkg/filter/filter_test.go b/pkg/filter/filter_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..7283e26a0dfebcc5159211e49c30dc29150f12db
--- /dev/null
+++ b/pkg/filter/filter_test.go
@@ -0,0 +1,473 @@
+package filter
+
+import (
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+	"go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+func TestFilterHandler(t *testing.T) {
+
+	sch := schema.New(
+		"str", field.String(),
+		"num", field.Number(field.NumberFormatInt),
+		"obj", field.Object(
+			"bool", field.Bool(),
+			"arr", field.Array(field.Time()),
+			"list", field.Array(
+				field.Object(
+					"num1", field.Number(field.NumberFormatFloat),
+					"str1", field.String(),
+				),
+			),
+		),
+		"date", field.Time(),
+		"geo", field.Location(),
+	)
+	h := NewFilterHandler(sch)
+	ph := NewFilterHandler(sch).SetTrimPrefix("data")
+
+	h.SetQueryBuilder(NewMongoQueryBuilder())
+	ph.SetQueryBuilder(NewMongoQueryBuilder())
+
+	var err error
+
+	t.Run("Validate", func(t *testing.T) {
+		t.Run("Simple", func(t *testing.T) {
+			t.Run("String", func(t *testing.T) {
+				f := &Filter{Op: Equal, Field: "str", Value: "zzz"}
+				err = h.Validate(f)
+				require.NoError(t, err)
+
+				f = &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+			})
+			t.Run("Int", func(t *testing.T) {
+				f := &Filter{Op: NotEqual, Field: "num", Value: 5.0}
+				err = h.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+
+				f = &Filter{Op: NotEqual, Field: "data.num", Value: 5.0}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+			})
+			t.Run("Time", func(t *testing.T) {
+				f := &Filter{Op: LessOrEqual, Field: "date", Value: "22 Dec 1997"}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: LessOrEqual, Field: "data.date", Value: "22 Dec 1997"}
+				err = ph.Validate(f)
+				require.Error(t, err)
+			})
+			t.Run("Location", func(t *testing.T) {
+				f := &Filter{Op: Near, Field: "geo", Value: ""}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: ""}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+				fv := map[string]interface{}{
+					"point":    []float64{55, 55},
+					"distance": 1000,
+				}
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+
+				fv["distance"] = -1
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+			})
+		})
+		t.Run("Embedded array field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = h.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+
+			f = &Filter{Op: In, Field: "data.obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+		})
+		t.Run("Embedded string contains", func(t *testing.T) {
+			f := &Filter{Op: Contains, Field: "obj.list.str1", Value: "zzz"}
+			err = h.Validate(f)
+			require.NoError(t, err)
+
+			f = &Filter{Op: Contains, Field: "data.obj.list.str1", Value: "zzz"}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+		})
+		t.Run("Compound filter with 'OR' operation", func(t *testing.T) {
+			t.Run("No Err", func(t *testing.T) {
+				w1, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+				w2, _ := time.Parse(time.RFC3339, "2015-12-01T22:08:41Z")
+
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: true},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: true},
+					}},
+				}
+				err = ph.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+			})
+			t.Run("Multiple Errors", func(t *testing.T) {
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+			})
+		})
+	})
+
+	t.Run("Build Query", func(t *testing.T) {
+		t.Run("No Filters", func(t *testing.T) {
+			res := h.Query()
+			require.IsType(t, res, primitive.M{})
+
+			pres := ph.Query()
+			assert.Equal(t, res, pres, "пустой запрос с префиксом и без должны быть одинаковые")
+		})
+		t.Run("Equal String", func(t *testing.T) {
+			f := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"data.str": primitive.M{"$eq": "zzz"}}}}, b)
+
+			pf := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			pres := ph.Query(pf)
+			assert.Equal(t, res, pres, "запрос в БД с полями с префиксом и без должны быть одинаковые")
+		})
+		t.Run("In Array", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+		t.Run("Several ops for one field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+	})
+}
+
+//func TestFilterHandler_Integration(t *testing.T) {
+//	ctx := context.Background()
+//
+//	uri := os.Getenv("MONGO_URL")
+//	if uri == "" {
+//		uri = "mongodb://localhost:27017"
+//	}
+//	opts := options.Client().SetConnectTimeout(15 * time.Second).ApplyURI(uri)
+//	client, err := mongo.Connect(context.Background(), opts)
+//	require.NoError(t, err)
+//	err = client.Ping(ctx, nil)
+//	require.NoError(t, err)
+//
+//	sch := schema.New(
+//		"name", field.String(validate.Required()),
+//		"color", field.String(),
+//		"qty", field.Number(field.NumberFormatInt),
+//		"info", field.Object(
+//			"is_fruit", field.Bool(),
+//			"similar", field.Array(
+//				field.Object(
+//					"name", field.Number(field.NumberFormatFloat),
+//					"color", field.String(),
+//				),
+//			),
+//			"desc", field.String(),
+//		),
+//		"produced", field.Time(),
+//		"shipment", field.Array(field.String()),
+//	)
+//
+//	w1, _ := time.Parse(time.RFC3339, "2020-01-01T10:08:41Z")
+//	w2, _ := time.Parse(time.RFC3339, "2020-05-01T10:08:41Z")
+//	w3, _ := time.Parse(time.RFC3339, "2020-10-01T10:08:41Z")
+//
+//	items := []map[string]interface{}{
+//		{
+//			"name":  "apple",
+//			"color": "red",
+//			"qty":   25,
+//			"info": map[string]interface{}{
+//				"is_fruit": true,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "pear", "color": "yellow"},
+//					map[string]interface{}{"name": "lemon", "color": "yellow"},
+//				},
+//				"desc": "An apple is the edible fruit . Apple trees are cultivated worldwide and have religious and mythological " +
+//					"significance in many cultures. Apples are eaten with honey at the Jewish New Year of Rosh Hashanah to symbolize a sweet new year.",
+//			},
+//			"produced":   w1,
+//			"shipment":   []interface{}{"Russia", "Iran"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.751472, 37.618727}},
+//		},
+//		{
+//			"name":  "orange",
+//			"color": "orange",
+//			"qty":   10,
+//			"info": map[string]interface{}{
+//				"is_fruit": true,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "lemon", "color": "yellow"},
+//					map[string]interface{}{"name": "grapefruit", "color": "red"},
+//				},
+//				"desc": "The orange is the edible fruit of various citrus species; a hybrid between pomelo and mandarin. Orange trees are widely grown" +
+//					" in tropical and subtropical climates for their sweet fruit. The fruit of the orange tree can be eaten fresh, or processed for its juice or fragrant peel.",
+//			},
+//			"produced":   w2,
+//			"shipment":   []interface{}{"Egypt", "Iran"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.716797, 37.552809}},
+//		},
+//		{
+//			"name":  "tomato",
+//			"color": "red",
+//			"qty":   1,
+//			"info": map[string]interface{}{
+//				"is_fruit": false,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "cucumber", "color": "green"},
+//					map[string]interface{}{"name": "apple", "color": "yellow"},
+//				},
+//				"desc": "The tomato is the edible red berry. The tomato is consumed in diverse ways, raw or cooked, in many dishes, " +
+//					"sauces, salads, and drinks. Numerous varieties of the tomato plant are widely grown in temperate climates across the world.",
+//			},
+//			"produced":   w3,
+//			"shipment":   []interface{}{"Russia", "Italy"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.760688, 37.619125}},
+//		},
+//	}
+//
+//	db := client.Database("perxis_test_filter")
+//	coll := db.Collection("items")
+//	coll.Drop(ctx)
+//
+//	for _, item := range items {
+//		_, err = coll.InsertOne(ctx, item)
+//		require.NoError(t, err)
+//	}
+//
+//	h := NewFilterHandler(sch)
+//	h.SetQueryBuilder(NewMongoQueryBuilder())
+//
+//	t.Run("By Color [Equal/NotEqual]", func(t *testing.T) {
+//		t.Run("Red", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: Equal, Field: "color", Value: "red"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Red", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotEqual, Field: "color", Value: "red"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "orange", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Quantity [Less/Greater]", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: LessOrEqual, Field: "qty", Value: 25}, &Filter{Op: Greater, Field: "qty", Value: 1})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 2)
+//		assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+//	})
+//	t.Run("Not Fruit [Equal embedded field]", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: Equal, Field: "info.is_fruit", Value: false})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 1)
+//		assert.Equal(t, "tomato", data[0]["name"])
+//	})
+//	t.Run("By Similar [In/NotIn]", func(t *testing.T) {
+//		t.Run("Similar to cucumber, pear", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: In, Field: "info.similar.name", Value: []string{"cucumber", "pear"}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Similar to cucumber, pear", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotIn, Field: "info.similar.name", Value: []string{"cucumber", "grapefruit"}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "apple", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Description [Contains/NotContains]", func(t *testing.T) {
+//		t.Run("Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: In, Field: "info.similar.color", Value: []string{"yellow"}},
+//				&Filter{Op: Contains, Field: "info.desc", Value: "edible fruit"},
+//			}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotContains, Field: "info.desc", Value: "fruit"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			for _, d := range data {
+//				fmt.Println(d["name"])
+//			}
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "tomato", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Shipment [Contains/NotContains]", func(t *testing.T) {
+//		t.Run("Contains", func(t *testing.T) {
+//			query := h.Query(
+//				&Filter{Op: Contains, Field: "shipment", Value: "Russia"},
+//			)
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotContains, Field: "shipment", Value: "Iran"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			for _, d := range data {
+//				fmt.Println(d["name"])
+//			}
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "tomato", data[0]["name"])
+//		})
+//	})
+//	t.Run("Compound Query", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: Or, Value: []*Filter{
+//			&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: In, Field: "color", Value: []interface{}{"red", "yellow", "green"}},
+//				&Filter{Op: Less, Field: "qty", Value: 10},
+//			}}, // 1 - tomato
+//			&Filter{Op: Equal, Field: "name", Value: "pepper"}, // 0
+//			&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: GreaterOrEqual, Field: "produced", Value: w1},
+//				&Filter{Op: Less, Field: "produced", Value: w2}, // 1 - apple
+//			}},
+//		}})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 2)
+//		assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//	})
+//}
diff --git a/pkg/items/codec.go b/pkg/items/codec.go
new file mode 100644
index 0000000000000000000000000000000000000000..6264c3b582a2af08c1746c763807b315c0ae2fa9
--- /dev/null
+++ b/pkg/items/codec.go
@@ -0,0 +1,9 @@
+package items
+
+type Encoder interface {
+	Encode(item *Item) (any, error)
+}
+
+type Decoder interface {
+	Decode(value any, item *Item) error
+}
diff --git a/pkg/items/context.go b/pkg/items/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..87e600e5b40da50381245a626e8228ab20485de8
--- /dev/null
+++ b/pkg/items/context.go
@@ -0,0 +1,71 @@
+package items
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+type Context struct {
+	Items
+	Clients clients.Clients
+
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+	Item         *Item
+	Space        *spaces.Space
+	Environment  *environments.Environment
+
+	ViewSpaceID       string
+	ViewEnvironmentID string
+	ViewCollectionID  string
+	ViewSpace         *spaces.Space
+	ViewEnvironment   *environments.Environment
+}
+
+type itemsCtx struct{}
+
+func WithContext(ctx context.Context, itmCtx *Context) context.Context {
+	if ctx == nil {
+		ctx = context.Background()
+	}
+
+	if itmCtx.ViewSpaceID == "" {
+		itmCtx.ViewSpaceID = itmCtx.SpaceID
+	}
+	if itmCtx.ViewEnvironmentID == "" {
+		itmCtx.ViewEnvironmentID = itmCtx.EnvID
+	}
+	if itmCtx.ViewCollectionID == "" {
+		itmCtx.ViewCollectionID = itmCtx.CollectionID
+	}
+	if itmCtx.ViewSpace == nil {
+		itmCtx.ViewSpace = itmCtx.Space
+	}
+	if itmCtx.ViewEnvironment == nil {
+		itmCtx.ViewEnvironment = itmCtx.Environment
+	}
+
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p != nil {
+		*p = *itmCtx
+		return ctx
+	}
+
+	return context.WithValue(ctx, itemsCtx{}, itmCtx)
+}
+
+func GetContext(ctx context.Context) *Context {
+	if ctx == nil {
+		return new(Context)
+	}
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p == nil {
+		return new(Context)
+	}
+	return p
+}
diff --git a/pkg/items/events.go b/pkg/items/events.go
new file mode 100644
index 0000000000000000000000000000000000000000..14ff72216edb875ed3ca15cc879913fdb21a7ed7
--- /dev/null
+++ b/pkg/items/events.go
@@ -0,0 +1,140 @@
+package items
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"github.com/golang/protobuf/proto"
+)
+
+const (
+	EventCreateItem    = "create_item"
+	EventUpdateItem    = "update_item"
+	EventPublishItem   = "publish_item"
+	EventUnpublishItem = "unpublish_item"
+	EventDeleteItem    = "delete_item"
+
+	DefaultEventSubject = "content.{{.EventType}}.{{.SpaceID}}.{{.EnvID}}.{{.CollectionID}}.{{.ItemID}}"
+)
+
+var (
+	ErrInvalidEventType = func(expected string, got any) error {
+		return errors.Errorf("invalid message type: expected '%s', got '%t'", expected, got)
+	}
+)
+
+type EventCreate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventCreate) ToProto() (proto.Message, error) {
+	return &pb.EventCreate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventCreate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventCreate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventCreate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUpdate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUpdate) ToProto() (proto.Message, error) {
+	return &pb.EventUpdate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUpdate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUpdate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUpdate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventPublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventPublish) ToProto() (proto.Message, error) {
+	return &pb.EventPublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventPublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventPublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventPublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUnpublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUnpublish) ToProto() (proto.Message, error) {
+	return &pb.EventUnpublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUnpublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUnpublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUnpublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventDelete struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventDelete) ToProto() (proto.Message, error) {
+	return &pb.EventDelete{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventDelete) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventDelete)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventDelete", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
diff --git a/pkg/items/item.go b/pkg/items/item.go
new file mode 100644
index 0000000000000000000000000000000000000000..fc3a5154f621b601c02761c126be3ce72aa979ca
--- /dev/null
+++ b/pkg/items/item.go
@@ -0,0 +1,566 @@
+package items
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"google.golang.org/protobuf/types/known/structpb"
+	"google.golang.org/protobuf/types/known/timestamppb"
+)
+
+var (
+	ErrNotSystemField = errors.New("not a system field")
+	ErrIncorrectValue = errors.New("incorrect value")
+	ErrIncorrectField = errors.New("incorrect field")
+)
+
+type State int
+
+func (s State) String() string {
+	switch s {
+	case StateDraft:
+		return "Draft"
+	case StateArchived:
+		return "Archived"
+	case StateChanged:
+		return "Changed"
+	case StatePublished:
+		return "Published"
+	}
+	return "Unknown"
+}
+
+const (
+	StateDraft State = iota
+	StatePublished
+	StateChanged
+	StateArchived
+
+	StateMax = StateArchived
+
+	SoftDeleteSeparator = "___"
+)
+
+var PermissionsAllowAny = &Permissions{
+	Edit:       true,
+	Archive:    true,
+	Publish:    true,
+	SoftDelete: true,
+	HardDelete: true,
+}
+
+// SystemFields - системные поля Item
+var SystemFields = []string{
+	"id",
+	"space_id",
+	"env_id",
+	"collection_id",
+	"state",
+	"created_rev_at",
+	"created_by",
+	"created_at",
+	"updated_at",
+	"updated_by",
+	"revision_id",
+	"published_at",
+	"published_by",
+	"archived_at",
+	"archived_by",
+	"data",
+	"translations",
+	"locale",
+	"deleted",
+	"hidden",
+	"template",
+}
+
+type Permissions struct {
+	Edit       bool
+	Archive    bool
+	Publish    bool
+	SoftDelete bool
+	HardDelete bool
+}
+
+type Item struct {
+	ID           string                            `json:"id" bson:"_id"` // ID - Идентификатор записи. Автоматически генерируется системой при сохранении первой ревизии.
+	SpaceID      string                            `json:"spaceId" bson:"-"`
+	EnvID        string                            `json:"envId" bson:"-"`
+	CollectionID string                            `json:"collectionId" bson:"-"`
+	State        State                             `json:"state" bson:"state"`
+	CreatedRevAt time.Time                         `json:"createdRevAt,omitempty" bson:"created_rev_at,omitempty"`
+	CreatedBy    string                            `json:"createdBy,omitempty" bson:"created_by,omitempty"`
+	CreatedAt    time.Time                         `json:"createdAt,omitempty" bson:"created_at,omitempty"`
+	UpdatedAt    time.Time                         `json:"updatedAt,omitempty" bson:"updated_at,omitempty"`
+	UpdatedBy    string                            `json:"updatedBy,omitempty" bson:"updated_by,omitempty"`
+	Data         map[string]interface{}            `json:"data" bson:"data"`
+	Locale       string                            `json:"locale" bson:"-"`
+	Translations map[string]map[string]interface{} `json:"translations" bson:"translations,omitempty"`
+	RevisionID   string                            `json:"revId,omitempty" bson:"revision_id"`
+	PublishedAt  time.Time                         `json:"publishedAt,omitempty" bson:"published_at,omitempty"`
+	PublishedBy  string                            `json:"publishedBy,omitempty" bson:"published_by,omitempty"`
+	ArchivedAt   time.Time                         `json:"archivedAt,omitempty" bson:"archived_at,omitempty"`
+	ArchivedBy   string                            `json:"archivedBy,omitempty" bson:"archived_by,omitempty"`
+	Permissions  *Permissions                      `json:"permissions,omitempty" bson:"-"`
+
+	// Флаги записи
+	Deleted  bool `json:"deleted" bson:"deleted,omitempty"`
+	Hidden   bool `json:"hidden" bson:"hidden,omitempty"`
+	Template bool `json:"template" bson:"template,omitempty"`
+}
+
+func NewItem(spaceID, envID, collID, id string, data map[string]interface{}, translations map[string]map[string]interface{}) *Item {
+	return &Item{
+		ID:           id,
+		SpaceID:      spaceID,
+		EnvID:        envID,
+		CollectionID: collID,
+		Data:         data,
+		Translations: translations,
+	}
+}
+
+func (i *Item) Clone() *Item {
+	itm := *i
+	itm.Data = data.CloneMap(i.Data)
+
+	if i.Translations != nil {
+		itm.Translations = make(map[string]map[string]interface{}, len(i.Translations))
+		for t, m := range i.Translations {
+			itm.Translations[t] = data.CloneMap(m)
+		}
+	}
+
+	return &itm
+}
+
+func (i *Item) ToMap() map[string]interface{} {
+	return map[string]interface{}{
+		"id":             i.ID,
+		"space_id":       i.SpaceID,
+		"env_id":         i.EnvID,
+		"collection_id":  i.CollectionID,
+		"state":          i.State,
+		"created_rev_at": i.CreatedRevAt,
+		"created_by":     i.CreatedBy,
+		"created_at":     i.CreatedAt,
+		"updated_at":     i.UpdatedAt,
+		"updated_by":     i.UpdatedBy,
+		"revision_id":    i.RevisionID,
+		"published_at":   i.PublishedAt,
+		"published_by":   i.PublishedBy,
+		"archived_at":    i.ArchivedAt,
+		"archived_by":    i.ArchivedBy,
+		"data":           i.Data,
+		"translations":   i.Translations,
+		"locale":         i.Locale,
+		"deleted":        i.Deleted,
+		"hidden":         i.Hidden,
+		"template":       i.Template,
+	}
+}
+
+func (i *Item) SetData(locale string, data map[string]interface{}) {
+	if locale != "" {
+		if i.Translations == nil {
+			i.Translations = make(map[string]map[string]interface{})
+		}
+		i.Translations[locale] = data
+		return
+	}
+	i.Data = data
+}
+
+func (i *Item) GetData(locale string) map[string]interface{} {
+	if locale != "" && i.Translations != nil {
+		translation, _ := i.Translations[locale]
+		return MergeData(i.Data, translation)
+	}
+	return i.Data
+}
+
+func (i Item) Encode(ctx context.Context, s *schema.Schema) (*Item, error) {
+	if i.Data != nil {
+		dt, err := schema.Encode(nil, s, i.Data)
+		if err != nil {
+			//return errors.WithField(err, "data")
+			return nil, err
+		}
+		i.Data = dt.(map[string]interface{})
+	}
+	if len(i.Translations) > 0 {
+		for l, v := range i.Translations {
+			dt, err := schema.Encode(nil, s, v)
+			if err != nil {
+				//return errors.WithField(err, fmt.Sprintf("translations.%s", l))
+				return nil, err
+			}
+			i.Translations[l] = dt.(map[string]interface{})
+		}
+	}
+	return &i, nil
+}
+
+func (i Item) Decode(ctx context.Context, s *schema.Schema) (res *Item, err error) {
+
+	if i.Data != nil {
+		i.Data, err = s.Decode(ctx, i.Data)
+		if err != nil {
+			return nil, err
+			//return errors.WithField(err, "data")
+		}
+	}
+
+	return &i, nil
+}
+
+// MergeData дополняет отсутствующие данные из оригинальных данных
+func MergeData(data ...map[string]interface{}) map[string]interface{} {
+	merge := make(map[string]interface{})
+	for _, d := range data {
+		for k, v := range d {
+			merge[k] = v
+		}
+	}
+	return merge
+}
+
+// ClearData убирает данные которые не изменились по сравнению с оригинальными данными
+func ClearData(data ...map[string]interface{}) map[string]interface{} {
+	var clear map[string]interface{}
+
+	for _, d := range data {
+		if clear == nil {
+			clear = d
+			continue
+		}
+
+		for k, v := range d {
+			if reflect.DeepEqual(clear[k], v) {
+				delete(clear, k)
+			}
+		}
+	}
+
+	return clear
+}
+
+type ProcessDataFunc func(ctx context.Context, sch *schema.Schema, data map[string]interface{}) (map[string]interface{}, error)
+
+func (i Item) ProcessData(ctx context.Context, sch *schema.Schema, fn ProcessDataFunc, locales ...string) (*Item, error) {
+	if i.Data != nil {
+		dt, err := fn(ctx, sch, i.Data)
+		if err != nil {
+			return nil, errors.WithField(err, "data")
+		}
+		i.Data = dt
+	}
+
+	tr := make(map[string]map[string]interface{})
+	for _, l := range locales {
+
+		data := i.GetData(l)
+
+		dt, err := fn(ctx, sch, data)
+		if err != nil {
+			return nil, errors.WithField(err, fmt.Sprintf("translations.%s", l))
+		}
+		tr[l] = dt
+
+	}
+
+	i.Translations = nil
+	if len(tr) > 0 {
+		i.Translations = tr
+	}
+
+	return &i, nil
+}
+
+// IsSystemField возвращает являться ли поле системным
+func IsSystemField(field string) bool {
+	if data.Contains(field, SystemFields) {
+		return true
+	}
+	return false
+}
+
+// SetSystemField устанавливает значение системного поля
+func (i *Item) SetSystemField(field string, value interface{}) error {
+	ok := true
+	switch field {
+	case "id":
+		i.ID, ok = value.(string)
+	case "space_id":
+		i.SpaceID, ok = value.(string)
+	case "env_id":
+		i.EnvID, ok = value.(string)
+	case "collection_id":
+		i.CollectionID, ok = value.(string)
+	case "created_rev_at":
+		i.CreatedRevAt, ok = value.(time.Time)
+	case "created_by":
+		i.CreatedBy, ok = value.(string)
+	case "created_at":
+		i.CreatedAt, ok = value.(time.Time)
+	case "updated_by":
+		i.UpdatedBy, ok = value.(string)
+	case "updated_at":
+		i.UpdatedAt, ok = value.(time.Time)
+	case "revision_id":
+		i.RevisionID, ok = value.(string)
+	case "published_by":
+		i.PublishedBy, ok = value.(string)
+	case "published_at":
+		i.PublishedAt, ok = value.(time.Time)
+	case "hidden":
+		i.Hidden, ok = value.(bool)
+	case "deleted":
+		i.Deleted, ok = value.(bool)
+	case "template":
+		i.Template, ok = value.(bool)
+	default:
+		return ErrNotSystemField
+	}
+
+	if !ok {
+		return ErrIncorrectValue
+	}
+
+	return nil
+}
+
+// GetSystem устанавливает значение системного поля
+func (i *Item) GetSystem(field string) (any, error) {
+	switch field {
+	case "id":
+		return i.ID, nil
+	case "space_id":
+		return i.SpaceID, nil
+	case "env_id":
+		return i.EnvID, nil
+	case "collection_id":
+		return i.CollectionID, nil
+	case "created_rev_at":
+		return i.CreatedRevAt, nil
+	case "created_by":
+		return i.CreatedBy, nil
+	case "created_at":
+		return i.CreatedAt, nil
+	case "updated_by":
+		return i.UpdatedBy, nil
+	case "updated_at":
+		return i.UpdatedAt, nil
+	case "revision_id":
+		return i.RevisionID, nil
+	case "published_by":
+		return i.PublishedBy, nil
+	case "published_at":
+		return i.PublishedAt, nil
+	case "hidden":
+		return i.Hidden, nil
+	case "deleted":
+		return i.Deleted, nil
+	case "template":
+		return i.Template, nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+func (i *Item) setItemData(field string, value interface{}) error {
+	if i.Data == nil {
+		i.Data = make(map[string]any)
+	}
+
+	return data.Set(field, i.Data, value)
+}
+
+func (i *Item) getItemData(field string) (any, error) {
+	if i.Data != nil {
+		if v, ok := data.Get(field, i.Data); ok {
+			return v, nil
+		}
+	}
+
+	return nil, ErrIncorrectField
+}
+
+// Set устанавливает значение поля
+func (i *Item) Set(field string, value interface{}) error {
+	if err := i.SetSystemField(field, value); !errors.Is(err, ErrNotSystemField) {
+		return errors.Wrapf(err, "fail to set system field '%s' value", field)
+	}
+
+	return i.setItemData(field, value)
+}
+
+// Get возвращает значение поля
+func (i *Item) Get(field string) (any, error) {
+	if v, err := i.GetSystem(field); err == nil {
+		return v, err
+	}
+
+	return i.getItemData(field)
+}
+
+// GetSystemField возвращает описание поля для системных аттрибутов Item
+func GetSystemField(fld string) (*field.Field, error) {
+	switch fld {
+	case "id", "space_id", "env_id", "collection_id", "revision_id":
+		return field.String(), nil
+	case "created_rev_at", "created_at", "updated_at", "published_at":
+		return field.Time(), nil
+	case "created_by", "updated_by", "published_by":
+		return field.String(), nil
+	case "hidden", "deleted", "template":
+		return field.Bool(), nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+// GetField возвращает значение поля
+func GetField(field string, sch *schema.Schema) (*field.Field, error) {
+	if f, err := GetSystemField(field); err == nil {
+		return f, err
+	}
+
+	f := sch.GetField(field)
+	if f == nil {
+		return nil, ErrIncorrectField
+	}
+
+	return f, nil
+}
+
+// GetSystemNamedFields возвращает описание всех системных полей Item
+func GetSystemNamedFields() []field.NamedField {
+	fields := make([]field.NamedField, 0, len(SystemFields))
+	for _, n := range SystemFields {
+		f := field.NamedField{Name: n}
+		f.Field, _ = GetSystemField(n)
+		fields = append(fields, f)
+	}
+
+	return fields
+}
+
+func ItemToProto(item *Item) *pb.Item {
+	if item == nil {
+		return nil
+	}
+
+	protoItem := &pb.Item{
+		Id:           item.ID,
+		SpaceId:      item.SpaceID,
+		EnvId:        item.EnvID,
+		CollectionId: item.CollectionID,
+		State:        pb.Item_State(item.State),
+		CreatedBy:    item.CreatedBy,
+		UpdatedBy:    item.UpdatedBy,
+		RevisionId:   item.RevisionID,
+		PublishedBy:  item.PublishedBy,
+		ArchivedBy:   item.ArchivedBy,
+		Locale:       item.Locale,
+		Hidden:       item.Hidden,
+		Template:     item.Template,
+		Deleted:      item.Deleted,
+	}
+
+	if item.Data != nil {
+		protoItem.Data, _ = structpb.NewStruct(item.Data)
+	}
+	if item.Translations != nil {
+		protoItem.Translations = make(map[string]*structpb.Struct, len(item.Translations))
+		for k, v := range item.Translations {
+			protoItem.Translations[k], _ = structpb.NewStruct(v)
+		}
+	}
+
+	protoItem.CreatedRevAt = timestamppb.New(item.CreatedRevAt)
+	protoItem.PublishedAt = timestamppb.New(item.PublishedAt)
+	protoItem.ArchivedAt = timestamppb.New(item.ArchivedAt)
+	protoItem.CreatedAt = timestamppb.New(item.CreatedAt)
+	protoItem.UpdatedAt = timestamppb.New(item.UpdatedAt)
+
+	if item.Permissions != nil {
+		protoItem.Permissions = &pb.Permissions{
+			Edit:       item.Permissions.Edit,
+			Archive:    item.Permissions.Archive,
+			Publish:    item.Permissions.Publish,
+			SoftDelete: item.Permissions.SoftDelete,
+			HardDelete: item.Permissions.HardDelete,
+		}
+	}
+
+	return protoItem
+}
+
+func ItemFromProto(protoItem *pb.Item) *Item {
+
+	if protoItem == nil {
+		return nil
+	}
+
+	item := &Item{
+		ID:           protoItem.Id,
+		SpaceID:      protoItem.SpaceId,
+		EnvID:        protoItem.EnvId,
+		CollectionID: protoItem.CollectionId,
+		State:        State(protoItem.State),
+		CreatedBy:    protoItem.CreatedBy,
+		UpdatedBy:    protoItem.UpdatedBy,
+		RevisionID:   protoItem.RevisionId,
+		PublishedBy:  protoItem.PublishedBy,
+		ArchivedBy:   protoItem.ArchivedBy,
+		Locale:       protoItem.Locale,
+		Hidden:       protoItem.Hidden,
+		Template:     protoItem.Template,
+		Deleted:      protoItem.Deleted,
+	}
+
+	if protoItem.Data != nil {
+		item.Data = protoItem.Data.AsMap()
+	}
+
+	if protoItem.Translations != nil {
+		item.Translations = make(map[string]map[string]interface{}, len(protoItem.Translations))
+		for k, v := range protoItem.Translations {
+			item.Translations[k] = v.AsMap()
+		}
+	}
+
+	if protoItem.Permissions != nil {
+		item.Permissions = &Permissions{
+			Edit:       protoItem.Permissions.Edit,
+			Archive:    protoItem.Permissions.Archive,
+			Publish:    protoItem.Permissions.Publish,
+			SoftDelete: protoItem.Permissions.SoftDelete,
+			HardDelete: protoItem.Permissions.HardDelete,
+		}
+	}
+
+	item.CreatedRevAt = protoItem.CreatedRevAt.AsTime()
+	item.PublishedAt = protoItem.PublishedAt.AsTime()
+	item.ArchivedAt = protoItem.ArchivedAt.AsTime()
+	item.CreatedAt = protoItem.CreatedAt.AsTime()
+	item.UpdatedAt = protoItem.UpdatedAt.AsTime()
+
+	return item
+}
+
+func GetItemIDs(arr []*Item) []string {
+	res := make([]string, len(arr))
+	for i, e := range arr {
+		res[i] = e.ID
+	}
+	return res
+}
diff --git a/pkg/items/item_test.go b/pkg/items/item_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..fb54fc501f45281bbafd37983de3b8638d5692d4
--- /dev/null
+++ b/pkg/items/item_test.go
@@ -0,0 +1,61 @@
+package items
+
+import (
+	"fmt"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+)
+
+func TestItem_Set(t *testing.T) {
+	item := &Item{}
+
+	item.Set("id", "id")
+	assert.Equal(t, "id", item.ID)
+	now := time.Now()
+
+	item.Set("created_at", now)
+	assert.Equal(t, now, item.CreatedAt)
+
+	item.Set("a.b.c", 101)
+	assert.Equal(t, map[string]any{"a": map[string]any{"b": map[string]any{"c": 101}}}, item.Data)
+
+}
+
+func TestGetField(t *testing.T) {
+	sch := schema.New(
+		"a", field.String(),
+		"obj", field.Object(
+			"a", field.Number(field.NumberFormatFloat),
+			"b", field.String(),
+		),
+		"arr", field.Array(field.Object("a", field.Time())),
+	)
+
+	tests := []struct {
+		name    string
+		field   string
+		want    *field.Field
+		wantErr assert.ErrorAssertionFunc
+	}{
+		{"Simple", "a", field.String(), assert.NoError},
+		{"Incorrect field", "b", nil, assert.Error},
+		{"Object", "obj", field.Object("a", field.Number(field.NumberFormatFloat), "b", field.String()), assert.NoError},
+		{"Object path", "obj.a", field.Number(field.NumberFormatFloat), assert.NoError},
+		{"Array", "arr", field.Array(field.Object("a", field.Time())), assert.NoError},
+		{"Array path", "arr.a", field.Time(), assert.NoError},
+		{"Array item", "arr.", field.Object("a", field.Time()), assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := GetField(tt.field, sch)
+			if !tt.wantErr(t, err, fmt.Sprintf("GetField(%v, sch)", tt.field)) {
+				return
+			}
+			assert.Equalf(t, tt.want, got, "GetField(%v, sch)", tt.field)
+		})
+	}
+}
diff --git a/pkg/items/mocks/Items.go b/pkg/items/mocks/Items.go
new file mode 100644
index 0000000000000000000000000000000000000000..1d3ea35f22d13e65afc70e6c7cc847c60aa0f8a7
--- /dev/null
+++ b/pkg/items/mocks/Items.go
@@ -0,0 +1,538 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Items is an autogenerated mock type for the Items type
+type Items struct {
+	mock.Mock
+}
+
+// Aggregate provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// AggregatePublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Archive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.ArchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Create provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.CreateOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.CreateOptions) error); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.DeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindArchived provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetRevision provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, revisionId, options
+func (_m *Items) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId, revisionId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Introspect provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (*items.Item, *schema.Schema, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 *schema.Schema
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *schema.Schema); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(1) != nil {
+			r1 = ret.Get(1).(*schema.Schema)
+		}
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *items.Item, ...*items.IntrospectOptions) error); ok {
+		r2 = rf(ctx, item, opts...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// ListRevisions provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) ([]*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Publish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.PublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unarchive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnarchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Undelete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.UndeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unpublish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnpublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, item, options
+func (_m *Items) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UpdateOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewItems interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewItems creates a new instance of Items. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewItems(t mockConstructorTestingTNewItems) *Items {
+	mock := &Items{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/mocks/PreSaver.go b/pkg/items/mocks/PreSaver.go
new file mode 100644
index 0000000000000000000000000000000000000000..6010e17bd14db83518507ac53ab35076a6cbc5bf
--- /dev/null
+++ b/pkg/items/mocks/PreSaver.go
@@ -0,0 +1,62 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	field "git.perx.ru/perxis/perxis-go/pkg/schema/field"
+
+	mock "github.com/stretchr/testify/mock"
+)
+
+// PreSaver is an autogenerated mock type for the PreSaver type
+type PreSaver struct {
+	mock.Mock
+}
+
+// PreSave provides a mock function with given fields: ctx, f, v, itemCtx
+func (_m *PreSaver) PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *items.Context) (interface{}, bool, error) {
+	ret := _m.Called(ctx, f, v, itemCtx)
+
+	var r0 interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, *field.Field, interface{}, *items.Context) interface{}); ok {
+		r0 = rf(ctx, f, v, itemCtx)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(interface{})
+		}
+	}
+
+	var r1 bool
+	if rf, ok := ret.Get(1).(func(context.Context, *field.Field, interface{}, *items.Context) bool); ok {
+		r1 = rf(ctx, f, v, itemCtx)
+	} else {
+		r1 = ret.Get(1).(bool)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *field.Field, interface{}, *items.Context) error); ok {
+		r2 = rf(ctx, f, v, itemCtx)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+type mockConstructorTestingTNewPreSaver interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewPreSaver creates a new instance of PreSaver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewPreSaver(t mockConstructorTestingTNewPreSaver) *PreSaver {
+	mock := &PreSaver{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/options.go b/pkg/items/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..d48a1cdaacb157e46c1185a37602c1703bb3b59e
--- /dev/null
+++ b/pkg/items/options.go
@@ -0,0 +1,422 @@
+package items
+
+import "git.perx.ru/perxis/perxis-go/pkg/options"
+
+type Options struct {
+	Env               map[string]interface{}
+	Filter            []string
+	PermissionsFilter []string
+}
+
+func MergeOptions(opts ...Options) Options {
+	o := Options{
+		Env:    make(map[string]interface{}),
+		Filter: make([]string, 0),
+	}
+
+	for _, opt := range opts {
+
+		for k, v := range opt.Env {
+			o.Env[k] = v
+		}
+
+		o.Filter = append(o.Filter, opt.Filter...)
+		o.PermissionsFilter = append(o.PermissionsFilter, opt.PermissionsFilter...)
+	}
+
+	return o
+}
+
+type CreateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeCreateOptions(opts ...*CreateOptions) *CreateOptions {
+	o := &CreateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type IntrospectOptions struct {
+	Options
+	Locale string
+}
+
+func MergeIntrospectOptions(opts ...*IntrospectOptions) *IntrospectOptions {
+	o := &IntrospectOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetOptions struct {
+	Options
+}
+
+func MergeGetOptions(opts ...*GetOptions) *GetOptions {
+	o := &GetOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindOptions struct {
+	Options
+	options.FindOptions
+	Deleted   bool
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindOptions(opts ...interface{}) *FindOptions {
+	fo := &FindOptions{}
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindOptions(opts ...*FindOptions) *FindOptions {
+	o := NewFindOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Deleted = o.Deleted || opt.Deleted
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type UpdateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions {
+	o := &UpdateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type DeleteOptions struct {
+	Options
+
+	Erase bool
+}
+
+func MergeDeleteOptions(opts ...*DeleteOptions) *DeleteOptions {
+	o := &DeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.Erase {
+			o.Erase = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type SoftDeleteOptions struct {
+	Options
+}
+
+func MergeSoftDeleteOptions(opts ...*SoftDeleteOptions) *SoftDeleteOptions {
+	o := &SoftDeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UndeleteOptions struct {
+	Options
+}
+
+func MergeUndeleteOptions(opts ...*UndeleteOptions) *UndeleteOptions {
+	o := &UndeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type PublishOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergePublishOptions(opts ...*PublishOptions) *PublishOptions {
+	o := &PublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UnpublishOptions struct {
+	Options
+}
+
+func MergeUnpublishOptions(opts ...*UnpublishOptions) *UnpublishOptions {
+	o := &UnpublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetPublishedOptions struct {
+	Options
+	LocaleID string
+}
+
+func NewGetPublishedOptions(oo ...interface{}) *GetPublishedOptions {
+	fo := &GetPublishedOptions{}
+	for _, o := range oo {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+	return fo
+}
+
+func MergeGetPublishedOptions(opts ...*GetPublishedOptions) *GetPublishedOptions {
+	o := &GetPublishedOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type FindPublishedOptions struct {
+	Options
+	options.FindOptions
+	LocaleID  string
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindPublishedOptions(opts ...interface{}) *FindPublishedOptions {
+	fo := &FindPublishedOptions{}
+	for _, o := range opts {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindPublishedOptions(opts ...*FindPublishedOptions) *FindPublishedOptions {
+	o := NewFindPublishedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type GetRevisionOptions struct {
+	Options
+}
+
+func MergeGetRevisionOptions(opts ...*GetRevisionOptions) *GetRevisionOptions {
+	o := &GetRevisionOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type ListRevisionsOptions struct {
+	Options
+	options.FindOptions
+}
+
+func MergeListRevisionsOptions(opts ...*ListRevisionsOptions) *ListRevisionsOptions {
+	o := &ListRevisionsOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type ArchiveOptions struct {
+	Options
+}
+
+func MergeArchiveOptions(opts ...*ArchiveOptions) *ArchiveOptions {
+	o := &ArchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindArchivedOptions struct {
+	Options
+	options.FindOptions
+}
+
+func NewFindArchivedOptions(oo ...interface{}) *FindArchivedOptions {
+	fo := &FindArchivedOptions{}
+	fo.FindOptions = *options.MergeFindOptions(oo...)
+	return fo
+}
+
+func MergeFindArchivedOptions(opts ...*FindArchivedOptions) *FindArchivedOptions {
+	o := NewFindArchivedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(o.FindOptions, opt.FindOptions)
+	}
+	return o
+}
+
+type UnarchiveOptions struct {
+	Options
+}
+
+func MergeUnarchiveOptions(opts ...*UnarchiveOptions) *UnarchiveOptions {
+	o := &UnarchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type AggregateOptions struct {
+	Options
+	options.SortOptions
+
+	// Fields поля которые должны быть возвращены или вычислены в результате.
+	// Ключ (string) - имя поля под которым будет добавляться результат.
+	// Значение (string) - является выражением, вычисление которого сформирует результат
+	// Функции для выражений (для поля F, типа T):
+	// - distinct(F) - все значения поля, тип результат []T
+	// - min(F) - минимальное значение поля, тип результат T
+	// - max(F) - максимальное значение поля, тип результат T
+	// - avg(F) - среднее значения поля, тип результат T
+	// - sum(F) - сумма значений поля, тип результат T
+	// - count() - число записей, тип результат int
+	Fields map[string]string
+}
+
+func MergeAggregateOptions(opts ...*AggregateOptions) *AggregateOptions {
+	o := &AggregateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+
+		if o.Fields == nil {
+			o.Fields = opt.Fields
+			continue
+		}
+		for k, v := range opt.Fields {
+			o.Fields[k] = v
+		}
+	}
+	return o
+}
+
+type AggregatePublishedOptions AggregateOptions
+
+func MergeAggregatePublishedOptions(opts ...*AggregatePublishedOptions) *AggregatePublishedOptions {
+	ao := make([]*AggregateOptions, len(opts))
+	for i, opt := range opts {
+		ao[i] = (*AggregateOptions)(opt)
+	}
+	merged := MergeAggregateOptions(ao...)
+	return (*AggregatePublishedOptions)(merged)
+}
diff --git a/pkg/items/service.go b/pkg/items/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..c10a69c55878a1fbfe571dc0fad594c338dccb99
--- /dev/null
+++ b/pkg/items/service.go
@@ -0,0 +1,151 @@
+package items
+
+import (
+	"context"
+	"regexp"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/items
+// @grpc-addr content.items.Items
+type Items interface {
+	Create(ctx context.Context, item *Item, opts ...*CreateOptions) (created *Item, err error)
+	Introspect(ctx context.Context, item *Item, opts ...*IntrospectOptions) (itm *Item, sch *schema.Schema, err error)
+	Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetOptions) (item *Item, err error)
+	Find(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindOptions) (items []*Item, total int, err error)
+	Update(ctx context.Context, item *Item, options ...*UpdateOptions) (err error)
+
+	// Delete выполняет удаление элемента
+	// Если установлен флаг DeleteOptions.Erase то данные будут полностью удалены из системы.
+	// В противном случае выполняется "мягкое удаление", элемент помечается как удаленный и может быть восстановлен с помощью метода Items.Undelete и получен в Items.Get/Find
+	Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*DeleteOptions) (err error)
+
+	// Undelete восстанавливает элементы после "мягкого удаление"
+	Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*UndeleteOptions) (err error)
+
+	Publish(ctx context.Context, item *Item, options ...*PublishOptions) (err error)
+	Unpublish(ctx context.Context, item *Item, options ...*UnpublishOptions) (err error)
+	GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetPublishedOptions) (item *Item, err error)
+	FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindPublishedOptions) (items []*Item, total int, err error)
+
+	GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*GetRevisionOptions) (item *Item, err error)
+	ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*ListRevisionsOptions) (items []*Item, err error)
+
+	Archive(ctx context.Context, item *Item, options ...*ArchiveOptions) (err error)
+	FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindArchivedOptions) (items []*Item, total int, err error)
+	Unarchive(ctx context.Context, item *Item, options ...*UnarchiveOptions) (err error)
+
+	// Aggregate выполняет агрегацию данных
+	Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregateOptions) (result map[string]interface{}, err error)
+	// AggregatePublished выполняет агрегацию опубликованных данных
+	AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregatePublishedOptions) (result map[string]interface{}, err error)
+}
+
+// PreSaver - интерфейс, который может быть реализован полем, чтобы получать событие PreSave перед сохранением Item в Storage
+type PreSaver interface {
+	PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *Context) (interface{}, bool, error)
+}
+
+type Filter struct {
+	ID     []string
+	Data   []*filter.Filter
+	Search string // Поиск, одновременно поддерживается только один запрос
+	Q      []string
+}
+
+func NewFilter(params ...interface{}) *Filter {
+	f := &Filter{}
+	for _, p := range params {
+		switch v := p.(type) {
+		case *filter.Filter:
+			f.Data = append(f.Data, v)
+		case string:
+			f.Q = append(f.Q, v)
+		}
+	}
+	return f
+}
+
+// AggregateExpRe - формат, которому должна соответствовать формула расчета данных
+var AggregateExpRe = regexp.MustCompile(`([a-zA-Z]+)\((.*)\)`)
+
+func ParseAggregateExp(exp string) (string, string, bool) {
+	ss := AggregateExpRe.FindAllStringSubmatch(exp, -1)
+	if len(ss) == 0 || len(ss[0]) < 2 {
+		return "", "", false
+	}
+	return ss[0][1], ss[0][2], true
+}
+
+func DecodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Decode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
+
+func EncodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Encode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
diff --git a/pkg/items/transport/client.go b/pkg/items/transport/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..3f6bd04ceab90dad415d963c6db3d1a9f4fb4b47
--- /dev/null
+++ b/pkg/items/transport/client.go
@@ -0,0 +1,266 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"github.com/hashicorp/go-multierror"
+	"google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *items.Item, arg2 ...*items.CreateOptions) (res0 *items.Item, res1 error) {
+	request := CreateRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Introspect(arg0 context.Context, arg1 *items.Item, arg2 ...*items.IntrospectOptions) (res0 *items.Item, res1 *schema.Schema, res2 error) {
+	request := IntrospectRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res2 := set.IntrospectEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	resp := response.(*IntrospectResponse)
+
+	if len(resp.ValidationErrors) > 0 {
+		var merr *multierror.Error
+		for _, err := range resp.ValidationErrors {
+			var fieldErr errors.FieldError
+			if errors.As(err, &fieldErr) {
+				merr = multierror.Append(merr, fieldErr)
+			}
+		}
+
+		res2 = errors.Wrap(merr, "validation error")
+
+	}
+	return resp.Item, resp.Schema, res2
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetOptions) (res0 *items.Item, res1 error) {
+	request := GetRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetResponse).Item, res1
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindResponse).Items, response.(*FindResponse).Total, res2
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UpdateOptions) (res0 error) {
+	request := UpdateRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.DeleteOptions) (res0 error) {
+	request := DeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Undelete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.UndeleteOptions) (res0 error) {
+	request := UndeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.UndeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Publish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.PublishOptions) (res0 error) {
+	request := PublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.PublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Unpublish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnpublishOptions) (res0 error) {
+	request := UnpublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnpublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) GetPublished(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetPublishedOptions) (res0 *items.Item, res1 error) {
+	request := GetPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetPublishedEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetPublishedResponse).Item, res1
+}
+
+func (set EndpointsSet) FindPublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindPublishedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindPublishedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindPublishedResponse).Items, response.(*FindPublishedResponse).Total, res2
+}
+
+func (set EndpointsSet) GetRevision(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 string, arg5 string, arg6 ...*items.GetRevisionOptions) (res0 *items.Item, res1 error) {
+	request := GetRevisionRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		RevisionId:   arg5,
+		SpaceId:      arg1,
+		Options:      arg6,
+	}
+	response, res1 := set.GetRevisionEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetRevisionResponse).Item, res1
+}
+
+func (set EndpointsSet) ListRevisions(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.ListRevisionsOptions) (res0 []*items.Item, res1 error) {
+	request := ListRevisionsRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.ListRevisionsEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*ListRevisionsResponse).Items, res1
+}
+
+func (set EndpointsSet) Archive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.ArchiveOptions) (res0 error) {
+	request := ArchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.ArchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) FindArchived(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindArchivedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindArchivedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Options:      arg5,
+		Filter:       arg4,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindArchivedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindArchivedResponse).Items, response.(*FindArchivedResponse).Total, res2
+}
+
+func (set EndpointsSet) Unarchive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnarchiveOptions) (res0 error) {
+	request := UnarchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnarchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Aggregate(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregateOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregateRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*AggregateResponse).Result, res1
+}
+
+func (set EndpointsSet) AggregatePublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregatePublishedOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregatePublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregatePublishedEndpoint(arg0, &request)
+
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*AggregatePublishedResponse).Result, res1
+}
diff --git a/pkg/items/transport/endpoints.microgen.go b/pkg/items/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a6e8d5a678cd7180deca17a97f615fe7793ff6e
--- /dev/null
+++ b/pkg/items/transport/endpoints.microgen.go
@@ -0,0 +1,27 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Items API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint             endpoint.Endpoint
+	IntrospectEndpoint         endpoint.Endpoint
+	GetEndpoint                endpoint.Endpoint
+	FindEndpoint               endpoint.Endpoint
+	UpdateEndpoint             endpoint.Endpoint
+	DeleteEndpoint             endpoint.Endpoint
+	UndeleteEndpoint           endpoint.Endpoint
+	PublishEndpoint            endpoint.Endpoint
+	UnpublishEndpoint          endpoint.Endpoint
+	GetPublishedEndpoint       endpoint.Endpoint
+	FindPublishedEndpoint      endpoint.Endpoint
+	GetRevisionEndpoint        endpoint.Endpoint
+	ListRevisionsEndpoint      endpoint.Endpoint
+	ArchiveEndpoint            endpoint.Endpoint
+	FindArchivedEndpoint       endpoint.Endpoint
+	UnarchiveEndpoint          endpoint.Endpoint
+	AggregateEndpoint          endpoint.Endpoint
+	AggregatePublishedEndpoint endpoint.Endpoint
+}
diff --git a/pkg/items/transport/exchanges.microgen.go b/pkg/items/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..b601946f74837d41df38b07e3c5887ba8698b183
--- /dev/null
+++ b/pkg/items/transport/exchanges.microgen.go
@@ -0,0 +1,186 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+type (
+	CreateRequest struct {
+		Item *items.Item            `json:"item"`
+		Opts []*items.CreateOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	CreateResponse struct {
+		Created *items.Item `json:"created"`
+	}
+
+	IntrospectRequest struct {
+		Item *items.Item                `json:"item"`
+		Opts []*items.IntrospectOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	IntrospectResponse struct {
+		Item             *items.Item    `json:"item"`
+		Schema           *schema.Schema `json:"schema"`
+		ValidationErrors []error        `json:"validation_errors"`
+	}
+
+	GetRequest struct {
+		SpaceId      string              `json:"space_id"`
+		EnvId        string              `json:"env_id"`
+		CollectionId string              `json:"collection_id"`
+		ItemId       string              `json:"item_id"`
+		Options      []*items.GetOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindRequest struct {
+		SpaceId      string               `json:"space_id"`
+		EnvId        string               `json:"env_id"`
+		CollectionId string               `json:"collection_id"`
+		Filter       *items.Filter        `json:"filter"`
+		Options      []*items.FindOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UpdateRequest struct {
+		Item    *items.Item            `json:"item"`
+		Options []*items.UpdateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId      string                 `json:"space_id"`
+		EnvId        string                 `json:"env_id"`
+		CollectionId string                 `json:"collection_id"`
+		ItemId       string                 `json:"item_id"`
+		Options      []*items.DeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	UndeleteRequest struct {
+		SpaceId      string                   `json:"space_id"`
+		EnvId        string                   `json:"env_id"`
+		CollectionId string                   `json:"collection_id"`
+		ItemId       string                   `json:"item_id"`
+		Options      []*items.UndeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UndeleteResponse struct{}
+
+	PublishRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.PublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	PublishResponse struct{}
+
+	UnpublishRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnpublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnpublishResponse struct{}
+
+	GetPublishedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		ItemId       string                       `json:"item_id"`
+		Options      []*items.GetPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetPublishedResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindPublishedRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		Filter       *items.Filter                 `json:"filter"`
+		Options      []*items.FindPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindPublishedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	GetRevisionRequest struct {
+		SpaceId      string                      `json:"space_id"`
+		EnvId        string                      `json:"env_id"`
+		CollectionId string                      `json:"collection_id"`
+		ItemId       string                      `json:"item_id"`
+		RevisionId   string                      `json:"revision_id"`
+		Options      []*items.GetRevisionOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetRevisionResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	ListRevisionsRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		ItemId       string                        `json:"item_id"`
+		Options      []*items.ListRevisionsOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	ListRevisionsResponse struct {
+		Items []*items.Item `json:"items"`
+	}
+
+	ArchiveRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.ArchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	ArchiveResponse struct{}
+
+	FindArchivedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		Filter       *items.Filter                `json:"filter"`
+		Options      []*items.FindArchivedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindArchivedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UnarchiveRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnarchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnarchiveResponse struct{}
+
+	AggregateRequest struct {
+		SpaceId      string                    `json:"space_id"`
+		EnvId        string                    `json:"env_id"`
+		CollectionId string                    `json:"collection_id"`
+		Filter       *items.Filter             `json:"filter"`
+		Options      []*items.AggregateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregateResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+
+	AggregatePublishedRequest struct {
+		SpaceId      string                             `json:"space_id"`
+		EnvId        string                             `json:"env_id"`
+		CollectionId string                             `json:"collection_id"`
+		Filter       *items.Filter                      `json:"filter"`
+		Options      []*items.AggregatePublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregatePublishedResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+)
diff --git a/pkg/items/transport/grpc/client.go b/pkg/items/transport/grpc/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..faea7cc6703746ba91b0af0e831431ffd76044fc
--- /dev/null
+++ b/pkg/items/transport/grpc/client.go
@@ -0,0 +1,34 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewClient(conn *grpc.ClientConn, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	c := NewGRPCClient(conn, "", opts...)
+	return transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ClientMiddleware(c.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ClientMiddleware(c.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ClientMiddleware(c.GetEndpoint),
+		FindEndpoint:               grpcerr.ClientMiddleware(c.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ClientMiddleware(c.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ClientMiddleware(c.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ClientMiddleware(c.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ClientMiddleware(c.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ClientMiddleware(c.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ClientMiddleware(c.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ClientMiddleware(c.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ClientMiddleware(c.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ClientMiddleware(c.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ClientMiddleware(c.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ClientMiddleware(c.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ClientMiddleware(c.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ClientMiddleware(c.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ClientMiddleware(c.AggregatePublishedEndpoint),
+	}
+}
diff --git a/pkg/items/transport/grpc/client.microgen.go b/pkg/items/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a91c8d16495333a356ebeb3e48100c40e0f7bc91
--- /dev/null
+++ b/pkg/items/transport/grpc/client.microgen.go
@@ -0,0 +1,145 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.items.Items"
+	}
+	return transport.EndpointsSet{
+		ArchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Archive",
+			_Encode_Archive_Request,
+			_Decode_Archive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UndeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Undelete",
+			_Encode_Undelete_Request,
+			_Decode_Undelete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindArchivedEndpoint: grpckit.NewClient(
+			conn, addr, "FindArchived",
+			_Encode_FindArchived_Request,
+			_Decode_FindArchived_Response,
+			pb.FindArchivedResponse{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		FindPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "FindPublished",
+			_Encode_FindPublished_Request,
+			_Decode_FindPublished_Response,
+			pb.FindPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		GetPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "GetPublished",
+			_Encode_GetPublished_Request,
+			_Decode_GetPublished_Response,
+			pb.GetPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetRevisionEndpoint: grpckit.NewClient(
+			conn, addr, "GetRevision",
+			_Encode_GetRevision_Request,
+			_Decode_GetRevision_Response,
+			pb.GetRevisionResponse{},
+			opts...,
+		).Endpoint(),
+		IntrospectEndpoint: grpckit.NewClient(
+			conn, addr, "Introspect",
+			_Encode_Introspect_Request,
+			_Decode_Introspect_Response,
+			pb.IntrospectResponse{},
+			opts...,
+		).Endpoint(),
+		ListRevisionsEndpoint: grpckit.NewClient(
+			conn, addr, "ListRevisions",
+			_Encode_ListRevisions_Request,
+			_Decode_ListRevisions_Response,
+			pb.ListRevisionsResponse{},
+			opts...,
+		).Endpoint(),
+		PublishEndpoint: grpckit.NewClient(
+			conn, addr, "Publish",
+			_Encode_Publish_Request,
+			_Decode_Publish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnarchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Unarchive",
+			_Encode_Unarchive_Request,
+			_Decode_Unarchive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnpublishEndpoint: grpckit.NewClient(
+			conn, addr, "Unpublish",
+			_Encode_Unpublish_Request,
+			_Decode_Unpublish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		AggregateEndpoint: grpckit.NewClient(
+			conn, addr, "Aggregate",
+			_Encode_Aggregate_Request,
+			_Decode_Aggregate_Response,
+			pb.AggregateResponse{},
+			opts...,
+		).Endpoint(),
+		AggregatePublishedEndpoint: grpckit.NewClient(
+			conn, addr, "AggregatePublished",
+			_Encode_AggregatePublished_Request,
+			_Decode_AggregatePublished_Response,
+			pb.AggregatePublishedResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..69a696df329a6e28e5912af8815df9852c0c504c
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,1010 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := CreateOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := UpdateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+
+	opts, err := DeleteOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.DeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      opts,
+	}, nil
+}
+
+func _Encode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*transport.UndeleteRequest)
+	return &pb.UndeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*transport.PublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := PublishOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*transport.UnpublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Encode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*transport.GetPublishedRequest)
+	reqOptions, err := ElPtrGetPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*transport.FindPublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*transport.GetRevisionRequest)
+	return &pb.GetRevisionRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		RevisionId:   req.RevisionId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*transport.ListRevisionsRequest)
+	reqOptions, err := ElPtrListRevisionsOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*transport.ArchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*transport.FindArchivedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindArchivedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*transport.UnarchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*transport.AggregateRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*transport.AggregatePublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregatePublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrItemToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Item: respItem}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*transport.GetPublishedResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Encode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*transport.FindPublishedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*transport.GetRevisionResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Encode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*transport.ListRevisionsResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Encode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*transport.FindArchivedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregateResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregatePublishedResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToCreateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{
+		Item: reqItem,
+		Opts: opts,
+	}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*pb.AggregateRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*pb.AggregatePublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregatePublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToUpdateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+
+	opts, err := ProtoToDeleteOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.DeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      opts,
+	}, nil
+}
+
+func _Decode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*pb.UndeleteRequest)
+	return &transport.UndeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*pb.PublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToPublishOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*pb.UnpublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Decode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*pb.GetPublishedRequest)
+	reqOptions, err := ProtoToElPtrGetPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*pb.FindPublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*pb.GetRevisionRequest)
+	return &transport.GetRevisionRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		RevisionId:   string(req.RevisionId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*pb.ListRevisionsRequest)
+	reqOptions, err := ProtoToElPtrListRevisionsOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*pb.ArchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*pb.FindArchivedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindArchivedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*pb.UnarchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrItem(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Item: respItem}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*pb.GetPublishedResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Decode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*pb.FindPublishedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*pb.GetRevisionResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Decode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*pb.ListRevisionsResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Decode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*pb.FindArchivedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*transport.IntrospectRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Encode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*transport.IntrospectResponse)
+	respItm, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := PtrSchemaSchemaToProto(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrors, err := ValidationErrorsToProto(resp.ValidationErrors)
+	return &pb.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrors,
+	}, nil
+}
+
+func _Decode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*pb.IntrospectRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Decode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*pb.IntrospectResponse)
+	respItm, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := ProtoToPtrSchemaSchema(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrs, err := ProtoToValidationErrors(resp.ValidationErrors)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrs,
+	}, nil
+}
+
+func _Decode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*pb.AggregateResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregatePublishedResponse")
+	}
+	resp := response.(*pb.AggregatePublishedResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
diff --git a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7eae996594e266ddfc712183bd3e1d7c4a39c78a
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,627 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	service "git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	pbcommon "git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	jsoniter "github.com/json-iterator/go"
+	"google.golang.org/protobuf/types/known/structpb"
+)
+
+func MapStringInterfaceToProto(data map[string]interface{}) (*structpb.Struct, error) {
+	if data == nil {
+		return nil, nil
+	}
+	return structpb.NewStruct(data)
+}
+
+func ProtoToMapStringInterface(protoData *structpb.Struct) (map[string]interface{}, error) {
+	if protoData == nil {
+		return nil, nil
+	}
+	return protoData.AsMap(), nil
+}
+
+func MapStringMapStringInterfaceToProto(translations map[string]map[string]interface{}) (map[string]*structpb.Struct, error) {
+	if translations == nil {
+		return nil, nil
+	}
+	res := make(map[string]*structpb.Struct, len(translations))
+	for k, v := range translations {
+		res[k], _ = MapStringInterfaceToProto(v)
+	}
+	return res, nil
+}
+
+func PtrPermissionsToProto(permissions *service.Permissions) (*pb.Permissions, error) {
+	if permissions == nil {
+		return nil, nil
+	}
+
+	return &pb.Permissions{
+			Edit:       permissions.Edit,
+			Archive:    permissions.Archive,
+			Publish:    permissions.Publish,
+			SoftDelete: permissions.SoftDelete,
+			HardDelete: permissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToPtrPermissions(protoPermissions *pb.Permissions) (*service.Permissions, error) {
+	if protoPermissions == nil {
+		return nil, nil
+	}
+
+	return &service.Permissions{
+			Edit:       protoPermissions.Edit,
+			Archive:    protoPermissions.Archive,
+			Publish:    protoPermissions.Publish,
+			SoftDelete: protoPermissions.SoftDelete,
+			HardDelete: protoPermissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToMapStringMapStringInterface(protoTranslations map[string]*structpb.Struct) (map[string]map[string]interface{}, error) {
+	if protoTranslations == nil {
+		return nil, nil
+	}
+	res := make(map[string]map[string]interface{}, len(protoTranslations))
+	for k, v := range protoTranslations {
+		res[k], _ = ProtoToMapStringInterface(v)
+	}
+	return res, nil
+}
+
+func PtrItemToProto(item *service.Item) (*pb.Item, error) {
+	return service.ItemToProto(item), nil
+}
+
+func ProtoToPtrItem(protoItem *pb.Item) (*service.Item, error) {
+	return service.ItemFromProto(protoItem), nil
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*pbcommon.Filter, 0, len(filter.Data))
+	for _, f := range filter.Data {
+		pf := &pbcommon.Filter{
+			Op:    string(f.Op),
+			Field: f.Field,
+		}
+
+		val, err := structpb.NewValue(f.Value)
+		if err != nil {
+			return nil, err
+		}
+		pf.Value = val
+		dt = append(dt, pf)
+	}
+
+	return &pb.Filter{
+		Id:   filter.ID,
+		Data: dt,
+		Q:    filter.Q,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*filter.Filter, 0, len(protoFilter.Data))
+	for _, pf := range protoFilter.Data {
+
+		f := &filter.Filter{
+			Op:    filter.Op(pf.Op),
+			Field: pf.Field,
+			Value: pf.Value.AsInterface(),
+		}
+
+		dt = append(dt, f)
+	}
+
+	return &service.Filter{
+		ID:   protoFilter.Id,
+		Data: dt,
+		Q:    protoFilter.Q,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*pbcommon.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pbcommon.FindOptions{
+		Sort:          opts.Sort,
+		PageNum:       int32(opts.PageNum),
+		PageSize:      int32(opts.PageSize),
+		Fields:        opts.Fields,
+		ExcludeFields: opts.ExcludeFields,
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *pbcommon.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+		FieldOptions: options.FieldOptions{
+			Fields:        protoOpts.Fields,
+			ExcludeFields: protoOpts.ExcludeFields,
+		},
+	}, nil
+}
+
+func ListPtrItemToProto(items []*service.Item) ([]*pb.Item, error) {
+	protoItems := make([]*pb.Item, 0, len(items))
+	for _, itm := range items {
+		pi, err := PtrItemToProto(itm)
+		if err != nil {
+			return nil, err
+		}
+		protoItems = append(protoItems, pi)
+	}
+	return protoItems, nil
+}
+
+func ProtoToListPtrItem(protoItems []*pb.Item) ([]*service.Item, error) {
+	items := make([]*service.Item, 0, len(protoItems))
+	for _, itm := range protoItems {
+		pi, err := ProtoToPtrItem(itm)
+		if err != nil {
+			return nil, err
+		}
+		items = append(items, pi)
+	}
+	return items, nil
+}
+
+func ProtoToCreateOptions(protoOptions *pb.CreateOptions) ([]*service.CreateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.CreateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func CreateOptionsToProto(options []*service.CreateOptions) (*pb.CreateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeCreateOptions(options...)
+
+	return &pb.CreateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrGetOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindOptionsToProto(options []*service.FindOptions) (*pb.FindOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindOptions(options...)
+
+	var err error
+
+	fo := &pb.FindOptions{
+		Deleted:   opts.Deleted,
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindOptions(protoOptions *pb.FindOptions) ([]*service.FindOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindOptions{
+		Deleted:   protoOptions.Deleted,
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindOptions{fo}, nil
+}
+
+func ProtoToUpdateOptions(protoOptions *pb.UpdateOptions) ([]*service.UpdateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.UpdateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func UpdateOptionsToProto(options []*service.UpdateOptions) (*pb.UpdateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeUpdateOptions(options...)
+
+	return &pb.UpdateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ProtoToDeleteOptions(protoOptions *pb.DeleteOptions) ([]*service.DeleteOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.DeleteOptions{
+		{Erase: protoOptions.Erase},
+	}, nil
+}
+
+func DeleteOptionsToProto(options []*service.DeleteOptions) (*pb.DeleteOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeDeleteOptions(options...)
+
+	return &pb.DeleteOptions{
+		Erase: opts.Erase,
+	}, nil
+}
+
+func ProtoToPublishOptions(protoOptions *pb.PublishOptions) ([]*service.PublishOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.PublishOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func PublishOptionsToProto(options []*service.PublishOptions) (*pb.PublishOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergePublishOptions(options...)
+
+	return &pb.PublishOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrUnpublishOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnpublishOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrGetPublishedOptionsToProto(options []*service.GetPublishedOptions) (*pb.GetPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeGetPublishedOptions(options...)
+
+	return &pb.GetPublishedOptions{LocaleId: opts.LocaleID}, nil
+}
+
+func ProtoToElPtrGetPublishedOptions(protoOptions *pb.GetPublishedOptions) ([]*service.GetPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	return []*service.GetPublishedOptions{{LocaleID: protoOptions.LocaleId}}, nil
+}
+
+func ElPtrFindPublishedOptionsToProto(options []*service.FindPublishedOptions) (*pb.FindPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindPublishedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindPublishedOptions{
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	fo.LocaleId = opts.LocaleID
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindPublishedOptions(protoOptions *pb.FindPublishedOptions) ([]*service.FindPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindPublishedOptions{
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	fo.LocaleID = protoOptions.LocaleId
+
+	return []*service.FindPublishedOptions{fo}, nil
+}
+
+func ElPtrGetRevisionOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetRevisionOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrListRevisionsOptionsToProto(options []*service.ListRevisionsOptions) (*pb.ListRevisionsOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeListRevisionsOptions(options...)
+
+	var err error
+
+	fo := &pb.ListRevisionsOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrListRevisionsOptions(protoOptions *pb.ListRevisionsOptions) ([]*service.ListRevisionsOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.ListRevisionsOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.ListRevisionsOptions{fo}, nil
+}
+
+func ElPtrArchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrArchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindArchivedOptionsToProto(options []*service.FindArchivedOptions) (*pb.FindArchivedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindArchivedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindArchivedOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindArchivedOptions(protoOptions *pb.FindArchivedOptions) ([]*service.FindArchivedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindArchivedOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindArchivedOptions{fo}, nil
+}
+
+func ElPtrUnarchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnarchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrIntrospectOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrIntrospectOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToPtrServicesAggregateOptions(protoOpts *pb.AggregateOptions) ([]*service.AggregateOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregateOptions{&service.AggregateOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregateOptionsToProto(opts *service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregateOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregateOptionsToProto(options []*service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregateOptions(options...)
+	return PtrServicesAggregateOptionsToProto(opts)
+}
+
+func ProtoToPtrServicesAggregatePublishedOptions(protoOpts *pb.AggregatePublishedOptions) ([]*service.AggregatePublishedOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregatePublishedOptions{&service.AggregatePublishedOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregatePublishedOptionsToProto(opts *service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregatePublishedOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregatePublishedOptionsToProto(options []*service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregatePublishedOptions(options...)
+	return PtrServicesAggregatePublishedOptionsToProto(opts)
+}
+
+func PtrSchemaSchemaToProto(sch *schema.Schema) (string, error) {
+	if sch == nil {
+		return "", nil
+	}
+	res, err := jsoniter.MarshalToString(sch)
+	if err != nil {
+		return "", err
+	}
+	return res, nil
+}
+
+func ProtoToPtrSchemaSchema(protoSch string) (*schema.Schema, error) {
+	if protoSch == "" {
+		return nil, nil
+	}
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(protoSch))
+	if err != nil {
+		return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+	}
+	return sch, nil
+}
+
+func ValidationErrorsToProto(errs []error) ([]*pbcommon.Error_BadRequest_FieldViolation, error) {
+	if errs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []*pbcommon.Error_BadRequest_FieldViolation
+	for _, err := range errs {
+
+		var fieldError errors.FieldError
+		if errors.As(err, &fieldError) {
+			validationErrors = append(validationErrors, &pbcommon.Error_BadRequest_FieldViolation{
+				Description: errors.Unwrap(fieldError).Error(),
+				Field:       fieldError.Field(),
+			})
+		}
+	}
+
+	return validationErrors, nil
+}
+
+func ProtoToValidationErrors(protoErrs []*pbcommon.Error_BadRequest_FieldViolation) ([]error, error) {
+	if protoErrs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []error
+	for _, err := range protoErrs {
+		validationErrors = append(validationErrors, errors.WithField(errors.New(err.Description), err.Field))
+	}
+
+	return validationErrors, nil
+}
diff --git a/pkg/items/transport/grpc/server.go b/pkg/items/transport/grpc/server.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ac8a3b02dd4991518d8b132707bb2dd0ce3c362
--- /dev/null
+++ b/pkg/items/transport/grpc/server.go
@@ -0,0 +1,34 @@
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+)
+
+func NewServer(svc items.Items, opts ...grpckit.ServerOption) pb.ItemsServer {
+	eps := transport.Endpoints(svc)
+	eps = transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ServerMiddleware(eps.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ServerMiddleware(eps.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ServerMiddleware(eps.GetEndpoint),
+		FindEndpoint:               grpcerr.ServerMiddleware(eps.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ServerMiddleware(eps.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ServerMiddleware(eps.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ServerMiddleware(eps.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ServerMiddleware(eps.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ServerMiddleware(eps.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ServerMiddleware(eps.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ServerMiddleware(eps.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ServerMiddleware(eps.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ServerMiddleware(eps.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ServerMiddleware(eps.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ServerMiddleware(eps.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ServerMiddleware(eps.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ServerMiddleware(eps.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ServerMiddleware(eps.AggregatePublishedEndpoint),
+	}
+	return NewGRPCServer(&eps, opts...)
+}
diff --git a/pkg/items/transport/grpc/server.microgen.go b/pkg/items/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a904b1e5a610dc12e38768d88b92eee392a6d7af
--- /dev/null
+++ b/pkg/items/transport/grpc/server.microgen.go
@@ -0,0 +1,292 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type itemsServer struct {
+	create             grpc.Handler
+	introspect         grpc.Handler
+	get                grpc.Handler
+	find               grpc.Handler
+	update             grpc.Handler
+	delete             grpc.Handler
+	undelete           grpc.Handler
+	publish            grpc.Handler
+	unpublish          grpc.Handler
+	getPublished       grpc.Handler
+	findPublished      grpc.Handler
+	getRevision        grpc.Handler
+	listRevisions      grpc.Handler
+	archive            grpc.Handler
+	findArchived       grpc.Handler
+	unarchive          grpc.Handler
+	aggregate          grpc.Handler
+	aggregatePublished grpc.Handler
+
+	pb.UnimplementedItemsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ItemsServer {
+	return &itemsServer{
+		archive: grpc.NewServer(
+			endpoints.ArchiveEndpoint,
+			_Decode_Archive_Request,
+			_Encode_Archive_Response,
+			opts...,
+		),
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		undelete: grpc.NewServer(
+			endpoints.UndeleteEndpoint,
+			_Decode_Undelete_Request,
+			_Encode_Undelete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		findArchived: grpc.NewServer(
+			endpoints.FindArchivedEndpoint,
+			_Decode_FindArchived_Request,
+			_Encode_FindArchived_Response,
+			opts...,
+		),
+		findPublished: grpc.NewServer(
+			endpoints.FindPublishedEndpoint,
+			_Decode_FindPublished_Request,
+			_Encode_FindPublished_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		getPublished: grpc.NewServer(
+			endpoints.GetPublishedEndpoint,
+			_Decode_GetPublished_Request,
+			_Encode_GetPublished_Response,
+			opts...,
+		),
+		getRevision: grpc.NewServer(
+			endpoints.GetRevisionEndpoint,
+			_Decode_GetRevision_Request,
+			_Encode_GetRevision_Response,
+			opts...,
+		),
+		introspect: grpc.NewServer(
+			endpoints.IntrospectEndpoint,
+			_Decode_Introspect_Request,
+			_Encode_Introspect_Response,
+			opts...,
+		),
+		listRevisions: grpc.NewServer(
+			endpoints.ListRevisionsEndpoint,
+			_Decode_ListRevisions_Request,
+			_Encode_ListRevisions_Response,
+			opts...,
+		),
+		publish: grpc.NewServer(
+			endpoints.PublishEndpoint,
+			_Decode_Publish_Request,
+			_Encode_Publish_Response,
+			opts...,
+		),
+		unarchive: grpc.NewServer(
+			endpoints.UnarchiveEndpoint,
+			_Decode_Unarchive_Request,
+			_Encode_Unarchive_Response,
+			opts...,
+		),
+		unpublish: grpc.NewServer(
+			endpoints.UnpublishEndpoint,
+			_Decode_Unpublish_Request,
+			_Encode_Unpublish_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+		aggregate: grpc.NewServer(
+			endpoints.AggregateEndpoint,
+			_Decode_Aggregate_Request,
+			_Encode_Aggregate_Response,
+			opts...,
+		),
+		aggregatePublished: grpc.NewServer(
+			endpoints.AggregatePublishedEndpoint,
+			_Decode_AggregatePublished_Request,
+			_Encode_AggregatePublished_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *itemsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *itemsServer) Introspect(ctx context.Context, req *pb.IntrospectRequest) (*pb.IntrospectResponse, error) {
+	_, resp, err := S.introspect.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.IntrospectResponse), nil
+}
+
+func (S *itemsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *itemsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
+
+func (S *itemsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Undelete(ctx context.Context, req *pb.UndeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.undelete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Publish(ctx context.Context, req *pb.PublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.publish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Unpublish(ctx context.Context, req *pb.UnpublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.unpublish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) GetPublished(ctx context.Context, req *pb.GetPublishedRequest) (*pb.GetPublishedResponse, error) {
+	_, resp, err := S.getPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetPublishedResponse), nil
+}
+
+func (S *itemsServer) FindPublished(ctx context.Context, req *pb.FindPublishedRequest) (*pb.FindPublishedResponse, error) {
+	_, resp, err := S.findPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindPublishedResponse), nil
+}
+
+func (S *itemsServer) GetRevision(ctx context.Context, req *pb.GetRevisionRequest) (*pb.GetRevisionResponse, error) {
+	_, resp, err := S.getRevision.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetRevisionResponse), nil
+}
+
+func (S *itemsServer) ListRevisions(ctx context.Context, req *pb.ListRevisionsRequest) (*pb.ListRevisionsResponse, error) {
+	_, resp, err := S.listRevisions.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListRevisionsResponse), nil
+}
+
+func (S *itemsServer) Archive(ctx context.Context, req *pb.ArchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.archive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) FindArchived(ctx context.Context, req *pb.FindArchivedRequest) (*pb.FindArchivedResponse, error) {
+	_, resp, err := S.findArchived.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindArchivedResponse), nil
+}
+
+func (S *itemsServer) Unarchive(ctx context.Context, req *pb.UnarchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.unarchive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Aggregate(ctx context.Context, req *pb.AggregateRequest) (*pb.AggregateResponse, error) {
+	_, resp, err := S.aggregate.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregateResponse), nil
+}
+
+func (S *itemsServer) AggregatePublished(ctx context.Context, req *pb.AggregatePublishedRequest) (*pb.AggregatePublishedResponse, error) {
+	_, resp, err := S.aggregatePublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregatePublishedResponse), nil
+}
diff --git a/pkg/items/transport/server.microgen.go b/pkg/items/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ba5f4a265125ea7b7168fab8d9c7c0f747f23bf
--- /dev/null
+++ b/pkg/items/transport/server.microgen.go
@@ -0,0 +1,220 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	endpoint "github.com/go-kit/kit/endpoint"
+	"github.com/hashicorp/go-multierror"
+)
+
+func Endpoints(svc items.Items) EndpointsSet {
+	return EndpointsSet{
+		ArchiveEndpoint:            ArchiveEndpoint(svc),
+		CreateEndpoint:             CreateEndpoint(svc),
+		DeleteEndpoint:             DeleteEndpoint(svc),
+		UndeleteEndpoint:           UndeleteEndpoint(svc),
+		FindArchivedEndpoint:       FindArchivedEndpoint(svc),
+		FindEndpoint:               FindEndpoint(svc),
+		FindPublishedEndpoint:      FindPublishedEndpoint(svc),
+		GetEndpoint:                GetEndpoint(svc),
+		GetPublishedEndpoint:       GetPublishedEndpoint(svc),
+		GetRevisionEndpoint:        GetRevisionEndpoint(svc),
+		IntrospectEndpoint:         IntrospectEndpoint(svc),
+		ListRevisionsEndpoint:      ListRevisionsEndpoint(svc),
+		PublishEndpoint:            PublishEndpoint(svc),
+		UnarchiveEndpoint:          UnarchiveEndpoint(svc),
+		UnpublishEndpoint:          UnpublishEndpoint(svc),
+		UpdateEndpoint:             UpdateEndpoint(svc),
+		AggregateEndpoint:          AggregateEndpoint(svc),
+		AggregatePublishedEndpoint: AggregatePublishedEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Item, req.Opts...)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func IntrospectEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*IntrospectRequest)
+		res0, res1, res2 := svc.Introspect(arg0, req.Item, req.Opts...)
+		resp := &IntrospectResponse{
+			Item:   res0,
+			Schema: res1,
+		}
+		if res2 != nil {
+
+			err := res2
+
+			var merr *multierror.Error
+			if (strings.Contains(err.Error(), "validation error") ||
+				strings.Contains(err.Error(), "modification error") ||
+				strings.Contains(err.Error(), "decode error") ||
+				strings.Contains(err.Error(), "encode error")) && errors.As(err, &merr) {
+
+				errs := make([]error, 0)
+				for _, e := range merr.WrappedErrors() {
+					var errField errors.FieldError
+					if errors.As(e, &errField) {
+						errs = append(errs, e)
+					}
+				}
+
+				if len(errs) > 0 {
+					resp.ValidationErrors = errs
+					res2 = nil
+				}
+			}
+		}
+		return resp, res2
+	}
+}
+
+func GetEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetResponse{Item: res0}, res1
+	}
+}
+
+func FindEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UpdateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Item, req.Options...)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func UndeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UndeleteRequest)
+		res0 := svc.Undelete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &UndeleteResponse{}, res0
+	}
+}
+
+func PublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*PublishRequest)
+		res0 := svc.Publish(arg0, req.Item, req.Options...)
+		return &PublishResponse{}, res0
+	}
+}
+
+func UnpublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnpublishRequest)
+		res0 := svc.Unpublish(arg0, req.Item, req.Options...)
+		return &UnpublishResponse{}, res0
+	}
+}
+
+func GetPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetPublishedRequest)
+		res0, res1 := svc.GetPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetPublishedResponse{Item: res0}, res1
+	}
+}
+
+func FindPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindPublishedRequest)
+		res0, res1, res2 := svc.FindPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindPublishedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func GetRevisionEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRevisionRequest)
+		res0, res1 := svc.GetRevision(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.RevisionId, req.Options...)
+		return &GetRevisionResponse{Item: res0}, res1
+	}
+}
+
+func ListRevisionsEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRevisionsRequest)
+		res0, res1 := svc.ListRevisions(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &ListRevisionsResponse{Items: res0}, res1
+	}
+}
+
+func ArchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ArchiveRequest)
+		res0 := svc.Archive(arg0, req.Item, req.Options...)
+		return &ArchiveResponse{}, res0
+	}
+}
+
+func FindArchivedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindArchivedRequest)
+		res0, res1, res2 := svc.FindArchived(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindArchivedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UnarchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnarchiveRequest)
+		res0 := svc.Unarchive(arg0, req.Item, req.Options...)
+		return &UnarchiveResponse{}, res0
+	}
+}
+
+func AggregateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregateRequest)
+		res0, res1 := svc.Aggregate(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregateResponse{
+			Result: res0,
+		}, res1
+	}
+}
+func AggregatePublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregatePublishedRequest)
+		res0, res1 := svc.AggregatePublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregatePublishedResponse{
+			Result: res0,
+		}, res1
+	}
+}
diff --git a/pkg/optional/optional.go b/pkg/optional/optional.go
new file mode 100644
index 0000000000000000000000000000000000000000..94e89bf6a04708abf853f2e8aaf8d7dbd9e99371
--- /dev/null
+++ b/pkg/optional/optional.go
@@ -0,0 +1,10 @@
+package optional
+
+var (
+	True  *bool = Bool(true)
+	False *bool = Bool(false)
+)
+
+func Bool(v bool) *bool {
+	return &v
+}
diff --git a/pkg/organizations/mocks/Organizations.go b/pkg/organizations/mocks/Organizations.go
new file mode 100644
index 0000000000000000000000000000000000000000..7b7255897b5f33f44202ef8e79255a93ea7ee429
--- /dev/null
+++ b/pkg/organizations/mocks/Organizations.go
@@ -0,0 +1,120 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	services "git.perx.ru/perxis/perxis-go/pkg/options"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Organizations is an autogenerated mock type for the Organizations type
+type Organizations struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, org
+func (_m *Organizations) Create(ctx context.Context, org *organizations.Organization) (*organizations.Organization, error) {
+	ret := _m.Called(ctx, org)
+
+	var r0 *organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Organization) *organizations.Organization); ok {
+		r0 = rf(ctx, org)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*organizations.Organization)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *organizations.Organization) error); ok {
+		r1 = rf(ctx, org)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, orgId
+func (_m *Organizations) Delete(ctx context.Context, orgId string) error {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string) error); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, filter, opts
+func (_m *Organizations) Find(ctx context.Context, filter *organizations.Filter, opts *services.FindOptions) ([]*organizations.Organization, int, error) {
+	ret := _m.Called(ctx, filter, opts)
+
+	var r0 []*organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Filter, *services.FindOptions) []*organizations.Organization); ok {
+		r0 = rf(ctx, filter, opts)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*organizations.Organization)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, *organizations.Filter, *services.FindOptions) int); ok {
+		r1 = rf(ctx, filter, opts)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *organizations.Filter, *services.FindOptions) error); ok {
+		r2 = rf(ctx, filter, opts)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, orgId
+func (_m *Organizations) Get(ctx context.Context, orgId string) (*organizations.Organization, error) {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 *organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, string) *organizations.Organization); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*organizations.Organization)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, orgId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Update provides a mock function with given fields: ctx, org
+func (_m *Organizations) Update(ctx context.Context, org *organizations.Organization) error {
+	ret := _m.Called(ctx, org)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Organization) error); ok {
+		r0 = rf(ctx, org)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/organizations/organization.go b/pkg/organizations/organization.go
new file mode 100644
index 0000000000000000000000000000000000000000..b95daa6988710d26fcda200eebef8afeff9158df
--- /dev/null
+++ b/pkg/organizations/organization.go
@@ -0,0 +1,14 @@
+package organizations
+
+type Organization struct {
+	ID          string  `bson:"_id"`
+	Name        string  `bson:"name"`
+	Description string  `bson:"description"`
+	LogoURL     string  `bson:"logoUrl"`
+	OwnerID     *string `bson:"-"`
+}
+
+func (o *Organization) SetOwnerID(s string) *Organization {
+	o.OwnerID = &s
+	return o
+}
diff --git a/pkg/organizations/service.go b/pkg/organizations/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..9ba3b9a91e6acd2353b1cebff342fa6a329547c9
--- /dev/null
+++ b/pkg/organizations/service.go
@@ -0,0 +1,25 @@
+package organizations
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/organizations
+// @grpc-addr account.organizations.Organizations
+type Organizations interface {
+	Create(ctx context.Context, org *Organization) (created *Organization, err error)
+
+	Get(ctx context.Context, orgId string) (org *Organization, err error)
+	Update(ctx context.Context, org *Organization) (err error)
+	Delete(ctx context.Context, orgId string) (err error)
+	Find(ctx context.Context, filter *Filter, opts *options.FindOptions) (orgs []*Organization, total int, err error)
+}
+
+// Organizations
+type Filter struct {
+	ID   []string
+	Name []string
+}
diff --git a/pkg/organizations/transport/client.microgen.go b/pkg/organizations/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..00b14af754877e02fe3936269d72cb13c7b138da
--- /dev/null
+++ b/pkg/organizations/transport/client.microgen.go
@@ -0,0 +1,76 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *organizations.Organization) (res0 *organizations.Organization, res1 error) {
+	request := CreateRequest{Org: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string) (res0 *organizations.Organization, res1 error) {
+	request := GetRequest{OrgId: arg1}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Org, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *organizations.Organization) (res0 error) {
+	request := UpdateRequest{Org: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string) (res0 error) {
+	request := DeleteRequest{OrgId: arg1}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1 *organizations.Filter, arg2 *options.FindOptions) (res0 []*organizations.Organization, res1 int, res2 error) {
+	request := FindRequest{
+		Filter: arg1,
+		Opts:   arg2,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*FindResponse).Orgs, response.(*FindResponse).Total, res2
+}
diff --git a/pkg/organizations/transport/endpoints.microgen.go b/pkg/organizations/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7e15457fef975b2f07611ef46786cf7b73f5bbee
--- /dev/null
+++ b/pkg/organizations/transport/endpoints.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Organizations API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	GetEndpoint    endpoint.Endpoint
+	UpdateEndpoint endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+	FindEndpoint   endpoint.Endpoint
+}
diff --git a/pkg/organizations/transport/exchanges.microgen.go b/pkg/organizations/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7ae098e5b5b555cfe834c55e2fe73c79805fdbfd
--- /dev/null
+++ b/pkg/organizations/transport/exchanges.microgen.go
@@ -0,0 +1,45 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+)
+
+type (
+	CreateRequest struct {
+		Org *organizations.Organization `json:"org"`
+	}
+	CreateResponse struct {
+		Created *organizations.Organization `json:"created"`
+	}
+
+	GetRequest struct {
+		OrgId string `json:"org_id"`
+	}
+	GetResponse struct {
+		Org *organizations.Organization `json:"org"`
+	}
+
+	UpdateRequest struct {
+		Org *organizations.Organization `json:"org"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		OrgId string `json:"org_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	FindRequest struct {
+		Filter *organizations.Filter `json:"filter"`
+		Opts   *options.FindOptions `json:"opts"`
+	}
+	FindResponse struct {
+		Orgs  []*organizations.Organization `json:"orgs"`
+		Total int                           `json:"total"`
+	}
+)
diff --git a/pkg/organizations/transport/grpc/client.microgen.go b/pkg/organizations/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..92918a88884de0a621858b7e09c49c66d83e5cab
--- /dev/null
+++ b/pkg/organizations/transport/grpc/client.microgen.go
@@ -0,0 +1,54 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "account.organizations.Organizations"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..966189093b756b558406ebacf758da6c2a3f4839
--- /dev/null
+++ b/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,225 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqOrg, err := PtrOrganizationToProto(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Org: reqOrg}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{OrgId: req.OrgId}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{OrgId: req.OrgId}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := PtrServicesFindOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrOrganizationToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respOrg, err := PtrOrganizationToProto(resp.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Org: respOrg}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respOrgs, err := ListPtrOrganizationToProto(resp.Orgs)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Orgs:  respOrgs,
+		Total: int64(resp.Total),
+	}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqOrg, err := ProtoToPtrOrganization(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Org: reqOrg}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{OrgId: string(req.OrgId)}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{OrgId: string(req.OrgId)}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := ProtoToPtrServicesFindOptions(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrOrganization(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respOrg, err := ProtoToPtrOrganization(resp.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Org: respOrg}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respOrgs, err := ProtoToListPtrOrganization(resp.Orgs)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Orgs:  respOrgs,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqOrg, err := PtrOrganizationToProto(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Org: reqOrg}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqOrg, err := ProtoToPtrOrganization(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Org: reqOrg}, nil
+}
diff --git a/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go b/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..15fce8f5a7966f8fa2958fef30f62805923f512b
--- /dev/null
+++ b/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,111 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+)
+
+func PtrOrganizationToProto(org *organizations.Organization) (*pb.Organization, error) {
+	if org == nil {
+		return nil, nil
+	}
+	po := &pb.Organization{
+		Id:          org.ID,
+		Name:        org.Name,
+		Description: org.Description,
+		LogoUrl:     org.LogoURL,
+		OwnerId:     org.OwnerID,
+	}
+
+	return po, nil
+}
+
+func ProtoToPtrOrganization(protoOrg *pb.Organization) (*organizations.Organization, error) {
+	if protoOrg == nil {
+		return nil, nil
+	}
+	o := &organizations.Organization{
+		ID:          protoOrg.Id,
+		Name:        protoOrg.Name,
+		Description: protoOrg.Description,
+		LogoURL:     protoOrg.LogoUrl,
+		OwnerID:     protoOrg.OwnerId,
+	}
+	return o, nil
+}
+
+func PtrFilterToProto(filter *organizations.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	return &pb.Filter{
+		Ids:   filter.ID,
+		Names: filter.Name,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*organizations.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	return &organizations.Filter{
+		ID:   protoFilter.Ids,
+		Name: protoFilter.Names,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*common.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &common.FindOptions{
+		Sort:     opts.Sort,
+		PageNum:  int32(opts.PageNum),
+		PageSize: int32(opts.PageSize),
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *common.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+	}, nil
+}
+
+func ListPtrOrganizationToProto(orgs []*organizations.Organization) ([]*pb.Organization, error) {
+	protoOrgs := make([]*pb.Organization, 0, len(orgs))
+	for _, o := range orgs {
+		op, err := PtrOrganizationToProto(o)
+		if err != nil {
+			return nil, err
+		}
+		protoOrgs = append(protoOrgs, op)
+	}
+	return protoOrgs, nil
+}
+
+func ProtoToListPtrOrganization(protoOrgs []*pb.Organization) ([]*organizations.Organization, error) {
+	orgs := make([]*organizations.Organization, 0, len(protoOrgs))
+	for _, op := range protoOrgs {
+		o, err := ProtoToPtrOrganization(op)
+		if err != nil {
+			return nil, err
+		}
+		orgs = append(orgs, o)
+	}
+	return orgs, nil
+}
diff --git a/pkg/organizations/transport/grpc/server.microgen.go b/pkg/organizations/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..1cf24d998b6422ad418cfbb0a47a51eba6fe801d
--- /dev/null
+++ b/pkg/organizations/transport/grpc/server.microgen.go
@@ -0,0 +1,97 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type organizationsServer struct {
+	create grpc.Handler
+	get    grpc.Handler
+	update grpc.Handler
+	delete grpc.Handler
+	find   grpc.Handler
+
+	pb.UnimplementedOrganizationsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.OrganizationsServer {
+	return &organizationsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *organizationsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *organizationsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *organizationsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *organizationsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *organizationsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
diff --git a/pkg/organizations/transport/server.microgen.go b/pkg/organizations/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..2e08f8f534f862552ecc5008b2f76277e702a9ab
--- /dev/null
+++ b/pkg/organizations/transport/server.microgen.go
@@ -0,0 +1,63 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc organizations.Organizations) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		FindEndpoint:   FindEndpoint(svc),
+		GetEndpoint:    GetEndpoint(svc),
+		UpdateEndpoint: UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Org)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.OrgId)
+		return &GetResponse{Org: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Org)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.OrgId)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func FindEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.Filter, req.Opts)
+		return &FindResponse{
+			Orgs:  res0,
+			Total: res1,
+		}, res2
+	}
+}