diff --git a/pkg/items/codec.go b/pkg/items/codec.go
new file mode 100644
index 0000000000000000000000000000000000000000..6264c3b582a2af08c1746c763807b315c0ae2fa9
--- /dev/null
+++ b/pkg/items/codec.go
@@ -0,0 +1,9 @@
+package items
+
+type Encoder interface {
+	Encode(item *Item) (any, error)
+}
+
+type Decoder interface {
+	Decode(value any, item *Item) error
+}
diff --git a/pkg/items/context.go b/pkg/items/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..87e600e5b40da50381245a626e8228ab20485de8
--- /dev/null
+++ b/pkg/items/context.go
@@ -0,0 +1,71 @@
+package items
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+type Context struct {
+	Items
+	Clients clients.Clients
+
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+	Item         *Item
+	Space        *spaces.Space
+	Environment  *environments.Environment
+
+	ViewSpaceID       string
+	ViewEnvironmentID string
+	ViewCollectionID  string
+	ViewSpace         *spaces.Space
+	ViewEnvironment   *environments.Environment
+}
+
+type itemsCtx struct{}
+
+func WithContext(ctx context.Context, itmCtx *Context) context.Context {
+	if ctx == nil {
+		ctx = context.Background()
+	}
+
+	if itmCtx.ViewSpaceID == "" {
+		itmCtx.ViewSpaceID = itmCtx.SpaceID
+	}
+	if itmCtx.ViewEnvironmentID == "" {
+		itmCtx.ViewEnvironmentID = itmCtx.EnvID
+	}
+	if itmCtx.ViewCollectionID == "" {
+		itmCtx.ViewCollectionID = itmCtx.CollectionID
+	}
+	if itmCtx.ViewSpace == nil {
+		itmCtx.ViewSpace = itmCtx.Space
+	}
+	if itmCtx.ViewEnvironment == nil {
+		itmCtx.ViewEnvironment = itmCtx.Environment
+	}
+
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p != nil {
+		*p = *itmCtx
+		return ctx
+	}
+
+	return context.WithValue(ctx, itemsCtx{}, itmCtx)
+}
+
+func GetContext(ctx context.Context) *Context {
+	if ctx == nil {
+		return new(Context)
+	}
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p == nil {
+		return new(Context)
+	}
+	return p
+}
diff --git a/pkg/items/events.go b/pkg/items/events.go
new file mode 100644
index 0000000000000000000000000000000000000000..14ff72216edb875ed3ca15cc879913fdb21a7ed7
--- /dev/null
+++ b/pkg/items/events.go
@@ -0,0 +1,140 @@
+package items
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"github.com/golang/protobuf/proto"
+)
+
+const (
+	EventCreateItem    = "create_item"
+	EventUpdateItem    = "update_item"
+	EventPublishItem   = "publish_item"
+	EventUnpublishItem = "unpublish_item"
+	EventDeleteItem    = "delete_item"
+
+	DefaultEventSubject = "content.{{.EventType}}.{{.SpaceID}}.{{.EnvID}}.{{.CollectionID}}.{{.ItemID}}"
+)
+
+var (
+	ErrInvalidEventType = func(expected string, got any) error {
+		return errors.Errorf("invalid message type: expected '%s', got '%t'", expected, got)
+	}
+)
+
+type EventCreate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventCreate) ToProto() (proto.Message, error) {
+	return &pb.EventCreate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventCreate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventCreate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventCreate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUpdate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUpdate) ToProto() (proto.Message, error) {
+	return &pb.EventUpdate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUpdate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUpdate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUpdate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventPublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventPublish) ToProto() (proto.Message, error) {
+	return &pb.EventPublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventPublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventPublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventPublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUnpublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUnpublish) ToProto() (proto.Message, error) {
+	return &pb.EventUnpublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUnpublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUnpublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUnpublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventDelete struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventDelete) ToProto() (proto.Message, error) {
+	return &pb.EventDelete{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventDelete) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventDelete)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventDelete", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
diff --git a/pkg/items/expr.go b/pkg/items/expr.go
new file mode 100644
index 0000000000000000000000000000000000000000..c275c635cd52484120d3d4d38e70fbfe63054f47
--- /dev/null
+++ b/pkg/items/expr.go
@@ -0,0 +1,76 @@
+package items
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	. "github.com/antonmedv/expr"
+	"github.com/antonmedv/expr/conf"
+)
+
+const EnvPerxisKey = "$perxis"
+
+type PerxisExpr struct {
+	ctx context.Context
+}
+
+func (p PerxisExpr) Fetch(i interface{}) interface{} {
+	param, _ := i.(string)
+	c := GetContext(p.ctx)
+	switch param {
+	case "SpaceID":
+		return c.SpaceID
+	case "EnvironmentID":
+		return c.EnvID
+	case "CollectionID":
+		return c.CollectionID
+	case "ItemID":
+		return c.ItemID
+	case "Item":
+		return c.Item
+
+	case "Space":
+		return c.Space
+	case "Environment":
+		return c.Environment
+
+	case "ViewSpaceID":
+		return c.ViewSpaceID
+	case "ViewEnvironmentID":
+		return c.ViewEnvironmentID
+	case "ViewCollectionID":
+		return c.ViewCollectionID
+
+	case "ViewSpace":
+		return c.ViewSpace
+	case "ViewEnvironment":
+		return c.ViewEnvironment
+
+	case "Principal":
+		return auth.GetPrincipal(p.ctx)
+	case "PrincipalID":
+		return auth.GetPrincipal(p.ctx).GetID(p.ctx)
+
+	case "Clients":
+		return c.Clients
+
+	default:
+		panic("unknown parameter")
+	}
+}
+
+func perxisOption() Option {
+	return func(c *conf.Config) {
+		env, _ := c.Env.(map[string]interface{})
+		if env != nil {
+			if ctx, ok := env[expr.EnvContextKey].(context.Context); ok {
+				env[EnvPerxisKey] = &PerxisExpr{ctx: ctx}
+			}
+		}
+	}
+}
+
+func init() {
+	expr.RegisterOption(perxisOption())
+}
diff --git a/pkg/items/expr_test.go b/pkg/items/expr_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..4d8a1518ba103590f4db227947e3f0031bf3f536
--- /dev/null
+++ b/pkg/items/expr_test.go
@@ -0,0 +1,159 @@
+package items
+
+import (
+	"context"
+	"testing"
+
+	"git.perx.ru/perxis/perxis/auth"
+	"git.perx.ru/perxis/perxis/pkg/expr"
+	mockscollaborators "git.perx.ru/perxis/perxis/services/collaborators/mocks"
+	"git.perx.ru/perxis/perxis/services/members"
+	mocksmembers "git.perx.ru/perxis/perxis/services/members/mocks"
+	"git.perx.ru/perxis/perxis/services/roles"
+	mocksroles "git.perx.ru/perxis/perxis/services/roles/mocks"
+	"git.perx.ru/perxis/perxis/services/spaces"
+	mocksSpaces "git.perx.ru/perxis/perxis/services/spaces/mocks"
+	"git.perx.ru/perxis/perxis/services/users"
+	mocksusers "git.perx.ru/perxis/perxis/services/users/mocks"
+	"github.com/pkg/errors"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestPerxisExpr_Principal(t *testing.T) {
+
+	var tt = []struct {
+		name              string
+		userscall         func(usersservice *mocksusers.Users) *mock.Call
+		memberscall       func(membersservice *mocksmembers.Members) *mock.Call
+		spacescall        func(spacesservice *mocksSpaces.Spaces) *mock.Call
+		rolescall         func(rolesservice *mocksroles.Roles) *mock.Call
+		collaboratorscall func(collaboratorsservice *mockscollaborators.Collaborators) *mock.Call
+		formula           string
+		wantErr           bool
+		expected          interface{}
+	}{
+		{
+			name: "Get current ID",
+			userscall: func(usersservice *mocksusers.Users) *mock.Call {
+				return usersservice.On("GetByIdentity", mock.Anything, "user_identity").
+					Return(&users.User{ID: "user_id", Name: "test user"}, nil).Once()
+			},
+			formula:  "$perxis.Principal.GetID(ctx)",
+			wantErr:  false,
+			expected: "user_id",
+		},
+		{
+			name: "Get current ID / User not exist",
+			userscall: func(usersservice *mocksusers.Users) *mock.Call {
+				return usersservice.On("GetByIdentity", mock.Anything, "user_identity").
+					Return(nil, errors.New("not found")).Once()
+			},
+			formula:  "$perxis.Principal.GetID(ctx)",
+			wantErr:  false,
+			expected: "",
+		},
+		{
+			name: "Get role in org",
+			userscall: func(usersservice *mocksusers.Users) *mock.Call {
+				return usersservice.On("GetByIdentity", mock.Anything, "user_identity").
+					Return(&users.User{ID: "user_id", Name: "test user"}, nil).Once()
+			},
+			spacescall: func(spacesservice *mocksSpaces.Spaces) *mock.Call {
+				return spacesservice.On("Get", mock.Anything, "sp").Return(&spaces.Space{ID: "sp", OrgID: "org"}, nil).Once()
+			},
+			memberscall: func(membersservice *mocksmembers.Members) *mock.Call {
+				return membersservice.On("Get", mock.Anything, "org", "user_id").Return(members.RoleAdmin, nil).Once()
+			},
+			formula:  "$perxis.Principal.Space('sp').Member(ctx)",
+			wantErr:  false,
+			expected: members.RoleAdmin,
+		},
+		{
+			name: "Check has space access",
+			userscall: func(usersservice *mocksusers.Users) *mock.Call {
+				return usersservice.On("GetByIdentity", mock.Anything, "user_identity").
+					Return(&users.User{ID: "user_id", Name: "test user"}, nil).Once()
+			},
+			collaboratorscall: func(collaboratorsservice *mockscollaborators.Collaborators) *mock.Call {
+				return collaboratorsservice.On("Get", mock.Anything, "sp", "user_id").Return("editor", nil).Once()
+			},
+			formula:  "$perxis.Principal.Space('sp').HasSpaceAccess(ctx, 'sp')",
+			wantErr:  false,
+			expected: true,
+		},
+		{
+			name: "Check has space access for any authorized",
+			userscall: func(usersservice *mocksusers.Users) *mock.Call {
+				return usersservice.On("GetByIdentity", mock.Anything, "user_identity").
+					Return(&users.User{ID: "user_id", Name: "test user"}, nil).Once()
+			},
+			//spacescall: func(spacesservice *mocksSpaces.Spaces) *mock.Call {
+			//	return spacesservice.On("Get", mock.Anything, "sp").Return(&spaces.Space{ID: "sp", OrgID: "org"}, nil).Once()
+			//},
+			collaboratorscall: func(collaboratorsservice *mockscollaborators.Collaborators) *mock.Call {
+				return collaboratorsservice.On("Get", mock.Anything, "sp", "user_id").Return("", errors.New("not found")).Once()
+			},
+			rolescall: func(rolesservice *mocksroles.Roles) *mock.Call {
+				return rolesservice.On("Get", mock.Anything, "sp", roles.AuthorizedRole).Return(&roles.Role{ID: roles.AuthorizedRole}, nil).Once()
+
+			},
+			formula:  "$perxis.Principal.Space('sp').HasSpaceAccess(ctx, 'sp')",
+			wantErr:  false,
+			expected: true,
+		},
+	}
+
+	for _, v := range tt {
+
+		t.Run(v.name, func(t *testing.T) {
+
+			usersservice := &mocksusers.Users{}
+			spacesservice := &mocksSpaces.Spaces{}
+			membersservice := &mocksmembers.Members{}
+			collaboratorsservice := &mockscollaborators.Collaborators{}
+			rolesservice := &mocksroles.Roles{}
+
+			if v.userscall != nil {
+				v.userscall(usersservice)
+			}
+			if v.spacescall != nil {
+				v.spacescall(spacesservice)
+			}
+			if v.memberscall != nil {
+				v.memberscall(membersservice)
+			}
+			if v.collaboratorscall != nil {
+				v.collaboratorscall(collaboratorsservice)
+			}
+			if v.rolescall != nil {
+				v.rolescall(rolesservice)
+			}
+
+			factory := &auth.PrincipalFactory{
+				Users:         usersservice,
+				Spaces:        spacesservice,
+				Members:       membersservice,
+				Collaborators: collaboratorsservice,
+				Roles:         rolesservice,
+			}
+			ctx := auth.WithPrincipal(context.Background(), factory.Principal("user_identity"))
+
+			res, err := expr.Eval(ctx, v.formula, map[string]interface{}{"ctx": context.Background()})
+
+			if !v.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, v.expected, res)
+			} else {
+				require.Error(t, err)
+			}
+
+			usersservice.AssertExpectations(t)
+			spacesservice.AssertExpectations(t)
+			membersservice.AssertExpectations(t)
+			collaboratorsservice.AssertExpectations(t)
+			rolesservice.AssertExpectations(t)
+		})
+	}
+}
diff --git a/pkg/items/item.go b/pkg/items/item.go
new file mode 100644
index 0000000000000000000000000000000000000000..223f8a22497dc061e920c4f1bd9e50cb14aa2a23
--- /dev/null
+++ b/pkg/items/item.go
@@ -0,0 +1,558 @@
+package items
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"google.golang.org/protobuf/types/known/structpb"
+	"google.golang.org/protobuf/types/known/timestamppb"
+)
+
+var (
+	ErrNotSystemField = errors.New("not a system field")
+	ErrIncorrectValue = errors.New("incorrect value")
+	ErrIncorrectField = errors.New("incorrect field")
+)
+
+type State int
+
+func (s State) String() string {
+	switch s {
+	case StateDraft:
+		return "Draft"
+	case StateArchived:
+		return "Archived"
+	case StateChanged:
+		return "Changed"
+	case StatePublished:
+		return "Published"
+	}
+	return "Unknown"
+}
+
+const (
+	StateDraft State = iota
+	StatePublished
+	StateChanged
+	StateArchived
+
+	StateMax = StateArchived
+
+	SoftDeleteSeparator = "___"
+)
+
+var PermissionsAllowAny = &Permissions{
+	Edit:       true,
+	Archive:    true,
+	Publish:    true,
+	SoftDelete: true,
+	HardDelete: true,
+}
+
+// SystemFields - системные поля Item
+var SystemFields = []string{
+	"id",
+	"space_id",
+	"env_id",
+	"collection_id",
+	"state",
+	"created_rev_at",
+	"created_by",
+	"created_at",
+	"updated_at",
+	"updated_by",
+	"revision_id",
+	"published_at",
+	"published_by",
+	"archived_at",
+	"archived_by",
+	"data",
+	"translations",
+	"locale",
+	"deleted",
+	"hidden",
+	"template",
+}
+
+type Permissions struct {
+	Edit       bool
+	Archive    bool
+	Publish    bool
+	SoftDelete bool
+	HardDelete bool
+}
+
+type Item struct {
+	ID           string                            `json:"id" bson:"_id"` // ID - Идентификатор записи. Автоматически генерируется системой при сохранении первой ревизии.
+	SpaceID      string                            `json:"spaceId" bson:"-"`
+	EnvID        string                            `json:"envId" bson:"-"`
+	CollectionID string                            `json:"collectionId" bson:"-"`
+	State        State                             `json:"state" bson:"state"`
+	CreatedRevAt time.Time                         `json:"createdRevAt,omitempty" bson:"created_rev_at,omitempty"`
+	CreatedBy    string                            `json:"createdBy,omitempty" bson:"created_by,omitempty"`
+	CreatedAt    time.Time                         `json:"createdAt,omitempty" bson:"created_at,omitempty"`
+	UpdatedAt    time.Time                         `json:"updatedAt,omitempty" bson:"updated_at,omitempty"`
+	UpdatedBy    string                            `json:"updatedBy,omitempty" bson:"updated_by,omitempty"`
+	Data         map[string]interface{}            `json:"data" bson:"data"`
+	Locale       string                            `json:"locale" bson:"-"`
+	Translations map[string]map[string]interface{} `json:"translations" bson:"translations,omitempty"`
+	RevisionID   string                            `json:"revId,omitempty" bson:"revision_id"`
+	PublishedAt  time.Time                         `json:"publishedAt,omitempty" bson:"published_at,omitempty"`
+	PublishedBy  string                            `json:"publishedBy,omitempty" bson:"published_by,omitempty"`
+	ArchivedAt   time.Time                         `json:"archivedAt,omitempty" bson:"archived_at,omitempty"`
+	ArchivedBy   string                            `json:"archivedBy,omitempty" bson:"archived_by,omitempty"`
+	Permissions  *Permissions                      `json:"permissions,omitempty" bson:"-"`
+
+	// Флаги записи
+	Deleted  bool `json:"deleted" bson:"deleted,omitempty"`
+	Hidden   bool `json:"hidden" bson:"hidden,omitempty"`
+	Template bool `json:"template" bson:"template,omitempty"`
+}
+
+func NewItem(spaceID, envID, collID, id string, data map[string]interface{}, translations map[string]map[string]interface{}) *Item {
+	return &Item{
+		ID:           id,
+		SpaceID:      spaceID,
+		EnvID:        envID,
+		CollectionID: collID,
+		Data:         data,
+		Translations: translations,
+	}
+}
+
+func (i *Item) Clone() *Item {
+	itm := *i
+	itm.Data = data.CloneMap(i.Data)
+
+	if i.Translations != nil {
+		itm.Translations = make(map[string]map[string]interface{}, len(i.Translations))
+		for t, m := range i.Translations {
+			itm.Translations[t] = data.CloneMap(m)
+		}
+	}
+
+	return &itm
+}
+
+func (i *Item) ToMap() map[string]interface{} {
+	return map[string]interface{}{
+		"id":             i.ID,
+		"space_id":       i.SpaceID,
+		"env_id":         i.EnvID,
+		"collection_id":  i.CollectionID,
+		"state":          i.State,
+		"created_rev_at": i.CreatedRevAt,
+		"created_by":     i.CreatedBy,
+		"created_at":     i.CreatedAt,
+		"updated_at":     i.UpdatedAt,
+		"updated_by":     i.UpdatedBy,
+		"revision_id":    i.RevisionID,
+		"published_at":   i.PublishedAt,
+		"published_by":   i.PublishedBy,
+		"archived_at":    i.ArchivedAt,
+		"archived_by":    i.ArchivedBy,
+		"data":           i.Data,
+		"translations":   i.Translations,
+		"locale":         i.Locale,
+		"deleted":        i.Deleted,
+		"hidden":         i.Hidden,
+		"template":       i.Template,
+	}
+}
+
+func (i *Item) SetData(locale string, data map[string]interface{}) {
+	if locale != "" {
+		if i.Translations == nil {
+			i.Translations = make(map[string]map[string]interface{})
+		}
+		i.Translations[locale] = data
+		return
+	}
+	i.Data = data
+}
+
+func (i *Item) GetData(locale string) map[string]interface{} {
+	if locale != "" && i.Translations != nil {
+		translation, _ := i.Translations[locale]
+		return MergeData(i.Data, translation)
+	}
+	return i.Data
+}
+
+func (i Item) Encode(ctx context.Context, s *schema.Schema) (*Item, error) {
+	if i.Data != nil {
+		dt, err := schema.Encode(nil, s, i.Data)
+		if err != nil {
+			//return errors.WithField(err, "data")
+			return nil, err
+		}
+		i.Data = dt.(map[string]interface{})
+	}
+	if len(i.Translations) > 0 {
+		for l, v := range i.Translations {
+			dt, err := schema.Encode(nil, s, v)
+			if err != nil {
+				//return errors.WithField(err, fmt.Sprintf("translations.%s", l))
+				return nil, err
+			}
+			i.Translations[l] = dt.(map[string]interface{})
+		}
+	}
+	return &i, nil
+}
+
+func (i Item) Decode(ctx context.Context, s *schema.Schema) (res *Item, err error) {
+
+	if i.Data != nil {
+		i.Data, err = s.Decode(ctx, i.Data)
+		if err != nil {
+			return nil, err
+			//return errors.WithField(err, "data")
+		}
+	}
+
+	return &i, nil
+}
+
+// MergeData дополняет отсутствующие данные из оригинальных данных
+func MergeData(data ...map[string]interface{}) map[string]interface{} {
+	merge := make(map[string]interface{})
+	for _, d := range data {
+		for k, v := range d {
+			merge[k] = v
+		}
+	}
+	return merge
+}
+
+// ClearData убирает данные которые не изменились по сравнению с оригинальными данными
+func ClearData(data ...map[string]interface{}) map[string]interface{} {
+	var clear map[string]interface{}
+
+	for _, d := range data {
+		if clear == nil {
+			clear = d
+			continue
+		}
+
+		for k, v := range d {
+			if reflect.DeepEqual(clear[k], v) {
+				delete(clear, k)
+			}
+		}
+	}
+
+	return clear
+}
+
+type ProcessDataFunc func(ctx context.Context, sch *schema.Schema, data map[string]interface{}) (map[string]interface{}, error)
+
+func (i Item) ProcessData(ctx context.Context, sch *schema.Schema, fn ProcessDataFunc, locales ...string) (*Item, error) {
+	if i.Data != nil {
+		dt, err := fn(ctx, sch, i.Data)
+		if err != nil {
+			return nil, errors.WithField(err, "data")
+		}
+		i.Data = dt
+	}
+
+	tr := make(map[string]map[string]interface{})
+	for _, l := range locales {
+
+		data := i.GetData(l)
+
+		dt, err := fn(ctx, sch, data)
+		if err != nil {
+			return nil, errors.WithField(err, fmt.Sprintf("translations.%s", l))
+		}
+		tr[l] = dt
+
+	}
+
+	i.Translations = nil
+	if len(tr) > 0 {
+		i.Translations = tr
+	}
+
+	return &i, nil
+}
+
+// IsSystemField возвращает являться ли поле системным
+func IsSystemField(field string) bool {
+	if util.Contains(field, SystemFields) {
+		return true
+	}
+	return false
+}
+
+// SetSystemField устанавливает значение системного поля
+func (i *Item) SetSystemField(field string, value interface{}) error {
+	ok := true
+	switch field {
+	case "id":
+		i.ID, ok = value.(string)
+	case "space_id":
+		i.SpaceID, ok = value.(string)
+	case "env_id":
+		i.EnvID, ok = value.(string)
+	case "collection_id":
+		i.CollectionID, ok = value.(string)
+	case "created_rev_at":
+		i.CreatedRevAt, ok = value.(time.Time)
+	case "created_by":
+		i.CreatedBy, ok = value.(string)
+	case "created_at":
+		i.CreatedAt, ok = value.(time.Time)
+	case "updated_by":
+		i.UpdatedBy, ok = value.(string)
+	case "updated_at":
+		i.UpdatedAt, ok = value.(time.Time)
+	case "revision_id":
+		i.RevisionID, ok = value.(string)
+	case "published_by":
+		i.PublishedBy, ok = value.(string)
+	case "published_at":
+		i.PublishedAt, ok = value.(time.Time)
+	case "hidden":
+		i.Hidden, ok = value.(bool)
+	case "deleted":
+		i.Deleted, ok = value.(bool)
+	case "template":
+		i.Template, ok = value.(bool)
+	default:
+		return ErrNotSystemField
+	}
+
+	if !ok {
+		return ErrIncorrectValue
+	}
+
+	return nil
+}
+
+// GetSystem устанавливает значение системного поля
+func (i *Item) GetSystem(field string) (any, error) {
+	switch field {
+	case "id":
+		return i.ID, nil
+	case "space_id":
+		return i.SpaceID, nil
+	case "env_id":
+		return i.EnvID, nil
+	case "collection_id":
+		return i.CollectionID, nil
+	case "created_rev_at":
+		return i.CreatedRevAt, nil
+	case "created_by":
+		return i.CreatedBy, nil
+	case "created_at":
+		return i.CreatedAt, nil
+	case "updated_by":
+		return i.UpdatedBy, nil
+	case "updated_at":
+		return i.UpdatedAt, nil
+	case "revision_id":
+		return i.RevisionID, nil
+	case "published_by":
+		return i.PublishedBy, nil
+	case "published_at":
+		return i.PublishedAt, nil
+	case "hidden":
+		return i.Hidden, nil
+	case "deleted":
+		return i.Deleted, nil
+	case "template":
+		return i.Template, nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+func (i *Item) setItemData(field string, value interface{}) error {
+	if i.Data == nil {
+		i.Data = make(map[string]any)
+	}
+
+	return data.Set(field, i.Data, value)
+}
+
+func (i *Item) getItemData(field string) (any, error) {
+	if i.Data != nil {
+		if v, ok := data.Get(field, i.Data); ok {
+			return v, nil
+		}
+	}
+
+	return nil, ErrIncorrectField
+}
+
+// Set устанавливает значение поля
+func (i *Item) Set(field string, value interface{}) error {
+	if err := i.SetSystemField(field, value); !errors.Is(err, ErrNotSystemField) {
+		return errors.Wrapf(err, "fail to set system field '%s' value", field)
+	}
+
+	return i.setItemData(field, value)
+}
+
+// Get возвращает значение поля
+func (i *Item) Get(field string) (any, error) {
+	if v, err := i.GetSystem(field); err == nil {
+		return v, err
+	}
+
+	return i.getItemData(field)
+}
+
+// GetSystemField возвращает описание поля для системных аттрибутов Item
+func GetSystemField(fld string) (*field.Field, error) {
+	switch fld {
+	case "id", "space_id", "env_id", "collection_id", "revision_id":
+		return field.String(), nil
+	case "created_rev_at", "created_at", "updated_at", "published_at":
+		return field.Time(), nil
+	case "created_by", "updated_by", "published_by":
+		return field.String(), nil
+	case "hidden", "deleted", "template":
+		return field.Bool(), nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+// GetField возвращает значение поля
+func GetField(field string, sch *schema.Schema) (*field.Field, error) {
+	if f, err := GetSystemField(field); err == nil {
+		return f, err
+	}
+
+	f := sch.GetField(field)
+	if f == nil {
+		return nil, ErrIncorrectField
+	}
+
+	return f, nil
+}
+
+// GetSystemNamedFields возвращает описание всех системных полей Item
+func GetSystemNamedFields() []field.NamedField {
+	fields := make([]field.NamedField, 0, len(SystemFields))
+	for _, n := range SystemFields {
+		f := field.NamedField{Name: n}
+		f.Field, _ = GetSystemField(n)
+		fields = append(fields, f)
+	}
+
+	return fields
+}
+
+func ItemToProto(item *Item) *pb.Item {
+	if item == nil {
+		return nil
+	}
+
+	protoItem := &pb.Item{
+		Id:           item.ID,
+		SpaceId:      item.SpaceID,
+		EnvId:        item.EnvID,
+		CollectionId: item.CollectionID,
+		State:        pb.Item_State(item.State),
+		CreatedBy:    item.CreatedBy,
+		UpdatedBy:    item.UpdatedBy,
+		RevisionId:   item.RevisionID,
+		PublishedBy:  item.PublishedBy,
+		ArchivedBy:   item.ArchivedBy,
+		Locale:       item.Locale,
+		Hidden:       item.Hidden,
+		Template:     item.Template,
+		Deleted:      item.Deleted,
+	}
+
+	if item.Data != nil {
+		protoItem.Data, _ = structpb.NewStruct(item.Data)
+	}
+	if item.Translations != nil {
+		protoItem.Translations = make(map[string]*structpb.Struct, len(item.Translations))
+		for k, v := range item.Translations {
+			protoItem.Translations[k], _ = structpb.NewStruct(v)
+		}
+	}
+
+	protoItem.CreatedRevAt = timestamppb.New(item.CreatedRevAt)
+	protoItem.PublishedAt = timestamppb.New(item.PublishedAt)
+	protoItem.ArchivedAt = timestamppb.New(item.ArchivedAt)
+	protoItem.CreatedAt = timestamppb.New(item.CreatedAt)
+	protoItem.UpdatedAt = timestamppb.New(item.UpdatedAt)
+
+	if item.Permissions != nil {
+		protoItem.Permissions = &pb.Permissions{
+			Edit:       item.Permissions.Edit,
+			Archive:    item.Permissions.Archive,
+			Publish:    item.Permissions.Publish,
+			SoftDelete: item.Permissions.SoftDelete,
+			HardDelete: item.Permissions.HardDelete,
+		}
+	}
+
+	return protoItem
+}
+
+func ItemFromProto(protoItem *pb.Item) *Item {
+
+	if protoItem == nil {
+		return nil
+	}
+
+	item := &Item{
+		ID:           protoItem.Id,
+		SpaceID:      protoItem.SpaceId,
+		EnvID:        protoItem.EnvId,
+		CollectionID: protoItem.CollectionId,
+		State:        State(protoItem.State),
+		CreatedBy:    protoItem.CreatedBy,
+		UpdatedBy:    protoItem.UpdatedBy,
+		RevisionID:   protoItem.RevisionId,
+		PublishedBy:  protoItem.PublishedBy,
+		ArchivedBy:   protoItem.ArchivedBy,
+		Locale:       protoItem.Locale,
+		Hidden:       protoItem.Hidden,
+		Template:     protoItem.Template,
+		Deleted:      protoItem.Deleted,
+	}
+
+	if protoItem.Data != nil {
+		item.Data = protoItem.Data.AsMap()
+	}
+
+	if protoItem.Translations != nil {
+		item.Translations = make(map[string]map[string]interface{}, len(protoItem.Translations))
+		for k, v := range protoItem.Translations {
+			item.Translations[k] = v.AsMap()
+		}
+	}
+
+	if protoItem.Permissions != nil {
+		item.Permissions = &Permissions{
+			Edit:       protoItem.Permissions.Edit,
+			Archive:    protoItem.Permissions.Archive,
+			Publish:    protoItem.Permissions.Publish,
+			SoftDelete: protoItem.Permissions.SoftDelete,
+			HardDelete: protoItem.Permissions.HardDelete,
+		}
+	}
+
+	item.CreatedRevAt = protoItem.CreatedRevAt.AsTime()
+	item.PublishedAt = protoItem.PublishedAt.AsTime()
+	item.ArchivedAt = protoItem.ArchivedAt.AsTime()
+	item.CreatedAt = protoItem.CreatedAt.AsTime()
+	item.UpdatedAt = protoItem.UpdatedAt.AsTime()
+
+	return item
+}
diff --git a/pkg/items/item_test.go b/pkg/items/item_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2c2c442d07a730b5b384573538424103e38f9b4f
--- /dev/null
+++ b/pkg/items/item_test.go
@@ -0,0 +1,62 @@
+package items
+
+import (
+	"fmt"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis/schema"
+	"git.perx.ru/perxis/perxis/schema/field"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestItem_Set(t *testing.T) {
+	item := &Item{}
+
+	item.Set("id", "id")
+	assert.Equal(t, "id", item.ID)
+	now := time.Now()
+
+	item.Set("created_at", now)
+	assert.Equal(t, now, item.CreatedAt)
+
+	item.Set("a.b.c", 101)
+	assert.Equal(t, map[string]any{"a": map[string]any{"b": map[string]any{"c": 101}}}, item.Data)
+
+}
+
+func TestGetField(t *testing.T) {
+	sch := schema.New(
+		"a", field.String(),
+		"obj", field.Object(
+			"a", field.Number(field.NumberFormatFloat),
+			"b", field.String(),
+		),
+		"arr", field.Array(field.Object("a", field.Time())),
+	)
+
+	tests := []struct {
+		name    string
+		field   string
+		want    *field.Field
+		wantErr assert.ErrorAssertionFunc
+	}{
+		{"Simple", "a", field.String(), assert.NoError},
+		{"Incorrect field", "b", nil, assert.Error},
+		{"Object", "obj", field.Object("a", field.Number(field.NumberFormatFloat), "b", field.String()), assert.NoError},
+		{"Object path", "obj.a", field.Number(field.NumberFormatFloat), assert.NoError},
+		{"Array", "arr", field.Array(field.Object("a", field.Time())), assert.NoError},
+		{"Array path", "arr.a", field.Time(), assert.NoError},
+		{"Array item", "arr.", field.Object("a", field.Time()), assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := GetField(tt.field, sch)
+			if !tt.wantErr(t, err, fmt.Sprintf("GetField(%v, sch)", tt.field)) {
+				return
+			}
+			assert.Equalf(t, tt.want, got, "GetField(%v, sch)", tt.field)
+		})
+	}
+}
diff --git a/pkg/items/mocks/Items.go b/pkg/items/mocks/Items.go
new file mode 100644
index 0000000000000000000000000000000000000000..50708b92d064588131506465e9dab0819aa5abfa
--- /dev/null
+++ b/pkg/items/mocks/Items.go
@@ -0,0 +1,539 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis/services/items"
+	mock "github.com/stretchr/testify/mock"
+
+	schema "git.perx.ru/perxis/perxis/schema"
+)
+
+// Items is an autogenerated mock type for the Items type
+type Items struct {
+	mock.Mock
+}
+
+// Aggregate provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// AggregatePublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Archive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.ArchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Create provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.CreateOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.CreateOptions) error); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.DeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindArchived provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetRevision provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, revisionId, options
+func (_m *Items) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId, revisionId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Introspect provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (*items.Item, *schema.Schema, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 *schema.Schema
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *schema.Schema); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(1) != nil {
+			r1 = ret.Get(1).(*schema.Schema)
+		}
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *items.Item, ...*items.IntrospectOptions) error); ok {
+		r2 = rf(ctx, item, opts...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// ListRevisions provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) ([]*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Publish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.PublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unarchive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnarchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Undelete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.UndeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unpublish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnpublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, item, options
+func (_m *Items) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UpdateOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewItems interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewItems creates a new instance of Items. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewItems(t mockConstructorTestingTNewItems) *Items {
+	mock := &Items{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/mocks/PreSaver.go b/pkg/items/mocks/PreSaver.go
new file mode 100644
index 0000000000000000000000000000000000000000..6010e17bd14db83518507ac53ab35076a6cbc5bf
--- /dev/null
+++ b/pkg/items/mocks/PreSaver.go
@@ -0,0 +1,62 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	field "git.perx.ru/perxis/perxis-go/pkg/schema/field"
+
+	mock "github.com/stretchr/testify/mock"
+)
+
+// PreSaver is an autogenerated mock type for the PreSaver type
+type PreSaver struct {
+	mock.Mock
+}
+
+// PreSave provides a mock function with given fields: ctx, f, v, itemCtx
+func (_m *PreSaver) PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *items.Context) (interface{}, bool, error) {
+	ret := _m.Called(ctx, f, v, itemCtx)
+
+	var r0 interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, *field.Field, interface{}, *items.Context) interface{}); ok {
+		r0 = rf(ctx, f, v, itemCtx)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(interface{})
+		}
+	}
+
+	var r1 bool
+	if rf, ok := ret.Get(1).(func(context.Context, *field.Field, interface{}, *items.Context) bool); ok {
+		r1 = rf(ctx, f, v, itemCtx)
+	} else {
+		r1 = ret.Get(1).(bool)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *field.Field, interface{}, *items.Context) error); ok {
+		r2 = rf(ctx, f, v, itemCtx)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+type mockConstructorTestingTNewPreSaver interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewPreSaver creates a new instance of PreSaver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewPreSaver(t mockConstructorTestingTNewPreSaver) *PreSaver {
+	mock := &PreSaver{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/options.go b/pkg/items/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..d48a1cdaacb157e46c1185a37602c1703bb3b59e
--- /dev/null
+++ b/pkg/items/options.go
@@ -0,0 +1,422 @@
+package items
+
+import "git.perx.ru/perxis/perxis-go/pkg/options"
+
+type Options struct {
+	Env               map[string]interface{}
+	Filter            []string
+	PermissionsFilter []string
+}
+
+func MergeOptions(opts ...Options) Options {
+	o := Options{
+		Env:    make(map[string]interface{}),
+		Filter: make([]string, 0),
+	}
+
+	for _, opt := range opts {
+
+		for k, v := range opt.Env {
+			o.Env[k] = v
+		}
+
+		o.Filter = append(o.Filter, opt.Filter...)
+		o.PermissionsFilter = append(o.PermissionsFilter, opt.PermissionsFilter...)
+	}
+
+	return o
+}
+
+type CreateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeCreateOptions(opts ...*CreateOptions) *CreateOptions {
+	o := &CreateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type IntrospectOptions struct {
+	Options
+	Locale string
+}
+
+func MergeIntrospectOptions(opts ...*IntrospectOptions) *IntrospectOptions {
+	o := &IntrospectOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetOptions struct {
+	Options
+}
+
+func MergeGetOptions(opts ...*GetOptions) *GetOptions {
+	o := &GetOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindOptions struct {
+	Options
+	options.FindOptions
+	Deleted   bool
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindOptions(opts ...interface{}) *FindOptions {
+	fo := &FindOptions{}
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindOptions(opts ...*FindOptions) *FindOptions {
+	o := NewFindOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Deleted = o.Deleted || opt.Deleted
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type UpdateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions {
+	o := &UpdateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type DeleteOptions struct {
+	Options
+
+	Erase bool
+}
+
+func MergeDeleteOptions(opts ...*DeleteOptions) *DeleteOptions {
+	o := &DeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.Erase {
+			o.Erase = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type SoftDeleteOptions struct {
+	Options
+}
+
+func MergeSoftDeleteOptions(opts ...*SoftDeleteOptions) *SoftDeleteOptions {
+	o := &SoftDeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UndeleteOptions struct {
+	Options
+}
+
+func MergeUndeleteOptions(opts ...*UndeleteOptions) *UndeleteOptions {
+	o := &UndeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type PublishOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergePublishOptions(opts ...*PublishOptions) *PublishOptions {
+	o := &PublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UnpublishOptions struct {
+	Options
+}
+
+func MergeUnpublishOptions(opts ...*UnpublishOptions) *UnpublishOptions {
+	o := &UnpublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetPublishedOptions struct {
+	Options
+	LocaleID string
+}
+
+func NewGetPublishedOptions(oo ...interface{}) *GetPublishedOptions {
+	fo := &GetPublishedOptions{}
+	for _, o := range oo {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+	return fo
+}
+
+func MergeGetPublishedOptions(opts ...*GetPublishedOptions) *GetPublishedOptions {
+	o := &GetPublishedOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type FindPublishedOptions struct {
+	Options
+	options.FindOptions
+	LocaleID  string
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindPublishedOptions(opts ...interface{}) *FindPublishedOptions {
+	fo := &FindPublishedOptions{}
+	for _, o := range opts {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindPublishedOptions(opts ...*FindPublishedOptions) *FindPublishedOptions {
+	o := NewFindPublishedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type GetRevisionOptions struct {
+	Options
+}
+
+func MergeGetRevisionOptions(opts ...*GetRevisionOptions) *GetRevisionOptions {
+	o := &GetRevisionOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type ListRevisionsOptions struct {
+	Options
+	options.FindOptions
+}
+
+func MergeListRevisionsOptions(opts ...*ListRevisionsOptions) *ListRevisionsOptions {
+	o := &ListRevisionsOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type ArchiveOptions struct {
+	Options
+}
+
+func MergeArchiveOptions(opts ...*ArchiveOptions) *ArchiveOptions {
+	o := &ArchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindArchivedOptions struct {
+	Options
+	options.FindOptions
+}
+
+func NewFindArchivedOptions(oo ...interface{}) *FindArchivedOptions {
+	fo := &FindArchivedOptions{}
+	fo.FindOptions = *options.MergeFindOptions(oo...)
+	return fo
+}
+
+func MergeFindArchivedOptions(opts ...*FindArchivedOptions) *FindArchivedOptions {
+	o := NewFindArchivedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(o.FindOptions, opt.FindOptions)
+	}
+	return o
+}
+
+type UnarchiveOptions struct {
+	Options
+}
+
+func MergeUnarchiveOptions(opts ...*UnarchiveOptions) *UnarchiveOptions {
+	o := &UnarchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type AggregateOptions struct {
+	Options
+	options.SortOptions
+
+	// Fields поля которые должны быть возвращены или вычислены в результате.
+	// Ключ (string) - имя поля под которым будет добавляться результат.
+	// Значение (string) - является выражением, вычисление которого сформирует результат
+	// Функции для выражений (для поля F, типа T):
+	// - distinct(F) - все значения поля, тип результат []T
+	// - min(F) - минимальное значение поля, тип результат T
+	// - max(F) - максимальное значение поля, тип результат T
+	// - avg(F) - среднее значения поля, тип результат T
+	// - sum(F) - сумма значений поля, тип результат T
+	// - count() - число записей, тип результат int
+	Fields map[string]string
+}
+
+func MergeAggregateOptions(opts ...*AggregateOptions) *AggregateOptions {
+	o := &AggregateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+
+		if o.Fields == nil {
+			o.Fields = opt.Fields
+			continue
+		}
+		for k, v := range opt.Fields {
+			o.Fields[k] = v
+		}
+	}
+	return o
+}
+
+type AggregatePublishedOptions AggregateOptions
+
+func MergeAggregatePublishedOptions(opts ...*AggregatePublishedOptions) *AggregatePublishedOptions {
+	ao := make([]*AggregateOptions, len(opts))
+	for i, opt := range opts {
+		ao[i] = (*AggregateOptions)(opt)
+	}
+	merged := MergeAggregateOptions(ao...)
+	return (*AggregatePublishedOptions)(merged)
+}
diff --git a/pkg/items/service.go b/pkg/items/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..533f509997df94f406e2f85a589003d585a3a2da
--- /dev/null
+++ b/pkg/items/service.go
@@ -0,0 +1,151 @@
+package items
+
+import (
+	"context"
+	"regexp"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	"git.perx.ru/perxis/perxis-go/schema"
+	"git.perx.ru/perxis/perxis-go/schema/field"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/items
+// @grpc-addr content.items.Items
+type Items interface {
+	Create(ctx context.Context, item *Item, opts ...*CreateOptions) (created *Item, err error)
+	Introspect(ctx context.Context, item *Item, opts ...*IntrospectOptions) (itm *Item, sch *schema.Schema, err error)
+	Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetOptions) (item *Item, err error)
+	Find(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindOptions) (items []*Item, total int, err error)
+	Update(ctx context.Context, item *Item, options ...*UpdateOptions) (err error)
+
+	// Delete выполняет удаление элемента
+	// Если установлен флаг DeleteOptions.Erase то данные будут полностью удалены из системы.
+	// В противном случае выполняется "мягкое удаление", элемент помечается как удаленный и может быть восстановлен с помощью метода Items.Undelete и получен в Items.Get/Find
+	Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*DeleteOptions) (err error)
+
+	// Undelete восстанавливает элементы после "мягкого удаление"
+	Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*UndeleteOptions) (err error)
+
+	Publish(ctx context.Context, item *Item, options ...*PublishOptions) (err error)
+	Unpublish(ctx context.Context, item *Item, options ...*UnpublishOptions) (err error)
+	GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetPublishedOptions) (item *Item, err error)
+	FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindPublishedOptions) (items []*Item, total int, err error)
+
+	GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*GetRevisionOptions) (item *Item, err error)
+	ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*ListRevisionsOptions) (items []*Item, err error)
+
+	Archive(ctx context.Context, item *Item, options ...*ArchiveOptions) (err error)
+	FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindArchivedOptions) (items []*Item, total int, err error)
+	Unarchive(ctx context.Context, item *Item, options ...*UnarchiveOptions) (err error)
+
+	// Aggregate выполняет агрегацию данных
+	Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregateOptions) (result map[string]interface{}, err error)
+	// AggregatePublished выполняет агрегацию опубликованных данных
+	AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregatePublishedOptions) (result map[string]interface{}, err error)
+}
+
+// PreSaver - интерфейс, который может быть реализован полем, чтобы получать событие PreSave перед сохранением Item в Storage
+type PreSaver interface {
+	PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *Context) (interface{}, bool, error)
+}
+
+type Filter struct {
+	ID     []string
+	Data   []*filter.Filter
+	Search string // Поиск, одновременно поддерживается только один запрос
+	Q      []string
+}
+
+func NewFilter(params ...interface{}) *Filter {
+	f := &Filter{}
+	for _, p := range params {
+		switch v := p.(type) {
+		case *filter.Filter:
+			f.Data = append(f.Data, v)
+		case string:
+			f.Q = append(f.Q, v)
+		}
+	}
+	return f
+}
+
+// AggregateExpRe - формат, которому должна соответствовать формула расчета данных
+var AggregateExpRe = regexp.MustCompile(`([a-zA-Z]+)\((.*)\)`)
+
+func ParseAggregateExp(exp string) (string, string, bool) {
+	ss := AggregateExpRe.FindAllStringSubmatch(exp, -1)
+	if len(ss) == 0 || len(ss[0]) < 2 {
+		return "", "", false
+	}
+	return ss[0][1], ss[0][2], true
+}
+
+func DecodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Decode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
+
+func EncodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Encode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
diff --git a/pkg/items/transport/client.go b/pkg/items/transport/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..3f6bd04ceab90dad415d963c6db3d1a9f4fb4b47
--- /dev/null
+++ b/pkg/items/transport/client.go
@@ -0,0 +1,266 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"github.com/hashicorp/go-multierror"
+	"google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *items.Item, arg2 ...*items.CreateOptions) (res0 *items.Item, res1 error) {
+	request := CreateRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Introspect(arg0 context.Context, arg1 *items.Item, arg2 ...*items.IntrospectOptions) (res0 *items.Item, res1 *schema.Schema, res2 error) {
+	request := IntrospectRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res2 := set.IntrospectEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	resp := response.(*IntrospectResponse)
+
+	if len(resp.ValidationErrors) > 0 {
+		var merr *multierror.Error
+		for _, err := range resp.ValidationErrors {
+			var fieldErr errors.FieldError
+			if errors.As(err, &fieldErr) {
+				merr = multierror.Append(merr, fieldErr)
+			}
+		}
+
+		res2 = errors.Wrap(merr, "validation error")
+
+	}
+	return resp.Item, resp.Schema, res2
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetOptions) (res0 *items.Item, res1 error) {
+	request := GetRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetResponse).Item, res1
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindResponse).Items, response.(*FindResponse).Total, res2
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UpdateOptions) (res0 error) {
+	request := UpdateRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.DeleteOptions) (res0 error) {
+	request := DeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Undelete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.UndeleteOptions) (res0 error) {
+	request := UndeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.UndeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Publish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.PublishOptions) (res0 error) {
+	request := PublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.PublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Unpublish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnpublishOptions) (res0 error) {
+	request := UnpublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnpublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) GetPublished(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetPublishedOptions) (res0 *items.Item, res1 error) {
+	request := GetPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetPublishedEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetPublishedResponse).Item, res1
+}
+
+func (set EndpointsSet) FindPublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindPublishedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindPublishedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindPublishedResponse).Items, response.(*FindPublishedResponse).Total, res2
+}
+
+func (set EndpointsSet) GetRevision(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 string, arg5 string, arg6 ...*items.GetRevisionOptions) (res0 *items.Item, res1 error) {
+	request := GetRevisionRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		RevisionId:   arg5,
+		SpaceId:      arg1,
+		Options:      arg6,
+	}
+	response, res1 := set.GetRevisionEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetRevisionResponse).Item, res1
+}
+
+func (set EndpointsSet) ListRevisions(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.ListRevisionsOptions) (res0 []*items.Item, res1 error) {
+	request := ListRevisionsRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.ListRevisionsEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*ListRevisionsResponse).Items, res1
+}
+
+func (set EndpointsSet) Archive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.ArchiveOptions) (res0 error) {
+	request := ArchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.ArchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) FindArchived(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindArchivedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindArchivedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Options:      arg5,
+		Filter:       arg4,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindArchivedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindArchivedResponse).Items, response.(*FindArchivedResponse).Total, res2
+}
+
+func (set EndpointsSet) Unarchive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnarchiveOptions) (res0 error) {
+	request := UnarchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnarchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Aggregate(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregateOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregateRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*AggregateResponse).Result, res1
+}
+
+func (set EndpointsSet) AggregatePublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregatePublishedOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregatePublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregatePublishedEndpoint(arg0, &request)
+
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*AggregatePublishedResponse).Result, res1
+}
diff --git a/pkg/items/transport/endpoints.microgen.go b/pkg/items/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a6e8d5a678cd7180deca17a97f615fe7793ff6e
--- /dev/null
+++ b/pkg/items/transport/endpoints.microgen.go
@@ -0,0 +1,27 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Items API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint             endpoint.Endpoint
+	IntrospectEndpoint         endpoint.Endpoint
+	GetEndpoint                endpoint.Endpoint
+	FindEndpoint               endpoint.Endpoint
+	UpdateEndpoint             endpoint.Endpoint
+	DeleteEndpoint             endpoint.Endpoint
+	UndeleteEndpoint           endpoint.Endpoint
+	PublishEndpoint            endpoint.Endpoint
+	UnpublishEndpoint          endpoint.Endpoint
+	GetPublishedEndpoint       endpoint.Endpoint
+	FindPublishedEndpoint      endpoint.Endpoint
+	GetRevisionEndpoint        endpoint.Endpoint
+	ListRevisionsEndpoint      endpoint.Endpoint
+	ArchiveEndpoint            endpoint.Endpoint
+	FindArchivedEndpoint       endpoint.Endpoint
+	UnarchiveEndpoint          endpoint.Endpoint
+	AggregateEndpoint          endpoint.Endpoint
+	AggregatePublishedEndpoint endpoint.Endpoint
+}
diff --git a/pkg/items/transport/exchanges.microgen.go b/pkg/items/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..b601946f74837d41df38b07e3c5887ba8698b183
--- /dev/null
+++ b/pkg/items/transport/exchanges.microgen.go
@@ -0,0 +1,186 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+type (
+	CreateRequest struct {
+		Item *items.Item            `json:"item"`
+		Opts []*items.CreateOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	CreateResponse struct {
+		Created *items.Item `json:"created"`
+	}
+
+	IntrospectRequest struct {
+		Item *items.Item                `json:"item"`
+		Opts []*items.IntrospectOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	IntrospectResponse struct {
+		Item             *items.Item    `json:"item"`
+		Schema           *schema.Schema `json:"schema"`
+		ValidationErrors []error        `json:"validation_errors"`
+	}
+
+	GetRequest struct {
+		SpaceId      string              `json:"space_id"`
+		EnvId        string              `json:"env_id"`
+		CollectionId string              `json:"collection_id"`
+		ItemId       string              `json:"item_id"`
+		Options      []*items.GetOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindRequest struct {
+		SpaceId      string               `json:"space_id"`
+		EnvId        string               `json:"env_id"`
+		CollectionId string               `json:"collection_id"`
+		Filter       *items.Filter        `json:"filter"`
+		Options      []*items.FindOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UpdateRequest struct {
+		Item    *items.Item            `json:"item"`
+		Options []*items.UpdateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId      string                 `json:"space_id"`
+		EnvId        string                 `json:"env_id"`
+		CollectionId string                 `json:"collection_id"`
+		ItemId       string                 `json:"item_id"`
+		Options      []*items.DeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	UndeleteRequest struct {
+		SpaceId      string                   `json:"space_id"`
+		EnvId        string                   `json:"env_id"`
+		CollectionId string                   `json:"collection_id"`
+		ItemId       string                   `json:"item_id"`
+		Options      []*items.UndeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UndeleteResponse struct{}
+
+	PublishRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.PublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	PublishResponse struct{}
+
+	UnpublishRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnpublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnpublishResponse struct{}
+
+	GetPublishedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		ItemId       string                       `json:"item_id"`
+		Options      []*items.GetPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetPublishedResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindPublishedRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		Filter       *items.Filter                 `json:"filter"`
+		Options      []*items.FindPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindPublishedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	GetRevisionRequest struct {
+		SpaceId      string                      `json:"space_id"`
+		EnvId        string                      `json:"env_id"`
+		CollectionId string                      `json:"collection_id"`
+		ItemId       string                      `json:"item_id"`
+		RevisionId   string                      `json:"revision_id"`
+		Options      []*items.GetRevisionOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetRevisionResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	ListRevisionsRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		ItemId       string                        `json:"item_id"`
+		Options      []*items.ListRevisionsOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	ListRevisionsResponse struct {
+		Items []*items.Item `json:"items"`
+	}
+
+	ArchiveRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.ArchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	ArchiveResponse struct{}
+
+	FindArchivedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		Filter       *items.Filter                `json:"filter"`
+		Options      []*items.FindArchivedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindArchivedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UnarchiveRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnarchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnarchiveResponse struct{}
+
+	AggregateRequest struct {
+		SpaceId      string                    `json:"space_id"`
+		EnvId        string                    `json:"env_id"`
+		CollectionId string                    `json:"collection_id"`
+		Filter       *items.Filter             `json:"filter"`
+		Options      []*items.AggregateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregateResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+
+	AggregatePublishedRequest struct {
+		SpaceId      string                             `json:"space_id"`
+		EnvId        string                             `json:"env_id"`
+		CollectionId string                             `json:"collection_id"`
+		Filter       *items.Filter                      `json:"filter"`
+		Options      []*items.AggregatePublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregatePublishedResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+)
diff --git a/pkg/items/transport/grpc/client.go b/pkg/items/transport/grpc/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..faea7cc6703746ba91b0af0e831431ffd76044fc
--- /dev/null
+++ b/pkg/items/transport/grpc/client.go
@@ -0,0 +1,34 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewClient(conn *grpc.ClientConn, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	c := NewGRPCClient(conn, "", opts...)
+	return transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ClientMiddleware(c.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ClientMiddleware(c.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ClientMiddleware(c.GetEndpoint),
+		FindEndpoint:               grpcerr.ClientMiddleware(c.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ClientMiddleware(c.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ClientMiddleware(c.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ClientMiddleware(c.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ClientMiddleware(c.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ClientMiddleware(c.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ClientMiddleware(c.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ClientMiddleware(c.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ClientMiddleware(c.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ClientMiddleware(c.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ClientMiddleware(c.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ClientMiddleware(c.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ClientMiddleware(c.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ClientMiddleware(c.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ClientMiddleware(c.AggregatePublishedEndpoint),
+	}
+}
diff --git a/pkg/items/transport/grpc/client.microgen.go b/pkg/items/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a91c8d16495333a356ebeb3e48100c40e0f7bc91
--- /dev/null
+++ b/pkg/items/transport/grpc/client.microgen.go
@@ -0,0 +1,145 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.items.Items"
+	}
+	return transport.EndpointsSet{
+		ArchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Archive",
+			_Encode_Archive_Request,
+			_Decode_Archive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UndeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Undelete",
+			_Encode_Undelete_Request,
+			_Decode_Undelete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindArchivedEndpoint: grpckit.NewClient(
+			conn, addr, "FindArchived",
+			_Encode_FindArchived_Request,
+			_Decode_FindArchived_Response,
+			pb.FindArchivedResponse{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		FindPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "FindPublished",
+			_Encode_FindPublished_Request,
+			_Decode_FindPublished_Response,
+			pb.FindPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		GetPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "GetPublished",
+			_Encode_GetPublished_Request,
+			_Decode_GetPublished_Response,
+			pb.GetPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetRevisionEndpoint: grpckit.NewClient(
+			conn, addr, "GetRevision",
+			_Encode_GetRevision_Request,
+			_Decode_GetRevision_Response,
+			pb.GetRevisionResponse{},
+			opts...,
+		).Endpoint(),
+		IntrospectEndpoint: grpckit.NewClient(
+			conn, addr, "Introspect",
+			_Encode_Introspect_Request,
+			_Decode_Introspect_Response,
+			pb.IntrospectResponse{},
+			opts...,
+		).Endpoint(),
+		ListRevisionsEndpoint: grpckit.NewClient(
+			conn, addr, "ListRevisions",
+			_Encode_ListRevisions_Request,
+			_Decode_ListRevisions_Response,
+			pb.ListRevisionsResponse{},
+			opts...,
+		).Endpoint(),
+		PublishEndpoint: grpckit.NewClient(
+			conn, addr, "Publish",
+			_Encode_Publish_Request,
+			_Decode_Publish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnarchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Unarchive",
+			_Encode_Unarchive_Request,
+			_Decode_Unarchive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnpublishEndpoint: grpckit.NewClient(
+			conn, addr, "Unpublish",
+			_Encode_Unpublish_Request,
+			_Decode_Unpublish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		AggregateEndpoint: grpckit.NewClient(
+			conn, addr, "Aggregate",
+			_Encode_Aggregate_Request,
+			_Decode_Aggregate_Response,
+			pb.AggregateResponse{},
+			opts...,
+		).Endpoint(),
+		AggregatePublishedEndpoint: grpckit.NewClient(
+			conn, addr, "AggregatePublished",
+			_Encode_AggregatePublished_Request,
+			_Decode_AggregatePublished_Response,
+			pb.AggregatePublishedResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..69a696df329a6e28e5912af8815df9852c0c504c
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,1010 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := CreateOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := UpdateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+
+	opts, err := DeleteOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.DeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      opts,
+	}, nil
+}
+
+func _Encode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*transport.UndeleteRequest)
+	return &pb.UndeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*transport.PublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := PublishOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*transport.UnpublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Encode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*transport.GetPublishedRequest)
+	reqOptions, err := ElPtrGetPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*transport.FindPublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*transport.GetRevisionRequest)
+	return &pb.GetRevisionRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		RevisionId:   req.RevisionId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*transport.ListRevisionsRequest)
+	reqOptions, err := ElPtrListRevisionsOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*transport.ArchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*transport.FindArchivedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindArchivedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*transport.UnarchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*transport.AggregateRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*transport.AggregatePublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregatePublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrItemToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Item: respItem}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*transport.GetPublishedResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Encode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*transport.FindPublishedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*transport.GetRevisionResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Encode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*transport.ListRevisionsResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Encode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*transport.FindArchivedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregateResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregatePublishedResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToCreateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{
+		Item: reqItem,
+		Opts: opts,
+	}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*pb.AggregateRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*pb.AggregatePublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregatePublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToUpdateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+
+	opts, err := ProtoToDeleteOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.DeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      opts,
+	}, nil
+}
+
+func _Decode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*pb.UndeleteRequest)
+	return &transport.UndeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*pb.PublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToPublishOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*pb.UnpublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Decode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*pb.GetPublishedRequest)
+	reqOptions, err := ProtoToElPtrGetPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*pb.FindPublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*pb.GetRevisionRequest)
+	return &transport.GetRevisionRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		RevisionId:   string(req.RevisionId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*pb.ListRevisionsRequest)
+	reqOptions, err := ProtoToElPtrListRevisionsOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*pb.ArchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*pb.FindArchivedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindArchivedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*pb.UnarchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrItem(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Item: respItem}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*pb.GetPublishedResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Decode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*pb.FindPublishedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*pb.GetRevisionResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Decode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*pb.ListRevisionsResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Decode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*pb.FindArchivedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*transport.IntrospectRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Encode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*transport.IntrospectResponse)
+	respItm, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := PtrSchemaSchemaToProto(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrors, err := ValidationErrorsToProto(resp.ValidationErrors)
+	return &pb.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrors,
+	}, nil
+}
+
+func _Decode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*pb.IntrospectRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Decode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*pb.IntrospectResponse)
+	respItm, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := ProtoToPtrSchemaSchema(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrs, err := ProtoToValidationErrors(resp.ValidationErrors)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrs,
+	}, nil
+}
+
+func _Decode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*pb.AggregateResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregatePublishedResponse")
+	}
+	resp := response.(*pb.AggregatePublishedResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
diff --git a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3cdee975fa94ca8d5c1349b7aa1fdcae835dab8d
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,627 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	service "git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	pbcommon "git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"git.perx.ru/perxis/perxis-go/schema"
+	jsoniter "github.com/json-iterator/go"
+	"google.golang.org/protobuf/types/known/structpb"
+)
+
+func MapStringInterfaceToProto(data map[string]interface{}) (*structpb.Struct, error) {
+	if data == nil {
+		return nil, nil
+	}
+	return structpb.NewStruct(data)
+}
+
+func ProtoToMapStringInterface(protoData *structpb.Struct) (map[string]interface{}, error) {
+	if protoData == nil {
+		return nil, nil
+	}
+	return protoData.AsMap(), nil
+}
+
+func MapStringMapStringInterfaceToProto(translations map[string]map[string]interface{}) (map[string]*structpb.Struct, error) {
+	if translations == nil {
+		return nil, nil
+	}
+	res := make(map[string]*structpb.Struct, len(translations))
+	for k, v := range translations {
+		res[k], _ = MapStringInterfaceToProto(v)
+	}
+	return res, nil
+}
+
+func PtrPermissionsToProto(permissions *service.Permissions) (*pb.Permissions, error) {
+	if permissions == nil {
+		return nil, nil
+	}
+
+	return &pb.Permissions{
+			Edit:       permissions.Edit,
+			Archive:    permissions.Archive,
+			Publish:    permissions.Publish,
+			SoftDelete: permissions.SoftDelete,
+			HardDelete: permissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToPtrPermissions(protoPermissions *pb.Permissions) (*service.Permissions, error) {
+	if protoPermissions == nil {
+		return nil, nil
+	}
+
+	return &service.Permissions{
+			Edit:       protoPermissions.Edit,
+			Archive:    protoPermissions.Archive,
+			Publish:    protoPermissions.Publish,
+			SoftDelete: protoPermissions.SoftDelete,
+			HardDelete: protoPermissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToMapStringMapStringInterface(protoTranslations map[string]*structpb.Struct) (map[string]map[string]interface{}, error) {
+	if protoTranslations == nil {
+		return nil, nil
+	}
+	res := make(map[string]map[string]interface{}, len(protoTranslations))
+	for k, v := range protoTranslations {
+		res[k], _ = ProtoToMapStringInterface(v)
+	}
+	return res, nil
+}
+
+func PtrItemToProto(item *service.Item) (*pb.Item, error) {
+	return service.ItemToProto(item), nil
+}
+
+func ProtoToPtrItem(protoItem *pb.Item) (*service.Item, error) {
+	return service.ItemFromProto(protoItem), nil
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*pbcommon.Filter, 0, len(filter.Data))
+	for _, f := range filter.Data {
+		pf := &pbcommon.Filter{
+			Op:    string(f.Op),
+			Field: f.Field,
+		}
+
+		val, err := structpb.NewValue(f.Value)
+		if err != nil {
+			return nil, err
+		}
+		pf.Value = val
+		dt = append(dt, pf)
+	}
+
+	return &pb.Filter{
+		Id:   filter.ID,
+		Data: dt,
+		Q:    filter.Q,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*filter.Filter, 0, len(protoFilter.Data))
+	for _, pf := range protoFilter.Data {
+
+		f := &filter.Filter{
+			Op:    filter.Op(pf.Op),
+			Field: pf.Field,
+			Value: pf.Value.AsInterface(),
+		}
+
+		dt = append(dt, f)
+	}
+
+	return &service.Filter{
+		ID:   protoFilter.Id,
+		Data: dt,
+		Q:    protoFilter.Q,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*pbcommon.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pbcommon.FindOptions{
+		Sort:          opts.Sort,
+		PageNum:       int32(opts.PageNum),
+		PageSize:      int32(opts.PageSize),
+		Fields:        opts.Fields,
+		ExcludeFields: opts.ExcludeFields,
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *pbcommon.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+		FieldOptions: options.FieldOptions{
+			Fields:        protoOpts.Fields,
+			ExcludeFields: protoOpts.ExcludeFields,
+		},
+	}, nil
+}
+
+func ListPtrItemToProto(items []*service.Item) ([]*pb.Item, error) {
+	protoItems := make([]*pb.Item, 0, len(items))
+	for _, itm := range items {
+		pi, err := PtrItemToProto(itm)
+		if err != nil {
+			return nil, err
+		}
+		protoItems = append(protoItems, pi)
+	}
+	return protoItems, nil
+}
+
+func ProtoToListPtrItem(protoItems []*pb.Item) ([]*service.Item, error) {
+	items := make([]*service.Item, 0, len(protoItems))
+	for _, itm := range protoItems {
+		pi, err := ProtoToPtrItem(itm)
+		if err != nil {
+			return nil, err
+		}
+		items = append(items, pi)
+	}
+	return items, nil
+}
+
+func ProtoToCreateOptions(protoOptions *pb.CreateOptions) ([]*service.CreateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.CreateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func CreateOptionsToProto(options []*service.CreateOptions) (*pb.CreateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeCreateOptions(options...)
+
+	return &pb.CreateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrGetOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindOptionsToProto(options []*service.FindOptions) (*pb.FindOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindOptions(options...)
+
+	var err error
+
+	fo := &pb.FindOptions{
+		Deleted:   opts.Deleted,
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindOptions(protoOptions *pb.FindOptions) ([]*service.FindOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindOptions{
+		Deleted:   protoOptions.Deleted,
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindOptions{fo}, nil
+}
+
+func ProtoToUpdateOptions(protoOptions *pb.UpdateOptions) ([]*service.UpdateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.UpdateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func UpdateOptionsToProto(options []*service.UpdateOptions) (*pb.UpdateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeUpdateOptions(options...)
+
+	return &pb.UpdateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ProtoToDeleteOptions(protoOptions *pb.DeleteOptions) ([]*service.DeleteOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.DeleteOptions{
+		{Erase: protoOptions.Erase},
+	}, nil
+}
+
+func DeleteOptionsToProto(options []*service.DeleteOptions) (*pb.DeleteOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeDeleteOptions(options...)
+
+	return &pb.DeleteOptions{
+		Erase: opts.Erase,
+	}, nil
+}
+
+func ProtoToPublishOptions(protoOptions *pb.PublishOptions) ([]*service.PublishOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.PublishOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func PublishOptionsToProto(options []*service.PublishOptions) (*pb.PublishOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergePublishOptions(options...)
+
+	return &pb.PublishOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrUnpublishOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnpublishOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrGetPublishedOptionsToProto(options []*service.GetPublishedOptions) (*pb.GetPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeGetPublishedOptions(options...)
+
+	return &pb.GetPublishedOptions{LocaleId: opts.LocaleID}, nil
+}
+
+func ProtoToElPtrGetPublishedOptions(protoOptions *pb.GetPublishedOptions) ([]*service.GetPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	return []*service.GetPublishedOptions{{LocaleID: protoOptions.LocaleId}}, nil
+}
+
+func ElPtrFindPublishedOptionsToProto(options []*service.FindPublishedOptions) (*pb.FindPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindPublishedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindPublishedOptions{
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	fo.LocaleId = opts.LocaleID
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindPublishedOptions(protoOptions *pb.FindPublishedOptions) ([]*service.FindPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindPublishedOptions{
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	fo.LocaleID = protoOptions.LocaleId
+
+	return []*service.FindPublishedOptions{fo}, nil
+}
+
+func ElPtrGetRevisionOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetRevisionOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrListRevisionsOptionsToProto(options []*service.ListRevisionsOptions) (*pb.ListRevisionsOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeListRevisionsOptions(options...)
+
+	var err error
+
+	fo := &pb.ListRevisionsOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrListRevisionsOptions(protoOptions *pb.ListRevisionsOptions) ([]*service.ListRevisionsOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.ListRevisionsOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.ListRevisionsOptions{fo}, nil
+}
+
+func ElPtrArchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrArchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindArchivedOptionsToProto(options []*service.FindArchivedOptions) (*pb.FindArchivedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindArchivedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindArchivedOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindArchivedOptions(protoOptions *pb.FindArchivedOptions) ([]*service.FindArchivedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindArchivedOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindArchivedOptions{fo}, nil
+}
+
+func ElPtrUnarchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnarchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrIntrospectOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrIntrospectOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToPtrServicesAggregateOptions(protoOpts *pb.AggregateOptions) ([]*service.AggregateOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregateOptions{&service.AggregateOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregateOptionsToProto(opts *service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregateOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregateOptionsToProto(options []*service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregateOptions(options...)
+	return PtrServicesAggregateOptionsToProto(opts)
+}
+
+func ProtoToPtrServicesAggregatePublishedOptions(protoOpts *pb.AggregatePublishedOptions) ([]*service.AggregatePublishedOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregatePublishedOptions{&service.AggregatePublishedOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregatePublishedOptionsToProto(opts *service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregatePublishedOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregatePublishedOptionsToProto(options []*service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregatePublishedOptions(options...)
+	return PtrServicesAggregatePublishedOptionsToProto(opts)
+}
+
+func PtrSchemaSchemaToProto(sch *schema.Schema) (string, error) {
+	if sch == nil {
+		return "", nil
+	}
+	res, err := jsoniter.MarshalToString(sch)
+	if err != nil {
+		return "", err
+	}
+	return res, nil
+}
+
+func ProtoToPtrSchemaSchema(protoSch string) (*schema.Schema, error) {
+	if protoSch == "" {
+		return nil, nil
+	}
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(protoSch))
+	if err != nil {
+		return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+	}
+	return sch, nil
+}
+
+func ValidationErrorsToProto(errs []error) ([]*pbcommon.Error_BadRequest_FieldViolation, error) {
+	if errs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []*pbcommon.Error_BadRequest_FieldViolation
+	for _, err := range errs {
+
+		var fieldError errors.FieldError
+		if errors.As(err, &fieldError) {
+			validationErrors = append(validationErrors, &pbcommon.Error_BadRequest_FieldViolation{
+				Description: errors.Unwrap(fieldError).Error(),
+				Field:       fieldError.Field(),
+			})
+		}
+	}
+
+	return validationErrors, nil
+}
+
+func ProtoToValidationErrors(protoErrs []*pbcommon.Error_BadRequest_FieldViolation) ([]error, error) {
+	if protoErrs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []error
+	for _, err := range protoErrs {
+		validationErrors = append(validationErrors, errors.WithField(errors.New(err.Description), err.Field))
+	}
+
+	return validationErrors, nil
+}
diff --git a/pkg/items/transport/grpc/server.go b/pkg/items/transport/grpc/server.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ac8a3b02dd4991518d8b132707bb2dd0ce3c362
--- /dev/null
+++ b/pkg/items/transport/grpc/server.go
@@ -0,0 +1,34 @@
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+)
+
+func NewServer(svc items.Items, opts ...grpckit.ServerOption) pb.ItemsServer {
+	eps := transport.Endpoints(svc)
+	eps = transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ServerMiddleware(eps.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ServerMiddleware(eps.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ServerMiddleware(eps.GetEndpoint),
+		FindEndpoint:               grpcerr.ServerMiddleware(eps.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ServerMiddleware(eps.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ServerMiddleware(eps.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ServerMiddleware(eps.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ServerMiddleware(eps.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ServerMiddleware(eps.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ServerMiddleware(eps.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ServerMiddleware(eps.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ServerMiddleware(eps.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ServerMiddleware(eps.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ServerMiddleware(eps.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ServerMiddleware(eps.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ServerMiddleware(eps.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ServerMiddleware(eps.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ServerMiddleware(eps.AggregatePublishedEndpoint),
+	}
+	return NewGRPCServer(&eps, opts...)
+}
diff --git a/pkg/items/transport/grpc/server.microgen.go b/pkg/items/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a904b1e5a610dc12e38768d88b92eee392a6d7af
--- /dev/null
+++ b/pkg/items/transport/grpc/server.microgen.go
@@ -0,0 +1,292 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type itemsServer struct {
+	create             grpc.Handler
+	introspect         grpc.Handler
+	get                grpc.Handler
+	find               grpc.Handler
+	update             grpc.Handler
+	delete             grpc.Handler
+	undelete           grpc.Handler
+	publish            grpc.Handler
+	unpublish          grpc.Handler
+	getPublished       grpc.Handler
+	findPublished      grpc.Handler
+	getRevision        grpc.Handler
+	listRevisions      grpc.Handler
+	archive            grpc.Handler
+	findArchived       grpc.Handler
+	unarchive          grpc.Handler
+	aggregate          grpc.Handler
+	aggregatePublished grpc.Handler
+
+	pb.UnimplementedItemsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ItemsServer {
+	return &itemsServer{
+		archive: grpc.NewServer(
+			endpoints.ArchiveEndpoint,
+			_Decode_Archive_Request,
+			_Encode_Archive_Response,
+			opts...,
+		),
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		undelete: grpc.NewServer(
+			endpoints.UndeleteEndpoint,
+			_Decode_Undelete_Request,
+			_Encode_Undelete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		findArchived: grpc.NewServer(
+			endpoints.FindArchivedEndpoint,
+			_Decode_FindArchived_Request,
+			_Encode_FindArchived_Response,
+			opts...,
+		),
+		findPublished: grpc.NewServer(
+			endpoints.FindPublishedEndpoint,
+			_Decode_FindPublished_Request,
+			_Encode_FindPublished_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		getPublished: grpc.NewServer(
+			endpoints.GetPublishedEndpoint,
+			_Decode_GetPublished_Request,
+			_Encode_GetPublished_Response,
+			opts...,
+		),
+		getRevision: grpc.NewServer(
+			endpoints.GetRevisionEndpoint,
+			_Decode_GetRevision_Request,
+			_Encode_GetRevision_Response,
+			opts...,
+		),
+		introspect: grpc.NewServer(
+			endpoints.IntrospectEndpoint,
+			_Decode_Introspect_Request,
+			_Encode_Introspect_Response,
+			opts...,
+		),
+		listRevisions: grpc.NewServer(
+			endpoints.ListRevisionsEndpoint,
+			_Decode_ListRevisions_Request,
+			_Encode_ListRevisions_Response,
+			opts...,
+		),
+		publish: grpc.NewServer(
+			endpoints.PublishEndpoint,
+			_Decode_Publish_Request,
+			_Encode_Publish_Response,
+			opts...,
+		),
+		unarchive: grpc.NewServer(
+			endpoints.UnarchiveEndpoint,
+			_Decode_Unarchive_Request,
+			_Encode_Unarchive_Response,
+			opts...,
+		),
+		unpublish: grpc.NewServer(
+			endpoints.UnpublishEndpoint,
+			_Decode_Unpublish_Request,
+			_Encode_Unpublish_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+		aggregate: grpc.NewServer(
+			endpoints.AggregateEndpoint,
+			_Decode_Aggregate_Request,
+			_Encode_Aggregate_Response,
+			opts...,
+		),
+		aggregatePublished: grpc.NewServer(
+			endpoints.AggregatePublishedEndpoint,
+			_Decode_AggregatePublished_Request,
+			_Encode_AggregatePublished_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *itemsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *itemsServer) Introspect(ctx context.Context, req *pb.IntrospectRequest) (*pb.IntrospectResponse, error) {
+	_, resp, err := S.introspect.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.IntrospectResponse), nil
+}
+
+func (S *itemsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *itemsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
+
+func (S *itemsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Undelete(ctx context.Context, req *pb.UndeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.undelete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Publish(ctx context.Context, req *pb.PublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.publish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Unpublish(ctx context.Context, req *pb.UnpublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.unpublish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) GetPublished(ctx context.Context, req *pb.GetPublishedRequest) (*pb.GetPublishedResponse, error) {
+	_, resp, err := S.getPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetPublishedResponse), nil
+}
+
+func (S *itemsServer) FindPublished(ctx context.Context, req *pb.FindPublishedRequest) (*pb.FindPublishedResponse, error) {
+	_, resp, err := S.findPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindPublishedResponse), nil
+}
+
+func (S *itemsServer) GetRevision(ctx context.Context, req *pb.GetRevisionRequest) (*pb.GetRevisionResponse, error) {
+	_, resp, err := S.getRevision.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetRevisionResponse), nil
+}
+
+func (S *itemsServer) ListRevisions(ctx context.Context, req *pb.ListRevisionsRequest) (*pb.ListRevisionsResponse, error) {
+	_, resp, err := S.listRevisions.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListRevisionsResponse), nil
+}
+
+func (S *itemsServer) Archive(ctx context.Context, req *pb.ArchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.archive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) FindArchived(ctx context.Context, req *pb.FindArchivedRequest) (*pb.FindArchivedResponse, error) {
+	_, resp, err := S.findArchived.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindArchivedResponse), nil
+}
+
+func (S *itemsServer) Unarchive(ctx context.Context, req *pb.UnarchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.unarchive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Aggregate(ctx context.Context, req *pb.AggregateRequest) (*pb.AggregateResponse, error) {
+	_, resp, err := S.aggregate.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregateResponse), nil
+}
+
+func (S *itemsServer) AggregatePublished(ctx context.Context, req *pb.AggregatePublishedRequest) (*pb.AggregatePublishedResponse, error) {
+	_, resp, err := S.aggregatePublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregatePublishedResponse), nil
+}
diff --git a/pkg/items/transport/server.microgen.go b/pkg/items/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ba5f4a265125ea7b7168fab8d9c7c0f747f23bf
--- /dev/null
+++ b/pkg/items/transport/server.microgen.go
@@ -0,0 +1,220 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	endpoint "github.com/go-kit/kit/endpoint"
+	"github.com/hashicorp/go-multierror"
+)
+
+func Endpoints(svc items.Items) EndpointsSet {
+	return EndpointsSet{
+		ArchiveEndpoint:            ArchiveEndpoint(svc),
+		CreateEndpoint:             CreateEndpoint(svc),
+		DeleteEndpoint:             DeleteEndpoint(svc),
+		UndeleteEndpoint:           UndeleteEndpoint(svc),
+		FindArchivedEndpoint:       FindArchivedEndpoint(svc),
+		FindEndpoint:               FindEndpoint(svc),
+		FindPublishedEndpoint:      FindPublishedEndpoint(svc),
+		GetEndpoint:                GetEndpoint(svc),
+		GetPublishedEndpoint:       GetPublishedEndpoint(svc),
+		GetRevisionEndpoint:        GetRevisionEndpoint(svc),
+		IntrospectEndpoint:         IntrospectEndpoint(svc),
+		ListRevisionsEndpoint:      ListRevisionsEndpoint(svc),
+		PublishEndpoint:            PublishEndpoint(svc),
+		UnarchiveEndpoint:          UnarchiveEndpoint(svc),
+		UnpublishEndpoint:          UnpublishEndpoint(svc),
+		UpdateEndpoint:             UpdateEndpoint(svc),
+		AggregateEndpoint:          AggregateEndpoint(svc),
+		AggregatePublishedEndpoint: AggregatePublishedEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Item, req.Opts...)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func IntrospectEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*IntrospectRequest)
+		res0, res1, res2 := svc.Introspect(arg0, req.Item, req.Opts...)
+		resp := &IntrospectResponse{
+			Item:   res0,
+			Schema: res1,
+		}
+		if res2 != nil {
+
+			err := res2
+
+			var merr *multierror.Error
+			if (strings.Contains(err.Error(), "validation error") ||
+				strings.Contains(err.Error(), "modification error") ||
+				strings.Contains(err.Error(), "decode error") ||
+				strings.Contains(err.Error(), "encode error")) && errors.As(err, &merr) {
+
+				errs := make([]error, 0)
+				for _, e := range merr.WrappedErrors() {
+					var errField errors.FieldError
+					if errors.As(e, &errField) {
+						errs = append(errs, e)
+					}
+				}
+
+				if len(errs) > 0 {
+					resp.ValidationErrors = errs
+					res2 = nil
+				}
+			}
+		}
+		return resp, res2
+	}
+}
+
+func GetEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetResponse{Item: res0}, res1
+	}
+}
+
+func FindEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UpdateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Item, req.Options...)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func UndeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UndeleteRequest)
+		res0 := svc.Undelete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &UndeleteResponse{}, res0
+	}
+}
+
+func PublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*PublishRequest)
+		res0 := svc.Publish(arg0, req.Item, req.Options...)
+		return &PublishResponse{}, res0
+	}
+}
+
+func UnpublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnpublishRequest)
+		res0 := svc.Unpublish(arg0, req.Item, req.Options...)
+		return &UnpublishResponse{}, res0
+	}
+}
+
+func GetPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetPublishedRequest)
+		res0, res1 := svc.GetPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetPublishedResponse{Item: res0}, res1
+	}
+}
+
+func FindPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindPublishedRequest)
+		res0, res1, res2 := svc.FindPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindPublishedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func GetRevisionEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRevisionRequest)
+		res0, res1 := svc.GetRevision(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.RevisionId, req.Options...)
+		return &GetRevisionResponse{Item: res0}, res1
+	}
+}
+
+func ListRevisionsEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRevisionsRequest)
+		res0, res1 := svc.ListRevisions(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &ListRevisionsResponse{Items: res0}, res1
+	}
+}
+
+func ArchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ArchiveRequest)
+		res0 := svc.Archive(arg0, req.Item, req.Options...)
+		return &ArchiveResponse{}, res0
+	}
+}
+
+func FindArchivedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindArchivedRequest)
+		res0, res1, res2 := svc.FindArchived(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindArchivedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UnarchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnarchiveRequest)
+		res0 := svc.Unarchive(arg0, req.Item, req.Options...)
+		return &UnarchiveResponse{}, res0
+	}
+}
+
+func AggregateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregateRequest)
+		res0, res1 := svc.Aggregate(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregateResponse{
+			Result: res0,
+		}, res1
+	}
+}
+func AggregatePublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregatePublishedRequest)
+		res0, res1 := svc.AggregatePublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregatePublishedResponse{
+			Result: res0,
+		}, res1
+	}
+}
diff --git a/pkg/items/util.go b/pkg/items/util.go
new file mode 100644
index 0000000000000000000000000000000000000000..e1fb4bde0ea53e3074b994f279d52c3201364a53
--- /dev/null
+++ b/pkg/items/util.go
@@ -0,0 +1,9 @@
+package items
+
+func GetItemIDs(arr []*Item) []string {
+	res := make([]string, len(arr))
+	for i, e := range arr {
+		res[i] = e.ID
+	}
+	return res
+}