diff --git a/pkg/invitations/middleware/caching_middleware_test.go b/pkg/invitations/middleware/caching_middleware_test.go
index 852c7ddb8f1c7f03ef494fe5d31c8091aaff385a..b635ee0cd09b28245a8f29c67d7902a08392eba7 100644
--- a/pkg/invitations/middleware/caching_middleware_test.go
+++ b/pkg/invitations/middleware/caching_middleware_test.go
@@ -6,6 +6,7 @@ import (
 	"time"
 
 	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
 	"git.perx.ru/perxis/perxis-go/pkg/invitations"
 	invmocks "git.perx.ru/perxis/perxis-go/pkg/invitations/mocks"
 	"github.com/stretchr/testify/assert"
@@ -24,6 +25,8 @@ func TestLocalesCache(t *testing.T) {
 		ttl   = 20 * time.Millisecond
 	)
 
+	errNotFound := errors.NotFound(errors.New("not found"))
+
 	ctx := context.Background()
 
 	t.Run("Get from Cache", func(t *testing.T) {
@@ -59,7 +62,7 @@ func TestLocalesCache(t *testing.T) {
 			assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
 
 			inv.On("Accept", mock.Anything, invID, usrID).Return(nil).Once()
-			inv.On("Get", mock.Anything, invID).Return(nil, services.ErrNotFound).Once()
+			inv.On("Get", mock.Anything, invID).Return(nil, errNotFound).Once()
 
 			err = svc.Accept(ctx, invID, usrID)
 			require.NoError(t, err)
@@ -86,7 +89,7 @@ func TestLocalesCache(t *testing.T) {
 			assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
 
 			inv.On("Delete", mock.Anything, invID).Return(nil).Once()
-			inv.On("Get", mock.Anything, invID).Return(nil, services.ErrNotFound).Once()
+			inv.On("Get", mock.Anything, invID).Return(nil, errNotFound).Once()
 
 			err = svc.Delete(ctx, invID)
 			require.NoError(t, err)
diff --git a/pkg/items/middleware/caching_middleware.go b/pkg/items/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0455cb276c5ccae86aeb27619fe55369a735550a
--- /dev/null
+++ b/pkg/items/middleware/caching_middleware.go
@@ -0,0 +1,176 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	envService "git.perx.ru/perxis/perxis-go/pkg/environments"
+	service "git.perx.ru/perxis/perxis-go/pkg/items"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache, cachePublished *cache.Cache, envs envService.Environments) Middleware {
+	return func(next service.Items) service.Items {
+		return &cachingMiddleware{
+			cache:          cache,
+			cachePublished: cachePublished,
+			Items:          next,
+			envs:           envs,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache          *cache.Cache
+	cachePublished *cache.Cache
+	envs           envService.Environments
+	service.Items
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.GetOptions) (itm *service.Item, err error) {
+
+	value, e := m.cache.Get(makeKey(spaceId, envId, collectionId, itemId))
+	if e == nil {
+		return value.(*service.Item), err
+	}
+	itm, err = m.Items.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, itm.SpaceID, itm.EnvID)
+		if err != nil {
+			return nil, err
+		}
+		m.cache.Set(makeKey(itm.SpaceID, env.ID, itm.CollectionID, itm.ID), itm)
+		for _, al := range env.Aliases {
+			m.cache.Set(makeKey(itm.SpaceID, al, itm.CollectionID, itm.ID), itm)
+		}
+	}
+	return itm, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, item *service.Item, options ...*service.UpdateOptions) (err error) {
+
+	err = m.Items.Update(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.DeleteOptions) (err error) {
+
+	err = m.Items.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, spaceId, envId)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(spaceId, env.ID, collectionId, itemId))
+		m.cachePublished.Remove(makeKey(spaceId, env.ID, collectionId, itemId))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(spaceId, al, collectionId, itemId))
+			m.cachePublished.Remove(makeKey(spaceId, al, collectionId, itemId))
+		}
+
+	}
+	return err
+}
+
+func (m cachingMiddleware) Publish(ctx context.Context, item *service.Item, options ...*service.PublishOptions) (err error) {
+
+	err = m.Items.Publish(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Unpublish(ctx context.Context, item *service.Item, options ...*service.UnpublishOptions) (err error) {
+
+	err = m.Items.Unpublish(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.GetPublishedOptions) (itm *service.Item, err error) {
+
+	opts := service.MergeGetPublishedOptions(options...)
+
+	val, e := m.cachePublished.Get(makeKey(spaceId, envId, collectionId, itemId))
+	if e == nil {
+		value := val.(map[string]*service.Item)
+		if i, ok := value[opts.LocaleID]; ok {
+			return i, nil
+		}
+	}
+
+	itm, err = m.Items.GetPublished(ctx, spaceId, envId, collectionId, itemId, opts)
+
+	if err == nil {
+		env, err := m.envs.Get(ctx, itm.SpaceID, itm.EnvID)
+		if err != nil {
+			return nil, err
+		}
+		var value = make(map[string]*service.Item)
+		if val != nil {
+			value = val.(map[string]*service.Item)
+		}
+		value[opts.LocaleID] = itm
+		m.cachePublished.Set(makeKey(itm.SpaceID, env.ID, itm.CollectionID, itm.ID), value)
+		for _, al := range env.Aliases {
+			m.cachePublished.Set(makeKey(itm.SpaceID, al, itm.CollectionID, itm.ID), value)
+		}
+	}
+
+	return itm, err
+}
+
+func (m cachingMiddleware) Archive(ctx context.Context, item *service.Item, options ...*service.ArchiveOptions) (err error) {
+
+	err = m.Items.Archive(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
diff --git a/pkg/items/middleware/caching_middleware_test.go b/pkg/items/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..9e02b49beb80507ef7fbf25d3c67d5c8a022ce1b
--- /dev/null
+++ b/pkg/items/middleware/caching_middleware_test.go
@@ -0,0 +1,685 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	envmocks "git.perx.ru/perxis/perxis-go/pkg/environments/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	itmsmocks "git.perx.ru/perxis/perxis-go/pkg/items/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestItemsCache(t *testing.T) {
+
+	const (
+		colID    = "colID"
+		spaceID  = "spaceID"
+		envID    = "envID"
+		envAlias = "envAlias"
+		itemID   = "itemID"
+		locID    = "locID"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша, при повторном запросе.")
+
+		v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("Get from cache(by Alias)", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша, при повторном запросе.")
+
+		v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+
+		v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+		v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache(by Alias)", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+
+		v2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+		v3, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache (with different locales)", func(t *testing.T) {
+		const (
+			loc1 = "loc1"
+			loc2 = "loc2"
+		)
+
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Twice()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1loc1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err, "Ожидается получение объекта из сервиса и добавление его в кеш с loc1.")
+
+		v1loc2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err, "Ожидается получение объекта из сервиса и добавление его в кеш с loc2 вместе с loc1.")
+
+		v2loc1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err)
+		assert.Same(t, v1loc1, v2loc1, "Ожидается получение объекта c локализацией loc1 из кеша.")
+
+		v2loc2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err)
+		assert.Same(t, v1loc2, v2loc2, "Ожидается получение объекта c локализацией loc2 из кеша.")
+
+		v3loc1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err)
+		assert.Same(t, v2loc1, v3loc1, "Ожидается получение объекта c локализацией loc1 из кеша, при запросе того же объекта по ID окружения.")
+
+		v3loc2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err)
+		assert.Same(t, v2loc2, v3loc2, "Ожидается получение объекта c локализацией loc2 из кеша, при запросе того же объекта по ID окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после обновления и получение его заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Archive(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Archive", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Archive(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateArchived}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после архивации и получение из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Publish(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Publish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			err = svc.Publish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после публикации и получение заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша при повторном запросе.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Delete", mock.Anything, spaceID, envID, colID, itemID).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Delete(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(nil, errNotFound).Once()
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(nil, errNotFound).Once()
+			_, err = svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление из кэша после удаления объекта и получение ошибки от сервиса.")
+
+			_, err = svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление из кэша после удаления объекта и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после снятия с публикации и получение заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Publish(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Publish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Publish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после публикации и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Update(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша при обновлении и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после снятия с публикации и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Update(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после его обновления и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после его обновления и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Archive(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Archive", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Archive(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после его архивации и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после его архивации и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Delete(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Delete", mock.Anything, spaceID, envID, colID, itemID).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Delete(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша после удаления из хранилища и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается очистка кеша по alias окружения после удаления объекта и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после снятия с публикации и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после снятия с публикации и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish by Alias", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v4, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v3, v4, "Ожидается получение опубликованного объекта из кеша.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envAlias, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			v5, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v5, v2, "Ожидается удаление объекта из кэша и получение заново из сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша и получение ошибки от сервиса из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			time.Sleep(2 * ttl)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается удаление объекта из кэша и получение из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/items/middleware/client_encode_middleware.go b/pkg/items/middleware/client_encode_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..3aaa6b94badd9726e74d1cb3b0abbd02893f5838
--- /dev/null
+++ b/pkg/items/middleware/client_encode_middleware.go
@@ -0,0 +1,317 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// ClientEncodeMiddleware выполняет операции encode/decode для передаваемых данных
+func ClientEncodeMiddleware(colls collections.Collections) Middleware {
+	return func(items items.Items) items.Items {
+		return &encodeDecodeMiddleware{
+			next:  items,
+			colls: colls,
+		}
+
+	}
+}
+
+type encodeDecodeMiddleware struct {
+	next  items.Items
+	colls collections.Collections
+}
+
+func (m *encodeDecodeMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	coll, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	if item, err = item.Encode(ctx, coll.Schema); err != nil {
+		return
+	}
+
+	itm, sch, err = m.next.Introspect(ctx, item, opts...)
+	if itm != nil && sch != nil {
+		var err error
+		if itm, err = itm.Decode(ctx, sch); err != nil {
+			return nil, nil, err
+		}
+	}
+	return itm, sch, err
+
+}
+
+func (m *encodeDecodeMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+
+	var col *collections.Collection
+
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+
+		col, err = m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return nil, err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return nil, err
+		}
+	}
+
+	res, err := m.next.Create(ctx, item, opts...)
+	if err == nil && (res.Data != nil || res.Translations != nil) {
+
+		if col == nil {
+			col, err = m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+			if err != nil {
+				return nil, err
+			}
+		}
+
+		res, err = res.Decode(ctx, col.Schema)
+	}
+
+	return res, err
+}
+
+func (m *encodeDecodeMiddleware) Update(ctx context.Context, upd *items.Item, options ...*items.UpdateOptions) (err error) {
+	var col *collections.Collection
+	if upd != nil && (upd.Data != nil || upd.Translations != nil) {
+		col, err = m.colls.Get(ctx, upd.SpaceID, upd.EnvID, upd.CollectionID)
+		if err != nil {
+			return err
+		}
+		if upd, err = upd.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+	return m.next.Update(ctx, upd, options...)
+}
+
+func (m *encodeDecodeMiddleware) Find(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	item, err = m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Publish(ctx context.Context, item *items.Item, opts ...*items.PublishOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Publish(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Unpublish(ctx context.Context, item *items.Item, opts ...*items.UnpublishOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Unpublish(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	item, err = m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	item, err = m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	items, err = m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && len(items) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Archive(ctx context.Context, item *items.Item, opts ...*items.ArchiveOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Archive(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Unarchive(ctx context.Context, item *items.Item, opts ...*items.UnarchiveOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Unarchive(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.DeleteOptions) (err error) {
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *encodeDecodeMiddleware) Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.UndeleteOptions) (err error) {
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *encodeDecodeMiddleware) Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	res, err := m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+	if len(res) > 0 && len(options) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, errors.Wrap(err, "encode aggregate result")
+		}
+		o := items.MergeAggregateOptions(options...)
+		res, err = items.DecodeAggregateResult(ctx, o.Fields, res, col.Schema)
+		if err != nil {
+			return nil, errors.Wrap(err, "encode aggregate result")
+		}
+	}
+	return res, err
+}
+
+func (m *encodeDecodeMiddleware) AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	res, err := m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+	if len(res) > 0 && len(options) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, errors.Wrap(err, "get collection")
+		}
+		o := items.MergeAggregatePublishedOptions(options...)
+		res, err = items.DecodeAggregateResult(ctx, o.Fields, res, col.Schema)
+		if err != nil {
+			return nil, err
+		}
+	}
+	return res, err
+}
diff --git a/pkg/items/middleware/error_logging_middleware.go b/pkg/items/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..553461112586322dfddf38ccdddf31d373a4b29a
--- /dev/null
+++ b/pkg/items/middleware/error_logging_middleware.go
@@ -0,0 +1,211 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements items.Items that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the items.Items with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Archive(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, item, opts...)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+}
+
+func (m *errorLoggingMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Introspect(ctx, item, opts...)
+}
+
+func (m *errorLoggingMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Publish(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Unarchive(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Unpublish(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, item, options...)
+}
diff --git a/pkg/items/middleware/logging_middleware.go b/pkg/items/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..ed47170e07c15f0b3d2189aa64b2a404259a4156
--- /dev/null
+++ b/pkg/items/middleware/logging_middleware.go
@@ -0,0 +1,732 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements items.Items that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// LoggingMiddleware instruments an implementation of the items.Items with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Aggregate.Request", fields...)
+
+	result, err = m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"result": result,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Aggregate.Response", fields...)
+
+	return result, err
+}
+
+func (m *loggingMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("AggregatePublished.Request", fields...)
+
+	result, err = m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"result": result,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("AggregatePublished.Response", fields...)
+
+	return result, err
+}
+
+func (m *loggingMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Archive.Request", fields...)
+
+	err = m.next.Archive(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Archive.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"item": item,
+		"opts": opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, item, opts...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Request", fields...)
+
+	items, total, err = m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindArchived.Request", fields...)
+
+	items, total, err = m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindArchived.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindPublished.Request", fields...)
+
+	items, total, err = m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindPublished.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	item, err = m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetPublished.Request", fields...)
+
+	item, err = m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetPublished.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"revisionId":   revisionId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetRevision.Request", fields...)
+
+	item, err = m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetRevision.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"item": item,
+		"opts": opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Introspect.Request", fields...)
+
+	itm, sch, err = m.next.Introspect(ctx, item, opts...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"itm": itm,
+		"sch": sch,
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Introspect.Response", fields...)
+
+	return itm, sch, err
+}
+
+func (m *loggingMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListRevisions.Request", fields...)
+
+	items, err = m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListRevisions.Response", fields...)
+
+	return items, err
+}
+
+func (m *loggingMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Publish.Request", fields...)
+
+	err = m.next.Publish(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Publish.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unarchive.Request", fields...)
+
+	err = m.next.Unarchive(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unarchive.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Undelete.Request", fields...)
+
+	err = m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Undelete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unpublish.Request", fields...)
+
+	err = m.next.Unpublish(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unpublish.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/items/middleware/middleware.go b/pkg/items/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..a1090fe5ad072cd42682c1a4fad8504f22136926
--- /dev/null
+++ b/pkg/items/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"go.uber.org/zap"
+)
+
+type Middleware func(items.Items) items.Items
+
+func WithLog(s items.Items, logger *zap.Logger, log_access bool) items.Items {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Items")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/items/middleware/recovering_middleware.go b/pkg/items/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..244fc8a0638fa9ad812afeb3f919907171fb9b1e
--- /dev/null
+++ b/pkg/items/middleware/recovering_middleware.go
@@ -0,0 +1,248 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements items.Items that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// RecoveringMiddleware instruments an implementation of the items.Items with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Archive(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, item, opts...)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+}
+
+func (m *recoveringMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Introspect(ctx, item, opts...)
+}
+
+func (m *recoveringMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Publish(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Unarchive(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Unpublish(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, item, options...)
+}