diff --git a/Makefile b/Makefile
index edffb61feca4e592fbd263981a3ebcf5ea682477..9a64367046d3ac93fdf4ec05437cfb15ffc43777 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,7 @@
+.PHONY: proto logging mocks .FORCE
+
+SHELL = bash
+
 PROTODIR=perxis-proto/proto
 DSTDIR=./proto
 ALLPROTO?=$(shell find $(PROTODIR) -name '*.proto' )
@@ -6,6 +10,10 @@ PROTOFILES=	$(filter-out $(PROTODIR)/status/status.proto, $(ALLPROTO))
 PROTOGOFILES=$(PROTOFILES:.proto=.pb.go)
 PROTOGOGRPCFILES=$(PROTOFILES:.proto=_grpc.pb.go)
 
+PKGDIR=pkg
+ACCESSLOGGING=$(shell find $(PKGDIR) -name "logging_middleware.go" -type f)
+ERRORLOGGING=$(shell find $(PKGDIR) -name "error_logging_middleware.go" -type f)
+
 # Генерация grpc-клиентов для go
 proto: protoc-check protoc-gen-go-check $(PROTOGOFILES)
 	@echo "Generated all protobuf Go files"
@@ -33,11 +41,18 @@ ifeq (,$(wildcard $(GOPATH)/bin/protoc-gen-go))
 	or visit \"https://github.com/golang/protobuf/tree/v1.3.2#installation\" for more.\n")
 endif
 
+# Генерация логгирования (access & error) для всех сервисов. Предполагается наличие файлов `logging_middleware.go/error_middleware.go`
+# с директивой go:generate и командой генерации кода в директориях `/pkg` сервисов
+# Для установки инструмента генерации выполнить команду `go get -u github.com/hexdigest/gowrap/cmd/gowrap`
+logging: $(ERRORLOGGING) $(ACCESSLOGGING)
 
+%/middleware/logging_middleware.go: .FORCE
+	@echo "$@"
+	@go generate "$@"
 
-#MICROGENFILES?=$(shell find $(SERVICESDIR) -name "service.go" -exec grep -Ril "microgen" {} \;)
-#SERVICEDIRS?=$(shell find $(SERVICESDIR) -name "service" -type d -exec dirname {} \;)
-#SERVICEFILES?=$(shell find $(SERVICESDIR) -name "service.go" -exec grep -Ril "go:generate" {} \;)
+%/middleware/error_logging_middleware.go: .FORCE
+	@echo "$@"
+	@go generate "$@"
 
 # Генерация моков для всех интерфейсов, найденных в директории. Выходные файлы с моками сохраняются в `./mocks`
 MOCKSDIRS?=$(shell find . -name "service.go" -exec dirname {} \;)
diff --git a/assets/templates/middleware/access_log b/assets/templates/middleware/access_log
new file mode 100644
index 0000000000000000000000000000000000000000..a8587b82d5a72130690a61c81e9f78e5eeb6e726
--- /dev/null
+++ b/assets/templates/middleware/access_log
@@ -0,0 +1,64 @@
+import (
+  "fmt"
+  "time"
+  "context"
+
+  "go.uber.org/zap"
+)
+
+{{ $funcName := (or .Vars.FuncName ("LoggingMiddleware")) }}
+{{ $decorator := (or .Vars.DecoratorName ("loggingMiddleware")) }}
+
+// {{$decorator}} implements {{.Interface.Type}} that is instrumented with logging
+type {{$decorator}} struct {
+  logger *zap.Logger
+  next {{.Interface.Type}}
+}
+
+// {{$funcName}} instruments an implementation of the {{.Interface.Type}} with simple logging
+func {{$funcName}}(logger *zap.Logger) Middleware {
+  return func(next {{.Interface.Type}}) {{.Interface.Type}} {
+    return &{{$decorator}}{
+      next: next,
+      logger: logger,
+    }
+  }
+}
+
+{{range $method := .Interface.Methods}}
+    func (m *{{$decorator}}) {{$method.Declaration}} {
+        begin := time.Now()
+        {{- if $method.HasParams}}
+        var fields []zapcore.Field
+        for k, v := range {{$method.ParamsMap}} {
+            if k == "ctx" {
+			    fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+                continue
+            }
+            fields = append(fields, zap.Reflect(k,v))
+        }
+        {{end}}
+
+		m.logger.Debug("{{$method.Name}}.Request",fields...)
+
+        {{ $method.ResultsNames }} = m.next.{{ $method.Call }}
+
+        fields = []zapcore.Field{
+			zap.Duration("time", time.Since(begin)),
+			zap.Error(err),
+        }
+
+        {{ if $method.HasResults}}
+        for k, v := range {{$method.ResultsMap}} {
+            if k == "err" {
+                continue
+            }
+            fields = append(fields, zap.Reflect(k,v))
+        }
+        {{end}}
+
+		m.logger.Debug("{{$method.Name}}.Response", fields...)
+
+        return {{ $method.ResultsNames }}
+    }
+{{end}}
diff --git a/assets/templates/middleware/error_log b/assets/templates/middleware/error_log
new file mode 100755
index 0000000000000000000000000000000000000000..9455e907b738801eb7f2d43d428d98cc620370a0
--- /dev/null
+++ b/assets/templates/middleware/error_log
@@ -0,0 +1,40 @@
+import (
+  "io"
+  "time"
+
+  "go.uber.org/zap"
+)
+
+{{ $funcName := (or .Vars.FuncName ("ErrorLoggingMiddleware")) }}
+{{ $decorator := (or .Vars.DecoratorName ("errorLoggingMiddleware")) }}
+
+// {{$decorator}} implements {{.Interface.Type}} that is instrumented with logging
+type {{$decorator}} struct {
+  logger *zap.Logger
+  next {{.Interface.Type}}
+}
+
+// {{$funcName}} instruments an implementation of the {{.Interface.Type}} with simple logging
+func {{$funcName}}(logger *zap.Logger) Middleware {
+  return func(next {{.Interface.Type}}) {{.Interface.Type}} {
+    return &{{$decorator}}{
+      next: next,
+      logger: logger,
+    }
+  }
+}
+
+{{range $method := .Interface.Methods}}
+    func (m *{{$decorator}}) {{$method.Declaration}} {
+        logger := m.logger
+        {{- if $method.ReturnsError}}
+            defer func() {
+                if err != nil {
+      		        logger.Warn("response error", zap.Error(err))
+      		    }
+      	    }()
+        {{end -}}
+
+        {{ $method.Pass "m.next." }}
+    }
+{{end}}
diff --git a/assets/templates/middleware/middleware b/assets/templates/middleware/middleware
new file mode 100755
index 0000000000000000000000000000000000000000..89877774c933840c2bdd569f2beed8105588aae2
--- /dev/null
+++ b/assets/templates/middleware/middleware
@@ -0,0 +1,21 @@
+import (
+	"go.uber.org/zap"
+)
+
+type Middleware func({{.Interface.Type}}) {{.Interface.Type}}
+
+
+func WithLog(s {{.Interface.Type}}, logger *zap.Logger, log_access bool) {{.Interface.Type}} {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("{{ .Interface.Name }}")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
+
diff --git a/assets/templates/middleware/recovery b/assets/templates/middleware/recovery
new file mode 100644
index 0000000000000000000000000000000000000000..a84fa3f913e885a1c9b8f1ed71848856137a92fe
--- /dev/null
+++ b/assets/templates/middleware/recovery
@@ -0,0 +1,38 @@
+import (
+	"go.uber.org/zap"
+)
+
+{{ $funcName := (or .Vars.FuncName ("RecoveringMiddleware")) }}
+{{ $decorator := (or .Vars.DecoratorName ("recoveringMiddleware")) }}
+
+// {{$decorator}} implements {{.Interface.Type}} that is instrumented with logging
+type {{$decorator}} struct {
+  logger *zap.Logger
+  next {{.Interface.Type}}
+}
+
+// {{$funcName}} instruments an implementation of the {{.Interface.Type}} with simple logging
+func {{$funcName}}(logger *zap.Logger) Middleware {
+  return func(next {{.Interface.Type}}) {{.Interface.Type}} {
+    return &{{$decorator}}{
+      next: next,
+      logger: logger,
+    }
+  }
+}
+
+{{range $method := .Interface.Methods}}
+func (m *{{$decorator}}) {{$method.Declaration}} {
+    logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			{{- if $method.ReturnsError}}
+			err = fmt.Errorf("%v", r)
+			{{end -}}
+		}
+	}()
+
+	{{ $method.Pass "m.next." }}
+}
+{{end}}
\ No newline at end of file
diff --git a/go.mod b/go.mod
index f53323bb290e119feda2d24ebe935d02cfdc9e38..b3222c89ed7a4bd799b2d3ef72a286cec3d79ecc 100644
--- a/go.mod
+++ b/go.mod
@@ -9,30 +9,57 @@ require (
 	github.com/gosimple/slug v1.13.1
 	github.com/hashicorp/go-multierror v1.1.1
 	github.com/hashicorp/golang-lru v0.5.4
+	github.com/json-iterator/go v1.1.12
+	github.com/mitchellh/mapstructure v1.4.2
+	github.com/nats-io/nats.go v1.23.0
 	github.com/pkg/errors v0.9.1
 	github.com/rs/xid v1.4.0
-	github.com/stretchr/testify v1.8.0
+	github.com/stretchr/testify v1.8.2
 	go.mongodb.org/mongo-driver v1.11.4
+	go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.40.0
 	go.uber.org/zap v1.19.1
-	golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
-	golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2
-	google.golang.org/grpc v1.45.0
-	google.golang.org/protobuf v1.28.0
+	golang.org/x/crypto v0.8.0
+	golang.org/x/net v0.9.0
+	golang.org/x/oauth2 v0.4.0
+	google.golang.org/grpc v1.54.0
+	google.golang.org/protobuf v1.28.1
 	gopkg.in/yaml.v3 v3.0.1
 )
 
 require (
+	cloud.google.com/go/compute v1.15.1 // indirect
+	cloud.google.com/go/compute/metadata v0.2.3 // indirect
 	github.com/davecgh/go-spew v1.1.1 // indirect
 	github.com/go-kit/log v0.2.0 // indirect
 	github.com/go-logfmt/logfmt v0.5.1 // indirect
-	github.com/google/go-cmp v0.5.7 // indirect
+	github.com/go-logr/logr v1.2.3 // indirect
+	github.com/go-logr/stdr v1.2.2 // indirect
+	github.com/golang/snappy v0.0.1 // indirect
 	github.com/gosimple/unidecode v1.0.1 // indirect
 	github.com/hashicorp/errwrap v1.0.0 // indirect
+	github.com/klauspost/compress v1.13.6 // indirect
+	github.com/kr/pretty v0.3.0 // indirect
+	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+	github.com/modern-go/reflect2 v1.0.2 // indirect
+	github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect
+	github.com/nats-io/nkeys v0.3.0 // indirect
+	github.com/nats-io/nuid v1.0.1 // indirect
 	github.com/pmezard/go-difflib v1.0.0 // indirect
-	github.com/stretchr/objx v0.4.0 // indirect
+	github.com/rogpeppe/go-internal v1.8.1 // indirect
+	github.com/stretchr/objx v0.5.0 // indirect
+	github.com/xdg-go/pbkdf2 v1.0.0 // indirect
+	github.com/xdg-go/scram v1.1.1 // indirect
+	github.com/xdg-go/stringprep v1.0.3 // indirect
+	github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect
+	go.opentelemetry.io/otel v1.14.0 // indirect
+	go.opentelemetry.io/otel/metric v0.37.0 // indirect
+	go.opentelemetry.io/otel/trace v1.14.0 // indirect
 	go.uber.org/atomic v1.9.0 // indirect
 	go.uber.org/multierr v1.7.0 // indirect
-	golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect
-	golang.org/x/text v0.3.7 // indirect
-	google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4 // indirect
+	golang.org/x/sync v0.1.0 // indirect
+	golang.org/x/sys v0.7.0 // indirect
+	golang.org/x/text v0.9.0 // indirect
+	google.golang.org/appengine v1.6.7 // indirect
+	google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect
+	gopkg.in/yaml.v2 v2.3.0 // indirect
 )
diff --git a/go.sum b/go.sum
index 9dea84ab8c222f13374d0a59a405fa290feed61a..b7913ca085331be894d8e2a4e08840b4a41565a7 100644
--- a/go.sum
+++ b/go.sum
@@ -1,135 +1,137 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+cloud.google.com/go/compute v1.15.1 h1:7UGq3QknM33pw5xATlpzeoomNxsacIVvTqTTvbfajmE=
+cloud.google.com/go/compute v1.15.1/go.mod h1:bjjoF/NtFUrkD/urWfdHaKuOPDR5nWIs63rR+SXhcpA=
+cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
+cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
 github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
-github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
-github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
 github.com/antonmedv/expr v1.9.0 h1:j4HI3NHEdgDnN9p6oI6Ndr0G5QryMY0FNxT4ONrFDGU=
 github.com/antonmedv/expr v1.9.0/go.mod h1:5qsM3oLGDND7sDmQGDXHkYfkjYMUX14qsgqmHhwGEk8=
 github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
 github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
-github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
-github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
-github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
-github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
-github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
-github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
-github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
-github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
 github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
-github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
-github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
-github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
-github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
 github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg=
 github.com/gdamore/tcell v1.3.0/go.mod h1:Hjvr+Ofd+gLglo7RYKxxnzCBmev3BzsS67MebKS4zMM=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
 github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4=
 github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs=
 github.com/go-kit/log v0.2.0 h1:7i2K3eKTos3Vc0enKCfnVcgHh2olr/MyfboYq7cAcFw=
 github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0=
 github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA=
 github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
-github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
-github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
-github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
-github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
-github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
-github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
-github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
+github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
 github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
 github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
 github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
 github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
-github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
-github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
 github.com/gosimple/slug v1.13.1 h1:bQ+kpX9Qa6tHRaK+fZR0A0M2Kd7Pa5eHPPsb1JpHD+Q=
 github.com/gosimple/slug v1.13.1/go.mod h1:UiRaFH+GEilHstLUmcBgWcI42viBN7mAb818JrYOeFQ=
 github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o=
 github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc=
-github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
 github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
 github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
 github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
 github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
 github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
 github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc=
 github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
-github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
-github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
-github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
 github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1fYclkSPilDOKW0s=
 github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
 github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
 github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
+github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo=
+github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0=
 github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
+github.com/nats-io/jwt/v2 v2.0.3 h1:i/O6cmIsjpcQyWDYNcq2JyZ3/VTF8SJ4JWluI5OhpvI=
+github.com/nats-io/nats-server/v2 v2.5.0 h1:wsnVaaXH9VRSg+A2MVg5Q727/CqxnmPLGFQ3YZYKTQg=
+github.com/nats-io/nats.go v1.23.0 h1:lR28r7IX44WjYgdiKz9GmUeW0uh/m33uD3yEjLZ2cOE=
+github.com/nats-io/nats.go v1.23.0/go.mod h1:ki/Scsa23edbh8IRZbCuNXR9TDcbvfaSijKtaqQgw+Q=
+github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8=
+github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4=
+github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
+github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 github.com/rivo/tview v0.0.0-20200219210816-cd38d7432498/go.mod h1:6lkG1x+13OShEf0EaOCaTQYyB7d5nSbb181KtjlS+84=
 github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
-github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
+github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
 github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY=
 github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
 github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4=
-github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4=
 github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
 github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
 github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
 github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
+github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
 github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
 github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
+github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
 github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
+github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E=
 github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
+github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs=
 github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
+github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA=
 github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
 github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 go.mongodb.org/mongo-driver v1.11.4 h1:4ayjakA013OdpGyL2K3ZqylTac/rMjrJOMZ1EHizXas=
 go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
-go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.40.0 h1:5jD3teb4Qh7mx/nfzq4jO2WFFpvXD0vYWFDrdvNWmXk=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.40.0/go.mod h1:UMklln0+MRhZC4e3PwmN3pCtq4DyIadWw4yikh6bNrw=
+go.opentelemetry.io/otel v1.14.0 h1:/79Huy8wbf5DnIPhemGB+zEPVwnN6fuQybr/SRXa6hM=
+go.opentelemetry.io/otel v1.14.0/go.mod h1:o4buv+dJzx8rohcUeRmWUZhqupFvzWis188WlggnNeU=
+go.opentelemetry.io/otel/metric v0.37.0 h1:pHDQuLQOZwYD+Km0eb657A25NaRzy0a+eLyKfDXedEs=
+go.opentelemetry.io/otel/metric v0.37.0/go.mod h1:DmdaHfGt54iV6UKxsV9slj2bBRJcKC1B1uvDLIioc1s=
+go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyKcFq/M=
+go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8=
 go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
 go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
 go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
@@ -142,114 +144,75 @@ go.uber.org/zap v1.19.1 h1:ue41HOKd1vGURxrmeKIgELGb3jPW9DMUDGtsinblHwI=
 go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
+golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
 golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/crypto v0.8.0 h1:pd9TJtTueMTVQXzk8E2XESSMQDj/U7OUu0PqJqPXQjQ=
+golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE=
 golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
 golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
-golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
 golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM=
+golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
+golang.org/x/oauth2 v0.4.0 h1:NF0gk8LVPg1Ml7SSbGyySuoxdsXitj7TvgvuRxIMc/M=
+golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
+golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190626150813-e07cf5db2756/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw=
-golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
+golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac h1:7zkz7BUtwNFFqcowJ+RIgu2MaV/MapERkDIy+mwPyjs=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
 golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
-google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
-google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4 h1:ysnBoUyeL/H6RCvNRhWHjKoDEmguI+mPU+qHgK8qv/w=
-google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
-google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
-google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
-google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
-google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
-google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M=
-google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
-google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
-google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
-google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
-google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
-google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
-google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
+google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f h1:BWUVssLB0HVOSY78gIdvk1dTVYtT1y8SBWtPYuTJ/6w=
+google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM=
+google.golang.org/grpc v1.54.0 h1:EhTqbhiYeixwWQtAEZAxmV9MGqcjEU2mFx52xCzNyag=
+google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g=
 google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
-google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
+google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
+google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
 gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
 gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/pkg/account/account.go b/pkg/account/account.go
new file mode 100644
index 0000000000000000000000000000000000000000..9eb54c317274789cb34ee30632df61c152aef93a
--- /dev/null
+++ b/pkg/account/account.go
@@ -0,0 +1,26 @@
+package account
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	msobserver "git.perx.ru/perxis/perxis-go/pkg/members/observer"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"git.perx.ru/perxis/perxis-go/pkg/version"
+	"google.golang.org/grpc"
+)
+
+type Account struct {
+	users.Users
+	members.Members
+	organizations.Organizations
+	version.Versions
+
+	// сервис, отвечающий за отслеживание сервисом `members` событий от других сервисов
+	MembersObserver msobserver.Observer
+
+	// используется для добавления в контекст информации о пользователе, инициировавшем запрос
+	PrincipalFactory *auth.PrincipalFactory
+
+	ClientConn *grpc.ClientConn
+}
diff --git a/pkg/account/client.go b/pkg/account/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..7604f1da49b21e25a1eca3a9a9f0bc7307547a54
--- /dev/null
+++ b/pkg/account/client.go
@@ -0,0 +1,138 @@
+package account
+
+import (
+	"context"
+	"crypto/tls"
+	"crypto/x509"
+	"fmt"
+	"net/url"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	serviceMembers "git.perx.ru/perxis/perxis-go/pkg/members/middleware"
+	membersObserverTransport "git.perx.ru/perxis/perxis-go/pkg/members/observer/transport/grpc"
+	membersTransport "git.perx.ru/perxis/perxis-go/pkg/members/transport/grpc"
+	serviceOrganizations "git.perx.ru/perxis/perxis-go/pkg/organizations/middleware"
+	organizationsTransport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport/grpc"
+	serviceUsers "git.perx.ru/perxis/perxis-go/pkg/users/middleware"
+	usersTransport "git.perx.ru/perxis/perxis-go/pkg/users/transport/grpc"
+	"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
+	"go.uber.org/zap"
+	"golang.org/x/oauth2/clientcredentials"
+	"google.golang.org/grpc"
+	"google.golang.org/grpc/credentials"
+	"google.golang.org/grpc/credentials/insecure"
+	"google.golang.org/grpc/credentials/oauth"
+)
+
+const (
+	DefaultCacheSize = 1000
+	DefaultCacheTTL  = time.Second * 10
+)
+
+func NewClient(ctx context.Context, addr string, opts ...Option) (*Account, *grpc.ClientConn, error) {
+
+	client := &Account{}
+	dialOpts := make([]grpc.DialOption, 0)
+	config := &config{}
+
+	for _, o := range opts {
+		o(config)
+
+	}
+
+	if config.logger == nil {
+		config.logger = zap.NewNop()
+	}
+
+	dialOpts = append(dialOpts, grpc.WithUnaryInterceptor(otelgrpc.UnaryClientInterceptor()),
+		grpc.WithStreamInterceptor(otelgrpc.StreamClientInterceptor()))
+
+	if config.auth == nil {
+		dialOpts = append(dialOpts, grpc.WithTransportCredentials(insecure.NewCredentials()))
+	} else {
+		if config.auth.TLS != nil {
+
+			certPool := x509.NewCertPool()
+
+			if !certPool.AppendCertsFromPEM(config.auth.TLS.cacert) {
+				return nil, nil, fmt.Errorf("CA certificate not loaded")
+			}
+
+			clientCert, err := tls.X509KeyPair(config.auth.TLS.cert, config.auth.TLS.key)
+			if err != nil {
+				return nil, nil, err
+
+			}
+
+			tlsConfig := &tls.Config{
+				Certificates: []tls.Certificate{clientCert},
+				RootCAs:      certPool,
+			}
+
+			dialOpts = append(dialOpts,
+				grpc.WithTransportCredentials(credentials.NewTLS(tlsConfig)),
+			)
+		}
+
+		if config.auth.OAuth2 != nil {
+			if config.auth.OAuth2 != nil {
+				// create external grpc client
+				conf := &clientcredentials.Config{
+					TokenURL:       config.auth.OAuth2.tokenURL,
+					ClientID:       config.auth.OAuth2.clientID,
+					ClientSecret:   config.auth.OAuth2.clientSecret,
+					EndpointParams: url.Values{"audience": {config.auth.OAuth2.audience}},
+				}
+				cred := oauth.TokenSource{
+					TokenSource: conf.TokenSource(ctx),
+				}
+				dialOpts = append(dialOpts,
+					grpc.WithPerRPCCredentials(cred),
+					grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{InsecureSkipVerify: true})), // обязательно использовать tls для credentials oauth.TokenSource https://github.com/grpc/grpc-go/blob/64031cbfcf4d84c026be93ad7b74b3c290100893/credentials/oauth/oauth.go#L160
+				)
+			}
+		}
+
+	}
+
+	accountConn, err := grpc.Dial(addr, dialOpts...)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	client.Members = membersTransport.NewGRPCClient(accountConn, "", config.clientOptions...)
+	client.Organizations = organizationsTransport.NewGRPCClient(accountConn, "", config.clientOptions...)
+	client.Users = usersTransport.NewGRPCClient(accountConn, "", config.clientOptions...)
+	client.MembersObserver = membersObserverTransport.NewGRPCClient(accountConn, "", config.clientOptions...)
+
+	if !config.noCache {
+		client = WithCaching(client, DefaultCacheSize, DefaultCacheTTL)
+	}
+
+	if !config.noLog {
+		client = WithLogging(client, config.logger, config.accessLog)
+	}
+
+	return client, accountConn, nil
+}
+
+func WithCaching(client *Account, size int, ttl time.Duration) *Account {
+	c := *client
+
+	c.Members = serviceMembers.CachingMiddleware(cache.NewCache(size, ttl))(client.Members)
+	c.Organizations = serviceOrganizations.CachingMiddleware(cache.NewCache(size, ttl))(client.Organizations)
+	c.Users = serviceUsers.CachingMiddleware(cache.NewCache(size, ttl))(client.Users)
+
+	return &c
+}
+
+func WithLogging(client *Account, logger *zap.Logger, accessLog bool) *Account {
+	c := *client
+
+	c.Members = serviceMembers.WithLog(c.Members, logger, accessLog)
+	c.Organizations = serviceOrganizations.WithLog(c.Organizations, logger, accessLog)
+	c.Users = serviceUsers.WithLog(c.Users, logger, accessLog)
+
+	return &c
+}
diff --git a/pkg/account/config.go b/pkg/account/config.go
new file mode 100644
index 0000000000000000000000000000000000000000..ca7218b594ace4315ef1cae578616d4eaadea7b4
--- /dev/null
+++ b/pkg/account/config.go
@@ -0,0 +1,95 @@
+package account
+
+import (
+	"github.com/go-kit/kit/transport/grpc"
+	"go.uber.org/zap"
+)
+
+type config struct {
+	auth      *authConfig
+	noCache   bool
+	noLog     bool
+	accessLog bool
+	debug     bool
+
+	clientOptions []grpc.ClientOption
+
+	logger *zap.Logger
+}
+
+type authConfig struct {
+	OAuth2 *authOAuth2Config
+	TLS    *authTLSConfig
+}
+
+type authOAuth2Config struct {
+	tokenURL     string
+	clientID     string // параметр из auth0 (клиент с таким id должен быть создан в perxis)
+	clientSecret string
+	audience     string // параметр из auth0 (название связанного с Application API)
+}
+
+type authTLSConfig struct {
+	cacert []byte
+	cert   []byte
+	key    []byte
+}
+
+type Option func(c *config)
+
+func AuthOAuth2(tokenUrl, clientID, clientSecret, audience string) Option {
+	return func(c *config) {
+		if c.auth == nil {
+			c.auth = &authConfig{}
+		}
+		c.auth.OAuth2 = &authOAuth2Config{
+			tokenURL:     tokenUrl,
+			clientID:     clientID,
+			clientSecret: clientSecret,
+			audience:     audience,
+		}
+	}
+}
+
+func AuthTLS(cacert, cert, key []byte) Option {
+	return func(c *config) {
+		if c.auth == nil {
+			c.auth = &authConfig{}
+		}
+		c.auth.TLS = &authTLSConfig{
+			cacert: cacert,
+			cert:   cert,
+			key:    key,
+		}
+	}
+}
+
+func NoCache() Option {
+	return func(c *config) {
+		c.noCache = true
+	}
+}
+
+func NoLog() Option {
+	return func(c *config) {
+		c.noLog = true
+	}
+}
+
+func GrpcClientOptions(opts ...grpc.ClientOption) Option {
+	return func(c *config) {
+		c.clientOptions = opts
+	}
+}
+
+func Logger(logger *zap.Logger) Option {
+	return func(c *config) {
+		c.logger = logger
+	}
+}
+
+func AccessLog() Option {
+	return func(c *config) {
+		c.accessLog = true
+	}
+}
diff --git a/pkg/auth/anonymous.go b/pkg/auth/anonymous.go
new file mode 100644
index 0000000000000000000000000000000000000000..e000842a68b2a0a64b5ed59bc7f5bcb5b7bf6514
--- /dev/null
+++ b/pkg/auth/anonymous.go
@@ -0,0 +1,128 @@
+package auth
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"git.perx.ru/perxis/perxis-go/pkg/service"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+type Anonymous struct {
+	roles        roles.Roles
+	spaces       spaces.Spaces
+	spaceID      string
+	environments environments.Environments
+}
+
+func (Anonymous) GetID(ctx context.Context) string  { return "anonymous" }
+func (Anonymous) IsValid(ctx context.Context) bool  { return false }
+func (Anonymous) IsSystem(ctx context.Context) bool { return false }
+func (Anonymous) IsManagementAllowed(ctx context.Context, spaceID string) error {
+	return service.ErrAccessDenied
+}
+
+func (a Anonymous) Space(spaceID string) SpaceAccessor {
+	a.spaceID = spaceID
+	return &a
+}
+
+func (a *Anonymous) getSpace(ctx context.Context, spaceID string) *spaces.Space {
+	if spaceID == "" {
+		return nil
+	}
+	space, _ := a.spaces.Get(WithSystem(ctx), spaceID)
+	return space
+}
+
+func (a *Anonymous) HasSpaceAccess(ctx context.Context, spaceID string) bool {
+	if a.spaceID == "" || a.spaces == nil {
+		return false
+	}
+	return a.Role(ctx, spaceID) != nil
+}
+
+func (a *Anonymous) Member(ctx context.Context) members.Role {
+	return members.NotMember
+}
+
+func (a *Anonymous) Role(ctx context.Context, spaceID string) *roles.Role {
+	if a.spaceID == "" || a.roles == nil {
+		return nil
+	}
+	role, err := a.roles.Get(WithSystem(ctx), spaceID, roles.AnonymousRole)
+	if err != nil {
+		return nil
+	}
+	return role
+}
+
+func (a *Anonymous) Rules(ctx context.Context, spaceID, envID string) permission.Ruleset {
+	role := a.Role(WithSystem(ctx), spaceID)
+	if role == nil {
+		return nil
+	}
+
+	if !a.HasEnvironmentAccess(ctx, spaceID, envID) {
+		return nil
+	}
+
+	return role.Rules
+}
+
+func (a *Anonymous) HasEnvironmentAccess(ctx context.Context, space, env string) bool {
+	return hasEnvironmentAccess(ctx, a.environments, a.Role(ctx, space), env)
+}
+
+func (Anonymous) Format(f fmt.State, verb rune) {
+	f.Write([]byte("AnonymousPrincipal{}"))
+}
+
+func (a Anonymous) HasAccess(ctx context.Context, spaceID, orgID string) error {
+	if !a.IsValid(ctx) {
+		return service.ErrAccessDenied
+	}
+
+	if a.IsSystem(ctx) {
+		return nil
+	}
+
+	if spaceID != "" {
+		hasAllow, err := a.hasRole(ctx, spaceID)
+		if err != nil {
+			return err
+		}
+
+		if hasAllow {
+			return nil
+		}
+	}
+
+	if a.Member(ctx).IsPrivileged() {
+		return nil
+	}
+
+	return service.ErrAccessDenied
+}
+
+func (a *Anonymous) hasRole(ctx context.Context, spaceID string) (bool, error) {
+	if a.spaceID == "" || a.roles == nil {
+		return false, nil
+	}
+	_, err := a.roles.Get(WithSystem(ctx), spaceID, roles.AnonymousRole)
+	if err == nil {
+		return true, nil
+	}
+
+	if errors.Is(err, service.ErrNotFound) {
+		if sp := a.getSpace(ctx, spaceID); sp == nil {
+			return false, service.ErrNotFound
+		}
+	}
+	return false, nil
+}
diff --git a/pkg/auth/client.go b/pkg/auth/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..1e22b4b92c27ff35f21d4e866394363b39c96a98
--- /dev/null
+++ b/pkg/auth/client.go
@@ -0,0 +1,256 @@
+package auth
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"git.perx.ru/perxis/perxis-go/pkg/service"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+type ClientPrincipal struct {
+	identity *clients.GetByParams
+	spaceID  string
+	space    *spaces.Space
+
+	client  *clients.Client
+	invalid bool
+
+	spaces        spaces.Spaces
+	environments  environments.Environments
+	clients       clients.Clients
+	roles         roles.Roles
+	collaborators collaborators.Collaborators
+}
+
+func NewClientPrincipal(identity *clients.GetByParams) *ClientPrincipal {
+	return &ClientPrincipal{identity: identity}
+}
+
+func (c ClientPrincipal) Format(f fmt.State, verb rune) {
+	var identity string
+	switch {
+	case c.identity == nil:
+		identity = "<nil>"
+	case c.identity.APIKey != "":
+		identity = fmt.Sprintf("APIKey: '%s'", c.identity.APIKey)
+	case c.identity.OAuthClientID != "":
+		identity = fmt.Sprintf("OAuthClientID: '%s'", c.identity.OAuthClientID)
+	case c.identity.TLSSubject != "":
+		identity = fmt.Sprintf("TLSSubject: '%s'", c.identity.TLSSubject)
+	}
+
+	var id string
+	if c.client != nil {
+		id = c.client.ID
+	}
+
+	f.Write([]byte(fmt.Sprintf("ClientPrincipal{ID: '%s', Identity: {%s}}", id, identity)))
+}
+
+func (c *ClientPrincipal) Space(spaceID string) SpaceAccessor {
+	c.spaceID = spaceID
+	c.space = nil
+	c.invalid = false
+	c.client = nil
+	return c
+}
+
+func (c *ClientPrincipal) getSpace(ctx context.Context, spaceID string) *spaces.Space {
+	if spaceID == "" {
+		return nil
+	}
+	space, _ := c.spaces.Get(WithSystem(ctx), spaceID)
+	return space
+}
+
+func (ClientPrincipal) IsSystem(ctx context.Context) bool {
+	return false
+}
+
+func (c *ClientPrincipal) IsManagementAllowed(ctx context.Context, spaceID string) error {
+	if !c.IsValid(ctx) {
+		return service.ErrAccessDenied
+	}
+
+	if role := c.Role(ctx, spaceID); role != nil && role.AllowManagement {
+		return nil
+	}
+
+	return service.ErrAccessDenied
+}
+
+func (c *ClientPrincipal) Member(ctx context.Context) members.Role {
+	return members.NotMember
+}
+
+func (c *ClientPrincipal) HasSpaceAccess(ctx context.Context, spaceID string) bool {
+	if c.spaceID == "" {
+		return false
+	}
+	client, _ := c.Client(ctx)
+	return client != nil && client.SpaceID == spaceID
+}
+
+func (c *ClientPrincipal) GetID(ctx context.Context) string {
+	client, _ := c.Client(ctx)
+	if client == nil {
+		return ""
+	}
+	return client.ID
+}
+
+func (c *ClientPrincipal) GetIdentity(ctx context.Context) *clients.GetByParams {
+	return c.identity
+}
+
+func (c *ClientPrincipal) IsValid(ctx context.Context) bool {
+	if c == nil {
+		return false
+	}
+	client, _ := c.Client(ctx)
+	return client != nil
+}
+
+func (c *ClientPrincipal) Client(ctx context.Context) (*clients.Client, error) {
+	if c.invalid {
+		return nil, nil
+	}
+
+	if c.client != nil {
+		return c.client, nil
+	}
+
+	if c.clients == nil {
+		c.invalid = true
+		return nil, nil
+	}
+
+	client, err := c.clients.GetBy(WithSystem(ctx), c.spaceID, c.identity)
+	if err != nil || client == nil || client.IsDisabled() {
+		c.invalid = true
+		return nil, err
+	}
+
+	c.client = client
+	return c.client, nil
+}
+
+func (c *ClientPrincipal) HasEnvironmentAccess(ctx context.Context, spaceID, envID string) bool {
+	return hasEnvironmentAccess(ctx, c.environments, c.Role(ctx, spaceID), envID)
+}
+
+func (c *ClientPrincipal) getRoleID(ctx context.Context, spaceID string) (string, bool) {
+
+	if c.spaceID == "" || spaceID == "" {
+		return "", false
+	}
+
+	if spaceID == c.spaceID {
+		cl, _ := c.Client(ctx)
+		if cl == nil || cl.RoleID == "" {
+			return "", false
+		}
+
+		return cl.RoleID, true
+	}
+
+	rID, err := c.collaborators.Get(WithSystem(ctx), spaceID, c.spaceID)
+	if err != nil {
+		rID = roles.ViewRole
+	}
+	return rID, true
+
+}
+
+func (c *ClientPrincipal) Role(ctx context.Context, spaceID string) *roles.Role {
+	if c.spaceID == "" {
+		return nil
+	}
+
+	rID, ok := c.getRoleID(ctx, spaceID)
+	if !ok {
+		return nil
+	}
+
+	role, err := c.roles.Get(WithSystem(ctx), spaceID, rID)
+	if err == nil {
+		//c.hasRole = true
+		//c.role = role
+		return role
+	}
+
+	return nil
+}
+
+func (c *ClientPrincipal) Rules(ctx context.Context, spaceID, envID string) permission.Ruleset {
+	if c.spaceID == "" || spaceID == "" || envID == "" {
+		return nil
+	}
+
+	role := c.Role(ctx, spaceID)
+	if role == nil {
+		return nil
+	}
+
+	if role.AllowManagement {
+		return permission.PrivilegedRuleset{}
+	}
+
+	if hasEnvironmentAccess(ctx, c.environments, role, envID) {
+		return role.Rules
+	}
+	return nil
+}
+
+func (c *ClientPrincipal) HasAccess(ctx context.Context, spaceID, orgID string) error {
+	if !c.IsValid(ctx) {
+		return service.ErrAccessDenied
+	}
+
+	if c.IsSystem(ctx) {
+		return nil
+	}
+
+	if spaceID != "" {
+		if c.spaceID == "" {
+			return service.ErrAccessDenied
+		}
+
+		client, _ := c.Client(ctx)
+		if client != nil && client.SpaceID == spaceID {
+			return nil
+		}
+	}
+
+	if c.Member(ctx).IsPrivileged() {
+		return nil
+	}
+
+	return service.ErrAccessDenied
+}
+
+func (c *ClientPrincipal) hasRole(ctx context.Context, spaceID string) (bool, error) {
+	if c.spaceID == "" {
+		return false, nil
+	}
+
+	client, err := c.Client(ctx)
+	if err != nil && errors.Is(err, service.ErrNotFound) {
+		if sp := c.getSpace(ctx, spaceID); sp == nil {
+			return false, service.ErrNotFound
+		}
+	}
+	if client != nil && client.SpaceID == spaceID {
+		return true, nil
+	}
+
+	return false, nil
+}
diff --git a/pkg/auth/context.go b/pkg/auth/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..d447681068fefc974089f351d7654056314796b7
--- /dev/null
+++ b/pkg/auth/context.go
@@ -0,0 +1,27 @@
+package auth
+
+import (
+	"context"
+)
+
+type principalKey struct{}
+
+func GetPrincipal(ctx context.Context) Principal {
+	p, _ := ctx.Value(principalKey{}).(Principal)
+	if p == nil {
+		return Anonymous{}
+	}
+	return p
+}
+
+func WithPrincipal(ctx context.Context, p Principal) context.Context {
+	if ctx == nil {
+		ctx = context.Background()
+	}
+
+	return context.WithValue(ctx, principalKey{}, p)
+}
+
+func WithSystem(ctx context.Context) context.Context {
+	return WithPrincipal(ctx, &SystemPrincipal{})
+}
diff --git a/pkg/auth/factory.go b/pkg/auth/factory.go
new file mode 100644
index 0000000000000000000000000000000000000000..2394c62a2f15ca7605959b3f5b31996a5c79a164
--- /dev/null
+++ b/pkg/auth/factory.go
@@ -0,0 +1,82 @@
+package auth
+
+import (
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+)
+
+type PrincipalFactory struct {
+	users.Users
+	members.Members
+	collaborators.Collaborators
+	roles.Roles
+	clients.Clients
+	spaces.Spaces
+	environments.Environments
+}
+
+func (f PrincipalFactory) User(identity string) Principal {
+	return &UserPrincipal{
+		identity:      identity,
+		users:         f.Users,
+		members:       f.Members,
+		roles:         f.Roles,
+		collaborators: f.Collaborators,
+		spaces:        f.Spaces,
+		environments:  f.Environments,
+	}
+}
+
+func (f PrincipalFactory) Client(param *clients.GetByParams) Principal {
+	return &ClientPrincipal{
+		identity: param,
+		//authID:       authID,
+		clients:       f.Clients,
+		environments:  f.Environments,
+		roles:         f.Roles,
+		spaces:        f.Spaces,
+		collaborators: f.Collaborators,
+	}
+}
+
+func (f PrincipalFactory) Anonymous() Principal {
+	return &Anonymous{
+		roles:  f.Roles,
+		spaces: f.Spaces,
+	}
+}
+
+func (f PrincipalFactory) System() Principal {
+	return &SystemPrincipal{}
+}
+
+func (f PrincipalFactory) Principal(principalId string) Principal {
+	switch {
+	case strings.Contains(principalId, "Subject="):
+		return f.Client(&clients.GetByParams{TLSSubject: getSubject(principalId)})
+	case strings.HasSuffix(principalId, "@clients"):
+		return f.Client(&clients.GetByParams{OAuthClientID: strings.TrimSuffix(principalId, "@clients")})
+	case strings.HasPrefix(principalId, "API-Key"):
+		return f.Client(&clients.GetByParams{APIKey: strings.TrimPrefix(principalId, "API-Key ")})
+	default:
+		return f.User(principalId)
+	}
+}
+
+func getSubject(header string) string {
+	var p string
+	for _, part := range strings.Split(header, ";") {
+		if strings.Contains(part, "Subject") {
+			p = strings.TrimSuffix(strings.TrimPrefix(part, "Subject=\""), "\"")
+			break
+		}
+	}
+	return p
+}
diff --git a/pkg/auth/grpc.go b/pkg/auth/grpc.go
new file mode 100644
index 0000000000000000000000000000000000000000..7a566711db76c11b22206d947ba94ecc2bc3b366
--- /dev/null
+++ b/pkg/auth/grpc.go
@@ -0,0 +1,92 @@
+package auth
+
+import (
+	"context"
+
+	kitgrpc "github.com/go-kit/kit/transport/grpc"
+	"google.golang.org/grpc"
+	"google.golang.org/grpc/metadata"
+)
+
+const (
+	OAuth2IdentityMetadata = "x-perxis-identity"
+	TLSIdentityMetadata    = "x-forwarded-client-cert"
+	AccessMetadata         = "x-perxis-access"
+
+	AuthorizationMetadata = "authorization"
+)
+
+func GRPCToContext(factory *PrincipalFactory) kitgrpc.ServerRequestFunc {
+	return func(ctx context.Context, md metadata.MD) context.Context {
+		if identity := md.Get(TLSIdentityMetadata); len(identity) > 0 {
+			return WithPrincipal(ctx, factory.Principal(identity[0]))
+		}
+
+		if identity := md.Get(OAuth2IdentityMetadata); len(identity) > 0 {
+			return WithPrincipal(ctx, factory.Principal(identity[0]))
+		}
+
+		if identity := md.Get(AuthorizationMetadata); len(identity) > 0 {
+			return WithPrincipal(ctx, factory.Principal(identity[0]))
+		}
+
+		if access := md.Get(AccessMetadata); len(access) > 0 {
+			return WithPrincipal(ctx, factory.System())
+		}
+
+		return WithPrincipal(ctx, factory.Anonymous())
+	}
+}
+
+func ContextToGRPC() kitgrpc.ClientRequestFunc {
+	return func(ctx context.Context, md *metadata.MD) context.Context {
+		p := GetPrincipal(ctx)
+
+		switch p := p.(type) {
+		case *UserPrincipal:
+			if p.GetIdentity(ctx) != "" {
+				(*md)[OAuth2IdentityMetadata] = []string{p.GetIdentity(ctx)}
+			}
+		case *ClientPrincipal:
+			if ident := p.GetIdentity(ctx); ident != nil {
+				switch {
+				case ident.OAuthClientID != "":
+					(*md)[OAuth2IdentityMetadata] = []string{ident.OAuthClientID + "@clients"}
+				case ident.TLSSubject != "":
+					(*md)[TLSIdentityMetadata] = []string{ident.TLSSubject}
+				case ident.APIKey != "":
+					(*md)[AuthorizationMetadata] = []string{"API-Key " + ident.APIKey}
+
+				}
+			}
+		case *SystemPrincipal:
+			(*md)[AccessMetadata] = []string{p.GetID(ctx)}
+		}
+
+		return ctx
+	}
+}
+
+// PrincipalServerInterceptor - grpc-интерсептор, который используется для получения данных принципала из grpc-метаданы и добавления в контекст ''. В случае, если
+// сервис не использует проверку прав 'Principal' к системе, в параметрах передается пустой объект '&PrincipalFactory{}'
+func PrincipalServerInterceptor(factory *PrincipalFactory) grpc.UnaryServerInterceptor {
+	return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
+		if md, ok := metadata.FromIncomingContext(ctx); ok {
+			ctx = GRPCToContext(factory)(ctx, md)
+		}
+		return handler(ctx, req)
+	}
+}
+
+// PrincipalClientInterceptor - grpc-интерсептор, который используется для получения данных принципала. В случае, если
+// сервис не использует проверку прав 'Principal' к системе, в параметрах передается пустой объект '&PrincipalFactory{}'
+func PrincipalClientInterceptor() grpc.UnaryClientInterceptor {
+	return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
+		md, ok := metadata.FromOutgoingContext(ctx)
+		if !ok {
+			md = metadata.MD{}
+		}
+		ctx = metadata.NewOutgoingContext(ContextToGRPC()(ctx, &md), md)
+		return invoker(ctx, method, req, reply, cc, opts...)
+	}
+}
diff --git a/pkg/auth/principal.go b/pkg/auth/principal.go
new file mode 100644
index 0000000000000000000000000000000000000000..78da09cb27f493f034720d40d91f33b7b6ce7313
--- /dev/null
+++ b/pkg/auth/principal.go
@@ -0,0 +1,86 @@
+package auth
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+)
+
+type Principal interface {
+	GetID(ctx context.Context) string
+	IsValid(ctx context.Context) bool
+	IsSystem(ctx context.Context) bool
+	HasAccess(ctx context.Context, spID, orgID string) error
+
+	IsManagementAllowed(ctx context.Context, spaceID string) error
+}
+
+type SpaceAccessor interface {
+	Principal
+	Space(spaceID string) SpaceAccessor
+
+	// HasSpaceAccess проверяет, есть ли у принципала доступ на чтение пространства
+	// (просмотр информации о пространстве, окружений, т.д. - доступ к записям коллекций
+	// определяется отдельным набором правил, см. SpaceAccessor.Rules())
+	HasSpaceAccess(ctx context.Context, spaceID string) bool
+	HasEnvironmentAccess(ctx context.Context, spaceID, env string) bool
+
+	// Member возвращает роль принципала в организации
+	Member(ctx context.Context) members.Role
+
+	Role(ctx context.Context, spaceID string) *roles.Role
+
+	// Rules возвращает набор правил, по которым принципал может получить
+	// доступ к записям коллекций пространства.
+	Rules(ctx context.Context, spaceID, envID string) permission.Ruleset
+}
+
+type OrganizationAccessor interface {
+	Principal
+	Organization(orgID string) OrganizationAccessor
+	Member(ctx context.Context) members.Role
+}
+
+func hasEnvironmentAccess(ctx context.Context, envsrv environments.Environments, role *roles.Role, envID string) bool {
+	if role == nil || role.SpaceID == "" || envID == "" {
+		return false
+	}
+
+	if role.AllowManagement {
+		return true
+	}
+
+	envs := role.Environments
+
+	// Если явно не указаны доступные окружения - доступ по умолчанию к окружению master
+	if len(envs) == 0 {
+		envs = []string{environments.DefaultEnvironment}
+	}
+
+	for _, ce := range envs {
+		if envID == ce || data.GlobMatch(envID, ce) {
+			return true
+		}
+	}
+
+	e, err := envsrv.Get(WithSystem(ctx), role.SpaceID, envID)
+	if err != nil || e == nil {
+		return false
+	}
+
+	aliases := append(e.Aliases, e.ID)
+
+	for _, ce := range envs {
+		for _, al := range aliases {
+			if al == ce || data.GlobMatch(al, ce) {
+				return true
+			}
+		}
+	}
+
+	return false
+}
diff --git a/pkg/auth/principal_test.go b/pkg/auth/principal_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..54e04ee4a8bf96151f9dfa9cf1edff0342ab03dd
--- /dev/null
+++ b/pkg/auth/principal_test.go
@@ -0,0 +1,178 @@
+package auth
+
+import (
+	"context"
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	mocksenvs "git.perx.ru/perxis/perxis-go/pkg/environments/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"github.com/stretchr/testify/mock"
+)
+
+func Test_hasEnvironmentAccess(t *testing.T) {
+	type args struct {
+		ctx      context.Context
+		envscall func(envsservice *mocksenvs.Environments)
+		role     *roles.Role
+		envID    string
+	}
+	tests := []struct {
+		name string
+		args args
+		want bool
+	}{
+		{
+			name: "simple",
+			args: args{
+				ctx: context.Background(),
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"env1", "env2"},
+				},
+				envID: "env1",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test: e*",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"e*"},
+				},
+				envID: "env",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test: *n*",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"*n*"},
+				},
+				envID: "env",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test: *1",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"*1"},
+				},
+				envID: "env",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test (alias): ma*",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"ma*"},
+				},
+				envID: "env1",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test: *",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"*"},
+				},
+				envID: "env1",
+			},
+			want: true,
+		},
+		{
+			name: "glob env in role test: q*",
+			args: args{
+				ctx: context.Background(),
+				envscall: func(envsservice *mocksenvs.Environments) {
+					envsservice.On("Get", mock.Anything, mock.Anything, mock.Anything).Return(&environments.Environment{
+						ID:      "env1",
+						SpaceID: "space",
+						Aliases: []string{"master"},
+					}, nil).Once()
+				},
+				role: &roles.Role{
+					ID:           "1",
+					SpaceID:      "space",
+					Description:  "Current",
+					Environments: []string{"q*"},
+				},
+				envID: "env1",
+			},
+			want: false,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			envsservice := &mocksenvs.Environments{}
+			if tt.args.envscall != nil {
+				tt.args.envscall(envsservice)
+			}
+
+			if got := hasEnvironmentAccess(tt.args.ctx, envsservice, tt.args.role, tt.args.envID); got != tt.want {
+				t.Errorf("hasEnvironmentAccess() = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/auth/system.go b/pkg/auth/system.go
new file mode 100644
index 0000000000000000000000000000000000000000..b602fe114bb53d5f82ae5f6796806dc20ed75188
--- /dev/null
+++ b/pkg/auth/system.go
@@ -0,0 +1,39 @@
+package auth
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+)
+
+type SystemPrincipal struct{}
+
+const (
+	SystemID = "system"
+)
+
+func (p SystemPrincipal) GetID(ctx context.Context) string                            { return SystemID }
+func (SystemPrincipal) IsValid(ctx context.Context) bool                              { return true }
+func (SystemPrincipal) IsSystem(ctx context.Context) bool                             { return true }
+func (SystemPrincipal) IsManagementAllowed(ctx context.Context, spaceID string) error { return nil }
+
+func (p SystemPrincipal) Organization(_ string) OrganizationAccessor { return p }
+
+func (p SystemPrincipal) Space(_ string) SpaceAccessor                  { return p }
+func (SystemPrincipal) HasSpaceAccess(_ context.Context, _ string) bool { return true }
+func (SystemPrincipal) HasAccess(ctx context.Context, spaceID, orgID string) error {
+	return nil
+}
+func (SystemPrincipal) HasEnvironmentAccess(_ context.Context, _, _ string) bool { return true }
+func (SystemPrincipal) Member(_ context.Context) members.Role                    { return members.NotMember }
+func (SystemPrincipal) Role(_ context.Context, _ string) *roles.Role             { return nil }
+func (SystemPrincipal) Rules(_ context.Context, _, _ string) permission.Ruleset {
+	return &permission.PrivilegedRuleset{}
+}
+
+func (SystemPrincipal) Format(f fmt.State, verb rune) {
+	f.Write([]byte("SystemPrincipal{}"))
+}
diff --git a/pkg/auth/user.go b/pkg/auth/user.go
new file mode 100644
index 0000000000000000000000000000000000000000..a4500c140ee48d80b97000fd3f703da8be7c782d
--- /dev/null
+++ b/pkg/auth/user.go
@@ -0,0 +1,335 @@
+package auth
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"git.perx.ru/perxis/perxis-go/pkg/service"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+)
+
+type UserPrincipal struct {
+	id       string
+	identity string
+
+	user    *users.User
+	invalid bool
+	spaceID string
+	orgID   string
+
+	users         users.Users
+	members       members.Members
+	hasMemberRole bool
+	memberRole    members.Role
+
+	collaborators collaborators.Collaborators
+	spaces        spaces.Spaces
+	environments  environments.Environments
+	roles         roles.Roles
+}
+
+func (u UserPrincipal) Format(f fmt.State, verb rune) {
+	f.Write([]byte(fmt.Sprintf("UserPrincipal{ID: '%s', Identity: '%s'}", u.id, u.identity)))
+}
+
+func (u *UserPrincipal) Space(spaceID string) SpaceAccessor {
+	u.spaceID = spaceID
+	u.orgID = ""
+	return u
+}
+
+func (u *UserPrincipal) getSpace(ctx context.Context, spaceID string) *spaces.Space {
+	if spaceID == "" {
+		return nil
+	}
+	space, _ := u.spaces.Get(WithSystem(ctx), spaceID)
+	return space
+}
+
+func (u UserPrincipal) Organization(orgID string) OrganizationAccessor {
+	u.orgID = orgID
+	return &u
+}
+
+func (u *UserPrincipal) GetID(ctx context.Context) string {
+	user := u.User(ctx)
+	if user == nil {
+		return ""
+	}
+	return user.ID
+}
+
+func (u *UserPrincipal) GetIdentity(ctx context.Context) string {
+	return u.identity
+}
+
+func (u *UserPrincipal) IsValid(ctx context.Context) bool {
+	if u == nil {
+		return false
+	}
+
+	return u.User(ctx) != nil
+}
+
+func (u *UserPrincipal) IsSystem(ctx context.Context) bool {
+	user := u.User(ctx)
+	if user != nil {
+		return user.IsSystem()
+	}
+	return false
+}
+
+func (u *UserPrincipal) IsManagementAllowed(ctx context.Context, spaceID string) error {
+	if !u.IsValid(ctx) {
+		return service.ErrAccessDenied
+	}
+
+	if u.IsSystem(ctx) {
+		return nil
+	}
+
+	if u.Member(ctx).IsPrivileged() {
+		return nil
+	}
+
+	if role := u.Role(ctx, spaceID); role != nil && role.AllowManagement {
+		return nil
+	}
+
+	return service.ErrAccessDenied
+}
+
+func (u *UserPrincipal) User(ctx context.Context) *users.User {
+	if u.invalid {
+		return nil
+	}
+
+	if u.user != nil {
+		return u.user
+	}
+	if u.users == nil {
+		u.invalid = true
+		return nil
+	}
+
+	var user *users.User
+	var err error
+	switch {
+	case u.id != "":
+		user, err = u.users.Get(WithSystem(ctx), u.id)
+	case u.identity != "":
+		ctx = WithSystem(ctx)
+		user, err = u.users.GetByIdentity(WithSystem(ctx), u.identity)
+	}
+
+	if err != nil || user == nil {
+		u.invalid = true
+		return nil
+	}
+
+	u.user = user
+	return u.user
+}
+
+func (u *UserPrincipal) Member(ctx context.Context) members.Role {
+	if u.hasMemberRole {
+		return u.memberRole
+	}
+
+	if u.members == nil || (u.orgID == "" && u.spaceID == "") {
+		u.hasMemberRole = true
+		return members.NotMember
+	}
+
+	if u.orgID == "" && u.spaceID != "" {
+		sp := u.getSpace(ctx, u.spaceID)
+		if sp == nil {
+			u.hasMemberRole = true
+			return members.NotMember
+		}
+		u.orgID = sp.OrgID
+	}
+
+	role, err := u.members.Get(WithSystem(ctx), u.orgID, u.GetID(ctx))
+	if err != nil {
+		role = members.NotMember
+	}
+
+	u.memberRole = role
+	u.hasMemberRole = true
+	return u.memberRole
+}
+
+// HasSpaceAccess проверяет, есть ли у пользователя доступ к пространству
+// Пользователь имеет доступ к пространству если:
+// - Является участником пространства (даже если его роль не существует)
+// - Пространство позволяет доступ для не участников (есть роли AnonymousRole/AuthorizedRole/ViewRole)
+// Deprecated :use HasAccess
+func (u *UserPrincipal) HasSpaceAccess(ctx context.Context, spaceID string) bool {
+	res, _ := u.hasRole(ctx, spaceID)
+	return res
+}
+
+// HasAccess проверяет, есть ли у пользователя доступ к пространству
+// Пользователь имеет доступ к пространству если:
+// - Является участником пространства (даже если его роль не существует)
+// - Пространство позволяет доступ для не участников (есть роли AnonymousRole/AuthorizedRole/ViewRole)
+func (u *UserPrincipal) HasAccess(ctx context.Context, spaceID, orgID string) error {
+	if !u.IsValid(ctx) {
+		return service.ErrAccessDenied
+	}
+
+	if u.IsSystem(ctx) {
+		return nil
+	}
+
+	if spaceID != "" {
+		hasAllow, err := u.hasRole(ctx, spaceID)
+		if err != nil {
+			return err
+		}
+
+		if hasAllow {
+			return nil
+		}
+	}
+
+	if orgID != "" {
+		if u.Organization(orgID).Member(ctx).IsPrivileged() {
+			return nil
+		}
+	} else {
+		if u.Member(ctx).IsPrivileged() {
+			return nil
+		}
+	}
+
+	return service.ErrAccessDenied
+}
+
+func (u *UserPrincipal) hasRole(ctx context.Context, spaceID string) (bool, error) {
+
+	if u.spaceID == "" || spaceID == "" {
+		return false, nil
+	}
+
+	ctx = WithSystem(ctx)
+
+	if spaceID != u.spaceID {
+		_, cErr := u.collaborators.Get(ctx, spaceID, u.spaceID)
+		if cErr == nil {
+			return true, nil
+		}
+		_, rErr := u.roles.Get(ctx, spaceID, roles.ViewRole)
+		if rErr == nil {
+			return true, nil
+		}
+		if errors.Is(cErr, service.ErrNotFound) || errors.Is(rErr, service.ErrNotFound) {
+			if sp := u.getSpace(ctx, spaceID); sp == nil {
+				return false, service.ErrNotFound
+			}
+		}
+
+		return false, nil
+	}
+
+	_, cErr := u.collaborators.Get(ctx, spaceID, u.GetID(ctx))
+	if cErr == nil {
+		return true, nil
+	}
+
+	_, rErr := u.roles.Get(ctx, spaceID, roles.AuthorizedRole)
+	if rErr == nil {
+		return true, nil
+	}
+
+	if errors.Is(cErr, service.ErrNotFound) || errors.Is(rErr, service.ErrNotFound) {
+		if sp := u.getSpace(ctx, spaceID); sp == nil {
+			return false, service.ErrNotFound
+		}
+	}
+
+	return false, nil
+}
+
+func (u *UserPrincipal) getRoleID(ctx context.Context, spaceID string) string {
+
+	if u.spaceID == "" || spaceID == "" {
+		return ""
+	}
+
+	ctx = WithSystem(ctx)
+
+	if spaceID != u.spaceID {
+		rID, err := u.collaborators.Get(ctx, spaceID, u.spaceID)
+		if err != nil {
+			rID = roles.ViewRole
+		}
+		return rID
+	}
+
+	if roleID, err := u.collaborators.Get(ctx, spaceID, u.GetID(ctx)); err == nil {
+		return roleID
+	}
+
+	return roles.AuthorizedRole
+}
+
+func (u *UserPrincipal) Role(ctx context.Context, spaceID string) *roles.Role {
+
+	if roleID := u.getRoleID(ctx, spaceID); roleID != "" {
+		role, _ := u.roles.Get(WithSystem(ctx), spaceID, roleID)
+		return role
+	}
+
+	return nil
+}
+
+func (u *UserPrincipal) Rules(ctx context.Context, spaceID, envID string) permission.Ruleset {
+	if spaceID == "" || envID == "" {
+		return nil
+	}
+
+	if u.spaceID == spaceID && (u.IsSystem(ctx) || u.Member(ctx).IsPrivileged()) {
+		return permission.PrivilegedRuleset{}
+	}
+
+	role := u.Role(ctx, spaceID)
+	if role == nil {
+		return nil
+	}
+
+	if !hasEnvironmentAccess(ctx, u.environments, role, envID) {
+		return nil
+	}
+
+	return role.Rules
+}
+
+func IsValidUser(ctx context.Context, p Principal) bool {
+	if p == nil {
+		return false
+	}
+	if u, ok := p.(*UserPrincipal); ok {
+		return u.IsValid(ctx)
+	}
+	return false
+}
+
+func User(ctx context.Context, p Principal) *users.User {
+	if u, ok := p.(*UserPrincipal); ok {
+		return u.User(ctx)
+	}
+	return nil
+}
+
+func (u *UserPrincipal) HasEnvironmentAccess(ctx context.Context, spaceID, env string) bool {
+	return hasEnvironmentAccess(ctx, u.environments, u.Role(ctx, spaceID), env)
+}
diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go
index 7f7248e5208f1691d7e094eaf640f361383faab6..8d32fb4d066026d0ccab1a0c596aa54bfb18d6ba 100644
--- a/pkg/cache/cache.go
+++ b/pkg/cache/cache.go
@@ -1,10 +1,10 @@
 package cache
 
 import (
-	"errors"
 	"fmt"
 	"time"
 
+	"git.perx.ru/perxis/perxis-go/pkg/service"
 	lru "github.com/hashicorp/golang-lru"
 	"go.uber.org/zap"
 )
@@ -14,8 +14,6 @@ const (
 	defaultTTL       = 30 * time.Second
 )
 
-var ErrNotFound = errors.New("not found")
-
 type Cache struct {
 	cache  *lru.Cache
 	ttl    time.Duration
@@ -69,13 +67,13 @@ func (c *Cache) Get(key interface{}) (value interface{}, err error) {
 		if v.expiredAt.Before(time.Now()) {
 			c.Remove(key)
 			c.logger.Debug("Expired", zap.String("key", fmt.Sprintf("%v", key)), zap.String("ptr", fmt.Sprintf("%p", v.value)))
-			return nil, ErrNotFound
+			return nil, service.ErrNotFound
 		}
 		c.logger.Debug("Hit", zap.String("key", fmt.Sprintf("%v", key)), zap.String("ptr", fmt.Sprintf("%p", v.value)))
 		return v.value, nil
 	}
 	c.logger.Debug("Miss", zap.String("key", fmt.Sprintf("%v", key)))
-	return nil, ErrNotFound
+	return nil, service.ErrNotFound
 }
 
 func (c *Cache) Remove(key interface{}) (err error) {
@@ -83,7 +81,7 @@ func (c *Cache) Remove(key interface{}) (err error) {
 	c.logger.Debug("Remove", zap.String("key", fmt.Sprintf("%v", key)))
 
 	if !present {
-		err = ErrNotFound
+		err = service.ErrNotFound
 	}
 
 	return
diff --git a/pkg/cache/cache_test.go b/pkg/cache/cache_test.go
index 345a391c09044b61ebac4f869ba26bbcd9def13d..b0d4304a499f0970cc695b667429c896763c1a33 100644
--- a/pkg/cache/cache_test.go
+++ b/pkg/cache/cache_test.go
@@ -1,7 +1,6 @@
 package cache
 
 import (
-	"errors"
 	"testing"
 	"time"
 
@@ -10,14 +9,13 @@ import (
 )
 
 func TestCache(t *testing.T) {
-
 	t.Run("Simple", func(t *testing.T) {
 		c := NewCache(10, 0)
 
 		{
 			val, err := c.Get("test_key")
 			require.Error(t, err)
-			assert.True(t, errors.Is(err, ErrNotFound))
+			assert.EqualError(t, err, "not found")
 			assert.Nil(t, val)
 		}
 		{
@@ -34,7 +32,7 @@ func TestCache(t *testing.T) {
 			require.NoError(t, err)
 
 			val, err := c.Get("test_key")
-			assert.True(t, errors.Is(err, ErrNotFound))
+			assert.EqualError(t, err, "not found")
 			assert.Nil(t, val)
 		}
 	})
@@ -55,7 +53,7 @@ func TestCache(t *testing.T) {
 			require.NoError(t, err)
 
 			val, err := c.Get("test_key_1")
-			assert.True(t, errors.Is(err, ErrNotFound))
+			assert.EqualError(t, err, "not found")
 			assert.Nil(t, val)
 			val, err = c.Get("test_key_2")
 			require.NoError(t, err)
@@ -76,7 +74,7 @@ func TestCache(t *testing.T) {
 		time.Sleep(15 * time.Millisecond)
 
 		val, err = c.Get("test_key")
-		assert.True(t, errors.Is(err, ErrNotFound))
+		assert.EqualError(t, err, "not found")
 		assert.Nil(t, val)
 	})
 }
diff --git a/pkg/clients/client.go b/pkg/clients/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..f38b5acc9b442be39139523bcd316947d5d54fe7
--- /dev/null
+++ b/pkg/clients/client.go
@@ -0,0 +1,87 @@
+package clients
+
+// Client - приложение имеющее доступ к API
+type Client struct {
+	// Внутренний идентификатор клиента внутри системы
+	ID string `json:"id" bson:"_id"`
+
+	// Идентификатор пространства
+	SpaceID string `json:"space_id" bson:"-"`
+
+	// Имя приложения (обязательное поле)
+	Name string `json:"name" bson:"name"`
+
+	// Параметры аутентификации клиента
+	OAuth  *OAuth  `json:"oauth,omitempty" bson:"oauth,omitempty"`
+	TLS    *TLS    `json:"tls,omitempty" bson:"tls,omitempty"`
+	APIKey *APIKey `json:"api_key,omitempty" bson:"api_key,omitempty"`
+
+	// Описание клиента, назначение
+	Description string `json:"description" bson:"description"`
+
+	// Приложение отключено и не может авторизоваться
+	Disabled *bool `json:"disabled,omitempty" bson:"disabled,omitempty"`
+
+	// Роль приложения в пространстве
+	RoleID string `json:"role_id" bson:"role_id"`
+}
+
+type OAuth struct {
+	ClientID     string `bson:"client_id,omitempty" json:"client_id,omitempty"`         // Идентификатор клиента выданные IdP сервером, используется для идентификации клиента
+	AuthID       string `bson:"auth_id,omitempty" json:"auth_id,omitempty"`             // Сервис, который используется для авторизации клиента
+	TokenURL     string `bson:"token_url,omitempty" json:"token_url,omitempty"`         // URL для получения/обновления access token клиента (опционально)
+	ClientSecret string `bson:"client_secret,omitempty" json:"client_secret,omitempty"` // Секретный Ключ клиента, используется для идентификации клиента (опционально)
+}
+
+type APIKey struct {
+	Key    string `bson:"key,omitempty" json:"key,omitempty"`
+	Rotate bool   `bson:"-" json:"rotate,omitempty"`
+}
+
+type TLS struct {
+	Subject string `json:"subject,omitempty"`
+	CACert  string `json:"ca_cert,omitempty"`
+	Cert    string `json:"cert,omitempty"`
+	Key     string `json:"key,omitempty"`
+}
+
+func (c *Client) SetDisabled(b bool) *Client {
+	c.Disabled = &b
+	return c
+}
+
+func (c *Client) IsDisabled() bool {
+	if c.Disabled != nil && *c.Disabled {
+		return true
+	}
+	return false
+}
+
+func (c Client) Clone() *Client {
+	clone := &Client{
+		ID:          c.ID,
+		SpaceID:     c.SpaceID,
+		Name:        c.Name,
+		Description: c.Description,
+		RoleID:      c.RoleID,
+	}
+
+	if c.OAuth != nil {
+		temp := *c.OAuth
+		clone.OAuth = &temp
+	}
+	if c.TLS != nil {
+		temp := *c.TLS
+		clone.TLS = &temp
+	}
+	if c.APIKey != nil {
+		temp := *c.APIKey
+		clone.APIKey = &temp
+	}
+	if c.Disabled != nil {
+		temp := *c.Disabled
+		clone.Disabled = &temp
+	}
+
+	return clone
+}
diff --git a/pkg/clients/middleware/caching_middleware.go b/pkg/clients/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1733c0de4a8dde8f011d46b2b77005f8924e60bb
--- /dev/null
+++ b/pkg/clients/middleware/caching_middleware.go
@@ -0,0 +1,165 @@
+package middleware
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/clients"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Clients) service.Clients {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Clients
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, client *service.Client) (cl *service.Client, err error) {
+
+	cl, err = m.next.Create(ctx, client)
+	if err == nil {
+		m.cache.Remove(cl.SpaceID)
+	}
+	return cl, err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId string, id string) (cl *service.Client, err error) {
+
+	key := makeKey(spaceId, id)
+	value, e := m.cache.Get(key)
+	if e == nil {
+		return value.(*service.Client), err
+	}
+	cl, err = m.next.Get(ctx, spaceId, id)
+	if err == nil {
+		m.cache.Set(key, cl)
+		for _, key := range keysFromIdentities(spaceId, cl) {
+			m.cache.Set(key, cl)
+		}
+	}
+	return cl, err
+}
+
+func (m cachingMiddleware) GetBy(ctx context.Context, spaceId string, params *service.GetByParams) (cl *service.Client, err error) {
+	if params == nil {
+		return m.next.GetBy(ctx, spaceId, params)
+	}
+
+	key := getIdentKey(spaceId, params)
+	value, e := m.cache.Get(key)
+	if e == nil {
+		return value.(*service.Client), err
+	}
+	cl, err = m.next.GetBy(ctx, spaceId, params)
+	if err == nil {
+		m.cache.Set(makeKey(spaceId, cl.ID), cl)
+		for _, key := range keysFromIdentities(spaceId, cl) {
+			m.cache.Set(key, cl)
+		}
+	}
+	return cl, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, spaceId string) (clients []*service.Client, err error) {
+
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.([]*service.Client), err
+	}
+	clients, err = m.next.List(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, clients)
+	}
+	return clients, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, client *service.Client) (err error) {
+
+	err = m.next.Update(ctx, client)
+
+	if err == nil {
+		m.cache.Remove(client.SpaceID)
+		value, e := m.cache.Get(makeKey(client.SpaceID, client.ID))
+		if e == nil {
+			client := value.(*service.Client)
+			m.cache.Remove(makeKey(client.SpaceID, client.ID))
+			for _, key := range keysFromIdentities(client.SpaceID, client) {
+				m.cache.Remove(key)
+			}
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string, id string) (err error) {
+
+	err = m.next.Delete(ctx, spaceId, id)
+	if err == nil {
+		value, e := m.cache.Get(makeKey(spaceId, id))
+		if e == nil {
+			client := value.(*service.Client)
+			m.cache.Remove(makeKey(client.SpaceID, client.ID))
+			for _, key := range keysFromIdentities(client.SpaceID, client) {
+				m.cache.Remove(key)
+			}
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Enable(ctx context.Context, spaceId string, id string, enable bool) (err error) {
+
+	err = m.next.Enable(ctx, spaceId, id, enable)
+	if err == nil {
+		value, e := m.cache.Get(makeKey(spaceId, id))
+		if e == nil {
+			client := value.(*service.Client)
+			m.cache.Remove(makeKey(client.SpaceID, client.ID))
+			for _, key := range keysFromIdentities(client.SpaceID, client) {
+				m.cache.Remove(key)
+			}
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
+
+func keysFromIdentities(spaceID string, client *service.Client) []string {
+	res := make([]string, 0)
+	if client.APIKey != nil && client.APIKey.Key != "" {
+		res = append(res, makeKey(spaceID, "api-key", client.APIKey.Key))
+	}
+	if client.TLS != nil && client.TLS.Subject != "" {
+		res = append(res, makeKey(spaceID, "tls", client.TLS.Subject))
+	}
+	if client.OAuth != nil && client.OAuth.ClientID != "" {
+		res = append(res, makeKey(spaceID, "oauth", client.OAuth.ClientID))
+	}
+	return res
+}
+
+func getIdentKey(spaceID string, params *service.GetByParams) string {
+	switch {
+	case params.APIKey != "":
+		return makeKey(spaceID, "api-key", params.APIKey)
+	case params.TLSSubject != "":
+		return makeKey(spaceID, "tls", params.TLSSubject)
+	case params.OAuthClientID != "":
+		return makeKey(spaceID, "oauth", params.OAuthClientID)
+	default:
+		return ""
+	}
+}
diff --git a/pkg/clients/middleware/caching_middleware_test.go b/pkg/clients/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..b842e1183a5425a4b3aec8d59b0720e5627767ed
--- /dev/null
+++ b/pkg/clients/middleware/caching_middleware_test.go
@@ -0,0 +1,382 @@
+package middleware
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	csmocks "git.perx.ru/perxis/perxis-go/pkg/clients/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestClientsCache(t *testing.T) {
+
+	const (
+		cltID    = "cltID"
+		spaceID  = "spaceID"
+		clientID = "123@client"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		cs := &csmocks.Clients{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, cltID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, cltID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша, после повторного запроса.")
+
+		v3, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кэша при запросе по ClientID.")
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("GetBy from cache", func(t *testing.T) {
+		cs := &csmocks.Clients{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("GetBy", mock.Anything, spaceID, &clients.GetByParams{OAuthClientID: clientID}).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}, nil).Once()
+
+		v1, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+		require.NoError(t, err)
+
+		v2, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша, после повторного запроса.")
+
+		v3, err := svc.Get(ctx, spaceID, cltID)
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кэша, после запроса Get.")
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("List", func(t *testing.T) {
+		cs := &csmocks.Clients{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+		vl1, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+
+		vl2, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+
+		t.Run("After Update", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}, nil).Once()
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			v3, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по ClientID.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Update(ctx, &clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_2", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}})
+			require.NoError(t, err)
+
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_2", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}, nil).Once()
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_2", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			v4, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v4, "Ожидает что после обновления объект был удален из кэша и будет запрошен заново из сервиса.")
+
+			v5, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v5)
+			assert.Same(t, v4, v5, "Ожидается что после обновления объект был удален из кеша и после запроса Get в кеш попал объект запрошенный заново из сервиса.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Update(List)", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Update(ctx, &clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_2", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}})
+			require.NoError(t, err)
+
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_2", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}, nil).Once()
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			v3, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по ClientID.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Delete", mock.Anything, spaceID, cltID).Return(nil).Once()
+
+			err = svc.Delete(ctx, spaceID, cltID)
+			require.NoError(t, err)
+
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(nil, errNotFound).Once()
+			cs.On("GetBy", mock.Anything, spaceID, &clients.GetByParams{OAuthClientID: clientID}).Return(nil, errNotFound).Once()
+			cs.On("List", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+
+			_, err = svc.Get(ctx, spaceID, cltID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаление из хранилища объект был удален из кэша и получена ошибка из сервиса.")
+
+			_, err = svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаление из хранилища объект был удален из кэша и получена ошибка из сервиса.")
+
+			_, err = svc.List(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаление из хранилища объекты были удалены из кэша.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Delete(List)", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Delete", mock.Anything, spaceID, cltID).Return(nil).Once()
+
+			err = svc.Delete(ctx, spaceID, cltID)
+			require.NoError(t, err)
+
+			cs.On("List", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+
+			_, err = svc.List(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаление из хранилища объекты были удалены из кэша.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1"}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+			assert.Len(t, vl2, 1, "Ожидается получение объектов из кэша.")
+
+			cs.On("Create", mock.Anything, mock.Anything).Return(&clients.Client{ID: "cltID2", SpaceID: spaceID, Name: "client_2"}, nil).Once()
+
+			_, err = svc.Create(ctx, &clients.Client{ID: "cltID2", SpaceID: spaceID, Name: "client_2"})
+			require.NoError(t, err)
+
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, Name: "client_1"}, {ID: "cltID2", SpaceID: spaceID, Name: "client_2"}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 2, "Ожидается что после создания нового объекта кеш будет очищен и объекты запрошены заново из сервиса.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Enable", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			tr := true
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &tr}, nil).Once()
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &tr}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			v3, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по ClientID.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Enable", mock.Anything, spaceID, cltID, tr).Return(nil).Once()
+
+			err = svc.Enable(ctx, spaceID, cltID, tr)
+			require.NoError(t, err)
+
+			fl := false
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &fl}, nil).Once()
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &fl}}, nil).Once()
+
+			v4, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v4, "Ожидается что после активации объект был удален из кэша и запрошен у сервиса.")
+
+			v5, err := svc.GetBy(ctx, spaceID, &clients.GetByParams{OAuthClientID: clientID})
+			assert.NotSame(t, v3, v5, "Ожидается что после активации объект был удален из кеша и после запроса Get в кеш попал объект запрошенный заново из сервиса.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что после активации объекта, кеш будет очищен и объекты будут запрошены заново из сервиса.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After Enable(List)", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			tr := true
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &tr}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша, после повторного запроса.")
+
+			cs.On("Enable", mock.Anything, spaceID, cltID, tr).Return(nil).Once()
+
+			err = svc.Enable(ctx, spaceID, cltID, tr)
+			require.NoError(t, err)
+
+			fl := false
+			cs.On("List", mock.Anything, spaceID).Return([]*clients.Client{{ID: cltID, SpaceID: spaceID, OAuth: &clients.OAuth{ClientID: clientID, AuthID: "authID"}, Disabled: &fl}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что после активации объекта, кеш будет очищен и объекты будут запрошены заново из сервиса.")
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			cs := &csmocks.Clients{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID}}, nil).Once()
+			cs.On("Get", mock.Anything, spaceID, cltID).Return(&clients.Client{ID: cltID, SpaceID: spaceID, Name: "client_1", OAuth: &clients.OAuth{ClientID: clientID}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			v2, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша после повторного запроса.")
+
+			time.Sleep(2 * ttl)
+
+			v3, err := svc.Get(ctx, spaceID, cltID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что элемент был удален из кэша по истечению ttl и будет запрошен заново из сервиса.")
+
+			cs.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/clients/middleware/error_logging_middleware.go b/pkg/clients/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0b96827a0c620ad1ca1aa5aaf6b93a821af6279d
--- /dev/null
+++ b/pkg/clients/middleware/error_logging_middleware.go
@@ -0,0 +1,100 @@
+package middleware
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/clients -i Clients -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements clients.Clients that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   clients.Clients
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the clients.Clients with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next clients.Clients) clients.Clients {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, client *clients.Client) (created *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, client)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, id string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, id)
+}
+
+func (m *errorLoggingMiddleware) Enable(ctx context.Context, spaceId string, id string, enable bool) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Enable(ctx, spaceId, id, enable)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, id string) (client *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, id)
+}
+
+func (m *errorLoggingMiddleware) GetBy(ctx context.Context, spaceId string, params *clients.GetByParams) (client *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetBy(ctx, spaceId, params)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, spaceId string) (clients []*clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, client *clients.Client) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, client)
+}
diff --git a/pkg/clients/middleware/logging_middleware.go b/pkg/clients/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..ef3ea5947a637bfa9caffa3af64f1f7cd2b8e019
--- /dev/null
+++ b/pkg/clients/middleware/logging_middleware.go
@@ -0,0 +1,288 @@
+package middleware
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/clients -i Clients -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements clients.Clients that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   clients.Clients
+}
+
+// LoggingMiddleware instruments an implementation of the clients.Clients with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next clients.Clients) clients.Clients {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, client *clients.Client) (created *clients.Client, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"client": client} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, client)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, id string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"id":      id} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, id)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Enable(ctx context.Context, spaceId string, id string, enable bool) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"id":      id,
+		"enable":  enable} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Enable.Request", fields...)
+
+	err = m.next.Enable(ctx, spaceId, id, enable)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Enable.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, id string) (client *clients.Client, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"id":      id} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	client, err = m.next.Get(ctx, spaceId, id)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"client": client,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return client, err
+}
+
+func (m *loggingMiddleware) GetBy(ctx context.Context, spaceId string, params *clients.GetByParams) (client *clients.Client, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"params":  params} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetBy.Request", fields...)
+
+	client, err = m.next.GetBy(ctx, spaceId, params)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"client": client,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetBy.Response", fields...)
+
+	return client, err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, spaceId string) (clients []*clients.Client, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	clients, err = m.next.List(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"clients": clients,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return clients, err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, client *clients.Client) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"client": client} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, client)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/clients/middleware/middleware.go b/pkg/clients/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..a49c9b3ebb0c041c23178d457ea5ddf2d2357d91
--- /dev/null
+++ b/pkg/clients/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package middleware
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/clients -i Clients -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"go.uber.org/zap"
+)
+
+type Middleware func(clients.Clients) clients.Clients
+
+func WithLog(s clients.Clients, logger *zap.Logger, log_access bool) clients.Clients {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Clients")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/clients/middleware/recovering_middleware.go b/pkg/clients/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..2406ca994112a148e6a204589ee1844a782922fc
--- /dev/null
+++ b/pkg/clients/middleware/recovering_middleware.go
@@ -0,0 +1,115 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package middleware
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/clients -i Clients -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements clients.Clients that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   clients.Clients
+}
+
+// RecoveringMiddleware instruments an implementation of the clients.Clients with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next clients.Clients) clients.Clients {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, client *clients.Client) (created *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, client)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, id string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, id)
+}
+
+func (m *recoveringMiddleware) Enable(ctx context.Context, spaceId string, id string, enable bool) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Enable(ctx, spaceId, id, enable)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, id string) (client *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, id)
+}
+
+func (m *recoveringMiddleware) GetBy(ctx context.Context, spaceId string, params *clients.GetByParams) (client *clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetBy(ctx, spaceId, params)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, spaceId string) (clients []*clients.Client, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, client *clients.Client) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, client)
+}
diff --git a/pkg/clients/mocks/Clients.go b/pkg/clients/mocks/Clients.go
new file mode 100644
index 0000000000000000000000000000000000000000..bfeb7e946a1fe50d044479ba785aac15f484ac31
--- /dev/null
+++ b/pkg/clients/mocks/Clients.go
@@ -0,0 +1,149 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	clients "git.perx.ru/perxis/perxis-go/pkg/clients"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Clients is an autogenerated mock type for the Clients type
+type Clients struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, client
+func (_m *Clients) Create(ctx context.Context, client *clients.Client) (*clients.Client, error) {
+	ret := _m.Called(ctx, client)
+
+	var r0 *clients.Client
+	if rf, ok := ret.Get(0).(func(context.Context, *clients.Client) *clients.Client); ok {
+		r0 = rf(ctx, client)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*clients.Client)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *clients.Client) error); ok {
+		r1 = rf(ctx, client)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, id
+func (_m *Clients) Delete(ctx context.Context, spaceId string, id string) error {
+	ret := _m.Called(ctx, spaceId, id)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, spaceId, id)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Enable provides a mock function with given fields: ctx, spaceId, id, enable
+func (_m *Clients) Enable(ctx context.Context, spaceId string, id string, enable bool) error {
+	ret := _m.Called(ctx, spaceId, id, enable)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, bool) error); ok {
+		r0 = rf(ctx, spaceId, id, enable)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, id
+func (_m *Clients) Get(ctx context.Context, spaceId string, id string) (*clients.Client, error) {
+	ret := _m.Called(ctx, spaceId, id)
+
+	var r0 *clients.Client
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) *clients.Client); ok {
+		r0 = rf(ctx, spaceId, id)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*clients.Client)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, spaceId, id)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetBy provides a mock function with given fields: ctx, spaceId, params
+func (_m *Clients) GetBy(ctx context.Context, spaceId string, params *clients.GetByParams) (*clients.Client, error) {
+	ret := _m.Called(ctx, spaceId, params)
+
+	var r0 *clients.Client
+	if rf, ok := ret.Get(0).(func(context.Context, string, *clients.GetByParams) *clients.Client); ok {
+		r0 = rf(ctx, spaceId, params)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*clients.Client)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, *clients.GetByParams) error); ok {
+		r1 = rf(ctx, spaceId, params)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// List provides a mock function with given fields: ctx, spaceId
+func (_m *Clients) List(ctx context.Context, spaceId string) ([]*clients.Client, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*clients.Client
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*clients.Client); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*clients.Client)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Update provides a mock function with given fields: ctx, client
+func (_m *Clients) Update(ctx context.Context, client *clients.Client) error {
+	ret := _m.Called(ctx, client)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *clients.Client) error); ok {
+		r0 = rf(ctx, client)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/clients/service.go b/pkg/clients/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..823de82bdbe720e2a9ff24f069cb94458bf1cf25
--- /dev/null
+++ b/pkg/clients/service.go
@@ -0,0 +1,37 @@
+package clients
+
+import (
+	"context"
+)
+
+type GetByParams struct {
+	OAuthClientID string `json:"oauth_client_id,omitempty"`
+	APIKey        string `json:"api_key,omitempty"`
+	TLSSubject    string `json:"tls_subject,omitempty"`
+}
+
+// @microgen grpc, recovering, middleware
+// @protobuf git.perx.ru/perxis/perxis-go/proto/clients
+// @grpc-addr content.clients.Clients
+type Clients interface {
+	// Create - создает клиента (приложение) для работы с API
+	Create(ctx context.Context, client *Client) (created *Client, err error)
+
+	// Get - возвращает клиента по id
+	Get(ctx context.Context, spaceId, id string) (client *Client, err error)
+
+	// GetBy - возвращает клиента по идентификатору системы авторизации
+	GetBy(ctx context.Context, spaceId string, params *GetByParams) (client *Client, err error)
+
+	// List - возвращает список клиентов созданных в пространстве
+	List(ctx context.Context, spaceId string) (clients []*Client, err error)
+
+	// Update - обновляет параметры клиента
+	Update(ctx context.Context, client *Client) (err error)
+
+	// Delete - удаляет указанного клиента из пространстве
+	Delete(ctx context.Context, spaceId, id string) (err error)
+
+	// Enable - активирует/деактивирует клиента. Клиент не сможет обращаться к API платформы
+	Enable(ctx context.Context, spaceId, id string, enable bool) (err error)
+}
diff --git a/pkg/clients/transport/client.microgen.go b/pkg/clients/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..633855af4d1f7fe74260ec7b4ece23db6063ebfb
--- /dev/null
+++ b/pkg/clients/transport/client.microgen.go
@@ -0,0 +1,108 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+	clients "git.perx.ru/perxis/perxis-go/pkg/clients"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *clients.Client) (res0 *clients.Client, res1 error) {
+	request := CreateRequest{Client: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string) (res0 *clients.Client, res1 error) {
+	request := GetRequest{
+		Id:      arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Client, res1
+}
+
+func (set EndpointsSet) GetBy(arg0 context.Context, arg1 string, arg2 *clients.GetByParams) (res0 *clients.Client, res1 error) {
+	request := GetByRequest{
+		Config:  arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.GetByEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetByResponse).Client, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string) (res0 []*clients.Client, res1 error) {
+	request := ListRequest{SpaceId: arg1}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Clients, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *clients.Client) (res0 error) {
+	request := UpdateRequest{Client: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := DeleteRequest{
+		Id:      arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Enable(arg0 context.Context, arg1 string, arg2 string, arg3 bool) (res0 error) {
+	request := EnableRequest{
+		Enable:  arg3,
+		Id:      arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.EnableEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/clients/transport/endpoints.microgen.go b/pkg/clients/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..bf73c784e68eb95f1dc631184046a4016bcca7a1
--- /dev/null
+++ b/pkg/clients/transport/endpoints.microgen.go
@@ -0,0 +1,16 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Clients API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	GetEndpoint    endpoint.Endpoint
+	GetByEndpoint  endpoint.Endpoint
+	ListEndpoint   endpoint.Endpoint
+	UpdateEndpoint endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+	EnableEndpoint endpoint.Endpoint
+}
diff --git a/pkg/clients/transport/exchanges.microgen.go b/pkg/clients/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..2a1a8e65967661b117a20ff62fc6e20837afce2b
--- /dev/null
+++ b/pkg/clients/transport/exchanges.microgen.go
@@ -0,0 +1,58 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import clients "git.perx.ru/perxis/perxis-go/pkg/clients"
+
+type (
+	CreateRequest struct {
+		Client *clients.Client `json:"client"`
+	}
+	CreateResponse struct {
+		Created *clients.Client `json:"created"`
+	}
+
+	GetRequest struct {
+		SpaceId string `json:"space_id"`
+		Id      string `json:"id"`
+	}
+	GetResponse struct {
+		Client *clients.Client `json:"client"`
+	}
+
+	GetByRequest struct {
+		SpaceId string               `json:"space_id"`
+		Config  *clients.GetByParams `json:"config"`
+	}
+	GetByResponse struct {
+		Client *clients.Client `json:"client"`
+	}
+
+	ListRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListResponse struct {
+		Clients []*clients.Client `json:"clients"`
+	}
+
+	UpdateRequest struct {
+		Client *clients.Client `json:"client"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId string `json:"space_id"`
+		Id      string `json:"id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	EnableRequest struct {
+		SpaceId string `json:"space_id"`
+		Id      string `json:"id"`
+		Enable  bool   `json:"enable"`
+	}
+	// Formal exchange type, please do not delete.
+	EnableResponse struct{}
+)
diff --git a/pkg/clients/transport/grpc/client.microgen.go b/pkg/clients/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..421a0178c29b691c33085761bc1710e3c4cbe4eb
--- /dev/null
+++ b/pkg/clients/transport/grpc/client.microgen.go
@@ -0,0 +1,68 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/clients/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/clients"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.clients.Clients"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		EnableEndpoint: grpckit.NewClient(
+			conn, addr, "Enable",
+			_Encode_Enable_Request,
+			_Decode_Enable_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		GetByEndpoint: grpckit.NewClient(
+			conn, addr, "GetBy",
+			_Encode_GetBy_Request,
+			_Decode_GetBy_Response,
+			pb.GetByResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/clients/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/clients/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..fea7765facc0f5ba2c6f07a5c7016d2e326a5d23
--- /dev/null
+++ b/pkg/clients/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,295 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	transport "git.perx.ru/perxis/perxis-go/pkg/clients/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/clients"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		Id:      req.Id,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	return &pb.ListRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		Id:      req.Id,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Enable_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil EnableRequest")
+	}
+	req := request.(*transport.EnableRequest)
+	return &pb.EnableRequest{
+		Enable:  req.Enable,
+		Id:      req.Id,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respClient, err := PtrClientToProto(resp.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Client: respClient}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respClients, err := ListPtrClientToProto(resp.Clients)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Clients: respClients}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Enable_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		Id:      string(req.Id),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	return &transport.ListRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		Id:      string(req.Id),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Enable_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil EnableRequest")
+	}
+	req := request.(*pb.EnableRequest)
+	return &transport.EnableRequest{
+		Enable:  bool(req.Enable),
+		Id:      string(req.Id),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respClient, err := ProtoToPtrClient(resp.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Client: respClient}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respClients, err := ProtoToListPtrClient(resp.Clients)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Clients: respClients}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Enable_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqClient, err := PtrClientToProto(req.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Client: reqClient}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqClient, err := PtrClientToProto(req.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Client: reqClient}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrClientToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqClient, err := ProtoToPtrClient(req.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Client: reqClient}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqClient, err := ProtoToPtrClient(req.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Client: reqClient}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrClient(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_GetBy_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetByRequest")
+	}
+	req := request.(*transport.GetByRequest)
+	pbreq := &pb.GetByRequest{SpaceId: req.SpaceId}
+	if req != nil && req.Config != nil {
+		pbreq.ApiKey = req.Config.APIKey
+		pbreq.TlsSubject = req.Config.TLSSubject
+		pbreq.OauthClientId = req.Config.OAuthClientID
+	}
+	return pbreq, nil
+}
+
+func _Encode_GetBy_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetByResponse")
+	}
+	resp := response.(*transport.GetByResponse)
+	respClient, err := PtrClientToProto(resp.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetByResponse{Client: respClient}, nil
+}
+
+func _Decode_GetBy_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetByRequest")
+	}
+	req := request.(*pb.GetByRequest)
+	return &transport.GetByRequest{
+		Config: &clients.GetByParams{
+			OAuthClientID: req.OauthClientId,
+			APIKey:        req.ApiKey,
+			TLSSubject:    req.TlsSubject,
+		},
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_GetBy_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetByResponse")
+	}
+	resp := response.(*pb.GetByResponse)
+	respClient, err := ProtoToPtrClient(resp.Client)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetByResponse{Client: respClient}, nil
+}
diff --git a/pkg/clients/transport/grpc/protobuf_type_converters.microgen.go b/pkg/clients/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5212c0b5d2dc9c96533f5f64aee9ed58ab241b41
--- /dev/null
+++ b/pkg/clients/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,164 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	service "git.perx.ru/perxis/perxis-go/pkg/clients"
+	permission "git.perx.ru/perxis/perxis-go/pkg/permission"
+	pb "git.perx.ru/perxis/perxis-go/proto/clients"
+	commonpb "git.perx.ru/perxis/perxis-go/proto/common"
+)
+
+func ListStringToProto(environments []string) ([]string, error) {
+	return environments, nil
+}
+
+func ProtoToListString(protoEnvironments []string) ([]string, error) {
+	return protoEnvironments, nil
+}
+
+func PtrClientToProto(client *service.Client) (*pb.Client, error) {
+	if client == nil {
+		return nil, nil
+	}
+
+	var oauth *pb.Client_OAuth
+	var tls *pb.Client_TLS
+	var apikey *pb.Client_APIKey
+
+	if client.OAuth != nil {
+		oauth = &pb.Client_OAuth{
+			ClientId:     client.OAuth.ClientID,
+			AuthId:       client.OAuth.AuthID,
+			TokenUrl:     client.OAuth.TokenURL,
+			ClientSecret: client.OAuth.ClientSecret,
+		}
+	}
+	if client.TLS != nil {
+		tls = &pb.Client_TLS{
+			Subject: client.TLS.Subject,
+		}
+	}
+	if client.APIKey != nil {
+		apikey = &pb.Client_APIKey{
+			Key:    client.APIKey.Key,
+			Rotate: client.APIKey.Rotate,
+		}
+	}
+
+	return &pb.Client{
+		Id:          client.ID,
+		SpaceId:     client.SpaceID,
+		Name:        client.Name,
+		Description: client.Description,
+		Disabled:    client.Disabled,
+		RoleId:      client.RoleID,
+		//Environments: client.Environments,
+		//Rules:        rules,
+		Oauth:  oauth,
+		Tls:    tls,
+		ApiKey: apikey,
+	}, nil
+}
+
+func ProtoToPtrClient(protoClient *pb.Client) (*service.Client, error) {
+	if protoClient == nil {
+		return nil, nil
+	}
+
+	var oauth *service.OAuth
+	var tls *service.TLS
+	var apikey *service.APIKey
+
+	if protoClient.Oauth != nil {
+		oauth = &service.OAuth{
+			ClientID:     protoClient.Oauth.ClientId,
+			AuthID:       protoClient.Oauth.AuthId,
+			TokenURL:     protoClient.Oauth.TokenUrl,
+			ClientSecret: protoClient.Oauth.ClientSecret,
+		}
+	}
+	if protoClient.Tls != nil {
+		tls = &service.TLS{
+			Subject: protoClient.Tls.Subject,
+		}
+	}
+	if protoClient.ApiKey != nil {
+		apikey = &service.APIKey{
+			Key:    protoClient.ApiKey.Key,
+			Rotate: protoClient.ApiKey.Rotate,
+		}
+	}
+
+	return &service.Client{
+		ID:          protoClient.Id,
+		SpaceID:     protoClient.SpaceId,
+		Name:        protoClient.Name,
+		Description: protoClient.Description,
+		Disabled:    protoClient.Disabled,
+		RoleID:      protoClient.RoleId,
+		OAuth:       oauth,
+		TLS:         tls,
+		APIKey:      apikey,
+	}, nil
+}
+
+func ListPtrClientToProto(clients []*service.Client) ([]*pb.Client, error) {
+	protoClients := make([]*pb.Client, 0, len(clients))
+	for _, c := range clients {
+		protoClient, _ := PtrClientToProto(c)
+		protoClients = append(protoClients, protoClient)
+	}
+	return protoClients, nil
+}
+
+func ProtoToListPtrClient(protoClients []*pb.Client) ([]*service.Client, error) {
+	clients := make([]*service.Client, 0, len(protoClients))
+	for _, c := range protoClients {
+		client, _ := ProtoToPtrClient(c)
+		clients = append(clients, client)
+	}
+	return clients, nil
+}
+
+func PtrPermissionRuleToProto(rule *permission.Rule) (*commonpb.Rule, error) {
+	if rule == nil {
+		return nil, nil
+	}
+	actions := make([]commonpb.Action, 0, len(rule.Actions))
+	for _, a := range rule.Actions {
+		actions = append(actions, commonpb.Action(a))
+	}
+	return &commonpb.Rule{
+		CollectionId:    rule.CollectionID,
+		Actions:         actions,
+		Access:          commonpb.Access(rule.Access),
+		HiddenFields:    rule.HiddenFields,
+		ReadonlyFields:  rule.ReadonlyFields,
+		WriteonlyFields: rule.WriteonlyFields,
+		ReadFilter:      rule.ReadFilter,
+		WriteFilter:     rule.WriteFilter,
+	}, nil
+}
+
+func ProtoToPtrPermissionRule(protoRule *commonpb.Rule) (*permission.Rule, error) {
+	if protoRule == nil {
+		return nil, nil
+	}
+	actions := make([]permission.Action, 0, len(protoRule.Actions))
+	for _, a := range protoRule.Actions {
+		actions = append(actions, permission.Action(a))
+	}
+	return &permission.Rule{
+		CollectionID:    protoRule.CollectionId,
+		Actions:         actions,
+		Access:          permission.Access(protoRule.Access),
+		HiddenFields:    protoRule.HiddenFields,
+		ReadonlyFields:  protoRule.ReadonlyFields,
+		WriteonlyFields: protoRule.WriteonlyFields,
+		ReadFilter:      protoRule.ReadFilter,
+		WriteFilter:     protoRule.WriteFilter,
+	}, nil
+}
diff --git a/pkg/clients/transport/grpc/server.microgen.go b/pkg/clients/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7408e691dd4fb1e8e772ffe9e530fcd112d02729
--- /dev/null
+++ b/pkg/clients/transport/grpc/server.microgen.go
@@ -0,0 +1,127 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/clients/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/clients"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type clientsServer struct {
+	create grpc.Handler
+	get    grpc.Handler
+	getBy  grpc.Handler
+	list   grpc.Handler
+	update grpc.Handler
+	delete grpc.Handler
+	enable grpc.Handler
+
+	pb.UnimplementedClientsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ClientsServer {
+	return &clientsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		enable: grpc.NewServer(
+			endpoints.EnableEndpoint,
+			_Decode_Enable_Request,
+			_Encode_Enable_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		getBy: grpc.NewServer(
+			endpoints.GetByEndpoint,
+			_Decode_GetBy_Request,
+			_Encode_GetBy_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *clientsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *clientsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *clientsServer) GetBy(ctx context.Context, req *pb.GetByRequest) (*pb.GetByResponse, error) {
+	_, resp, err := S.getBy.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetByResponse), nil
+}
+
+func (S *clientsServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *clientsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *clientsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *clientsServer) Enable(ctx context.Context, req *pb.EnableRequest) (*empty.Empty, error) {
+	_, resp, err := S.enable.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/clients/transport/server.microgen.go b/pkg/clients/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4c8b6a3e538dd5d1f28c3e7fd2b9f56af54cae8e
--- /dev/null
+++ b/pkg/clients/transport/server.microgen.go
@@ -0,0 +1,77 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	clients "git.perx.ru/perxis/perxis-go/pkg/clients"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc clients.Clients) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		EnableEndpoint: EnableEndpoint(svc),
+		GetByEndpoint:  GetByEndpoint(svc),
+		GetEndpoint:    GetEndpoint(svc),
+		ListEndpoint:   ListEndpoint(svc),
+		UpdateEndpoint: UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Client)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.Id)
+		return &GetResponse{Client: res0}, res1
+	}
+}
+
+func GetByEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetByRequest)
+		res0, res1 := svc.GetBy(arg0, req.SpaceId, req.Config)
+		return &GetByResponse{Client: res0}, res1
+	}
+}
+
+func ListEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId)
+		return &ListResponse{Clients: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Client)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.Id)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func EnableEndpoint(svc clients.Clients) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*EnableRequest)
+		res0 := svc.Enable(arg0, req.SpaceId, req.Id, req.Enable)
+		return &EnableResponse{}, res0
+	}
+}
diff --git a/pkg/collaborators/middleware/caching_middleware.go b/pkg/collaborators/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..f57995acf8387c80edc622313ba4729ae11bc6f6
--- /dev/null
+++ b/pkg/collaborators/middleware/caching_middleware.go
@@ -0,0 +1,88 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/collaborators"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Collaborators) service.Collaborators {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Collaborators
+}
+
+func (m cachingMiddleware) Set(ctx context.Context, spaceId, subject, role string) (err error) {
+
+	err = m.next.Set(ctx, spaceId, subject, role)
+	if err == nil {
+		m.cache.Remove(spaceId)
+		m.cache.Remove(subject)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId, subject string) (role string, err error) {
+
+	key := makeKey(spaceId, subject)
+	value, e := m.cache.Get(key)
+	if e == nil {
+		return value.(string), err
+	}
+	role, err = m.next.Get(ctx, spaceId, subject)
+	if err == nil {
+		m.cache.Set(key, role)
+	}
+	return role, err
+}
+
+func (m cachingMiddleware) Remove(ctx context.Context, spaceId, subject string) (err error) {
+
+	err = m.next.Remove(ctx, spaceId, subject)
+	if err == nil {
+		m.cache.Remove(makeKey(spaceId, subject))
+		m.cache.Remove(spaceId)
+		m.cache.Remove(subject)
+	}
+	return err
+}
+
+func (m cachingMiddleware) ListCollaborators(ctx context.Context, spaceId string) (collaborators []*service.Collaborator, err error) {
+
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.([]*service.Collaborator), err
+	}
+	collaborators, err = m.next.ListCollaborators(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, collaborators)
+	}
+	return collaborators, err
+}
+
+func (m cachingMiddleware) ListSpaces(ctx context.Context, subject string) (collaborators []*service.Collaborator, err error) {
+
+	value, e := m.cache.Get(subject)
+	if e == nil {
+		return value.([]*service.Collaborator), err
+	}
+	collaborators, err = m.next.ListSpaces(ctx, subject)
+	if err == nil {
+		m.cache.Set(subject, collaborators)
+	}
+	return collaborators, err
+}
diff --git a/pkg/collaborators/middleware/caching_middleware_test.go b/pkg/collaborators/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..da1d6e842f542e1c664f988f38c4797ea31c38f1
--- /dev/null
+++ b/pkg/collaborators/middleware/caching_middleware_test.go
@@ -0,0 +1,190 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	csmocks "git.perx.ru/perxis/perxis-go/pkg/collaborators/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestCollaboratorsCache(t *testing.T) {
+
+	const (
+		userID    = "userID"
+		spaceID   = "spaceID"
+		spaceRole = "spaceRole"
+		size      = 5
+		ttl       = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		cs := &csmocks.Collaborators{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("Get", mock.Anything, spaceID, userID).Return(spaceRole, nil).Once()
+
+		_, err := svc.Get(ctx, spaceID, userID)
+		require.NoError(t, err)
+
+		rl, err := svc.Get(ctx, spaceID, userID)
+		require.NoError(t, err)
+		assert.Equal(t, spaceRole, rl)
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("ListCollaborators from cache", func(t *testing.T) {
+		cs := &csmocks.Collaborators{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("ListCollaborators", mock.Anything, spaceID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+
+		v1, err := svc.ListCollaborators(ctx, spaceID)
+		require.NoError(t, err)
+		v2, err := svc.ListCollaborators(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, v1[0], v2[0], "Ожидается получение объектов из кэша при повторном запросе.")
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("ListSpaces from cache", func(t *testing.T) {
+		cs := &csmocks.Collaborators{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+		cs.On("ListSpaces", mock.Anything, userID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+
+		v1, err := svc.ListSpaces(ctx, userID)
+		require.NoError(t, err)
+		v2, err := svc.ListSpaces(ctx, userID)
+		require.NoError(t, err)
+		assert.Same(t, v1[0], v2[0], "Ожидается получение объектов из кэша при повторном запросе.")
+
+		cs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Remove", func(t *testing.T) {
+			cs := &csmocks.Collaborators{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("Get", mock.Anything, spaceID, userID).Return(spaceRole, nil).Once()
+			cs.On("ListCollaborators", mock.Anything, spaceID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+			cs.On("ListSpaces", mock.Anything, userID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+
+			_, err := svc.Get(ctx, spaceID, userID)
+			require.NoError(t, err)
+
+			rl, err := svc.Get(ctx, spaceID, userID)
+			require.NoError(t, err)
+			assert.Equal(t, spaceRole, rl, "Ожидается получение данных из кэша.")
+
+			lc1, err := svc.ListCollaborators(ctx, spaceID)
+			require.NoError(t, err)
+			lc2, err := svc.ListCollaborators(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, lc1[0], lc2[0], "Ожидается получение объектов из кэша.")
+
+			ls1, err := svc.ListSpaces(ctx, userID)
+			require.NoError(t, err)
+			ls2, err := svc.ListSpaces(ctx, userID)
+			require.NoError(t, err)
+			assert.Same(t, ls1[0], ls2[0], "Ожидается получение объектов из кэша.")
+
+			cs.On("Remove", mock.Anything, spaceID, userID).Return(nil).Once()
+
+			cs.On("Get", mock.Anything, spaceID, userID).Return("", errNotFound).Once()
+			cs.On("ListCollaborators", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+			cs.On("ListSpaces", mock.Anything, userID).Return(nil, errNotFound).Once()
+
+			err = svc.Remove(ctx, spaceID, userID)
+
+			rl, err = svc.Get(ctx, spaceID, userID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Empty(t, rl)
+
+			lc, err := svc.ListCollaborators(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Nil(t, lc)
+
+			ls, err := svc.ListSpaces(ctx, userID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Nil(t, ls)
+
+			cs.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			cs := &csmocks.Collaborators{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(cs)
+
+			cs.On("Get", mock.Anything, spaceID, userID).Return(spaceRole, nil).Once()
+			cs.On("ListCollaborators", mock.Anything, spaceID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+			cs.On("ListSpaces", mock.Anything, userID).Return([]*collaborators.Collaborator{{SpaceID: spaceID, Subject: userID, Role: spaceRole}}, nil).Once()
+
+			_, err := svc.Get(ctx, spaceID, userID)
+			require.NoError(t, err)
+
+			rl, err := svc.Get(ctx, spaceID, userID)
+			require.NoError(t, err)
+			assert.Equal(t, spaceRole, rl, "Ожидается получение данных из кэша.")
+
+			lc1, err := svc.ListCollaborators(ctx, spaceID)
+			require.NoError(t, err)
+			lc2, err := svc.ListCollaborators(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, lc1[0], lc2[0], "Ожидается получение объектов из кэша.")
+
+			ls1, err := svc.ListSpaces(ctx, userID)
+			require.NoError(t, err)
+			ls2, err := svc.ListSpaces(ctx, userID)
+			require.NoError(t, err)
+			assert.Same(t, ls1[0], ls2[0], "Ожидается получение объектов из кэша.")
+
+			cs.On("Remove", mock.Anything, spaceID, userID).Return(nil).Once()
+
+			cs.On("Get", mock.Anything, spaceID, userID).Return("", errNotFound).Once()
+			cs.On("ListCollaborators", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+			cs.On("ListSpaces", mock.Anything, userID).Return(nil, errNotFound).Once()
+
+			err = svc.Remove(ctx, spaceID, userID)
+
+			rl, err = svc.Get(ctx, spaceID, userID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Empty(t, rl)
+
+			lc, err := svc.ListCollaborators(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Nil(t, lc)
+
+			ls, err := svc.ListSpaces(ctx, userID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление данных из кеша, и получение ошибки от сервиса")
+			assert.Nil(t, ls)
+
+			cs.AssertExpectations(t)
+		})
+	})
+
+}
diff --git a/pkg/collaborators/middleware/error_logging_middleware.go b/pkg/collaborators/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..a45dfd8053e1718fdc139f1792a9f0b1547c08d1
--- /dev/null
+++ b/pkg/collaborators/middleware/error_logging_middleware.go
@@ -0,0 +1,80 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collaborators -i Collaborators -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements collaborators.Collaborators that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   collaborators.Collaborators
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the collaborators.Collaborators with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next collaborators.Collaborators) collaborators.Collaborators {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, subject string) (role string, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, subject)
+}
+
+func (m *errorLoggingMiddleware) ListCollaborators(ctx context.Context, spaceId string) (collaborators []*collaborators.Collaborator, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListCollaborators(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) ListSpaces(ctx context.Context, subject string) (spaces []*collaborators.Collaborator, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListSpaces(ctx, subject)
+}
+
+func (m *errorLoggingMiddleware) Remove(ctx context.Context, spaceId string, subject string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Remove(ctx, spaceId, subject)
+}
+
+func (m *errorLoggingMiddleware) Set(ctx context.Context, spaceId string, subject string, role string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Set(ctx, spaceId, subject, role)
+}
diff --git a/pkg/collaborators/middleware/logging_middleware.go b/pkg/collaborators/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..41f541e6cc0e09f2fc6240d585159bda09c10a74
--- /dev/null
+++ b/pkg/collaborators/middleware/logging_middleware.go
@@ -0,0 +1,216 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collaborators -i Collaborators -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements collaborators.Collaborators that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   collaborators.Collaborators
+}
+
+// LoggingMiddleware instruments an implementation of the collaborators.Collaborators with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next collaborators.Collaborators) collaborators.Collaborators {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, subject string) (role string, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"subject": subject} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	role, err = m.next.Get(ctx, spaceId, subject)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"role": role,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return role, err
+}
+
+func (m *loggingMiddleware) ListCollaborators(ctx context.Context, spaceId string) (collaborators []*collaborators.Collaborator, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListCollaborators.Request", fields...)
+
+	collaborators, err = m.next.ListCollaborators(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"collaborators": collaborators,
+		"err":           err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListCollaborators.Response", fields...)
+
+	return collaborators, err
+}
+
+func (m *loggingMiddleware) ListSpaces(ctx context.Context, subject string) (spaces []*collaborators.Collaborator, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"subject": subject} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListSpaces.Request", fields...)
+
+	spaces, err = m.next.ListSpaces(ctx, subject)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"spaces": spaces,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListSpaces.Response", fields...)
+
+	return spaces, err
+}
+
+func (m *loggingMiddleware) Remove(ctx context.Context, spaceId string, subject string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"subject": subject} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Remove.Request", fields...)
+
+	err = m.next.Remove(ctx, spaceId, subject)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Remove.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Set(ctx context.Context, spaceId string, subject string, role string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"subject": subject,
+		"role":    role} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Set.Request", fields...)
+
+	err = m.next.Set(ctx, spaceId, subject, role)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Set.Response", fields...)
+
+	return err
+}
diff --git a/pkg/collaborators/middleware/middleware.go b/pkg/collaborators/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..682fd963ae290298adaec9001b7f60215c80d4db
--- /dev/null
+++ b/pkg/collaborators/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collaborators -i Collaborators -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"go.uber.org/zap"
+)
+
+type Middleware func(collaborators.Collaborators) collaborators.Collaborators
+
+func WithLog(s collaborators.Collaborators, logger *zap.Logger, log_access bool) collaborators.Collaborators {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Collaborators")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/collaborators/middleware/recovering_middleware.go b/pkg/collaborators/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..84e9dfb59514b1a146c96f0239cd3c8e83d8e7ba
--- /dev/null
+++ b/pkg/collaborators/middleware/recovering_middleware.go
@@ -0,0 +1,91 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collaborators -i Collaborators -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements collaborators.Collaborators that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   collaborators.Collaborators
+}
+
+// RecoveringMiddleware instruments an implementation of the collaborators.Collaborators with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next collaborators.Collaborators) collaborators.Collaborators {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, subject string) (role string, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, subject)
+}
+
+func (m *recoveringMiddleware) ListCollaborators(ctx context.Context, spaceId string) (collaborators []*collaborators.Collaborator, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListCollaborators(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) ListSpaces(ctx context.Context, subject string) (spaces []*collaborators.Collaborator, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListSpaces(ctx, subject)
+}
+
+func (m *recoveringMiddleware) Remove(ctx context.Context, spaceId string, subject string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Remove(ctx, spaceId, subject)
+}
+
+func (m *recoveringMiddleware) Set(ctx context.Context, spaceId string, subject string, role string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Set(ctx, spaceId, subject, role)
+}
diff --git a/pkg/collections/collection.go b/pkg/collections/collection.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b3c5dd310ac8af32ee11e9e18c4aa1a3d4c72f3
--- /dev/null
+++ b/pkg/collections/collection.go
@@ -0,0 +1,180 @@
+package collections
+
+import (
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// Config
+type Config struct {
+	SourceSpaceID      string
+	SourceEnvID        string
+	SourceCollectionID string
+	SourceSchema       *schema.Schema
+}
+
+// Access - описывает текущие ограничения на доступ к элементам коллекции для текущего
+// пользователя
+type Access struct {
+	Actions         []permission.Action // Список разрешенных действия с элементами коллекции
+	HiddenFields    []string            // Поля не отображаемые в интерфейсе и не возвращаемые API
+	ReadonlyFields  []string            // Поля недоступные для редактирования и не обновляемые через API
+	WriteonlyFields []string            // Поля отображаемые в интерфейсе, но не возвращаемые в API
+}
+
+func (a Access) Clone() *Access {
+
+	clone := &Access{
+		Actions:         make([]permission.Action, len(a.Actions)),
+		HiddenFields:    make([]string, len(a.HiddenFields)),
+		ReadonlyFields:  make([]string, len(a.ReadonlyFields)),
+		WriteonlyFields: make([]string, len(a.WriteonlyFields)),
+	}
+
+	copy(clone.Actions, a.Actions)
+	copy(clone.HiddenFields, a.HiddenFields)
+	copy(clone.ReadonlyFields, a.ReadonlyFields)
+	copy(clone.WriteonlyFields, a.WriteonlyFields)
+
+	return clone
+}
+
+func (a Access) Can(action permission.Action) bool {
+	for _, act := range a.Actions {
+		if act == action {
+			return true
+		}
+	}
+	return false
+}
+
+type Collection struct {
+	ID      string         `json:"id" bson:"id"`
+	SpaceID string         `json:"spaceId" bson:"-"`
+	EnvID   string         `json:"envId" bson:"-"`
+	Name    string         `json:"name" bson:"name"`
+	Single  *bool          `json:"single" bson:"single,omitempty"` // В коллекции может быть только один документ
+	System  *bool          `json:"system" bson:"system,omitempty"` // Системная коллекция
+	NoData  *bool          `json:"no_data" bson:"no_data"`         // Коллекция не содержит элементы. Схема используется для включения в другие схемы
+	Hidden  bool           `json:"hidden" bson:"hidden"`           // Коллекция скрыта в административном интерфейсе
+	Schema  *schema.Schema `json:"schema" bson:"schema"`
+	Access  *Access        `json:"access" bson:"-"` // Ограничения на доступ к элементам коллекции. Отсутствие объекта означает неограниченный доступ
+
+	// StateInfo отображает состояние коллекции:
+	// - State: идентификатор состояния коллекции (new/preparing/ready/error/changed)
+	// - Info: дополнительная информация о состоянии коллекции (например, если при
+	//   применении схемы к коллекции произошла ошибка)
+	// - StartedAt: время, в которое коллекция перешла в состояние `Preparing`
+	StateInfo *StateInfo `json:"state_info" bson:"state_info,omitempty"` // todo: показывать в интерфейсе как readonly
+
+	// View - Если значение поля непустое, то коллекция является View ("отображением"
+	// части данных другой коллекции согласно View.Filter)
+	View *View `json:"view,omitempty" bson:"view,omitempty"`
+
+	// Tags - список тегов коллекции. Добавляются при отправке событий events
+	Tags []string `json:"tags,omitempty" bson:"tags,omitempty"`
+
+	Config *Config `json:"-" bson:"-"`
+}
+
+type View struct {
+	SpaceID      string `json:"space_id" bson:"space_id"`             // SpaceID оригинальной коллекции
+	EnvID        string `json:"environment_id" bson:"environment_id"` // EnvID оригинальной коллекции
+	CollectionID string `json:"collection_id" bson:"collection_id"`   // CollectionID оригинальной коллекции
+	Filter       string `json:"filter" bson:"filter,omitempty"`       // Правила фильтрации записей оригинальной коллекции
+}
+
+type StateInfo struct {
+	State     State     `json:"state" bson:"state"`
+	Info      string    `json:"info" bson:"info"`
+	StartedAt time.Time `json:"started_at,omitempty" bson:"started_at,omitempty"`
+}
+
+type State int
+
+func (s State) String() string {
+	var state string
+
+	switch s {
+	case StateNew:
+		state = "New"
+	case StatePreparing:
+		state = "Preparing"
+	case StateReady:
+		state = "Ready"
+	case StateError:
+		state = "Error"
+	case StateChanged:
+		state = "Changed"
+	default:
+		state = "Unknown"
+	}
+
+	return state
+}
+
+const (
+	StateNew State = iota
+	StatePreparing
+	StateReady
+	StateError
+	StateChanged
+)
+
+func (c Collection) Clone() *Collection {
+
+	clone := &Collection{
+		ID:      c.ID,
+		SpaceID: c.SpaceID,
+		EnvID:   c.EnvID,
+		Name:    c.Name,
+		NoData:  c.NoData,
+		Hidden:  c.Hidden,
+	}
+
+	if c.Single != nil {
+		single := *c.Single
+		clone.Single = &single
+	}
+	if c.System != nil {
+		system := *c.System
+		clone.System = &system
+	}
+	if c.Schema != nil {
+		clone.Schema = c.Schema.Clone(false)
+	}
+	if c.Access != nil {
+		clone.Access = c.Access.Clone()
+	}
+	if c.StateInfo != nil {
+		info := *c.StateInfo
+		clone.StateInfo = &info
+	}
+	if c.View != nil {
+		view := *c.View
+		clone.View = &view
+	}
+	if c.Config != nil {
+		cfg := *c.Config
+		clone.Config = &cfg
+	}
+	if c.Tags != nil {
+		clone.Tags = append([]string{}, c.Tags...)
+	}
+
+	return clone
+}
+
+func (c Collection) IsSingle() bool {
+	return c.Single != nil && *c.Single
+}
+
+func (c Collection) IsNoData() bool {
+	return c.NoData != nil && *c.NoData
+}
+
+func (c Collection) IsView() bool {
+	return c.View != nil
+}
diff --git a/pkg/collections/middleware/caching_middleware.go b/pkg/collections/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0bb41c9e28027e8d9c4cfa5e305b91ac76f44165
--- /dev/null
+++ b/pkg/collections/middleware/caching_middleware.go
@@ -0,0 +1,136 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/collections"
+	envService "git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+func makeKey(spaceId, envId, collectionId string, disableSchemaIncludes bool) string {
+	s := spaceId + "-" + envId + "-" + collectionId + "-"
+	if disableSchemaIncludes {
+		s += "1"
+	} else {
+		s += "0"
+	}
+	return s
+}
+
+func CachingMiddleware(cache *cache.Cache, envs envService.Environments) Middleware {
+	return func(next service.Collections) service.Collections {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+			envs:  envs,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Collections
+	envs  envService.Environments
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, collection *service.Collection) (coll *service.Collection, err error) {
+	return m.next.Create(ctx, collection)
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*service.GetOptions) (coll *service.Collection, err error) {
+
+	opts := service.MergeGetOptions(options...)
+	value, e := m.cache.Get(makeKey(spaceId, envId, collectionId, opts.DisableSchemaIncludes))
+	if e == nil {
+		return value.(*service.Collection), err
+	}
+	coll, err = m.next.Get(ctx, spaceId, envId, collectionId, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, coll.SpaceID, coll.EnvID)
+		if err != nil {
+			return nil, err
+		}
+		m.cache.Set(makeKey(coll.SpaceID, env.ID, coll.ID, opts.DisableSchemaIncludes), coll)
+		for _, al := range env.Aliases {
+			m.cache.Set(makeKey(coll.SpaceID, al, coll.ID, opts.DisableSchemaIncludes), coll)
+		}
+
+	}
+	return coll, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, spaceId, envId string, filter *service.Filter) (collections []*service.Collection, err error) {
+	return m.next.List(ctx, spaceId, envId, filter)
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, coll *service.Collection) (err error) {
+
+	err = m.next.Update(ctx, coll)
+	if err == nil {
+		env, err := m.envs.Get(ctx, coll.SpaceID, coll.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, coll.ID, true))
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, coll.ID, false))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(env.SpaceID, al, coll.ID, true))
+			m.cache.Remove(makeKey(env.SpaceID, al, coll.ID, false))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) SetSchema(ctx context.Context, spaceId, envId, collectionId string, schema *schema.Schema) (err error) {
+	err = m.next.SetSchema(ctx, spaceId, envId, collectionId, schema)
+	if err == nil {
+		env, err := m.envs.Get(ctx, spaceId, envId)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, true))
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, false))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, true))
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, false))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) SetState(ctx context.Context, spaceId, envId, collectionId string, state *service.StateInfo) (err error) {
+	err = m.next.SetState(ctx, spaceId, envId, collectionId, state)
+	if err == nil {
+		env, err := m.envs.Get(ctx, spaceId, envId)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, true))
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, false))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, true))
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, false))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string) (err error) {
+
+	err = m.next.Delete(ctx, spaceId, envId, collectionId)
+	if err == nil {
+		env, err := m.envs.Get(ctx, spaceId, envId)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, true))
+		m.cache.Remove(makeKey(env.SpaceID, env.ID, collectionId, false))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, true))
+			m.cache.Remove(makeKey(env.SpaceID, al, collectionId, false))
+		}
+	}
+	return err
+}
diff --git a/pkg/collections/middleware/caching_middleware_test.go b/pkg/collections/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..967a75bce7fa7d933918edc9f05600f75a6c0ff5
--- /dev/null
+++ b/pkg/collections/middleware/caching_middleware_test.go
@@ -0,0 +1,458 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	colsmocks "git.perx.ru/perxis/perxis-go/pkg/collections/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	envmocks "git.perx.ru/perxis/perxis-go/pkg/environments/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestCollections_Cache(t *testing.T) {
+
+	const (
+		colID    = "colID"
+		spaceID  = "spaceID"
+		envID    = "envId"
+		envAlias = "envAlias"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		col := &colsmocks.Collections{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envID, colID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envID, colID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша при повторном запросе по ID окружения.")
+
+		v3, err := svc.Get(ctx, spaceID, envAlias, colID)
+		require.NoError(t, err)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+
+		env.AssertExpectations(t)
+		col.AssertExpectations(t)
+	})
+
+	t.Run("Get from cache(by Alias)", func(t *testing.T) {
+		col := &colsmocks.Collections{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+		col.On("Get", mock.Anything, spaceID, envAlias, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envAlias, colID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envAlias, colID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша при повторном запросе по Alias окружения.")
+
+		v3, err := svc.Get(ctx, spaceID, envID, colID)
+		require.NoError(t, err)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+
+		env.AssertExpectations(t)
+		col.AssertExpectations(t)
+	})
+
+	t.Run("Get from cache with options", func(t *testing.T) {
+		col := &colsmocks.Collections{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		col.On("Get", mock.Anything, spaceID, envID, colID, mock.Anything).Run(func(args mock.Arguments) {
+			require.Len(t, args, 5)
+			opt := args.Get(4).(*collections.GetOptions)
+			assert.True(t, opt.DisableSchemaIncludes)
+		}).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+
+		_, err := svc.Get(ctx, spaceID, envID, colID, []*collections.GetOptions{{DisableSchemaIncludes: true}}...)
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		col.AssertExpectations(t)
+	})
+
+	//t.Run("List from cache", func(t *testing.T) {
+	//	col := &colsmocks.Collections{}
+	//	env := &envmocks.Environments{}
+	//
+	//	svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+	//
+	//	col.On("List", mock.Anything, spaceID, envID).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+	//	env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+	//
+	//	vl1, err := svc.List(ctx, spaceID, envID, nil)
+	//	require.NoError(t, err)
+	//
+	//	vl2, err := svc.List(ctx, spaceID, envID, nil)
+	//	require.NoError(t, err)
+	//	assert.Len(t, vl2, 1)
+	//	assert.Same(t, vl1[0], vl2[0], "При повторном запросе по ID окружения, ожидается получение списка объектов из кеша.")
+	//
+	//	vl3, err := svc.List(ctx, spaceID, envAlias, nil)
+	//	require.NoError(t, err)
+	//	assert.Len(t, vl3, 1)
+	//	assert.Same(t, vl3[0], vl2[0], "При повторном запросе по Alias окружения, ожидается получение списка объектов из кеша.")
+	//
+	//	env.AssertExpectations(t)
+	//	col.AssertExpectations(t)
+	//})
+
+	t.Run("List", func(t *testing.T) {
+		col := &colsmocks.Collections{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+		col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+		col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+		_, err := svc.List(ctx, spaceID, envAlias, nil)
+		require.NoError(t, err)
+
+		_, err = svc.List(ctx, spaceID, envID, nil)
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		col.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по ID окружения.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl1, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Update(ctx, &collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}}, nil).Once()
+
+			v4, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v4, "Ожидает что элемент после обновления был удален из кэша и будет запрошен заново из сервиса.")
+
+			v5, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v4, v5, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl2, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.NotSame(t, vl1[0], vl2[0], "Ожидает что после обновления элементы будут запрошены заново из сервиса.")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+
+		t.Run("After Update(by Alias)", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			//env.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			col.On("Get", mock.Anything, spaceID, envAlias, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по ID окружения.")
+
+			vl1, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Update(ctx, &collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			//env.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			col.On("Get", mock.Anything, spaceID, envAlias, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}}, nil).Once()
+
+			v4, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v4, "Ожидает что элемент после обновления был удален из кэша и будет запрошен заново из сервиса.")
+
+			v5, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v4, v5, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl4, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.NotSame(t, vl1[0], vl4[0], "Ожидает что после обновления элементы будут запрошены заново из сервиса.")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+
+		t.Run("After Set Schema", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Twice()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Twice()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по ID окружения.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl1, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кеша по ID окружения.")
+
+			vl3, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Equal(t, vl2[0], vl3[0], "Ожидается получение объектов из кеша по Alias окружения.")
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("SetSchema", mock.Anything, spaceID, envID, colID, mock.Anything).Return(nil).Once()
+			err = svc.SetSchema(ctx, spaceID, envID, colID, &schema.Schema{})
+			require.NoError(t, err)
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "nameUPD"}}, nil).Once()
+
+			v4, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v4, "Ожидает что элемент после обновления схемы был удален из кэша и будет запрошен заново из сервиса.")
+
+			v5, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v4, v5, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl4, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.NotSame(t, vl4[0], vl3[0], "Ожидает что после обновления схемы элементы будут запрошены заново из сервиса.")
+
+			vl5, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.Equal(t, vl4[0], vl5[0], "Ожидается получение объектов из кеша по Alias окружения..")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Twice()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Twice()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по ID окружения.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по Alias окружения.")
+
+			vl1, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кеша по ID окружения.")
+
+			vl3, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Equal(t, vl2[0], vl3[0], "Ожидается получение объектов из кеша по Alias окружения.")
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Delete", mock.Anything, spaceID, envID, colID).Return(nil).Once()
+			err = svc.Delete(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(nil, errNotFound).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{}, nil).Once()
+
+			_, err = svc.Get(ctx, spaceID, envID, colID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидает что элемент был удален из кэша и получена ошибка от сервиса.")
+
+			vl4, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl4, 0, "Ожидает что элементы были удалены из кэша.")
+
+			col.On("Get", mock.Anything, spaceID, envAlias, colID).Return(nil, errNotFound).Once()
+
+			_, err = svc.Get(ctx, spaceID, envAlias, colID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидает что элемент был удален из кэша и получена ошибка от сервиса.")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Twice()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кеша по ID окружения.")
+
+			vl3, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl2, 1)
+			assert.Equal(t, vl2[0], vl3[0], "Ожидается получение объектов из кеша по Alias окружения.")
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("Create", mock.Anything, mock.Anything).Return(&collections.Collection{ID: "colID2", SpaceID: spaceID, EnvID: envID, Name: "name2"}, nil).Once()
+			_, err = svc.Create(ctx, &collections.Collection{ID: "colID2", SpaceID: spaceID, EnvID: envID, Name: "name2"})
+			require.NoError(t, err)
+
+			//env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envID, mock.Anything).Return([]*collections.Collection{
+				{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"},
+				{ID: "colID2", SpaceID: spaceID, EnvID: envID, Name: "name2"},
+			}, nil).Once()
+			col.On("List", mock.Anything, spaceID, envAlias, mock.Anything).Return([]*collections.Collection{
+				{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"},
+				{ID: "colID2", SpaceID: spaceID, EnvID: envID, Name: "name2"},
+			}, nil).Once()
+
+			vl4, err := svc.List(ctx, spaceID, envID, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl4, 2, "Ожидает что элементы были удалены из кэша и получены заново из сервиса.")
+
+			vl5, err := svc.List(ctx, spaceID, envAlias, nil)
+			require.NoError(t, err)
+			assert.Len(t, vl5, 2)
+			assert.Equal(t, vl4[0], vl5[0], "Ожидается получение объектов из кеша по Alias окружения..")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			col := &colsmocks.Collections{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), env)(col)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil)
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			time.Sleep(2 * ttl)
+
+			col.On("Get", mock.Anything, spaceID, envID, colID).Return(&collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "name"}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидает что элемент был удален из кэша и будет запрошен заново из сервиса.")
+
+			env.AssertExpectations(t)
+			col.AssertExpectations(t)
+		})
+	})
+
+}
diff --git a/pkg/collections/middleware/error_logging_middleware.go b/pkg/collections/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..d1be7f66c8564cc162d93a926652c63d0a670e1d
--- /dev/null
+++ b/pkg/collections/middleware/error_logging_middleware.go
@@ -0,0 +1,101 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collections -i Collections -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements collections.Collections that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   collections.Collections
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the collections.Collections with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next collections.Collections) collections.Collections {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, collection *collections.Collection) (created *collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, collection)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, envId, collectionId)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*collections.GetOptions) (collection *collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, envId, collectionId, options...)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, spaceId string, envId string, filter *collections.Filter) (collections []*collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, spaceId, envId, filter)
+}
+
+func (m *errorLoggingMiddleware) SetSchema(ctx context.Context, spaceId string, envId string, collectionId string, schema *schema.Schema) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.SetSchema(ctx, spaceId, envId, collectionId, schema)
+}
+
+func (m *errorLoggingMiddleware) SetState(ctx context.Context, spaceId string, envId string, collectionId string, state *collections.StateInfo) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.SetState(ctx, spaceId, envId, collectionId, state)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, coll *collections.Collection) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, coll)
+}
diff --git a/pkg/collections/middleware/logging_middleware.go b/pkg/collections/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..dd43cb9ffd3e662241a1bb08e7174cfd1c5677df
--- /dev/null
+++ b/pkg/collections/middleware/logging_middleware.go
@@ -0,0 +1,296 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collections -i Collections -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements collections.Collections that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   collections.Collections
+}
+
+// LoggingMiddleware instruments an implementation of the collections.Collections with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next collections.Collections) collections.Collections {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, collection *collections.Collection) (created *collections.Collection, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":        ctx,
+		"collection": collection} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, collection)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, envId, collectionId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*collections.GetOptions) (collection *collections.Collection, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	collection, err = m.next.Get(ctx, spaceId, envId, collectionId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"collection": collection,
+		"err":        err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return collection, err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, spaceId string, envId string, filter *collections.Filter) (collections []*collections.Collection, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId,
+		"filter":  filter} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	collections, err = m.next.List(ctx, spaceId, envId, filter)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"collections": collections,
+		"err":         err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return collections, err
+}
+
+func (m *loggingMiddleware) SetSchema(ctx context.Context, spaceId string, envId string, collectionId string, schema *schema.Schema) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"schema":       schema} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetSchema.Request", fields...)
+
+	err = m.next.SetSchema(ctx, spaceId, envId, collectionId, schema)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetSchema.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) SetState(ctx context.Context, spaceId string, envId string, collectionId string, state *collections.StateInfo) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"state":        state} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetState.Request", fields...)
+
+	err = m.next.SetState(ctx, spaceId, envId, collectionId, state)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetState.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, coll *collections.Collection) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"coll": coll} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, coll)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/collections/middleware/middleware.go b/pkg/collections/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..911368f0f1df019e0e9b4b22afac9863cd4e523b
--- /dev/null
+++ b/pkg/collections/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collections -i Collections -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"go.uber.org/zap"
+)
+
+type Middleware func(collections.Collections) collections.Collections
+
+func WithLog(s collections.Collections, logger *zap.Logger, log_access bool) collections.Collections {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Collections")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/collections/middleware/recovering_middleware.go b/pkg/collections/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..fb61326fd05df2473219bccacea776246a68376e
--- /dev/null
+++ b/pkg/collections/middleware/recovering_middleware.go
@@ -0,0 +1,116 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/collections -i Collections -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements collections.Collections that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   collections.Collections
+}
+
+// RecoveringMiddleware instruments an implementation of the collections.Collections with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next collections.Collections) collections.Collections {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, collection *collections.Collection) (created *collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, collection)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, envId, collectionId)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*collections.GetOptions) (collection *collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, envId, collectionId, options...)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, spaceId string, envId string, filter *collections.Filter) (collections []*collections.Collection, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, spaceId, envId, filter)
+}
+
+func (m *recoveringMiddleware) SetSchema(ctx context.Context, spaceId string, envId string, collectionId string, schema *schema.Schema) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.SetSchema(ctx, spaceId, envId, collectionId, schema)
+}
+
+func (m *recoveringMiddleware) SetState(ctx context.Context, spaceId string, envId string, collectionId string, state *collections.StateInfo) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.SetState(ctx, spaceId, envId, collectionId, state)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, coll *collections.Collection) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, coll)
+}
diff --git a/pkg/collections/mocks/Collections.go b/pkg/collections/mocks/Collections.go
new file mode 100644
index 0000000000000000000000000000000000000000..e52dffabfd0c8764ab1edd08979b873666d1935e
--- /dev/null
+++ b/pkg/collections/mocks/Collections.go
@@ -0,0 +1,163 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Collections is an autogenerated mock type for the Collections type
+type Collections struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, collection
+func (_m *Collections) Create(ctx context.Context, collection *collections.Collection) (*collections.Collection, error) {
+	ret := _m.Called(ctx, collection)
+
+	var r0 *collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, *collections.Collection) *collections.Collection); ok {
+		r0 = rf(ctx, collection)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *collections.Collection) error); ok {
+		r1 = rf(ctx, collection)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId
+func (_m *Collections) Delete(ctx context.Context, spaceId string, envId string, collectionId string) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, options
+func (_m *Collections) Get(ctx context.Context, spaceId string, envId string, collectionId string, options ...*collections.GetOptions) (*collections.Collection, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, ...*collections.GetOptions) *collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, ...*collections.GetOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// List provides a mock function with given fields: ctx, spaceId, envId, filter
+func (_m *Collections) List(ctx context.Context, spaceId string, envId string, filter *collections.Filter) ([]*collections.Collection, error) {
+	ret := _m.Called(ctx, spaceId, envId, filter)
+
+	var r0 []*collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, *collections.Filter) []*collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId, filter)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, *collections.Filter) error); ok {
+		r1 = rf(ctx, spaceId, envId, filter)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// SetSchema provides a mock function with given fields: ctx, spaceId, envId, collectionId, _a4
+func (_m *Collections) SetSchema(ctx context.Context, spaceId string, envId string, collectionId string, _a4 *schema.Schema) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId, _a4)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *schema.Schema) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, _a4)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// SetState provides a mock function with given fields: ctx, spaceId, envId, collectionId, state
+func (_m *Collections) SetState(ctx context.Context, spaceId string, envId string, collectionId string, state *collections.StateInfo) error {
+	ret := _m.Called(ctx, spaceId, envId, collectionId, state)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *collections.StateInfo) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, state)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, coll
+func (_m *Collections) Update(ctx context.Context, coll *collections.Collection) error {
+	ret := _m.Called(ctx, coll)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *collections.Collection) error); ok {
+		r0 = rf(ctx, coll)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewCollections interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewCollections creates a new instance of Collections. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewCollections(t mockConstructorTestingTNewCollections) *Collections {
+	mock := &Collections{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/collections/options.go b/pkg/collections/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..7bdf2b70bef9349d90cf012c5cf2e996698f13f0
--- /dev/null
+++ b/pkg/collections/options.go
@@ -0,0 +1,15 @@
+package collections
+
+type GetOptions struct {
+	DisableSchemaIncludes bool
+}
+
+func MergeGetOptions(opts ...*GetOptions) *GetOptions {
+	o := &GetOptions{}
+	for _, opt := range opts {
+		if opt.DisableSchemaIncludes {
+			o.DisableSchemaIncludes = true
+		}
+	}
+	return o
+}
diff --git a/pkg/collections/service.go b/pkg/collections/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..e73133e7c5ecbf6f58d29b2f07343f05bb13d5b0
--- /dev/null
+++ b/pkg/collections/service.go
@@ -0,0 +1,30 @@
+package collections
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/collections
+// @grpc-addr content.collections.Collections
+type Collections interface {
+	Create(ctx context.Context, collection *Collection) (created *Collection, err error)
+	Get(ctx context.Context, spaceId, envId, collectionId string, options ...*GetOptions) (collection *Collection, err error)
+	List(ctx context.Context, spaceId, envId string, filter *Filter) (collections []*Collection, err error)
+	Update(ctx context.Context, coll *Collection) (err error)
+	SetSchema(ctx context.Context, spaceId, envId, collectionId string, schema *schema.Schema) (err error)
+
+	// @microgen -
+	SetState(ctx context.Context, spaceId, envId, collectionId string, state *StateInfo) (err error)
+	Delete(ctx context.Context, spaceId, envId, collectionId string) (err error)
+}
+
+type Filter struct {
+	IncludeNoData bool     `json:"include_no_data,omitempty"`
+	IncludeHidden bool     `json:"include_hidden,omitempty"`
+	ExcludeSystem bool     `json:"exclude_system,omitempty"`
+	Name          []string `json:"name,omitempty"`
+	ID            []string `json:"id,omitempty"`
+}
diff --git a/pkg/collections/transport/client.microgen.go b/pkg/collections/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7fd5b53f360a927e48c00edcd3f4dc7606dd47fe
--- /dev/null
+++ b/pkg/collections/transport/client.microgen.go
@@ -0,0 +1,107 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *collections.Collection) (res0 *collections.Collection, res1 error) {
+	request := CreateRequest{Collection: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 ...*collections.GetOptions) (res0 *collections.Collection, res1 error) {
+	request := GetRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Options:      arg4,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Collection, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string, arg2 string, arg3 *collections.Filter) (res0 []*collections.Collection, res1 error) {
+	request := ListRequest{
+		EnvId:   arg2,
+		Filter:  arg3,
+		SpaceId: arg1,
+	}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Collections, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *collections.Collection) (res0 error) {
+	request := UpdateRequest{Coll: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) SetSchema(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *schema.Schema) (res0 error) {
+	request := SetSchemaRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Schema:       arg4,
+		SpaceId:      arg1,
+	}
+	_, res0 = set.SetSchemaEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) SetState(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *collections.StateInfo) (res0 error) {
+	return
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string, arg3 string) (res0 error) {
+	request := DeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		SpaceId:      arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/collections/transport/endpoints.microgen.go b/pkg/collections/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..0b4643bbf83e24b8f751d5dbaac676ea2a4f2f5b
--- /dev/null
+++ b/pkg/collections/transport/endpoints.microgen.go
@@ -0,0 +1,15 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Collections API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint    endpoint.Endpoint
+	GetEndpoint       endpoint.Endpoint
+	ListEndpoint      endpoint.Endpoint
+	UpdateEndpoint    endpoint.Endpoint
+	SetSchemaEndpoint endpoint.Endpoint
+	DeleteEndpoint    endpoint.Endpoint
+}
diff --git a/pkg/collections/transport/exchanges.microgen.go b/pkg/collections/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3aabcbc890d2e5691fe6167024254bba71f5fe2d
--- /dev/null
+++ b/pkg/collections/transport/exchanges.microgen.go
@@ -0,0 +1,59 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+type (
+	CreateRequest struct {
+		Collection *collections.Collection `json:"collection"`
+	}
+	CreateResponse struct {
+		Created *collections.Collection `json:"created"`
+	}
+
+	GetRequest struct {
+		SpaceId      string                    `json:"space_id"`
+		EnvId        string                    `json:"env_id"`
+		CollectionId string                    `json:"collection_id"`
+		Options      []*collections.GetOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetResponse struct {
+		Collection *collections.Collection `json:"collection"`
+	}
+
+	ListRequest struct {
+		SpaceId string              `json:"space_id"`
+		EnvId   string              `json:"env_id"`
+		Filter  *collections.Filter `json:"filter"`
+	}
+	ListResponse struct {
+		Collections []*collections.Collection `json:"collections"`
+	}
+
+	UpdateRequest struct {
+		Coll *collections.Collection `json:"coll"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	SetSchemaRequest struct {
+		SpaceId      string         `json:"space_id"`
+		EnvId        string         `json:"env_id"`
+		CollectionId string         `json:"collection_id"`
+		Schema       *schema.Schema `json:"schema"`
+	}
+	// Formal exchange type, please do not delete.
+	SetSchemaResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId      string `json:"space_id"`
+		EnvId        string `json:"env_id"`
+		CollectionId string `json:"collection_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+)
diff --git a/pkg/collections/transport/grpc/client.microgen.go b/pkg/collections/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..0f41276cea0c2112aa1eb316b83d745c88208b07
--- /dev/null
+++ b/pkg/collections/transport/grpc/client.microgen.go
@@ -0,0 +1,61 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.collections.Collections"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+		SetSchemaEndpoint: grpckit.NewClient(
+			conn, addr, "SetSchema",
+			_Encode_SetSchema_Request,
+			_Decode_SetSchema_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..34c541f8f4e05556c58c2a0a8547812a3afe4e1e
--- /dev/null
+++ b/pkg/collections/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,273 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqCollection, err := PtrCollectionToProto(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Collection: reqCollection}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      GetOptionsToProto(req.Options),
+	}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRequest{
+		EnvId:   req.EnvId,
+		Filter:  reqFilter,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqColl, err := PtrCollectionToProto(req.Coll)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Collection: reqColl}, nil
+}
+
+func _Encode_SetSchema_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetSchemaRequest")
+	}
+	req := request.(*transport.SetSchemaRequest)
+	reqSchema, err := PtrSchemaSchemaToProto(req.Schema)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.SetSchemaRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Schema:       reqSchema,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrCollectionToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respCollection, err := PtrCollectionToProto(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Collection: respCollection}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respCollections, err := ListPtrCollectionToProto(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Collections: respCollections}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_SetSchema_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqCollection, err := ProtoToPtrCollection(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Collection: reqCollection}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		SpaceId:      string(req.SpaceId),
+		Options:      ProtoToGetOptions(req.Options),
+	}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRequest{
+		EnvId:   string(req.EnvId),
+		Filter:  reqFilter,
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqColl, err := ProtoToPtrCollection(req.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Coll: reqColl}, nil
+}
+
+func _Decode_SetSchema_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetSchemaRequest")
+	}
+	req := request.(*pb.SetSchemaRequest)
+	reqSchema, err := ProtoToPtrSchemaSchema(req.Schema)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.SetSchemaRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Schema:       reqSchema,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrCollection(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respCollection, err := ProtoToPtrCollection(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Collection: respCollection}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respCollections, err := ProtoToListPtrCollection(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Collections: respCollections}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_SetSchema_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go b/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..d19bee8b078380d6af962eac8e7738ba0170cd98
--- /dev/null
+++ b/pkg/collections/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,229 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	service "git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	commonpb "git.perx.ru/perxis/perxis-go/proto/common"
+	jsoniter "github.com/json-iterator/go"
+	"google.golang.org/protobuf/types/known/timestamppb"
+)
+
+func PtrSchemaSchemaToProto(schema *schema.Schema) (string, error) {
+	if schema == nil {
+		return "", nil
+	}
+	res, err := jsoniter.MarshalToString(schema)
+	if err != nil {
+		return "", err
+	}
+	return res, nil
+}
+
+func ProtoToPtrSchemaSchema(protoSchema string) (*schema.Schema, error) {
+	if protoSchema == "" {
+		return nil, nil
+	}
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(protoSchema))
+	if err != nil {
+		return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+	}
+	return sch, nil
+}
+
+func PtrCollectionToProto(coll *service.Collection) (*pb.Collection, error) {
+	if coll == nil {
+		return nil, nil
+	}
+
+	var access *pb.Access
+
+	if coll.Access != nil {
+		actions := make([]commonpb.Action, len(coll.Access.Actions))
+		for i, a := range coll.Access.Actions {
+			actions[i] = commonpb.Action(a)
+		}
+		access = &pb.Access{
+			Actions:         actions,
+			HiddenFields:    coll.Access.HiddenFields,
+			ReadonlyFields:  coll.Access.ReadonlyFields,
+			WriteonlyFields: coll.Access.WriteonlyFields,
+		}
+	}
+	protoCollection := &pb.Collection{
+		Id:      coll.ID,
+		SpaceId: coll.SpaceID,
+		EnvId:   coll.EnvID,
+		Name:    coll.Name,
+		Single:  coll.Single,
+		System:  coll.System,
+		NoData:  coll.NoData,
+		Access:  access,
+		Hidden:  coll.Hidden,
+		Tags:    coll.Tags,
+	}
+
+	if coll.StateInfo != nil {
+		protoCollection.StateInfo = &pb.Collection_StateInfo{
+			State:     pb.Collection_State(coll.StateInfo.State),
+			Info:      coll.StateInfo.Info,
+			StartedAt: timestamppb.New(coll.StateInfo.StartedAt),
+		}
+	}
+
+	sch, err := PtrSchemaSchemaToProto(coll.Schema)
+	if err != nil {
+		return nil, err
+	}
+	protoCollection.Schema = sch
+
+	if coll.View != nil {
+		protoCollection.View = &pb.Collection_View{
+			SpaceId:      coll.View.SpaceID,
+			EnvId:        coll.View.EnvID,
+			CollectionId: coll.View.CollectionID,
+			Filter:       coll.View.Filter,
+		}
+	}
+
+	return protoCollection, nil
+}
+
+func ProtoToPtrCollection(protoCollection *pb.Collection) (*service.Collection, error) {
+	if protoCollection == nil {
+		return nil, nil
+	}
+
+	var access *service.Access
+
+	if protoCollection.Access != nil {
+		actions := make([]permission.Action, len(protoCollection.Access.Actions))
+		for i, a := range protoCollection.Access.Actions {
+			actions[i] = permission.Action(a)
+		}
+		access = &service.Access{
+			Actions:         actions,
+			HiddenFields:    protoCollection.Access.HiddenFields,
+			ReadonlyFields:  protoCollection.Access.ReadonlyFields,
+			WriteonlyFields: protoCollection.Access.WriteonlyFields,
+		}
+	}
+	collection := &service.Collection{
+		ID:      protoCollection.Id,
+		SpaceID: protoCollection.SpaceId,
+		EnvID:   protoCollection.EnvId,
+		Name:    protoCollection.Name,
+		Single:  protoCollection.Single,
+		System:  protoCollection.System,
+		NoData:  protoCollection.NoData,
+		Access:  access,
+		Hidden:  protoCollection.Hidden,
+		Tags:    protoCollection.Tags,
+	}
+
+	if protoCollection.StateInfo != nil {
+		collection.StateInfo = &service.StateInfo{
+			State:     service.State(protoCollection.StateInfo.State),
+			Info:      protoCollection.StateInfo.Info,
+			StartedAt: protoCollection.StateInfo.StartedAt.AsTime(),
+		}
+	}
+
+	schm, err := ProtoToPtrSchemaSchema(protoCollection.Schema)
+	if err != nil {
+		return nil, err
+	}
+	collection.Schema = schm
+
+	if protoCollection.View != nil {
+		collection.View = &service.View{
+			SpaceID:      protoCollection.View.SpaceId,
+			EnvID:        protoCollection.View.EnvId,
+			CollectionID: protoCollection.View.CollectionId,
+			Filter:       protoCollection.View.Filter,
+		}
+	}
+
+	return collection, nil
+}
+
+func ListPtrCollectionToProto(collections []*service.Collection) ([]*pb.Collection, error) {
+	protoCollections := make([]*pb.Collection, 0, len(collections))
+	for _, collection := range collections {
+		protoCollection, err := PtrCollectionToProto(collection)
+		if err != nil {
+			return nil, err
+		}
+		protoCollections = append(protoCollections, protoCollection)
+	}
+	return protoCollections, nil
+}
+
+func ProtoToListPtrCollection(protoCollections []*pb.Collection) ([]*service.Collection, error) {
+	collections := make([]*service.Collection, 0, len(protoCollections))
+	for _, protoCollection := range protoCollections {
+		collection, err := ProtoToPtrCollection(protoCollection)
+		if err != nil {
+			return nil, err
+		}
+		collections = append(collections, collection)
+	}
+	return collections, nil
+}
+
+func ProtoToPtrBool(protoSingle *bool) (*bool, error) {
+	panic("function not provided") // TODO: provide converter
+}
+
+func PtrBoolToProto(single *bool) (*bool, error) {
+	panic("function not provided") // TODO: provide converter
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.ListRequest_Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	return &pb.ListRequest_Filter{
+		ExcludeSystem: filter.ExcludeSystem,
+		IncludeNoData: filter.IncludeNoData,
+		IncludeHidden: filter.IncludeHidden,
+		Name:          filter.Name,
+		Id:            filter.ID,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.ListRequest_Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	return &service.Filter{
+		IncludeNoData: protoFilter.IncludeNoData,
+		IncludeHidden: protoFilter.IncludeHidden,
+		ExcludeSystem: protoFilter.ExcludeSystem,
+		Name:          protoFilter.Name,
+		ID:            protoFilter.Id,
+	}, nil
+}
+
+func GetOptionsToProto(options []*service.GetOptions) *pb.GetOptions {
+	opts := service.MergeGetOptions(options...)
+	return &pb.GetOptions{DisableSchemaIncludes: opts.DisableSchemaIncludes}
+}
+
+func ProtoToGetOptions(protoOptions *pb.GetOptions) []*service.GetOptions {
+	if protoOptions == nil {
+		return nil
+	}
+	opts := &service.GetOptions{
+		DisableSchemaIncludes: protoOptions.DisableSchemaIncludes,
+	}
+	return []*service.GetOptions{opts}
+}
diff --git a/pkg/collections/transport/grpc/server.microgen.go b/pkg/collections/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..169e726d32ec7cfbbc465a7100d10aad2b80d481
--- /dev/null
+++ b/pkg/collections/transport/grpc/server.microgen.go
@@ -0,0 +1,112 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/collections/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/collections"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type collectionsServer struct {
+	create    grpc.Handler
+	get       grpc.Handler
+	list      grpc.Handler
+	update    grpc.Handler
+	setSchema grpc.Handler
+	delete    grpc.Handler
+
+	pb.UnimplementedCollectionsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.CollectionsServer {
+	return &collectionsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+		setSchema: grpc.NewServer(
+			endpoints.SetSchemaEndpoint,
+			_Decode_SetSchema_Request,
+			_Encode_SetSchema_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *collectionsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *collectionsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *collectionsServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *collectionsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *collectionsServer) SetSchema(ctx context.Context, req *pb.SetSchemaRequest) (*empty.Empty, error) {
+	_, resp, err := S.setSchema.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *collectionsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/collections/transport/server.microgen.go b/pkg/collections/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..958e28ab78913b23389469ce53316be45dd82c48
--- /dev/null
+++ b/pkg/collections/transport/server.microgen.go
@@ -0,0 +1,69 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc collections.Collections) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint:    CreateEndpoint(svc),
+		DeleteEndpoint:    DeleteEndpoint(svc),
+		GetEndpoint:       GetEndpoint(svc),
+		ListEndpoint:      ListEndpoint(svc),
+		SetSchemaEndpoint: SetSchemaEndpoint(svc),
+		UpdateEndpoint:    UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Collection)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Options...)
+		return &GetResponse{Collection: res0}, res1
+	}
+}
+
+func ListEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId, req.EnvId, req.Filter)
+		return &ListResponse{Collections: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Coll)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func SetSchemaEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*SetSchemaRequest)
+		res0 := svc.SetSchema(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Schema)
+		return &SetSchemaResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc collections.Collections) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId)
+		return &DeleteResponse{}, res0
+	}
+}
diff --git a/pkg/content/client.go b/pkg/content/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..a67648427576b7a9134cd4fdf3715273a5c56ac3
--- /dev/null
+++ b/pkg/content/client.go
@@ -0,0 +1,207 @@
+package content
+
+import (
+	"context"
+	"crypto/tls"
+	"crypto/x509"
+	"errors"
+	"fmt"
+	"net/url"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	clientsSvc "git.perx.ru/perxis/perxis-go/pkg/clients/middleware"
+	clientsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/clients/transport/grpc"
+	collaboratorsSvc "git.perx.ru/perxis/perxis-go/pkg/collaborators/middleware"
+	collaboratorsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/collaborators/transport/grpc"
+	collectionsSvc "git.perx.ru/perxis/perxis-go/pkg/collections/middleware"
+	collectionsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/collections/transport/grpc"
+	environmentsSvc "git.perx.ru/perxis/perxis-go/pkg/environments/middleware"
+	environmentsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/environments/transport/grpc"
+	invitationsSvc "git.perx.ru/perxis/perxis-go/pkg/invitations/middleware"
+	invitationsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/invitations/transport/grpc"
+	itemsSvc "git.perx.ru/perxis/perxis-go/pkg/items/middleware"
+	itemsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/items/transport/grpc"
+	localesSvc "git.perx.ru/perxis/perxis-go/pkg/locales/middleware"
+	localsTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/locales/transport/grpc"
+	referencesSvc "git.perx.ru/perxis/perxis-go/pkg/references/middleware"
+	referencesTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/references/transport/grpc"
+	rolesSvc "git.perx.ru/perxis/perxis-go/pkg/roles/middleware"
+	rolesTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/roles/transport/grpc"
+	spacesSvc "git.perx.ru/perxis/perxis-go/pkg/spaces/middleware"
+	spacesTransportGrpc "git.perx.ru/perxis/perxis-go/pkg/spaces/transport/grpc"
+	"go.uber.org/zap"
+	"golang.org/x/oauth2/clientcredentials"
+	"google.golang.org/grpc"
+	"google.golang.org/grpc/credentials"
+	"google.golang.org/grpc/credentials/insecure"
+	"google.golang.org/grpc/credentials/oauth"
+	"google.golang.org/grpc/metadata"
+)
+
+const (
+	DefaultCacheSize = 1000
+	DefaultCacheTTL  = time.Second * 10
+)
+
+func NewClient(addr string, opts ...Option) (*Content, *grpc.ClientConn, error) {
+	ctx := context.Background()
+
+	client := &Content{}
+	dialOpts := make([]grpc.DialOption, 0)
+	config := &Config{}
+
+	for _, o := range opts {
+		o(config)
+	}
+
+	if config.Logger == nil {
+		config.Logger = zap.NewNop()
+	}
+
+	authDialOpts, err := config.GetAuthDialOpts(ctx)
+	if err != nil {
+		return nil, nil, err
+	}
+	dialOpts = append(dialOpts, authDialOpts...)
+
+	contentConn, err := grpc.Dial(addr, dialOpts...)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	client.Spaces = spacesTransportGrpc.NewClient(contentConn, config.ClientOptions...)
+	client.Environments = environmentsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Collections = collectionsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Items = itemsTransportGrpc.NewClient(contentConn, config.ClientOptions...)
+	client.Invitations = invitationsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Collaborators = collaboratorsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Clients = clientsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Locales = localsTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.Roles = rolesTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+	client.References = referencesTransportGrpc.NewGRPCClient(contentConn, "", config.ClientOptions...)
+
+	if !config.NoDecode {
+		client.Items = itemsSvc.ClientEncodeMiddleware(client.Collections)(client.Items)
+		client.References = referencesSvc.ClientEncodeMiddleware(client.Collections)(client.References)
+	}
+
+	if !config.NoCache {
+		client = WithCaching(client, DefaultCacheSize, DefaultCacheTTL)
+	}
+
+	if !config.NoLog {
+		client = WithLogging(client, config.Logger, config.AccessLog)
+	}
+
+	return client, contentConn, nil
+}
+
+func WithCaching(client *Content, size int, ttl time.Duration) *Content {
+	c := *client
+
+	c.Clients = clientsSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Clients)
+	c.Environments = environmentsSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Environments)
+	c.Locales = localesSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Locales)
+	c.Roles = rolesSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Roles)
+	c.Spaces = spacesSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Spaces)
+	c.Items = itemsSvc.CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), c.Environments)(client.Items)
+	c.Collections = collectionsSvc.CachingMiddleware(cache.NewCache(size, ttl), c.Environments)(client.Collections)
+	c.Collaborators = collaboratorsSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Collaborators)
+	c.Invitations = invitationsSvc.CachingMiddleware(cache.NewCache(size, ttl))(client.Invitations)
+
+	return &c
+}
+
+func WithLogging(cs *Content, logger *zap.Logger, accessLog bool) *Content {
+	s := *cs
+
+	s.Collaborators = collaboratorsSvc.WithLog(s.Collaborators, logger, accessLog)
+	s.Collections = collectionsSvc.WithLog(s.Collections, logger, accessLog)
+	s.Environments = environmentsSvc.WithLog(s.Environments, logger, accessLog)
+	s.Invitations = invitationsSvc.WithLog(s.Invitations, logger, accessLog)
+	s.Items = itemsSvc.WithLog(s.Items, logger, accessLog)
+	s.Locales = localesSvc.WithLog(s.Locales, logger, accessLog)
+	s.Roles = rolesSvc.WithLog(s.Roles, logger, accessLog)
+	s.Spaces = spacesSvc.WithLog(s.Spaces, logger, accessLog)
+	s.Clients = clientsSvc.WithLog(s.Clients, logger, accessLog)
+	s.References = referencesSvc.WithLog(s.References, logger, accessLog)
+
+	return &s
+}
+
+func AddAPIKeyInterceptor(key string) grpc.UnaryClientInterceptor {
+	return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
+		ctx = metadata.AppendToOutgoingContext(ctx, "authorization", "API-Key"+" "+key)
+		return invoker(ctx, method, req, reply, cc, opts...)
+	}
+}
+
+func (c *Config) GetAuthDialOpts(ctx context.Context) (opts []grpc.DialOption, err error) {
+
+	if c.Auth == nil {
+		opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials()))
+		return
+	}
+
+	authConfig := c.Auth
+
+	switch {
+	case authConfig.OAuth2 != nil:
+		// create external grpc client
+		conf := &clientcredentials.Config{
+			TokenURL:       authConfig.OAuth2.TokenURL,
+			ClientID:       authConfig.OAuth2.ClientID,
+			ClientSecret:   authConfig.OAuth2.ClientSecret,
+			EndpointParams: url.Values{"audience": {authConfig.OAuth2.Audience}},
+		}
+
+		// обязательно использовать tls для credentials oauth.TokenSource https://github.com/grpc/grpc-go/blob/64031cbfcf4d84c026be93ad7b74b3c290100893/credentials/oauth/oauth.go#L160
+		if authConfig.Insecure {
+			return nil, errors.New("oauth requires tls")
+		}
+		if authConfig.TLS == nil {
+			authConfig.TLS = &TLS{SkipVerify: true}
+		}
+
+		opts = append(opts,
+			grpc.WithPerRPCCredentials(oauth.TokenSource{
+				TokenSource: conf.TokenSource(ctx),
+			}),
+		)
+	case authConfig.APIKey != nil:
+
+		if !authConfig.Insecure && authConfig.TLS == nil {
+			authConfig.TLS = &TLS{SkipVerify: true}
+		}
+		opts = append(opts,
+			grpc.WithUnaryInterceptor(AddAPIKeyInterceptor(authConfig.APIKey.APIKey)),
+		)
+	}
+
+	switch {
+	case authConfig.TLS != nil && !authConfig.SkipVerify:
+		certPool := x509.NewCertPool()
+		if !certPool.AppendCertsFromPEM(authConfig.TLS.CaCert) {
+			return nil, fmt.Errorf("CA certificate not loaded")
+		}
+
+		clientCert, err := tls.X509KeyPair(authConfig.TLS.Cert, authConfig.TLS.Key)
+		if err != nil {
+			return nil, err
+		}
+
+		opts = append(opts,
+			grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{
+				Certificates: []tls.Certificate{clientCert},
+				RootCAs:      certPool,
+			})),
+		)
+	case authConfig.TLS != nil && authConfig.SkipVerify:
+		opts = append(opts, grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{InsecureSkipVerify: true})))
+	default:
+		opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials()))
+	}
+
+	return
+}
diff --git a/pkg/content/config.go b/pkg/content/config.go
new file mode 100644
index 0000000000000000000000000000000000000000..b77d7ddb68be20402c5b2d5807c58c325a9a4f2e
--- /dev/null
+++ b/pkg/content/config.go
@@ -0,0 +1,127 @@
+package content
+
+import (
+	kitgrpc "github.com/go-kit/kit/transport/grpc"
+	"go.uber.org/zap"
+)
+
+type Config struct {
+	Auth      *AuthConfig
+	NoCache   bool
+	NoLog     bool
+	AccessLog bool
+	Debug     bool
+	NoDecode  bool
+
+	ClientOptions []kitgrpc.ClientOption
+
+	Logger *zap.Logger
+}
+
+type AuthConfig struct {
+	*OAuth2  `json:"oauth_2,omitempty"`
+	*APIKey  `json:"api_key,omitempty"`
+	*TLS     `json:"tls,omitempty"`
+	Insecure bool `json:"insecure,omitempty"`
+}
+
+type OAuth2 struct {
+	TokenURL     string `json:"token_url,omitempty"`
+	ClientID     string `json:"client_id,omitempty"` // параметр из auth0 (клиент с таким id должен быть создан в perxis)
+	ClientSecret string `json:"client_secret,omitempty"`
+	Audience     string `json:"audience,omitempty"` // параметр из auth0 (название связанного с Application API)
+}
+
+type TLS struct {
+	CaCert     []byte `json:"tls-cacert"`
+	Cert       []byte `json:"tls-cert"`
+	Key        []byte `json:"tls-key"`
+	SkipVerify bool
+}
+
+type APIKey struct {
+	APIKey string `json:"api_key"`
+}
+
+type Option func(c *Config)
+
+func AuthOAuth2(tokenUrl, clientID, clientSecret, audience string) Option {
+	return func(c *Config) {
+		if c.Auth == nil {
+			c.Auth = &AuthConfig{}
+		}
+		c.Auth.OAuth2 = &OAuth2{
+			TokenURL:     tokenUrl,
+			ClientID:     clientID,
+			ClientSecret: clientSecret,
+			Audience:     audience,
+		}
+	}
+}
+
+func AuthTLS(cacert, cert, key []byte) Option {
+	return func(c *Config) {
+		if c.Auth == nil {
+			c.Auth = &AuthConfig{}
+		}
+		c.Auth.TLS = &TLS{
+			CaCert: cacert,
+			Cert:   cert,
+			Key:    key,
+		}
+	}
+}
+
+func AuthAPIKey(key string) Option {
+	return func(c *Config) {
+		if c.Auth == nil {
+			c.Auth = &AuthConfig{}
+		}
+		c.Auth.APIKey = &APIKey{APIKey: key}
+	}
+}
+
+func AuthInsecure() Option {
+	return func(c *Config) {
+		if c.Auth == nil {
+			c.Auth = &AuthConfig{}
+		}
+		c.Auth.Insecure = true
+	}
+}
+
+func Auth(cfg *AuthConfig) Option {
+	return func(c *Config) {
+		c.Auth = cfg
+	}
+}
+
+func NoCache() Option {
+	return func(c *Config) {
+		c.NoCache = true
+	}
+}
+
+func NoLog() Option {
+	return func(c *Config) {
+		c.NoLog = true
+	}
+}
+
+func Logger(logger *zap.Logger) Option {
+	return func(c *Config) {
+		c.Logger = logger
+	}
+}
+
+func AccessLog() Option {
+	return func(c *Config) {
+		c.AccessLog = true
+	}
+}
+
+func GrpcClientOptions(opts ...kitgrpc.ClientOption) Option {
+	return func(c *Config) {
+		c.ClientOptions = opts
+	}
+}
diff --git a/pkg/content/content.go b/pkg/content/content.go
new file mode 100644
index 0000000000000000000000000000000000000000..22acaaa7227622781e93b0a9a019acae51e76873
--- /dev/null
+++ b/pkg/content/content.go
@@ -0,0 +1,57 @@
+package content
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"git.perx.ru/perxis/perxis-go/pkg/version"
+)
+
+type Runnable interface {
+	Start()
+	Stop()
+}
+
+type Content struct {
+	collaborators.Collaborators
+	collections.Collections
+	environments.Environments
+	invitations.Invitations
+	items.Items
+	references.References
+	locales.Locales
+	roles.Roles
+	spaces.Spaces
+	clients.Clients
+	version.Versions
+
+	PrincipalFactory *auth.PrincipalFactory
+
+	runners []Runnable
+}
+
+func (c *Content) RegisterStart(svc interface{}) {
+	if r, ok := svc.(Runnable); ok {
+		c.runners = append(c.runners, r)
+	}
+}
+
+func (c Content) Start() {
+	for _, r := range c.runners {
+		r.Start()
+	}
+}
+
+func (c Content) Stop() {
+	for _, r := range c.runners {
+		r.Stop()
+	}
+}
diff --git a/pkg/data/data.go b/pkg/data/data.go
new file mode 100644
index 0000000000000000000000000000000000000000..0540055ad4e213f666b0cf72019b9b4b9c39fbe8
--- /dev/null
+++ b/pkg/data/data.go
@@ -0,0 +1,294 @@
+package data
+
+import (
+	"strconv"
+	"strings"
+)
+
+const DefaultFieldDelimiter = "."
+
+type DeleteValueType struct{}
+
+var DeleteValue DeleteValueType
+
+// TODO: везде добавить поддержку массивов и массивов объектов
+
+// Сделано на базе библиотеки https://github.com/knadh/koanf
+
+// Flatten takes a map[string]interface{} and traverses it and flattens
+// nested children into keys delimited by delim.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+//
+// eg: `{ "parent": { "child": 123 }}` becomes `{ "parent.child": 123 }`
+// In addition, it keeps track of and returns a map of the delimited keypaths with
+// a slice of key parts, for eg: { "parent.child": ["parent", "child"] }. This
+// parts list is used to remember the key path's original structure to
+// unflatten later.
+func Flatten(m map[string]interface{}, keys []string, delim string) (map[string]interface{}, map[string][]string) {
+	var (
+		out    = make(map[string]interface{})
+		keyMap = make(map[string][]string)
+	)
+
+	flatten(m, keys, delim, out, keyMap)
+	return out, keyMap
+}
+
+func flatten(m map[string]interface{}, keys []string, delim string, out map[string]interface{}, keyMap map[string][]string) {
+	for key, val := range m {
+		// Copy the incoming key paths into a fresh list
+		// and append the current key in the iteration.
+		kp := make([]string, 0, len(keys)+1)
+		kp = append(kp, keys...)
+		kp = append(kp, key)
+
+		switch cur := val.(type) {
+		case map[string]interface{}:
+			// Empty map.
+			if len(cur) == 0 {
+				newKey := strings.Join(kp, delim)
+				out[newKey] = val
+				keyMap[newKey] = kp
+				continue
+			}
+
+			// It's a nested map. Flatten it recursively.
+			flatten(cur, kp, delim, out, keyMap)
+		default:
+			newKey := strings.Join(kp, delim)
+			out[newKey] = val
+			keyMap[newKey] = kp
+		}
+	}
+}
+
+// Unflatten takes a flattened key:value map (non-nested with delimited keys)
+// and returns a nested map where the keys are split into hierarchies by the given
+// delimiter. For instance, `parent.child.key: 1` to `{parent: {child: {key: 1}}}`
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Unflatten(m map[string]interface{}, delim string) map[string]interface{} {
+	out := make(map[string]interface{})
+
+	// Iterate through the flat conf map.
+	for k, v := range m {
+		var (
+			keys = strings.Split(k, delim)
+			next = out
+		)
+
+		// Iterate through key parts, for eg:, parent.child.key
+		// will be ["parent", "child", "key"]
+		for _, k := range keys[:len(keys)-1] {
+			sub, ok := next[k]
+			if !ok {
+				// If the key does not exist in the map, create it.
+				sub = make(map[string]interface{})
+				next[k] = sub
+			}
+			if n, ok := sub.(map[string]interface{}); ok {
+				next = n
+			}
+		}
+
+		// Assign the value.
+		next[keys[len(keys)-1]] = v
+	}
+	return out
+}
+
+// Delete removes the entry present at a given path, from the interface
+// if it is an object or an array.
+// The path is the key map slice, for eg:, parent.child.key -> [parent child key].
+// Any empty, nested map on the path, is recursively deleted.
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Delete(field string, data any, delim ...string) error {
+	return set(getPath(field, delim...), data, DeleteValue)
+}
+
+// DeleteMany removes the entries present at a given paths, from the interface
+func DeleteMany(paths []string, value any, delim ...string) {
+	if value == nil || len(paths) == 0 {
+		return
+	}
+	for _, path := range paths {
+		Delete(path, value, delim...)
+	}
+}
+
+// Search recursively searches the interface for a given path. The path is
+// the key map slice, for eg:, parent.child.key -> [parent child key].
+//
+// It's important to note that all nested maps should be
+// map[string]interface{} and not map[interface{}]interface{}.
+// Use IntfaceKeysToStrings() to convert if necessary.
+func Search(in interface{}, path []string) interface{} {
+	switch val := in.(type) {
+
+	case map[string]interface{}:
+		next, ok := val[path[0]]
+		if ok {
+			if len(path) == 1 {
+				return next
+			}
+			switch v := next.(type) {
+			case map[string]interface{}, []interface{}:
+				return Search(v, path[1:])
+			}
+		}
+	case []interface{}:
+		out := make([]interface{}, len(val))
+		for i, e := range val {
+			out[i] = Search(e, path)
+		}
+		return out
+	}
+	return nil
+}
+
+func getPath(field string, delim ...string) []string {
+	if field == "" {
+		return nil
+	}
+
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	return strings.Split(field, d)
+}
+
+func Set(field string, data, value any, delim ...string) error {
+	return set(getPath(field, delim...), data, value)
+}
+
+func set(path []string, data, value any) error {
+	if len(path) == 0 {
+		return nil
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		if len(path) == 1 {
+
+			if _, ok := value.(DeleteValueType); ok {
+				delete(v, path[0])
+				return nil
+			}
+
+			v[path[0]] = value
+			return nil
+		}
+
+		next, ok := v[path[0]]
+		if !ok {
+			next = make(map[string]interface{})
+			v[path[0]] = next
+		}
+		return set(path[1:], next, value)
+
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil {
+			for _, vv := range v {
+				if err = set(path, vv, value); err != nil {
+					return err
+				}
+			}
+		}
+		if idx >= len(v) {
+			return nil
+		}
+		return set(path[1:], v[idx], value)
+	}
+
+	return nil
+}
+
+func Get(field string, data any, delim ...string) (any, bool) {
+	return get(getPath(field, delim...), data)
+}
+
+func get(path []string, data any) (any, bool) {
+	if len(path) == 0 {
+		return data, true
+	}
+
+	switch v := data.(type) {
+	case map[string]interface{}:
+		val, ok := v[path[0]]
+		if !ok {
+			return nil, false
+		}
+		return get(path[1:], val)
+	case []interface{}:
+		idx, err := strconv.Atoi(path[0])
+		if err != nil || idx >= len(v) {
+			return nil, false
+		}
+		return get(path[1:], v[idx])
+	}
+
+	return nil, false
+}
+
+// Keep keeps the entries present at a given paths, from the interface and remove other data
+// if it is an object or an array.
+// The path is the sting with delim, for eg:, parent.child.key
+func Keep(paths []string, data any, delim ...string) {
+	if len(paths) == 0 {
+		data = nil
+		return
+	}
+	switch val := data.(type) {
+	case map[string]interface{}:
+		for k, v := range val {
+			if Contains(k, paths) {
+				continue
+			}
+			p := getObjectPaths(k, paths, delim...)
+			if len(p) == 0 {
+				delete(val, k)
+			}
+			Keep(p, v, delim...)
+		}
+	case []interface{}:
+		for _, ar := range val {
+			Keep(paths, ar, delim...)
+		}
+	}
+}
+
+func getObjectPaths(prefix string, arr []string, delim ...string) []string {
+	var res []string
+	d := DefaultFieldDelimiter
+	if len(delim) > 0 {
+		d = delim[0]
+	}
+	for _, v := range arr {
+		if strings.HasPrefix(v, prefix+d) {
+			res = append(res, strings.TrimPrefix(v, prefix+d))
+		}
+	}
+	return res
+}
+
+func CloneMap(m map[string]interface{}) map[string]interface{} {
+	if m == nil {
+		return m
+	}
+
+	c := make(map[string]interface{}, len(m))
+	for k, v := range m {
+		c[k] = v
+	}
+	return c
+}
diff --git a/pkg/data/data_test.go b/pkg/data/data_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..785eefbb868a68c9d8c6b2f75b8f861ab2041e11
--- /dev/null
+++ b/pkg/data/data_test.go
@@ -0,0 +1,374 @@
+package data
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestDelete(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		field string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			"a",
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a",
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			"a.a",
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			"a.a",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		// Решили что автоматически удалять пустые объекты/слайсы не нужно
+		//{
+		//	"empty object",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//}, {
+		//	"empty array",
+		//	map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		//	[]string{"a", "a"},
+		//	map[string]interface{}{},
+		//},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Delete(tt.field, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestDeleteMany(t *testing.T) {
+	tests := []struct {
+		name  string
+		in    interface{}
+		paths []string
+		out   interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2", "d": "2"},
+			[]string{"a", "d"},
+			map[string]interface{}{"z": "2"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2", "b": "4"}},
+			[]string{"a.a", "a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"z": "2"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2", "c": 0},
+				map[string]interface{}{"a": "3", "b": "4", "c": 0},
+			}, "z": "2"},
+			[]string{"a.a", "a.c"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"b": "2"},
+				map[string]interface{}{"b": "4"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.a"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+		},
+		{
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.a", "a"},
+			map[string]interface{}{},
+		},
+		{
+			"field not exist in array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			DeleteMany(tt.paths, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
+
+func TestSearch(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			"1",
+		},
+		{
+			"object",
+			map[string]interface{}{
+				"a": map[string]interface{}{"a": "1", "z": "2"},
+				"b": map[string]interface{}{"c": "1", "d": "2"},
+			},
+			[]string{"a"},
+			map[string]interface{}{"a": "1", "z": "2"},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a", "a"},
+			"1",
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a", "a"},
+			[]interface{}{"1", "3"},
+		},
+		{
+			"object field from array of arrays",
+			[]interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			},
+			[]string{"a"},
+			[]interface{}{[]interface{}{"1"}, []interface{}{"3"}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			out := Search(tt.in, tt.path)
+			assert.Equal(t, tt.out, out)
+		})
+	}
+}
+
+func TestSet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+		value any
+	}
+	tests := []struct {
+		name     string
+		args     args
+		wantData any
+		wantErr  assert.ErrorAssertionFunc
+	}{
+		{"Simple", args{"a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "a"}, assert.NoError},
+		{"New key", args{"b", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": "a"}, assert.NoError},
+		{"Path", args{"a.b.c", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, "c"}, map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}, assert.NoError},
+		{"Delete", args{"a.b", map[string]interface{}{"a": map[string]any{"b": map[string]any{"c": "0"}}}, DeleteValue}, map[string]any{"a": map[string]any{}}, assert.NoError},
+		{"Create map", args{"b.a", map[string]interface{}{"a": "0"}, "a"}, map[string]interface{}{"a": "0", "b": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Map value", args{"a", map[string]interface{}{"a": "0"}, map[string]interface{}{"a": "a"}}, map[string]interface{}{"a": map[string]interface{}{"a": "a"}}, assert.NoError},
+		{"Slice", args{"a.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "a", "b": "b"}}}, assert.NoError},
+		{"Slice", args{"a.0.a", map[string]interface{}{"a": []any{map[string]any{"a": "0"}, map[string]any{"a": "0", "b": "b"}}}, "a"}, map[string]interface{}{"a": []any{map[string]any{"a": "a"}, map[string]any{"a": "0", "b": "b"}}}, assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			data := tt.args.data
+			tt.wantErr(t, Set(tt.args.field, data, tt.args.value), fmt.Sprintf("Set(%v, %v, %v)", tt.args.field, data, tt.args.value))
+			assert.Equal(t, tt.wantData, data)
+		})
+	}
+}
+
+func TestGet(t *testing.T) {
+	type args struct {
+		field string
+		data  any
+	}
+	tests := []struct {
+		name  string
+		args  args
+		want  any
+		found bool
+	}{
+		{"Direct value", args{"", 100}, 100, true},
+		{"Not found", args{"a", 100}, nil, false},
+		{"Simple", args{"a", map[string]any{"a": "0"}}, "0", true},
+		{"Path", args{"a.b.c", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, "c", true},
+		{"Incorrect path", args{"a.b.wrong", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, nil, false},
+		{"Map value", args{"a.b", map[string]any{"a": map[string]any{"b": map[string]any{"c": "c"}}}}, map[string]any{"c": "c"}, true},
+		{"Slice", args{"a.1.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, "1", true},
+		{"Slice out of range", args{"a.2.b", map[string]any{"a": []any{map[string]any{"b": "0"}, map[string]any{"b": "1"}}}}, nil, false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, got1 := Get(tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.want, got, "Get(%v, %v)", tt.args.field, tt.args.data)
+			assert.Equalf(t, tt.found, got1, "Get(%v, %v)", tt.args.field, tt.args.data)
+		})
+	}
+}
+
+func TestKeep(t *testing.T) {
+	tests := []struct {
+		name string
+		in   interface{}
+		path []string
+		out  interface{}
+	}{
+		{
+			"simple",
+			map[string]interface{}{"a": "1", "z": "2"},
+			[]string{"a"},
+			map[string]interface{}{"a": "1"},
+		},
+		{
+			"object",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+		},
+		{
+			"no field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"z"},
+			map[string]interface{}{},
+		},
+		{
+			"object field",
+			map[string]interface{}{"a": map[string]interface{}{"a": "1", "z": "2"}},
+			[]string{"a.a"},
+			map[string]interface{}{"a": map[string]interface{}{"a": "1"}},
+		},
+		{
+			"object field from map with array",
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1", "b": "2"},
+				map[string]interface{}{"a": "3", "b": "4"},
+			}, "z": "2"},
+			[]string{"a.a", "z"},
+			map[string]interface{}{"a": []interface{}{
+				map[string]interface{}{"a": "1"},
+				map[string]interface{}{"a": "3"},
+			}, "z": "2"},
+		},
+		{
+			"object field from map with array of arrays",
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"a": "1", "b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"a": "3", "b": "4"},
+				},
+			}, "z": "2"},
+			[]string{"a.b", "z"},
+			map[string]interface{}{"a": []interface{}{
+				[]interface{}{
+					map[string]interface{}{"b": "2"},
+				}, []interface{}{
+					map[string]interface{}{"b": "4"},
+				},
+			}, "z": "2"},
+		},
+		{
+			"empty object",
+			map[string]interface{}{"a": map[string]interface{}{"a": map[string]interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		}, {
+			"empty array",
+			map[string]interface{}{"a": map[string]interface{}{"a": []interface{}{}}},
+			[]string{"a.b"},
+			map[string]interface{}{"a": map[string]interface{}{}},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			Keep(tt.path, tt.in)
+			assert.Equal(t, tt.out, tt.in)
+		})
+	}
+}
diff --git a/pkg/delivery/client/adapter.go b/pkg/delivery/client/adapter.go
new file mode 100644
index 0000000000000000000000000000000000000000..df4c1722cae2ee0abb3be12e58e9f5232bac2bb7
--- /dev/null
+++ b/pkg/delivery/client/adapter.go
@@ -0,0 +1,63 @@
+package client
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/delivery"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+)
+
+type envsAdapter struct {
+	environments.Environments
+	dv delivery.Delivery
+}
+
+func (a envsAdapter) Get(ctx context.Context, spaceId, envId string) (env *environments.Environment, err error) {
+	return a.dv.GetEnvironment(ctx, spaceId, envId)
+}
+
+func (a envsAdapter) List(ctx context.Context, spaceId string) (envs []*environments.Environment, err error) {
+	return a.dv.ListEnvironments(ctx, spaceId)
+}
+
+type localesAdapter struct {
+	locales.Locales
+	dv delivery.Delivery
+}
+
+func (a localesAdapter) List(ctx context.Context, spaceId string) (locales []*locales.Locale, err error) {
+	return a.dv.ListLocales(ctx, spaceId)
+}
+
+type collectionsAdapter struct {
+	collections.Collections
+	dv delivery.Delivery
+}
+
+func (a collectionsAdapter) Get(ctx context.Context, spaceId, envId, collectionId string, opts ...*collections.GetOptions) (collection *collections.Collection, err error) {
+	return a.dv.GetCollection(ctx, spaceId, envId, collectionId)
+}
+
+func (a collectionsAdapter) List(ctx context.Context, spaceId, envId string, filter *collections.Filter) (collections []*collections.Collection, err error) {
+	return a.dv.ListCollections(ctx, spaceId, envId)
+}
+
+type itemsAdapter struct {
+	items.Items
+	dv delivery.Delivery
+}
+
+func (a itemsAdapter) GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	return a.dv.GetItem(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (a itemsAdapter) FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	return a.dv.FindItems(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (a itemsAdapter) AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	return a.dv.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+}
diff --git a/pkg/delivery/client/client.go b/pkg/delivery/client/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..4f7d9a97ab4a978f3fd6c5708f6b5691067b64c0
--- /dev/null
+++ b/pkg/delivery/client/client.go
@@ -0,0 +1,67 @@
+package client
+
+import (
+	"context"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections/middleware"
+	"git.perx.ru/perxis/perxis-go/pkg/delivery"
+	deliveryservice "git.perx.ru/perxis/perxis-go/pkg/delivery/service"
+	deliverytransportgrpc "git.perx.ru/perxis/perxis-go/pkg/delivery/transport/grpc"
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments/middleware"
+	items "git.perx.ru/perxis/perxis-go/pkg/items/middleware"
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales/middleware"
+	"google.golang.org/grpc"
+)
+
+func NewClient(addr string, opts ...Option) (delivery.Delivery, error) {
+	ctx := context.Background()
+
+	c := new(Config)
+	dialOpts := make([]grpc.DialOption, 0)
+
+	for _, o := range opts {
+		o(&c.Config)
+	}
+
+	authDialOpts, err := c.GetAuthDialOpts(ctx)
+	if err != nil {
+		return nil, err
+	}
+
+	conn, err := grpc.Dial(addr, append(dialOpts, authDialOpts...)...)
+	if err != nil {
+		return nil, err
+	}
+
+	client := deliverytransportgrpc.NewGRPCClient(conn, "", c.ClientOptions...)
+
+	cfg := &deliveryservice.Config{
+		Locales:      localesAdapter{dv: client},
+		Environments: envsAdapter{dv: client},
+		Collections:  collectionsAdapter{dv: client},
+		Items:        itemsAdapter{dv: client},
+	}
+
+	if !c.NoDecode {
+		cfg.Items = items.ClientEncodeMiddleware(cfg.Collections)(cfg.Items)
+	}
+
+	if !c.NoCache {
+		cfg = WithCaching(cfg, 0, 0)
+	}
+
+	return deliveryservice.NewService(cfg), nil
+}
+
+func WithCaching(cfg *deliveryservice.Config, size int, ttl time.Duration) *deliveryservice.Config {
+	c := *cfg
+
+	c.Environments = environments.CachingMiddleware(cache.NewCache(size, ttl))(cfg.Environments)
+	c.Locales = locales.CachingMiddleware(cache.NewCache(size, ttl))(cfg.Locales)
+	c.Items = items.CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), c.Environments)(cfg.Items)
+	c.Collections = collections.CachingMiddleware(cache.NewCache(size, ttl), c.Environments)(cfg.Collections)
+
+	return &c
+}
diff --git a/pkg/delivery/client/config.go b/pkg/delivery/client/config.go
new file mode 100644
index 0000000000000000000000000000000000000000..27f34b0bef42e7ad5aa40093a2f4188f62d1c556
--- /dev/null
+++ b/pkg/delivery/client/config.go
@@ -0,0 +1,36 @@
+package client
+
+import (
+	contentclient "git.perx.ru/perxis/perxis-go/pkg/content"
+	"github.com/go-kit/kit/transport/grpc"
+)
+
+type Option contentclient.Option
+
+type Config struct {
+	contentclient.Config
+}
+
+func AuthOAuth2(tokenUrl, clientID, clientSecret, audience string) Option {
+	return Option(contentclient.AuthOAuth2(tokenUrl, clientID, clientSecret, audience))
+}
+
+func AuthTLS(cacert, cert, key []byte) Option {
+	return Option(contentclient.AuthTLS(cacert, cert, key))
+}
+
+func AuthAPIKey(key string) Option {
+	return Option(contentclient.AuthAPIKey(key))
+}
+
+func AuthInsecure() Option {
+	return Option(contentclient.AuthInsecure())
+}
+
+func GrpcClientOptions(opts ...grpc.ClientOption) Option {
+	return Option(contentclient.GrpcClientOptions(opts...))
+}
+
+func NoCache() Option {
+	return Option(contentclient.NoCache())
+}
diff --git a/pkg/delivery/mocks/Delivery.go b/pkg/delivery/mocks/Delivery.go
new file mode 100644
index 0000000000000000000000000000000000000000..5cd13757a86e0ee7cf7c3180053e65299558178a
--- /dev/null
+++ b/pkg/delivery/mocks/Delivery.go
@@ -0,0 +1,200 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Delivery is an autogenerated mock type for the Delivery type
+type Delivery struct {
+	mock.Mock
+}
+
+// FindItems provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Delivery) FindItems(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// GetCollection provides a mock function with given fields: ctx, spaceId, envId, collectionId
+func (_m *Delivery) GetCollection(ctx context.Context, spaceId string, envId string, collectionId string) (*collections.Collection, error) {
+	ret := _m.Called(ctx, spaceId, envId, collectionId)
+
+	var r0 *collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string) *collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetEnvironment provides a mock function with given fields: ctx, spaceId, envId
+func (_m *Delivery) GetEnvironment(ctx context.Context, spaceId string, envId string) (*environments.Environment, error) {
+	ret := _m.Called(ctx, spaceId, envId)
+
+	var r0 *environments.Environment
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) *environments.Environment); ok {
+		r0 = rf(ctx, spaceId, envId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*environments.Environment)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, spaceId, envId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetItem provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Delivery) GetItem(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// ListCollections provides a mock function with given fields: ctx, spaceId, envId
+func (_m *Delivery) ListCollections(ctx context.Context, spaceId string, envId string) ([]*collections.Collection, error) {
+	ret := _m.Called(ctx, spaceId, envId)
+
+	var r0 []*collections.Collection
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) []*collections.Collection); ok {
+		r0 = rf(ctx, spaceId, envId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*collections.Collection)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, spaceId, envId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// ListEnvironments provides a mock function with given fields: ctx, spaceId
+func (_m *Delivery) ListEnvironments(ctx context.Context, spaceId string) ([]*environments.Environment, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*environments.Environment
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*environments.Environment); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*environments.Environment)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// ListLocales provides a mock function with given fields: ctx, spaceId
+func (_m *Delivery) ListLocales(ctx context.Context, spaceId string) ([]*locales.Locale, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*locales.Locale
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*locales.Locale); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*locales.Locale)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
diff --git a/pkg/delivery/service.go b/pkg/delivery/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..9f084740499a5d620d158c4ef97555d4ed5acca6
--- /dev/null
+++ b/pkg/delivery/service.go
@@ -0,0 +1,28 @@
+package delivery
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/delivery
+// @grpc-addr delivery.Delivery
+type Delivery interface {
+	ListLocales(ctx context.Context, spaceId string) (locales []*locales.Locale, err error)
+
+	GetEnvironment(ctx context.Context, spaceId, envId string) (env *environments.Environment, err error)
+	ListEnvironments(ctx context.Context, spaceId string) (envs []*environments.Environment, err error)
+
+	GetCollection(ctx context.Context, spaceId, envId, collectionId string) (collection *collections.Collection, err error)
+	ListCollections(ctx context.Context, spaceId, envId string) (collections []*collections.Collection, err error)
+
+	GetItem(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error)
+	FindItems(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error)
+
+	Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error)
+}
diff --git a/pkg/delivery/service/service.go b/pkg/delivery/service/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..ed1b5091c553853ec78f9f20bc3b54b3e0239834
--- /dev/null
+++ b/pkg/delivery/service/service.go
@@ -0,0 +1,67 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/delivery"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+)
+
+type Config struct {
+	locales.Locales
+	environments.Environments
+	collections.Collections
+	items.Items
+}
+
+func NewService(config *Config) delivery.Delivery {
+	return &deliveryService{
+		Locales:      config.Locales,
+		Environments: config.Environments,
+		Collections:  config.Collections,
+		Items:        config.Items,
+	}
+}
+
+type deliveryService struct {
+	locales.Locales
+	environments.Environments
+	collections.Collections
+	items.Items
+}
+
+func (s deliveryService) ListLocales(ctx context.Context, spaceId string) (locales []*locales.Locale, err error) {
+	return s.Locales.List(ctx, spaceId)
+}
+
+func (s deliveryService) GetEnvironment(ctx context.Context, spaceId, envId string) (env *environments.Environment, err error) {
+	return s.Environments.Get(ctx, spaceId, envId)
+}
+
+func (s deliveryService) ListEnvironments(ctx context.Context, spaceId string) (envs []*environments.Environment, err error) {
+	return s.Environments.List(ctx, spaceId)
+}
+
+func (s deliveryService) GetCollection(ctx context.Context, spaceId, envId, collectionId string) (collection *collections.Collection, err error) {
+	// Для Delivery всегда загружается коллекция для последующего использования, так как нет кейсов использования незагруженной коллекции
+	return s.Collections.Get(ctx, spaceId, envId, collectionId)
+}
+
+func (s deliveryService) ListCollections(ctx context.Context, spaceId, envId string) (collections []*collections.Collection, err error) {
+	return s.Collections.List(ctx, spaceId, envId, nil)
+}
+
+func (s deliveryService) GetItem(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	return s.Items.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (s deliveryService) FindItems(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	return s.Items.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (s deliveryService) Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	return s.Items.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
diff --git a/pkg/delivery/transport/client.microgen.go b/pkg/delivery/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..8f2e1216f49441cf14afe4fe2fe509ba9898bca9
--- /dev/null
+++ b/pkg/delivery/transport/client.microgen.go
@@ -0,0 +1,139 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) ListLocales(arg0 context.Context, arg1 string) (res0 []*locales.Locale, res1 error) {
+	request := ListLocalesRequest{SpaceId: arg1}
+	response, res1 := set.ListLocalesEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListLocalesResponse).Locales, res1
+}
+
+func (set EndpointsSet) GetEnvironment(arg0 context.Context, arg1 string, arg2 string) (res0 *environments.Environment, res1 error) {
+	request := GetEnvironmentRequest{
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.GetEnvironmentEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetEnvironmentResponse).Env, res1
+}
+
+func (set EndpointsSet) ListEnvironments(arg0 context.Context, arg1 string) (res0 []*environments.Environment, res1 error) {
+	request := ListEnvironmentsRequest{SpaceId: arg1}
+	response, res1 := set.ListEnvironmentsEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListEnvironmentsResponse).Envs, res1
+}
+
+func (set EndpointsSet) GetCollection(arg0 context.Context, arg1 string, arg2 string, arg3 string) (res0 *collections.Collection, res1 error) {
+	request := GetCollectionRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.GetCollectionEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetCollectionResponse).Collection, res1
+}
+
+func (set EndpointsSet) ListCollections(arg0 context.Context, arg1 string, arg2 string) (res0 []*collections.Collection, res1 error) {
+	request := ListCollectionsRequest{
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.ListCollectionsEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListCollectionsResponse).Collections, res1
+}
+
+func (set EndpointsSet) GetItem(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 string, arg5 ...*items.GetPublishedOptions) (res0 *items.Item, res1 error) {
+	request := GetItemRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.GetItemEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetItemResponse).Item, res1
+}
+
+func (set EndpointsSet) FindItems(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *items.Filter, arg5 ...*items.FindPublishedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindItemsRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindItemsEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*FindItemsResponse).Items, response.(*FindItemsResponse).Total, res2
+}
+
+func (set EndpointsSet) Aggregate(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregatePublishedOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregateRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*AggregateResponse).Result, res1
+}
diff --git a/pkg/delivery/transport/endpoints.microgen.go b/pkg/delivery/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a76d9c168ad5242df0fda62d3ca7f16b324f4ae8
--- /dev/null
+++ b/pkg/delivery/transport/endpoints.microgen.go
@@ -0,0 +1,17 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Delivery API and used for transport purposes.
+type EndpointsSet struct {
+	ListLocalesEndpoint      endpoint.Endpoint
+	GetEnvironmentEndpoint   endpoint.Endpoint
+	ListEnvironmentsEndpoint endpoint.Endpoint
+	GetCollectionEndpoint    endpoint.Endpoint
+	ListCollectionsEndpoint  endpoint.Endpoint
+	GetItemEndpoint          endpoint.Endpoint
+	FindItemsEndpoint        endpoint.Endpoint
+	AggregateEndpoint        endpoint.Endpoint
+}
diff --git a/pkg/delivery/transport/exchanges.microgen.go b/pkg/delivery/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3e0f4c8958cc3cb58608e4e6676c8c4163a83cd0
--- /dev/null
+++ b/pkg/delivery/transport/exchanges.microgen.go
@@ -0,0 +1,85 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+)
+
+type (
+	ListLocalesRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListLocalesResponse struct {
+		Locales []*locales.Locale `json:"locales"`
+	}
+
+	GetEnvironmentRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+	}
+	GetEnvironmentResponse struct {
+		Env *environments.Environment `json:"env"`
+	}
+
+	ListEnvironmentsRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListEnvironmentsResponse struct {
+		Envs []*environments.Environment `json:"envs"`
+	}
+
+	GetCollectionRequest struct {
+		SpaceId      string `json:"space_id"`
+		EnvId        string `json:"env_id"`
+		CollectionId string `json:"collection_id"`
+	}
+	GetCollectionResponse struct {
+		Collection *collections.Collection `json:"collection"`
+	}
+
+	ListCollectionsRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+	}
+	ListCollectionsResponse struct {
+		Collections []*collections.Collection `json:"collections"`
+	}
+
+	GetItemRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		ItemId       string                       `json:"item_id"`
+		Options      []*items.GetPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetItemResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindItemsRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		Filter       *items.Filter                 `json:"filter"`
+		Options      []*items.FindPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindItemsResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	AggregateRequest struct {
+		SpaceId      string                             `json:"space_id"`
+		EnvId        string                             `json:"env_id"`
+		CollectionId string                             `json:"collection_id"`
+		Filter       *items.Filter                      `json:"filter"`
+		Options      []*items.AggregatePublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregateResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+)
diff --git a/pkg/delivery/transport/grpc/client.microgen.go b/pkg/delivery/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6a038379e065b12934f76e25102d425930ad072d
--- /dev/null
+++ b/pkg/delivery/transport/grpc/client.microgen.go
@@ -0,0 +1,74 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/delivery/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/delivery"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "delivery.Delivery"
+	}
+	return transport.EndpointsSet{
+		AggregateEndpoint: grpckit.NewClient(
+			conn, addr, "Aggregate",
+			_Encode_Aggregate_Request,
+			_Decode_Aggregate_Response,
+			pb.AggregateResponse{},
+			opts...,
+		).Endpoint(),
+		FindItemsEndpoint: grpckit.NewClient(
+			conn, addr, "FindItems",
+			_Encode_FindItems_Request,
+			_Decode_FindItems_Response,
+			pb.FindItemsResponse{},
+			opts...,
+		).Endpoint(),
+		GetCollectionEndpoint: grpckit.NewClient(
+			conn, addr, "GetCollection",
+			_Encode_GetCollection_Request,
+			_Decode_GetCollection_Response,
+			pb.GetCollectionResponse{},
+			opts...,
+		).Endpoint(),
+		GetEnvironmentEndpoint: grpckit.NewClient(
+			conn, addr, "GetEnvironment",
+			_Encode_GetEnvironment_Request,
+			_Decode_GetEnvironment_Response,
+			pb.GetEnvironmentResponse{},
+			opts...,
+		).Endpoint(),
+		GetItemEndpoint: grpckit.NewClient(
+			conn, addr, "GetItem",
+			_Encode_GetItem_Request,
+			_Decode_GetItem_Response,
+			pb.GetItemResponse{},
+			opts...,
+		).Endpoint(),
+		ListCollectionsEndpoint: grpckit.NewClient(
+			conn, addr, "ListCollections",
+			_Encode_ListCollections_Request,
+			_Decode_ListCollections_Response,
+			pb.ListCollectionsResponse{},
+			opts...,
+		).Endpoint(),
+		ListEnvironmentsEndpoint: grpckit.NewClient(
+			conn, addr, "ListEnvironments",
+			_Encode_ListEnvironments_Request,
+			_Decode_ListEnvironments_Response,
+			pb.ListEnvironmentsResponse{},
+			opts...,
+		).Endpoint(),
+		ListLocalesEndpoint: grpckit.NewClient(
+			conn, addr, "ListLocales",
+			_Encode_ListLocales_Request,
+			_Decode_ListLocales_Response,
+			pb.ListLocalesResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/delivery/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/delivery/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..83f2869edbd101135a854e1c5a8542ed7ba28a7b
--- /dev/null
+++ b/pkg/delivery/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,434 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/delivery/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/delivery"
+)
+
+func _Encode_ListLocales_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListLocalesRequest")
+	}
+	req := request.(*transport.ListLocalesRequest)
+	return &pb.ListLocalesRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_GetEnvironment_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetEnvironmentRequest")
+	}
+	req := request.(*transport.GetEnvironmentRequest)
+	return &pb.GetEnvironmentRequest{
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListEnvironments_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListEnvironmentsRequest")
+	}
+	req := request.(*transport.ListEnvironmentsRequest)
+	return &pb.ListEnvironmentsRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_GetCollection_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetCollectionRequest")
+	}
+	req := request.(*transport.GetCollectionRequest)
+	return &pb.GetCollectionRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListCollections_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListCollectionsRequest")
+	}
+	req := request.(*transport.ListCollectionsRequest)
+	return &pb.ListCollectionsRequest{
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_GetItem_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetItemRequest")
+	}
+	req := request.(*transport.GetItemRequest)
+	reqOptions, err := ElPtrItemsGetPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetItemRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_FindItems_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindItemsRequest")
+	}
+	req := request.(*transport.FindItemsRequest)
+	reqFilter, err := PtrItemsFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrItemsFindPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindItemsRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Filter:       reqFilter,
+		Options:      reqOptions,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListLocales_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListLocalesResponse")
+	}
+	resp := response.(*transport.ListLocalesResponse)
+	respLocales, err := ListPtrLocalesLocaleToProto(resp.Locales)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListLocalesResponse{Locales: respLocales}, nil
+}
+
+func _Encode_GetEnvironment_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetEnvironmentResponse")
+	}
+	resp := response.(*transport.GetEnvironmentResponse)
+	respEnv, err := PtrEnvironmentsEnvironmentToProto(resp.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetEnvironmentResponse{Env: respEnv}, nil
+}
+
+func _Encode_ListEnvironments_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListEnvironmentsResponse")
+	}
+	resp := response.(*transport.ListEnvironmentsResponse)
+	respEnvs, err := ListPtrEnvironmentsEnvironmentToProto(resp.Envs)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListEnvironmentsResponse{Envs: respEnvs}, nil
+}
+
+func _Encode_GetCollection_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetCollectionResponse")
+	}
+	resp := response.(*transport.GetCollectionResponse)
+	respCollection, err := PtrCollectionsCollectionToProto(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetCollectionResponse{Collection: respCollection}, nil
+}
+
+func _Encode_ListCollections_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListCollectionsResponse")
+	}
+	resp := response.(*transport.ListCollectionsResponse)
+	respCollections, err := ListPtrCollectionsCollectionToProto(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListCollectionsResponse{Collections: respCollections}, nil
+}
+
+func _Encode_GetItem_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetItemResponse")
+	}
+	resp := response.(*transport.GetItemResponse)
+	respItem, err := PtrItemsItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetItemResponse{Item: respItem}, nil
+}
+
+func _Encode_FindItems_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindItemsResponse")
+	}
+	resp := response.(*transport.FindItemsResponse)
+	respItem, err := ListPtrItemsItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindItemsResponse{
+		Items: respItem,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Decode_ListLocales_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListLocalesRequest")
+	}
+	req := request.(*pb.ListLocalesRequest)
+	return &transport.ListLocalesRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_GetEnvironment_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetEnvironmentRequest")
+	}
+	req := request.(*pb.GetEnvironmentRequest)
+	return &transport.GetEnvironmentRequest{
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListEnvironments_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListEnvironmentsRequest")
+	}
+	req := request.(*pb.ListEnvironmentsRequest)
+	return &transport.ListEnvironmentsRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_GetCollection_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetCollectionRequest")
+	}
+	req := request.(*pb.GetCollectionRequest)
+	return &transport.GetCollectionRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListCollections_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListCollectionsRequest")
+	}
+	req := request.(*pb.ListCollectionsRequest)
+	return &transport.ListCollectionsRequest{
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_GetItem_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetItemRequest")
+	}
+	req := request.(*pb.GetItemRequest)
+	reqOptions, err := ProtoToElPtrItemsGetPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetItemRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_FindItems_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindItemsRequest")
+	}
+	req := request.(*pb.FindItemsRequest)
+	reqFilter, err := ProtoToPtrItemsFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrItemsFindPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindItemsRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListLocales_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListLocalesResponse")
+	}
+	resp := response.(*pb.ListLocalesResponse)
+	respLocales, err := ProtoToListPtrLocalesLocale(resp.Locales)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListLocalesResponse{Locales: respLocales}, nil
+}
+
+func _Decode_GetEnvironment_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetEnvironmentResponse")
+	}
+	resp := response.(*pb.GetEnvironmentResponse)
+	respEnv, err := ProtoToPtrEnvironmentsEnvironment(resp.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetEnvironmentResponse{Env: respEnv}, nil
+}
+
+func _Decode_ListEnvironments_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListEnvironmentsResponse")
+	}
+	resp := response.(*pb.ListEnvironmentsResponse)
+	respEnvs, err := ProtoToListPtrEnvironmentsEnvironment(resp.Envs)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListEnvironmentsResponse{Envs: respEnvs}, nil
+}
+
+func _Decode_GetCollection_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetCollectionResponse")
+	}
+	resp := response.(*pb.GetCollectionResponse)
+	respCollection, err := ProtoToPtrCollectionsCollection(resp.Collection)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetCollectionResponse{Collection: respCollection}, nil
+}
+
+func _Decode_ListCollections_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListCollectionsResponse")
+	}
+	resp := response.(*pb.ListCollectionsResponse)
+	respCollections, err := ProtoToListPtrCollectionsCollection(resp.Collections)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListCollectionsResponse{Collections: respCollections}, nil
+}
+
+func _Decode_GetItem_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetItemResponse")
+	}
+	resp := response.(*pb.GetItemResponse)
+	respItem, err := ProtoToPtrItemsItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetItemResponse{Item: respItem}, nil
+}
+
+func _Decode_FindItems_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindItemsResponse")
+	}
+	resp := response.(*pb.FindItemsResponse)
+	respItem, err := ProtoToListPtrItemsItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindItemsResponse{
+		Items: respItem,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Encode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*transport.AggregateRequest)
+	reqFilter, err := PtrItemsFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrItemsAggregateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Filter:       reqFilter,
+		Options:      reqOptions,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregateResponse)
+	respResult, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateResponse{Result: respResult}, nil
+}
+
+func _Decode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*pb.AggregateRequest)
+	reqFilter, err := ProtoToPtrItemsFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrItemsAggregateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*pb.AggregateResponse)
+	respResult, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateResponse{Result: respResult}, nil
+}
diff --git a/pkg/delivery/transport/grpc/protobuf_type_converters.microgen.go b/pkg/delivery/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..dbbb951d484c903909bf774295ff1851a30995be
--- /dev/null
+++ b/pkg/delivery/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,486 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	collections "git.perx.ru/perxis/perxis-go/pkg/collections"
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	services "git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	collectionspb "git.perx.ru/perxis/perxis-go/proto/collections"
+	"git.perx.ru/perxis/perxis-go/proto/common"
+	"git.perx.ru/perxis/perxis-go/proto/delivery"
+	environmentspb "git.perx.ru/perxis/perxis-go/proto/environments"
+	itemspb "git.perx.ru/perxis/perxis-go/proto/items"
+	localespb "git.perx.ru/perxis/perxis-go/proto/locales"
+	"github.com/golang/protobuf/ptypes"
+	"google.golang.org/protobuf/types/known/structpb"
+)
+
+func ListPtrLocalesLocaleToProto(locales []*locales.Locale) ([]*localespb.Locale, error) {
+	protoLocales := make([]*localespb.Locale, 0, len(locales))
+	for _, l := range locales {
+		protoLocales = append(protoLocales, &localespb.Locale{Id: l.ID, Name: l.Name, SpaceId: l.SpaceID})
+	}
+	return protoLocales, nil
+}
+
+func ProtoToListPtrLocalesLocale(protoLocales []*localespb.Locale) ([]*locales.Locale, error) {
+	ls := make([]*locales.Locale, 0, len(protoLocales))
+	for _, pl := range protoLocales {
+		ls = append(ls, &locales.Locale{ID: pl.Id, Name: pl.Name, SpaceID: pl.SpaceId})
+	}
+	return ls, nil
+}
+
+func PtrEnvironmentsEnvironmentToProto(env *environments.Environment) (*environmentspb.Environment, error) {
+	if env == nil {
+		return nil, nil
+	}
+	protoEnvironment := &environmentspb.Environment{
+		Id:          env.ID,
+		SpaceId:     env.SpaceID,
+		Description: env.Description,
+		Aliases:     env.Aliases,
+	}
+	if env.StateInfo != nil {
+		protoEnvironment.StateInfo = &environmentspb.StateInfo{
+			State: environmentspb.StateInfo_State(env.StateInfo.State),
+			Info:  env.StateInfo.Info,
+		}
+		protoEnvironment.StateInfo.StartedAt, _ = ptypes.TimestampProto(env.StateInfo.StartedAt)
+	}
+	if env.Config != nil {
+		protoEnvironment.Config = &environmentspb.Config{
+			SourceId: env.Config.SourceID,
+			Features: env.Config.Features,
+		}
+	}
+	return protoEnvironment, nil
+}
+
+func ProtoToPtrEnvironmentsEnvironment(protoEnv *environmentspb.Environment) (*environments.Environment, error) {
+	if protoEnv == nil {
+		return nil, nil
+	}
+	env := &environments.Environment{
+		ID:          protoEnv.Id,
+		SpaceID:     protoEnv.SpaceId,
+		Description: protoEnv.Description,
+		Aliases:     protoEnv.Aliases,
+	}
+	if protoEnv.StateInfo != nil {
+		env.StateInfo = &environments.StateInfo{
+			State: environments.State(protoEnv.StateInfo.State),
+			Info:  protoEnv.StateInfo.Info,
+		}
+		env.StateInfo.StartedAt, _ = ptypes.Timestamp(protoEnv.StateInfo.StartedAt)
+	}
+	if protoEnv.Config != nil {
+		env.Config = &environments.Config{
+			SourceID: protoEnv.Config.SourceId,
+			Features: protoEnv.Config.Features,
+		}
+	}
+	return env, nil
+}
+
+func ListPtrEnvironmentsEnvironmentToProto(envs []*environments.Environment) ([]*environmentspb.Environment, error) {
+	protoEnvironments := make([]*environmentspb.Environment, 0, len(envs))
+	for _, environment := range envs {
+		protoEnvironment, err := PtrEnvironmentsEnvironmentToProto(environment)
+		if err != nil {
+			return nil, err
+		}
+		protoEnvironments = append(protoEnvironments, protoEnvironment)
+	}
+	return protoEnvironments, nil
+}
+
+func ProtoToListPtrEnvironmentsEnvironment(protoEnvs []*environmentspb.Environment) ([]*environments.Environment, error) {
+	environments := make([]*environments.Environment, 0, len(protoEnvs))
+	for _, protoEnvironment := range protoEnvs {
+		environment, err := ProtoToPtrEnvironmentsEnvironment(protoEnvironment)
+		if err != nil {
+			return nil, err
+		}
+		environments = append(environments, environment)
+	}
+	return environments, nil
+}
+
+func PtrCollectionsCollectionToProto(collection *collections.Collection) (*collectionspb.Collection, error) {
+	if collection == nil {
+		return nil, nil
+	}
+	protoCollection := &collectionspb.Collection{
+		Id:      collection.ID,
+		SpaceId: collection.SpaceID,
+		EnvId:   collection.EnvID,
+		Name:    collection.Name,
+		Single:  collection.Single,
+		System:  collection.System,
+	}
+	if collection.Schema != nil {
+		b, err := collection.Schema.MarshalJSON()
+		if err != nil {
+			return nil, err
+		}
+		protoCollection.Schema = string(b)
+	}
+	return protoCollection, nil
+}
+
+func ProtoToPtrCollectionsCollection(protoCollection *collectionspb.Collection) (*collections.Collection, error) {
+	if protoCollection == nil {
+		return nil, nil
+	}
+	collection := &collections.Collection{
+		ID:      protoCollection.Id,
+		SpaceID: protoCollection.SpaceId,
+		EnvID:   protoCollection.EnvId,
+		Name:    protoCollection.Name,
+		Single:  protoCollection.Single,
+		System:  protoCollection.System,
+	}
+	if protoCollection.Schema != "" {
+		sch := schema.New()
+		err := sch.UnmarshalJSON([]byte(protoCollection.Schema))
+		if err != nil {
+			return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+		}
+		collection.Schema = sch
+	}
+	return collection, nil
+}
+
+func ListPtrCollectionsCollectionToProto(collections []*collections.Collection) ([]*collectionspb.Collection, error) {
+	protoCollections := make([]*collectionspb.Collection, 0, len(collections))
+	for _, collection := range collections {
+		protoCollection, err := PtrCollectionsCollectionToProto(collection)
+		if err != nil {
+			return nil, err
+		}
+		protoCollections = append(protoCollections, protoCollection)
+	}
+	return protoCollections, nil
+}
+
+func ProtoToListPtrCollectionsCollection(protoCollections []*collectionspb.Collection) ([]*collections.Collection, error) {
+	collections := make([]*collections.Collection, 0, len(protoCollections))
+	for _, protoCollection := range protoCollections {
+		collection, err := ProtoToPtrCollectionsCollection(protoCollection)
+		if err != nil {
+			return nil, err
+		}
+		collections = append(collections, collection)
+	}
+	return collections, nil
+}
+
+func PtrItemsItemToProto(item *items.Item) (*itemspb.Item, error) {
+	if item == nil {
+		return nil, nil
+	}
+
+	protoItem := &itemspb.Item{
+		Id:           item.ID,
+		SpaceId:      item.SpaceID,
+		EnvId:        item.EnvID,
+		CollectionId: item.CollectionID,
+		State:        itemspb.Item_State(item.State),
+		CreatedBy:    item.CreatedBy,
+		UpdatedBy:    item.UpdatedBy,
+		RevisionId:   item.RevisionID,
+		PublishedBy:  item.PublishedBy,
+		ArchivedBy:   item.ArchivedBy,
+		Locale:       item.Locale,
+		//Hidden, Template, Deleted - не передается для delivery
+	}
+
+	var err error
+	protoItem.Data, err = MapStringInterfaceToProto(item.Data)
+	if err != nil {
+		return nil, err
+	}
+	protoItem.Translations, err = MapStringMapStringInterfaceToProto(item.Translations)
+	if err != nil {
+		return nil, err
+	}
+	//protoItem.Permissions - не передается для delivery
+
+	protoItem.CreatedRevAt, _ = ptypes.TimestampProto(item.CreatedRevAt)
+	protoItem.PublishedAt, _ = ptypes.TimestampProto(item.PublishedAt)
+	protoItem.ArchivedAt, _ = ptypes.TimestampProto(item.ArchivedAt)
+	protoItem.CreatedAt, _ = ptypes.TimestampProto(item.CreatedAt)
+	protoItem.UpdatedAt, _ = ptypes.TimestampProto(item.UpdatedAt)
+
+	return protoItem, nil
+}
+
+func ProtoToPtrItemsItem(protoItem *itemspb.Item) (*items.Item, error) {
+	if protoItem == nil {
+		return nil, nil
+	}
+
+	item := &items.Item{
+		ID:           protoItem.Id,
+		SpaceID:      protoItem.SpaceId,
+		EnvID:        protoItem.EnvId,
+		CollectionID: protoItem.CollectionId,
+		State:        items.State(protoItem.State),
+		CreatedBy:    protoItem.CreatedBy,
+		UpdatedBy:    protoItem.UpdatedBy,
+		RevisionID:   protoItem.RevisionId,
+		PublishedBy:  protoItem.PublishedBy,
+		ArchivedBy:   protoItem.ArchivedBy,
+		Locale:       protoItem.Locale,
+		//Hidden, Template, Deleted - не передается для delivery
+	}
+
+	item.Data, _ = ProtoToMapStringInterface(protoItem.Data)
+	item.Translations, _ = ProtoToMapStringMapStringInterface(protoItem.Translations)
+	//item.Permissions - не передается для delivery
+
+	item.CreatedRevAt, _ = ptypes.Timestamp(protoItem.CreatedRevAt)
+	item.PublishedAt, _ = ptypes.Timestamp(protoItem.PublishedAt)
+	item.ArchivedAt, _ = ptypes.Timestamp(protoItem.ArchivedAt)
+	item.CreatedAt, _ = ptypes.Timestamp(protoItem.CreatedAt)
+	item.UpdatedAt, _ = ptypes.Timestamp(protoItem.UpdatedAt)
+
+	return item, nil
+}
+
+func PtrItemsFilterToProto(filter *items.Filter) (*itemspb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*common.Filter, 0, len(filter.Data))
+	for _, f := range filter.Data {
+		pf := &common.Filter{
+			Op:    string(f.Op),
+			Field: f.Field,
+		}
+
+		val, err := structpb.NewValue(f.Value)
+		if err != nil {
+			return nil, err
+		}
+		pf.Value = val
+		dt = append(dt, pf)
+	}
+
+	return &itemspb.Filter{
+		Id:   filter.ID,
+		Data: dt,
+		Q:    filter.Q,
+	}, nil
+}
+
+func ProtoToPtrItemsFilter(protoFilter *itemspb.Filter) (*items.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*filter.Filter, 0, len(protoFilter.Data))
+	for _, pf := range protoFilter.Data {
+
+		f := &filter.Filter{
+			Op:    filter.Op(pf.Op),
+			Field: pf.Field,
+			Value: pf.Value.AsInterface(),
+		}
+
+		dt = append(dt, f)
+	}
+
+	return &items.Filter{
+		ID:   protoFilter.Id,
+		Data: dt,
+		Q:    protoFilter.Q,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(options *services.FindOptions) (*common.FindOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	return &common.FindOptions{
+		Sort:     options.Sort,
+		PageNum:  int32(options.PageNum),
+		PageSize: int32(options.PageSize),
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOptions *common.FindOptions) (*services.FindOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return &services.FindOptions{
+		SortOptions: services.SortOptions{
+			Sort: protoOptions.Sort,
+		},
+		PaginationOptions: services.PaginationOptions{
+			PageNum:  int(protoOptions.PageNum),
+			PageSize: int(protoOptions.PageSize),
+		},
+	}, nil
+}
+
+func ListPtrItemsItemToProto(itms []*items.Item) ([]*itemspb.Item, error) {
+	protoItems := make([]*itemspb.Item, 0, len(itms))
+	for _, itm := range itms {
+		pi, err := PtrItemsItemToProto(itm)
+		if err != nil {
+			return nil, err
+		}
+		protoItems = append(protoItems, pi)
+	}
+	return protoItems, nil
+}
+
+func ProtoToListPtrItemsItem(protoItems []*itemspb.Item) ([]*items.Item, error) {
+	items := make([]*items.Item, 0, len(protoItems))
+	for _, itm := range protoItems {
+		pi, err := ProtoToPtrItemsItem(itm)
+		if err != nil {
+			return nil, err
+		}
+		items = append(items, pi)
+	}
+	return items, nil
+}
+
+func MapStringInterfaceToProto(data map[string]interface{}) (*structpb.Struct, error) {
+	if data == nil {
+		return nil, nil
+	}
+	return structpb.NewStruct(data)
+}
+
+func ProtoToMapStringInterface(protoData *structpb.Struct) (map[string]interface{}, error) {
+	if protoData == nil {
+		return nil, nil
+	}
+	return protoData.AsMap(), nil
+}
+
+func MapStringMapStringInterfaceToProto(translations map[string]map[string]interface{}) (map[string]*structpb.Struct, error) {
+	if translations == nil {
+		return nil, nil
+	}
+	res := make(map[string]*structpb.Struct, len(translations))
+	for k, v := range translations {
+		res[k], _ = MapStringInterfaceToProto(v)
+	}
+	return res, nil
+}
+
+func ProtoToMapStringMapStringInterface(protoTranslations map[string]*structpb.Struct) (map[string]map[string]interface{}, error) {
+	if protoTranslations == nil {
+		return nil, nil
+	}
+	res := make(map[string]map[string]interface{}, len(protoTranslations))
+	for k, v := range protoTranslations {
+		res[k], _ = ProtoToMapStringInterface(v)
+	}
+	return res, nil
+}
+
+func ElPtrItemsGetPublishedOptionsToProto(options []*items.GetPublishedOptions) (*itemspb.GetPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := items.MergeGetPublishedOptions(options...)
+
+	return &itemspb.GetPublishedOptions{LocaleId: opts.LocaleID}, nil
+}
+
+func ProtoToElPtrItemsGetPublishedOptions(protoOptions *itemspb.GetPublishedOptions) ([]*items.GetPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	return []*items.GetPublishedOptions{{LocaleID: protoOptions.LocaleId}}, nil
+}
+
+func ElPtrItemsFindPublishedOptionsToProto(options []*items.FindPublishedOptions) (*itemspb.FindPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := items.MergeFindPublishedOptions(options...)
+
+	var err error
+
+	fo := &itemspb.FindPublishedOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	fo.LocaleId = opts.LocaleID
+
+	return fo, nil
+}
+
+func ProtoToElPtrItemsFindPublishedOptions(protoOptions *itemspb.FindPublishedOptions) ([]*items.FindPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &items.FindPublishedOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	fo.LocaleID = protoOptions.LocaleId
+
+	return []*items.FindPublishedOptions{fo}, nil
+}
+
+func PtrServicesAggregateOptionsToProto(options *items.AggregatePublishedOptions) (*delivery.AggregateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	return &delivery.AggregateOptions{Fields: options.Fields}, nil
+}
+
+func ProtoToPtrServicesAggregateOptions(protoOptions *delivery.AggregateOptions) ([]*items.AggregatePublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*items.AggregatePublishedOptions{&items.AggregatePublishedOptions{Fields: protoOptions.Fields}}, nil
+}
+
+func ElPtrItemsAggregateOptionsToProto(options []*items.AggregatePublishedOptions) (*delivery.AggregateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := items.MergeAggregatePublishedOptions(options...)
+	return PtrServicesAggregateOptionsToProto(opts)
+}
+
+func ProtoToElPtrItemsAggregateOptions(protoOptions *delivery.AggregateOptions) ([]*items.AggregatePublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*items.AggregatePublishedOptions{&items.AggregatePublishedOptions{Fields: protoOptions.Fields}}, nil
+}
diff --git a/pkg/delivery/transport/grpc/server.microgen.go b/pkg/delivery/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..d815b136f2218cafecf4fac406d35d1ca6464aa5
--- /dev/null
+++ b/pkg/delivery/transport/grpc/server.microgen.go
@@ -0,0 +1,142 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/delivery/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/delivery"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	context "golang.org/x/net/context"
+)
+
+type deliveryServer struct {
+	listLocales        grpc.Handler
+	getEnvironment     grpc.Handler
+	listEnvironments   grpc.Handler
+	getCollection      grpc.Handler
+	listCollections    grpc.Handler
+	getItem            grpc.Handler
+	findItems          grpc.Handler
+	aggregate          grpc.Handler
+	aggregatePublished grpc.Handler
+
+	pb.UnimplementedDeliveryServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.DeliveryServer {
+	return &deliveryServer{
+		aggregate: grpc.NewServer(
+			endpoints.AggregateEndpoint,
+			_Decode_Aggregate_Request,
+			_Encode_Aggregate_Response,
+			opts...,
+		),
+		findItems: grpc.NewServer(
+			endpoints.FindItemsEndpoint,
+			_Decode_FindItems_Request,
+			_Encode_FindItems_Response,
+			opts...,
+		),
+		getCollection: grpc.NewServer(
+			endpoints.GetCollectionEndpoint,
+			_Decode_GetCollection_Request,
+			_Encode_GetCollection_Response,
+			opts...,
+		),
+		getEnvironment: grpc.NewServer(
+			endpoints.GetEnvironmentEndpoint,
+			_Decode_GetEnvironment_Request,
+			_Encode_GetEnvironment_Response,
+			opts...,
+		),
+		getItem: grpc.NewServer(
+			endpoints.GetItemEndpoint,
+			_Decode_GetItem_Request,
+			_Encode_GetItem_Response,
+			opts...,
+		),
+		listCollections: grpc.NewServer(
+			endpoints.ListCollectionsEndpoint,
+			_Decode_ListCollections_Request,
+			_Encode_ListCollections_Response,
+			opts...,
+		),
+		listEnvironments: grpc.NewServer(
+			endpoints.ListEnvironmentsEndpoint,
+			_Decode_ListEnvironments_Request,
+			_Encode_ListEnvironments_Response,
+			opts...,
+		),
+		listLocales: grpc.NewServer(
+			endpoints.ListLocalesEndpoint,
+			_Decode_ListLocales_Request,
+			_Encode_ListLocales_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *deliveryServer) ListLocales(ctx context.Context, req *pb.ListLocalesRequest) (*pb.ListLocalesResponse, error) {
+	_, resp, err := S.listLocales.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListLocalesResponse), nil
+}
+
+func (S *deliveryServer) GetEnvironment(ctx context.Context, req *pb.GetEnvironmentRequest) (*pb.GetEnvironmentResponse, error) {
+	_, resp, err := S.getEnvironment.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetEnvironmentResponse), nil
+}
+
+func (S *deliveryServer) ListEnvironments(ctx context.Context, req *pb.ListEnvironmentsRequest) (*pb.ListEnvironmentsResponse, error) {
+	_, resp, err := S.listEnvironments.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListEnvironmentsResponse), nil
+}
+
+func (S *deliveryServer) GetCollection(ctx context.Context, req *pb.GetCollectionRequest) (*pb.GetCollectionResponse, error) {
+	_, resp, err := S.getCollection.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetCollectionResponse), nil
+}
+
+func (S *deliveryServer) ListCollections(ctx context.Context, req *pb.ListCollectionsRequest) (*pb.ListCollectionsResponse, error) {
+	_, resp, err := S.listCollections.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListCollectionsResponse), nil
+}
+
+func (S *deliveryServer) GetItem(ctx context.Context, req *pb.GetItemRequest) (*pb.GetItemResponse, error) {
+	_, resp, err := S.getItem.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetItemResponse), nil
+}
+
+func (S *deliveryServer) FindItems(ctx context.Context, req *pb.FindItemsRequest) (*pb.FindItemsResponse, error) {
+	_, resp, err := S.findItems.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindItemsResponse), nil
+}
+
+func (S *deliveryServer) Aggregate(ctx context.Context, req *pb.AggregateRequest) (*pb.AggregateResponse, error) {
+	_, resp, err := S.aggregate.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregateResponse), nil
+}
diff --git a/pkg/delivery/transport/server.microgen.go b/pkg/delivery/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a50b5316226d1b030a5ddfa05a21e1a4044c1ee5
--- /dev/null
+++ b/pkg/delivery/transport/server.microgen.go
@@ -0,0 +1,90 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	delivery "git.perx.ru/perxis/perxis-go/pkg/delivery"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc delivery.Delivery) EndpointsSet {
+	return EndpointsSet{
+		AggregateEndpoint:        AggregateEndpoint(svc),
+		FindItemsEndpoint:        FindItemsEndpoint(svc),
+		GetCollectionEndpoint:    GetCollectionEndpoint(svc),
+		GetEnvironmentEndpoint:   GetEnvironmentEndpoint(svc),
+		GetItemEndpoint:          GetItemEndpoint(svc),
+		ListCollectionsEndpoint:  ListCollectionsEndpoint(svc),
+		ListEnvironmentsEndpoint: ListEnvironmentsEndpoint(svc),
+		ListLocalesEndpoint:      ListLocalesEndpoint(svc),
+	}
+}
+
+func ListLocalesEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListLocalesRequest)
+		res0, res1 := svc.ListLocales(arg0, req.SpaceId)
+		return &ListLocalesResponse{Locales: res0}, res1
+	}
+}
+
+func GetEnvironmentEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetEnvironmentRequest)
+		res0, res1 := svc.GetEnvironment(arg0, req.SpaceId, req.EnvId)
+		return &GetEnvironmentResponse{Env: res0}, res1
+	}
+}
+
+func ListEnvironmentsEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListEnvironmentsRequest)
+		res0, res1 := svc.ListEnvironments(arg0, req.SpaceId)
+		return &ListEnvironmentsResponse{Envs: res0}, res1
+	}
+}
+
+func GetCollectionEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetCollectionRequest)
+		res0, res1 := svc.GetCollection(arg0, req.SpaceId, req.EnvId, req.CollectionId)
+		return &GetCollectionResponse{Collection: res0}, res1
+	}
+}
+
+func ListCollectionsEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListCollectionsRequest)
+		res0, res1 := svc.ListCollections(arg0, req.SpaceId, req.EnvId)
+		return &ListCollectionsResponse{Collections: res0}, res1
+	}
+}
+
+func GetItemEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetItemRequest)
+		res0, res1 := svc.GetItem(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetItemResponse{Item: res0}, res1
+	}
+}
+
+func FindItemsEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindItemsRequest)
+		res0, res1, res2 := svc.FindItems(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindItemsResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func AggregateEndpoint(svc delivery.Delivery) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregateRequest)
+		res0, res1 := svc.Aggregate(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregateResponse{Result: res0}, res1
+	}
+}
diff --git a/pkg/environments/environment.go b/pkg/environments/environment.go
new file mode 100644
index 0000000000000000000000000000000000000000..cbd468dc361951e048a0840f4158a42c962e7af1
--- /dev/null
+++ b/pkg/environments/environment.go
@@ -0,0 +1,114 @@
+package environments
+
+import "time"
+
+const (
+	DefaultEnvironment = "master"
+)
+
+type State int
+
+const (
+	StateUnknown State = iota
+	StateNew
+	StatePreparing
+	StateReady
+	StateError
+
+	StateInfoEmpty = "EMPTY"
+)
+
+func (s State) String() string {
+	switch s {
+	case StateNew:
+		return "new"
+	case StatePreparing:
+		return "preparing"
+	case StateReady:
+		return "ready"
+	case StateError:
+		return "error"
+	default:
+		return "unknown"
+	}
+}
+
+type StateInfo struct {
+	State     State     `json:"state" bson:"state"`
+	StartedAt time.Time `json:"started_at,omitempty" bson:"started_at,omitempty"`
+	Info      string    `json:"info,omitempty" bson:"info,omitempty"`
+}
+
+type Config struct {
+	SourceID string
+
+	// Deprecated
+	Features []string
+}
+
+// Environment - представляет рабочее окружения для пространства
+// Каждое окружение может иметь собственный набор коллекций и данных и
+// использоваться независимо друг от друга
+type Environment struct {
+	ID          string `json:"id" bson:"_id"` // Идентификатор окружения, задается пользователем при создании. Уникален в рамках пространства `SpaceID`
+	SpaceID     string `json:"spaceID" bson:"-"`
+	Description string `json:"description" bson:"desc,omitempty"` // Описание для окружения
+	//State       State  `json:"state" bson:"state"`                // Состояние окружения (Preparing/Ready/Failed)
+	//StateInfo   string   `json:"state_info,omitempty" bson:"state_info,omitempty"`
+
+	// StateInfo отображает состояние коллекции:
+	// - State: идентификатор состояния окружения (unknown/new/preparing/ready/error)
+	// - Info: дополнительная информация о состоянии коллекции (например, если при
+	//   применении схемы к коллекции произошла ошибка)
+	// - StartedAt: время, в которое коллекция перешла в состояние `Preparing`
+	StateInfo *StateInfo `json:"state_info" bson:"state_info,omitempty"`
+
+	Aliases []string `json:"aliases" bson:"aliases,omitempty"` // Синонимы окружения (только чтение)
+	Config  *Config  `json:"config,omitempty" bson:"config,omitempty"`
+}
+
+func (e Environment) Clone() *Environment {
+	clone := &Environment{
+		ID:          e.ID,
+		SpaceID:     e.SpaceID,
+		Description: e.Description,
+		Aliases:     append([]string(nil), e.Aliases...),
+		Config:      nil,
+	}
+
+	if e.StateInfo != nil {
+		clone.StateInfo = &StateInfo{
+			State:     e.StateInfo.State,
+			Info:      e.StateInfo.Info,
+			StartedAt: e.StateInfo.StartedAt,
+		}
+	}
+
+	if e.Config != nil {
+		clone.Config = &Config{
+			SourceID: e.Config.SourceID,
+		}
+	}
+
+	return clone
+}
+
+func (e Environment) Fetch(i interface{}) interface{} {
+	p, _ := i.(string)
+	switch p {
+	case "ID":
+		return e.ID
+	case "SpaceID":
+		return e.SpaceID
+	case "Description":
+		return e.Description
+	case "StateInfo":
+		return e.StateInfo
+	case "Aliases":
+		return e.Aliases
+	case "Config":
+		return e.Config
+	default:
+		panic("unknown parameter")
+	}
+}
diff --git a/pkg/environments/middleware/caching_middleware.go b/pkg/environments/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..18b594bc3a193d962d63ef3d5eae6217408d61bd
--- /dev/null
+++ b/pkg/environments/middleware/caching_middleware.go
@@ -0,0 +1,167 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/environments"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Environments) service.Environments {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Environments
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, env *service.Environment) (environment *service.Environment, err error) {
+
+	environment, err = m.next.Create(ctx, env)
+	if err == nil {
+		m.cache.Remove(environment.SpaceID)
+	}
+	return environment, err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId string, envId string) (environment *service.Environment, err error) {
+
+	value, e := m.cache.Get(makeKey(spaceId, envId))
+	if e == nil {
+		return value.(*service.Environment), err
+	}
+	environment, err = m.next.Get(ctx, spaceId, envId)
+	if err == nil {
+		m.cache.Set(makeKey(spaceId, environment.ID), environment)
+		for _, a := range environment.Aliases {
+			m.cache.Set(makeKey(spaceId, a), environment)
+		}
+	}
+	return environment, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, spaceId string) (environments []*service.Environment, err error) {
+
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.([]*service.Environment), err
+	}
+	environments, err = m.next.List(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, environments)
+	}
+	return environments, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, env *service.Environment) (err error) {
+
+	err = m.next.Update(ctx, env)
+	if err == nil {
+		value, e := m.cache.Get(makeKey(env.SpaceID, env.ID))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+		m.cache.Remove(env.SpaceID)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string, envId string) (err error) {
+
+	err = m.next.Delete(ctx, spaceId, envId)
+	if err == nil {
+		value, e := m.cache.Get(makeKey(spaceId, envId))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) SetAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+
+	err = m.next.SetAlias(ctx, spaceId, envId, alias)
+	if err == nil {
+		value, e := m.cache.Get(makeKey(spaceId, alias))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+
+		value, e = m.cache.Get(makeKey(spaceId, envId))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) RemoveAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+
+	err = m.next.RemoveAlias(ctx, spaceId, envId, alias)
+	if err == nil {
+		m.cache.Remove(spaceId)
+		value, e := m.cache.Get(makeKey(spaceId, alias))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+
+		value, e = m.cache.Get(makeKey(spaceId, envId))
+		if e == nil {
+			env := value.(*service.Environment)
+			m.cache.Remove(makeKey(env.SpaceID, env.ID))
+			for _, a := range env.Aliases {
+				m.cache.Remove(makeKey(env.SpaceID, a))
+			}
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Migrate(ctx context.Context, spaceId, envId string, options ...*service.MigrateOptions) (err error) {
+	err = m.next.Migrate(ctx, spaceId, envId, options...)
+
+	// значение из кэша удалить вне зависимости от наличия ошибки, поскольку состояние окружения могло измениться
+	value, e := m.cache.Get(makeKey(spaceId, envId))
+	if e == nil {
+		env := value.(*service.Environment)
+		m.cache.Remove(makeKey(env.SpaceID, env.ID))
+		for _, a := range env.Aliases {
+			m.cache.Remove(makeKey(env.SpaceID, a))
+		}
+	}
+	m.cache.Remove(spaceId)
+	return err
+}
diff --git a/pkg/environments/middleware/caching_middleware_test.go b/pkg/environments/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f5bfa7a9b5d207ed0c00d60cd126950a6da5aba0
--- /dev/null
+++ b/pkg/environments/middleware/caching_middleware_test.go
@@ -0,0 +1,387 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	mocksenvironments "git.perx.ru/perxis/perxis-go/pkg/environments/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestEnvironmentsCache(t *testing.T) {
+
+	const (
+		envID    = "envID"
+		spaceID  = "spaceID"
+		envAlias = "envAlias"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		envs := &mocksenvironments.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+		envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша при повторном запросе по ID.")
+
+		v3, err := svc.Get(ctx, spaceID, envAlias)
+		require.NoError(t, err)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+
+		envs.AssertExpectations(t)
+	})
+
+	t.Run("Get from cache(by Alias)", func(t *testing.T) {
+		envs := &mocksenvironments.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+		envs.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envAlias)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envAlias)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша по alias.")
+
+		v3, err := svc.Get(ctx, spaceID, envID)
+		require.NoError(t, err)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+
+		envs.AssertExpectations(t)
+	})
+
+	t.Run("List from cache", func(t *testing.T) {
+		envs := &mocksenvironments.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+		envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment"}}, nil).Once()
+
+		vl1, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+
+		vl2, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+		envs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After SetAlias", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2"}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2"}}}, nil).Once()
+			envs.On("SetAlias", mock.Anything, spaceID, envID, envAlias).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			err = svc.SetAlias(ctx, spaceID, envID, envAlias)
+			require.NoError(t, err)
+
+			envs.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2", envAlias}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2", envAlias}}}, nil).Once()
+
+			v4, err := svc.Get(ctx, spaceID, envAlias)
+			require.NoError(t, err)
+			assert.Contains(t, v4.Aliases, envAlias, "Ожидает что элемент будет запрошен из сервиса по Alias.")
+
+			v5, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v4, v5, "Ожидается получение объекта из кэша по ID.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидает что объекты будут удалены из кэша и запрошены из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After RemoveAlias", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2", envAlias}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2", envAlias}}}, nil).Once()
+			envs.On("RemoveAlias", mock.Anything, spaceID, envID, envAlias).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша по ID.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по Alias.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			err = svc.RemoveAlias(ctx, spaceID, envID, envAlias)
+			require.NoError(t, err)
+
+			envs.On("Get", mock.Anything, spaceID, envAlias).Return(nil, errNotFound).Once()
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2"}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{"envID2"}}}, nil).Once()
+
+			_, err = svc.Get(ctx, spaceID, envAlias)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидает что элемент был удален из кеша и сервис вернул ошибку на запрос по несуществующему Alias.")
+
+			v4, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v4, "Ожидает что элемент был удален из кеша и получен из сервиса по ID.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидает что объекты будут удалены из кэша и запрошены из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After Update", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}}, nil).Once()
+			envs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по Alias.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			err = svc.Update(ctx, &environments.Environment{ID: envID, SpaceID: spaceID, Description: "EnvironmentUPD", Aliases: []string{envAlias}})
+			require.NoError(t, err)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "EnvironmentUPD", Aliases: []string{envAlias}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "EnvironmentUPD", Aliases: []string{envAlias}}}, nil).Once()
+
+			_, err = svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v4, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v4, "Ожидает что элемент был удален из кэша и будет запрошен заново из сервиса.")
+
+			v5, err := svc.Get(ctx, spaceID, envAlias)
+			require.NoError(t, err)
+			assert.Same(t, v4, v5, "Ожидается получение объекта из кэша по Alias после обновления объекта и получения по ID.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидает что объекты будут удалены из кэша и запрошены из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After Update(List)", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}}, nil).Once()
+			envs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			err = svc.Update(ctx, &environments.Environment{ID: envID, SpaceID: spaceID, Description: "EnvironmentUPD", Aliases: []string{envAlias}})
+			require.NoError(t, err)
+
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "EnvironmentUPD", Aliases: []string{envAlias}}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидает что объекты будут удалены из кэша и запрошены из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}, nil).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment", Aliases: []string{envAlias}}}, nil).Once()
+			envs.On("Delete", mock.Anything, spaceID, envID).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кэша по Alias.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			err = svc.Delete(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(nil, errNotFound).Once()
+			envs.On("Get", mock.Anything, spaceID, envAlias).Return(nil, errNotFound).Once()
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{}, nil).Once()
+
+			_, err = svc.Get(ctx, spaceID, envID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидает что элемент был удален из кэша по ID и получена ошибка от сервиса.")
+
+			_, err = svc.Get(ctx, spaceID, envAlias)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидает что элемент был удален  из кэша по Alias и получена ошибка от сервиса.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 0, "Ожидает что объекты будут удалены из кэша и запрошены из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment"}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается получение объектов из кэша.")
+
+			envs.On("Create", mock.Anything, mock.Anything).Return(&environments.Environment{ID: "envID2", SpaceID: spaceID, Description: "Environment2"}, nil).Once()
+			_, err = svc.Create(ctx, &environments.Environment{ID: "envID2", SpaceID: spaceID, Description: "Environment2"})
+			require.NoError(t, err)
+
+			envs.On("List", mock.Anything, spaceID).Return([]*environments.Environment{{ID: envID, SpaceID: spaceID, Description: "Environment"}, {ID: "envID2", SpaceID: spaceID, Description: "Environment2"}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 2, "Ожидает что объекты были удалены из кэша и запрошены заново из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After size exceeded", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(1, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, "envID2").Return(&environments.Environment{ID: "envID2", SpaceID: spaceID, Description: "Environment2"}, nil).Once()
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment"}, nil).Once()
+			envs.On("Get", mock.Anything, spaceID, "envID2").Return(&environments.Environment{ID: "envID2", SpaceID: spaceID, Description: "Environment2"}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, "envID2")
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, "envID2")
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			_, err = svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v5, err := svc.Get(ctx, spaceID, "envID2")
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v5, "Ожидает что объект был удален из кэша и будет запрошен заново из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			envs := &mocksenvironments.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(envs)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment"}, nil).Once()
+			v1, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+
+			envs.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Description: "Environment"}, nil).Once()
+			v3, err := svc.Get(ctx, spaceID, envID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидает что объект был удален из кэша и будет запрошен заново из сервиса.")
+
+			envs.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/environments/middleware/error_logging_middleware.go b/pkg/environments/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..6d6cb544f129f2925d33be91f6666dcf8ebb8936
--- /dev/null
+++ b/pkg/environments/middleware/error_logging_middleware.go
@@ -0,0 +1,110 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/environments -i Environments -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements environments.Environments that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   environments.Environments
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the environments.Environments with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next environments.Environments) environments.Environments {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, env *environments.Environment) (created *environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, env)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, envId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, envId)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, envId string) (env *environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, envId)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, spaceId string) (envs []*environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) Migrate(ctx context.Context, spaceId string, envId string, options ...*environments.MigrateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Migrate(ctx, spaceId, envId, options...)
+}
+
+func (m *errorLoggingMiddleware) RemoveAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.RemoveAlias(ctx, spaceId, envId, alias)
+}
+
+func (m *errorLoggingMiddleware) SetAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.SetAlias(ctx, spaceId, envId, alias)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, env *environments.Environment) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, env)
+}
diff --git a/pkg/environments/middleware/logging_middleware.go b/pkg/environments/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..26f4eb279126e6a008db8998b83012c8a316ca2f
--- /dev/null
+++ b/pkg/environments/middleware/logging_middleware.go
@@ -0,0 +1,325 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/environments -i Environments -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements environments.Environments that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   environments.Environments
+}
+
+// LoggingMiddleware instruments an implementation of the environments.Environments with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next environments.Environments) environments.Environments {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, env *environments.Environment) (created *environments.Environment, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx": ctx,
+		"env": env} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, env)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, envId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, envId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, envId string) (env *environments.Environment, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	env, err = m.next.Get(ctx, spaceId, envId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"env": env,
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return env, err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, spaceId string) (envs []*environments.Environment, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	envs, err = m.next.List(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"envs": envs,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return envs, err
+}
+
+func (m *loggingMiddleware) Migrate(ctx context.Context, spaceId string, envId string, options ...*environments.MigrateOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Migrate.Request", fields...)
+
+	err = m.next.Migrate(ctx, spaceId, envId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Migrate.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) RemoveAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId,
+		"alias":   alias} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("RemoveAlias.Request", fields...)
+
+	err = m.next.RemoveAlias(ctx, spaceId, envId, alias)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("RemoveAlias.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) SetAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"envId":   envId,
+		"alias":   alias} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetAlias.Request", fields...)
+
+	err = m.next.SetAlias(ctx, spaceId, envId, alias)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("SetAlias.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, env *environments.Environment) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx": ctx,
+		"env": env} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, env)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/environments/middleware/middleware.go b/pkg/environments/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..b5e29a99db170df7d2cc5003b2d9a206a423fef2
--- /dev/null
+++ b/pkg/environments/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/environments -i Environments -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"go.uber.org/zap"
+)
+
+type Middleware func(environments.Environments) environments.Environments
+
+func WithLog(s environments.Environments, logger *zap.Logger, log_access bool) environments.Environments {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Environments")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/environments/middleware/recovering_middleware.go b/pkg/environments/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..bf4bec7d46fa8b3e963c65330c6883c9c7fb0c20
--- /dev/null
+++ b/pkg/environments/middleware/recovering_middleware.go
@@ -0,0 +1,127 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/environments -i Environments -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements environments.Environments that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   environments.Environments
+}
+
+// RecoveringMiddleware instruments an implementation of the environments.Environments with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next environments.Environments) environments.Environments {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, env *environments.Environment) (created *environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, env)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, envId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, envId)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, envId string) (env *environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, envId)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, spaceId string) (envs []*environments.Environment, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) Migrate(ctx context.Context, spaceId string, envId string, options ...*environments.MigrateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Migrate(ctx, spaceId, envId, options...)
+}
+
+func (m *recoveringMiddleware) RemoveAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.RemoveAlias(ctx, spaceId, envId, alias)
+}
+
+func (m *recoveringMiddleware) SetAlias(ctx context.Context, spaceId string, envId string, alias string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.SetAlias(ctx, spaceId, envId, alias)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, env *environments.Environment) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, env)
+}
diff --git a/pkg/environments/mocks/Environments.go b/pkg/environments/mocks/Environments.go
new file mode 100644
index 0000000000000000000000000000000000000000..8c8d099f32cbcbd7cd1edf377b3d46e6756a1d72
--- /dev/null
+++ b/pkg/environments/mocks/Environments.go
@@ -0,0 +1,176 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Environments is an autogenerated mock type for the Environments type
+type Environments struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, env
+func (_m *Environments) Create(ctx context.Context, env *environments.Environment) (*environments.Environment, error) {
+	ret := _m.Called(ctx, env)
+
+	var r0 *environments.Environment
+	if rf, ok := ret.Get(0).(func(context.Context, *environments.Environment) *environments.Environment); ok {
+		r0 = rf(ctx, env)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*environments.Environment)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *environments.Environment) error); ok {
+		r1 = rf(ctx, env)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId
+func (_m *Environments) Delete(ctx context.Context, spaceId string, envId string) error {
+	ret := _m.Called(ctx, spaceId, envId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, spaceId, envId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId
+func (_m *Environments) Get(ctx context.Context, spaceId string, envId string) (*environments.Environment, error) {
+	ret := _m.Called(ctx, spaceId, envId)
+
+	var r0 *environments.Environment
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) *environments.Environment); ok {
+		r0 = rf(ctx, spaceId, envId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*environments.Environment)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, spaceId, envId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// List provides a mock function with given fields: ctx, spaceId
+func (_m *Environments) List(ctx context.Context, spaceId string) ([]*environments.Environment, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*environments.Environment
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*environments.Environment); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*environments.Environment)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Migrate provides a mock function with given fields: ctx, spaceId, envId, options
+func (_m *Environments) Migrate(ctx context.Context, spaceId string, envId string, options ...*environments.MigrateOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, ...*environments.MigrateOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// RemoveAlias provides a mock function with given fields: ctx, spaceId, envId, alias
+func (_m *Environments) RemoveAlias(ctx context.Context, spaceId string, envId string, alias string) error {
+	ret := _m.Called(ctx, spaceId, envId, alias)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
+		r0 = rf(ctx, spaceId, envId, alias)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// SetAlias provides a mock function with given fields: ctx, spaceId, envId, alias
+func (_m *Environments) SetAlias(ctx context.Context, spaceId string, envId string, alias string) error {
+	ret := _m.Called(ctx, spaceId, envId, alias)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
+		r0 = rf(ctx, spaceId, envId, alias)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, env
+func (_m *Environments) Update(ctx context.Context, env *environments.Environment) error {
+	ret := _m.Called(ctx, env)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *environments.Environment) error); ok {
+		r0 = rf(ctx, env)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewEnvironments interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewEnvironments creates a new instance of Environments. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewEnvironments(t mockConstructorTestingTNewEnvironments) *Environments {
+	mock := &Environments{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/environments/options.go b/pkg/environments/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..02f1a1fea3229ccfe702401c2dfa06e5094d9671
--- /dev/null
+++ b/pkg/environments/options.go
@@ -0,0 +1,37 @@
+package environments
+
+type MigrateOptions struct {
+
+	// Ожидать завершения миграции в синхронном режиме
+	Wait bool
+}
+
+func MergeMigrateOptions(opts ...*MigrateOptions) *MigrateOptions {
+	o := &MigrateOptions{}
+	for _, opt := range opts {
+		if opt.Wait {
+			o.Wait = true
+		}
+	}
+	return o
+}
+
+type UpdateOptions struct {
+
+	// Состояние будет обновлено только в том случае, если выполняется указанное условие
+	// Cond указывается с использованием синтаксиса `expr`
+	Cond string
+}
+
+func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions {
+	o := &UpdateOptions{}
+	for _, opt := range opts {
+		if opt.Cond != "" {
+			if o.Cond != "" {
+				o.Cond += " && "
+			}
+			o.Cond += opt.Cond
+		}
+	}
+	return o
+}
diff --git a/pkg/environments/service.go b/pkg/environments/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..cd0a37f449819a4b92239f8d8b38a3a0036ec8d8
--- /dev/null
+++ b/pkg/environments/service.go
@@ -0,0 +1,20 @@
+package environments
+
+import (
+	"context"
+)
+
+// Environments
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/environments
+// @grpc-addr content.environments.Environments
+type Environments interface {
+	Create(ctx context.Context, env *Environment) (created *Environment, err error)
+	Get(ctx context.Context, spaceId, envId string) (env *Environment, err error)
+	List(ctx context.Context, spaceId string) (envs []*Environment, err error)
+	Update(ctx context.Context, env *Environment) (err error)
+	Delete(ctx context.Context, spaceId, envId string) (err error)
+	SetAlias(ctx context.Context, spaceId, envId, alias string) (err error)
+	RemoveAlias(ctx context.Context, spaceId, envId, alias string) (err error)
+	Migrate(ctx context.Context, spaceId, envId string, options ...*MigrateOptions) (err error)
+}
diff --git a/pkg/environments/transport/client.microgen.go b/pkg/environments/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..96094fda943e407d0264a8abc738fd5c3afe440e
--- /dev/null
+++ b/pkg/environments/transport/client.microgen.go
@@ -0,0 +1,126 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *environments.Environment) (res0 *environments.Environment, res1 error) {
+	request := CreateRequest{Env: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string) (res0 *environments.Environment, res1 error) {
+	request := GetRequest{
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Env, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string) (res0 []*environments.Environment, res1 error) {
+	request := ListRequest{SpaceId: arg1}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Envs, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *environments.Environment) (res0 error) {
+	request := UpdateRequest{Env: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := DeleteRequest{
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) SetAlias(arg0 context.Context, arg1 string, arg2 string, arg3 string) (res0 error) {
+	request := SetAliasRequest{
+		Alias:   arg3,
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.SetAliasEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) RemoveAlias(arg0 context.Context, arg1 string, arg2 string, arg3 string) (res0 error) {
+	request := RemoveAliasRequest{
+		Alias:   arg3,
+		EnvId:   arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.RemoveAliasEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Migrate(arg0 context.Context, arg1 string, arg2 string, arg3 ...*environments.MigrateOptions) (res0 error) {
+	request := MigrateRequest{
+		EnvId:   arg2,
+		Options: arg3,
+		SpaceId: arg1,
+	}
+	_, res0 = set.MigrateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/environments/transport/endpoints.microgen.go b/pkg/environments/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5639637d2b479721e21d5c3fe4ea097d2f2a6cde
--- /dev/null
+++ b/pkg/environments/transport/endpoints.microgen.go
@@ -0,0 +1,17 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Environments API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint      endpoint.Endpoint
+	GetEndpoint         endpoint.Endpoint
+	ListEndpoint        endpoint.Endpoint
+	UpdateEndpoint      endpoint.Endpoint
+	DeleteEndpoint      endpoint.Endpoint
+	SetAliasEndpoint    endpoint.Endpoint
+	RemoveAliasEndpoint endpoint.Endpoint
+	MigrateEndpoint     endpoint.Endpoint
+}
diff --git a/pkg/environments/transport/exchanges.microgen.go b/pkg/environments/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a1a0ab068d575767c543d8db652c5d8c9a7e02f4
--- /dev/null
+++ b/pkg/environments/transport/exchanges.microgen.go
@@ -0,0 +1,66 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import environments "git.perx.ru/perxis/perxis-go/pkg/environments"
+
+type (
+	CreateRequest struct {
+		Env *environments.Environment `json:"env"`
+	}
+	CreateResponse struct {
+		Created *environments.Environment `json:"created"`
+	}
+
+	GetRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+	}
+	GetResponse struct {
+		Env *environments.Environment `json:"env"`
+	}
+
+	ListRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListResponse struct {
+		Envs []*environments.Environment `json:"envs"`
+	}
+
+	UpdateRequest struct {
+		Env *environments.Environment `json:"env"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	SetAliasRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+		Alias   string `json:"alias"`
+	}
+	// Formal exchange type, please do not delete.
+	SetAliasResponse struct{}
+
+	RemoveAliasRequest struct {
+		SpaceId string `json:"space_id"`
+		EnvId   string `json:"env_id"`
+		Alias   string `json:"alias"`
+	}
+	// Formal exchange type, please do not delete.
+	RemoveAliasResponse struct{}
+
+	MigrateRequest struct {
+		SpaceId string                         `json:"space_id"`
+		EnvId   string                         `json:"env_id"`
+		Options []*environments.MigrateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	MigrateResponse struct{}
+)
diff --git a/pkg/environments/transport/grpc/client.microgen.go b/pkg/environments/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..34e0fa4db1cf8149475dde562b3179091089f70f
--- /dev/null
+++ b/pkg/environments/transport/grpc/client.microgen.go
@@ -0,0 +1,75 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/environments/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/environments"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.environments.Environments"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+		MigrateEndpoint: grpckit.NewClient(
+			conn, addr, "Migrate",
+			_Encode_Migrate_Request,
+			_Decode_Migrate_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		RemoveAliasEndpoint: grpckit.NewClient(
+			conn, addr, "RemoveAlias",
+			_Encode_RemoveAlias_Request,
+			_Decode_RemoveAlias_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		SetAliasEndpoint: grpckit.NewClient(
+			conn, addr, "SetAlias",
+			_Encode_SetAlias_Request,
+			_Decode_SetAlias_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/environments/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/environments/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6411216cd4f54cedd9bf2a3c61ff77b98d70cc35
--- /dev/null
+++ b/pkg/environments/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,307 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/environments/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/environments"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqEnv, err := PtrEnvironmentToProto(req.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Env: reqEnv}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	return &pb.ListRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqEnv, err := PtrEnvironmentToProto(req.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Env: reqEnv}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_SetAlias_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetAliasRequest")
+	}
+	req := request.(*transport.SetAliasRequest)
+	return &pb.SetAliasRequest{
+		Alias:   req.Alias,
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_RemoveAlias_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil RemoveAliasRequest")
+	}
+	req := request.(*transport.RemoveAliasRequest)
+	return &pb.RemoveAliasRequest{
+		Alias:   req.Alias,
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrEnvironmentToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respEnv, err := PtrEnvironmentToProto(resp.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Env: respEnv}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respEnvs, err := ListPtrEnvironmentToProto(resp.Envs)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Envs: respEnvs}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_SetAlias_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_RemoveAlias_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqEnv, err := ProtoToPtrEnvironment(req.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Env: reqEnv}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	return &transport.ListRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqEnv, err := ProtoToPtrEnvironment(req.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Env: reqEnv}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_SetAlias_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetAliasRequest")
+	}
+	req := request.(*pb.SetAliasRequest)
+	return &transport.SetAliasRequest{
+		Alias:   string(req.Alias),
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_RemoveAlias_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil RemoveAliasRequest")
+	}
+	req := request.(*pb.RemoveAliasRequest)
+	return &transport.RemoveAliasRequest{
+		Alias:   string(req.Alias),
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrEnvironment(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respEnv, err := ProtoToPtrEnvironment(resp.Env)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Env: respEnv}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respEnvs, err := ProtoToListPtrEnvironment(resp.Envs)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Envs: respEnvs}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_SetAlias_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_RemoveAlias_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Migrate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil MigrateRequest")
+	}
+	req := request.(*transport.MigrateRequest)
+	opts, _ := ElPtrMigrateOptionsToProto(req.Options)
+	return &pb.MigrateRequest{
+		EnvId:   req.EnvId,
+		SpaceId: req.SpaceId,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Migrate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Migrate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil MigrateRequest")
+	}
+	req := request.(*pb.MigrateRequest)
+	opts, _ := ProtoToElPtrMigrateOptions(req.Options)
+	return &transport.MigrateRequest{
+		EnvId:   string(req.EnvId),
+		SpaceId: string(req.SpaceId),
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Migrate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/environments/transport/grpc/protobuf_type_converters.microgen.go b/pkg/environments/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5402c513bc7ecbf3a7dc3b4a291282eed03f554e
--- /dev/null
+++ b/pkg/environments/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,99 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	pb "git.perx.ru/perxis/perxis-go/proto/environments"
+	service "git.perx.ru/perxis/perxis-go/pkg/environments"
+	"github.com/golang/protobuf/ptypes"
+)
+
+func PtrEnvironmentToProto(env *service.Environment) (*pb.Environment, error) {
+	if env == nil {
+		return nil, nil
+	}
+	protoEnvironment := &pb.Environment{
+		Id:          env.ID,
+		SpaceId:     env.SpaceID,
+		Description: env.Description,
+		Aliases:     env.Aliases,
+	}
+	if env.StateInfo != nil {
+		protoEnvironment.StateInfo = &pb.StateInfo{
+			State: pb.StateInfo_State(env.StateInfo.State),
+			Info:  env.StateInfo.Info,
+		}
+		protoEnvironment.StateInfo.StartedAt, _ = ptypes.TimestampProto(env.StateInfo.StartedAt)
+	}
+	if env.Config != nil {
+		protoEnvironment.Config = &pb.Config{
+			SourceId: env.Config.SourceID,
+			Features: env.Config.Features,
+		}
+	}
+	return protoEnvironment, nil
+}
+
+func ProtoToPtrEnvironment(protoEnv *pb.Environment) (*service.Environment, error) {
+	if protoEnv == nil {
+		return nil, nil
+	}
+	env := &service.Environment{
+		ID:          protoEnv.Id,
+		SpaceID:     protoEnv.SpaceId,
+		Description: protoEnv.Description,
+		Aliases:     protoEnv.Aliases,
+	}
+	if protoEnv.StateInfo != nil {
+		env.StateInfo = &service.StateInfo{
+			State: service.State(protoEnv.StateInfo.State),
+			Info:  protoEnv.StateInfo.Info,
+		}
+		env.StateInfo.StartedAt, _ = ptypes.Timestamp(protoEnv.StateInfo.StartedAt)
+	}
+	if protoEnv.Config != nil {
+		env.Config = &service.Config{
+			SourceID: protoEnv.Config.SourceId,
+			Features: protoEnv.Config.Features,
+		}
+	}
+	return env, nil
+}
+
+func ListPtrEnvironmentToProto(envs []*service.Environment) ([]*pb.Environment, error) {
+	protoEnvironments := make([]*pb.Environment, 0, len(envs))
+	for _, environment := range envs {
+		protoEnvironment, err := PtrEnvironmentToProto(environment)
+		if err != nil {
+			return nil, err
+		}
+		protoEnvironments = append(protoEnvironments, protoEnvironment)
+	}
+	return protoEnvironments, nil
+}
+
+func ProtoToListPtrEnvironment(protoEnvs []*pb.Environment) ([]*service.Environment, error) {
+	environments := make([]*service.Environment, 0, len(protoEnvs))
+	for _, protoEnvironment := range protoEnvs {
+		environment, err := ProtoToPtrEnvironment(protoEnvironment)
+		if err != nil {
+			return nil, err
+		}
+		environments = append(environments, environment)
+	}
+	return environments, nil
+}
+
+func ElPtrMigrateOptionsToProto(options []*service.MigrateOptions) (*pb.MigrateOptions, error) {
+	opts := service.MergeMigrateOptions(options...)
+	return &pb.MigrateOptions{Wait: opts.Wait}, nil
+}
+
+func ProtoToElPtrMigrateOptions(protoOptions *pb.MigrateOptions) ([]*service.MigrateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.MigrateOptions{{Wait: protoOptions.Wait}}, nil
+}
diff --git a/pkg/environments/transport/grpc/server.microgen.go b/pkg/environments/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..1389d32692eaf27f5068073d330696d1ad221bd3
--- /dev/null
+++ b/pkg/environments/transport/grpc/server.microgen.go
@@ -0,0 +1,142 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/environments/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/environments"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type environmentsServer struct {
+	create      grpc.Handler
+	get         grpc.Handler
+	list        grpc.Handler
+	update      grpc.Handler
+	delete      grpc.Handler
+	setAlias    grpc.Handler
+	removeAlias grpc.Handler
+	migrate     grpc.Handler
+
+	pb.UnimplementedEnvironmentsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.EnvironmentsServer {
+	return &environmentsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+		migrate: grpc.NewServer(
+			endpoints.MigrateEndpoint,
+			_Decode_Migrate_Request,
+			_Encode_Migrate_Response,
+			opts...,
+		),
+		removeAlias: grpc.NewServer(
+			endpoints.RemoveAliasEndpoint,
+			_Decode_RemoveAlias_Request,
+			_Encode_RemoveAlias_Response,
+			opts...,
+		),
+		setAlias: grpc.NewServer(
+			endpoints.SetAliasEndpoint,
+			_Decode_SetAlias_Request,
+			_Encode_SetAlias_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *environmentsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *environmentsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *environmentsServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *environmentsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *environmentsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *environmentsServer) SetAlias(ctx context.Context, req *pb.SetAliasRequest) (*empty.Empty, error) {
+	_, resp, err := S.setAlias.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *environmentsServer) RemoveAlias(ctx context.Context, req *pb.RemoveAliasRequest) (*empty.Empty, error) {
+	_, resp, err := S.removeAlias.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *environmentsServer) Migrate(ctx context.Context, req *pb.MigrateRequest) (*empty.Empty, error) {
+	_, resp, err := S.migrate.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/environments/transport/server.microgen.go b/pkg/environments/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3a0b20a55cdf3aff82beb803efb59c5833e5afd7
--- /dev/null
+++ b/pkg/environments/transport/server.microgen.go
@@ -0,0 +1,88 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+
+"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc environments.Environments) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint:      CreateEndpoint(svc),
+		DeleteEndpoint:      DeleteEndpoint(svc),
+		GetEndpoint:         GetEndpoint(svc),
+		ListEndpoint:        ListEndpoint(svc),
+		MigrateEndpoint:     MigrateEndpoint(svc),
+		RemoveAliasEndpoint: RemoveAliasEndpoint(svc),
+		SetAliasEndpoint:    SetAliasEndpoint(svc),
+		UpdateEndpoint:      UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Env)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId)
+		return &GetResponse{Env: res0}, res1
+	}
+}
+
+func ListEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId)
+		return &ListResponse{Envs: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Env)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func SetAliasEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*SetAliasRequest)
+		res0 := svc.SetAlias(arg0, req.SpaceId, req.EnvId, req.Alias)
+		return &SetAliasResponse{}, res0
+	}
+}
+
+func RemoveAliasEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*RemoveAliasRequest)
+		res0 := svc.RemoveAlias(arg0, req.SpaceId, req.EnvId, req.Alias)
+		return &RemoveAliasResponse{}, res0
+	}
+}
+
+func MigrateEndpoint(svc environments.Environments) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*MigrateRequest)
+		res0 := svc.Migrate(arg0, req.SpaceId, req.EnvId, req.Options...)
+		return &MigrateResponse{}, res0
+	}
+}
diff --git a/pkg/events/events.go b/pkg/events/events.go
new file mode 100644
index 0000000000000000000000000000000000000000..454f690a196b08782d8110b0c2c13b186853a776
--- /dev/null
+++ b/pkg/events/events.go
@@ -0,0 +1,55 @@
+package events
+
+type Subscription interface {
+	Unsubscribe() error
+}
+
+type Connection interface {
+	Publish(subject string, msg any, opts ...PublishOption) error
+	Subscribe(subject string, handler any, opts ...SubscribeOption) (Subscription, error)
+	Close() error
+}
+
+type PublishOptions struct {
+	Tags []string
+}
+
+func NewPublishOptions(opts ...PublishOption) *PublishOptions {
+	o := &PublishOptions{}
+	for _, opt := range opts {
+		if opt != nil {
+			opt(o)
+		}
+	}
+	return o
+}
+
+type PublishOption func(options *PublishOptions)
+
+func Tag(tag ...string) PublishOption {
+	return func(o *PublishOptions) {
+		o.Tags = tag
+	}
+}
+
+type SubscribeOptions struct {
+	FilterTags []string
+}
+
+func NewSubscribeOptions(opts ...SubscribeOption) *SubscribeOptions {
+	o := &SubscribeOptions{}
+	for _, opt := range opts {
+		if opt != nil {
+			opt(o)
+		}
+	}
+	return o
+}
+
+type SubscribeOption func(options *SubscribeOptions)
+
+func FilterTag(tag ...string) SubscribeOption {
+	return func(o *SubscribeOptions) {
+		o.FilterTags = tag
+	}
+}
diff --git a/pkg/events/mocks/Connection.go b/pkg/events/mocks/Connection.go
new file mode 100644
index 0000000000000000000000000000000000000000..a295924662ba10243a788c219359d60720789759
--- /dev/null
+++ b/pkg/events/mocks/Connection.go
@@ -0,0 +1,96 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Connection is an autogenerated mock type for the Connection type
+type Connection struct {
+	mock.Mock
+}
+
+// Close provides a mock function with given fields:
+func (_m *Connection) Close() error {
+	ret := _m.Called()
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func() error); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Publish provides a mock function with given fields: subject, msg, opts
+func (_m *Connection) Publish(subject string, msg interface{}, opts ...events.PublishOption) error {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, subject, msg)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.PublishOption) error); ok {
+		r0 = rf(subject, msg, opts...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Subscribe provides a mock function with given fields: subject, handler, opts
+func (_m *Connection) Subscribe(subject string, handler interface{}, opts ...events.SubscribeOption) (events.Subscription, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, subject, handler)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 events.Subscription
+	var r1 error
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.SubscribeOption) (events.Subscription, error)); ok {
+		return rf(subject, handler, opts...)
+	}
+	if rf, ok := ret.Get(0).(func(string, interface{}, ...events.SubscribeOption) events.Subscription); ok {
+		r0 = rf(subject, handler, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(events.Subscription)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(string, interface{}, ...events.SubscribeOption) error); ok {
+		r1 = rf(subject, handler, opts...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+type mockConstructorTestingTNewConnection interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewConnection creates a new instance of Connection. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewConnection(t mockConstructorTestingTNewConnection) *Connection {
+	mock := &Connection{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/MsgFilter.go b/pkg/events/mocks/MsgFilter.go
new file mode 100644
index 0000000000000000000000000000000000000000..8e1340743309bfc3097e478e7aac7f1880bfb157
--- /dev/null
+++ b/pkg/events/mocks/MsgFilter.go
@@ -0,0 +1,44 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	nats "github.com/nats-io/nats.go"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// MsgFilter is an autogenerated mock type for the MsgFilter type
+type MsgFilter struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: _a0
+func (_m *MsgFilter) Execute(_a0 *nats.Msg) *nats.Msg {
+	ret := _m.Called(_a0)
+
+	var r0 *nats.Msg
+	if rf, ok := ret.Get(0).(func(*nats.Msg) *nats.Msg); ok {
+		r0 = rf(_a0)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*nats.Msg)
+		}
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewMsgFilter interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewMsgFilter creates a new instance of MsgFilter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewMsgFilter(t mockConstructorTestingTNewMsgFilter) *MsgFilter {
+	mock := &MsgFilter{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/ProtoEncoder.go b/pkg/events/mocks/ProtoEncoder.go
new file mode 100644
index 0000000000000000000000000000000000000000..f0916a3c720b41de88ba029c2d44e23bffbb42c7
--- /dev/null
+++ b/pkg/events/mocks/ProtoEncoder.go
@@ -0,0 +1,68 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	mock "github.com/stretchr/testify/mock"
+	protoiface "google.golang.org/protobuf/runtime/protoiface"
+)
+
+// ProtoEncoder is an autogenerated mock type for the ProtoEncoder type
+type ProtoEncoder struct {
+	mock.Mock
+}
+
+// FromProto provides a mock function with given fields: message
+func (_m *ProtoEncoder) FromProto(message protoiface.MessageV1) error {
+	ret := _m.Called(message)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(protoiface.MessageV1) error); ok {
+		r0 = rf(message)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// ToProto provides a mock function with given fields:
+func (_m *ProtoEncoder) ToProto() (protoiface.MessageV1, error) {
+	ret := _m.Called()
+
+	var r0 protoiface.MessageV1
+	var r1 error
+	if rf, ok := ret.Get(0).(func() (protoiface.MessageV1, error)); ok {
+		return rf()
+	}
+	if rf, ok := ret.Get(0).(func() protoiface.MessageV1); ok {
+		r0 = rf()
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(protoiface.MessageV1)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func() error); ok {
+		r1 = rf()
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+type mockConstructorTestingTNewProtoEncoder interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewProtoEncoder creates a new instance of ProtoEncoder. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewProtoEncoder(t mockConstructorTestingTNewProtoEncoder) *ProtoEncoder {
+	mock := &ProtoEncoder{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/PublishOption.go b/pkg/events/mocks/PublishOption.go
new file mode 100644
index 0000000000000000000000000000000000000000..f3517b7602f5b9714cc9de98ac6413bf324f91e8
--- /dev/null
+++ b/pkg/events/mocks/PublishOption.go
@@ -0,0 +1,33 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// PublishOption is an autogenerated mock type for the PublishOption type
+type PublishOption struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: options
+func (_m *PublishOption) Execute(options *events.PublishOptions) {
+	_m.Called(options)
+}
+
+type mockConstructorTestingTNewPublishOption interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewPublishOption creates a new instance of PublishOption. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewPublishOption(t mockConstructorTestingTNewPublishOption) *PublishOption {
+	mock := &PublishOption{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/SubscribeOption.go b/pkg/events/mocks/SubscribeOption.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b2a9449f517c4d0881a53a64194139d50203961
--- /dev/null
+++ b/pkg/events/mocks/SubscribeOption.go
@@ -0,0 +1,33 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	events "git.perx.ru/perxis/perxis-go/pkg/events"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// SubscribeOption is an autogenerated mock type for the SubscribeOption type
+type SubscribeOption struct {
+	mock.Mock
+}
+
+// Execute provides a mock function with given fields: options
+func (_m *SubscribeOption) Execute(options *events.SubscribeOptions) {
+	_m.Called(options)
+}
+
+type mockConstructorTestingTNewSubscribeOption interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewSubscribeOption creates a new instance of SubscribeOption. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewSubscribeOption(t mockConstructorTestingTNewSubscribeOption) *SubscribeOption {
+	mock := &SubscribeOption{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/mocks/Subscription.go b/pkg/events/mocks/Subscription.go
new file mode 100644
index 0000000000000000000000000000000000000000..b43ed0fa5e55b3d026a6d682b44facb0f60c1ed3
--- /dev/null
+++ b/pkg/events/mocks/Subscription.go
@@ -0,0 +1,39 @@
+// Code generated by mockery v2.20.0. DO NOT EDIT.
+
+package mocks
+
+import mock "github.com/stretchr/testify/mock"
+
+// Subscription is an autogenerated mock type for the Subscription type
+type Subscription struct {
+	mock.Mock
+}
+
+// Unsubscribe provides a mock function with given fields:
+func (_m *Subscription) Unsubscribe() error {
+	ret := _m.Called()
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func() error); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewSubscription interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewSubscription creates a new instance of Subscription. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewSubscription(t mockConstructorTestingTNewSubscription) *Subscription {
+	mock := &Subscription{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/events/nats.go b/pkg/events/nats.go
new file mode 100644
index 0000000000000000000000000000000000000000..4540a48216432f2332c4a9913051b3d8934127ed
--- /dev/null
+++ b/pkg/events/nats.go
@@ -0,0 +1,197 @@
+package events
+
+import (
+	"reflect"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/nats-io/nats.go"
+)
+
+type natsConnetion struct {
+	Conn *nats.Conn
+	enc  nats.Encoder
+	// добавление префикса для всех топиков
+	prefix string
+}
+
+func Open(url string, prefix string) (Connection, error) {
+	var err error
+	b := new(natsConnetion)
+	b.Conn, err = nats.Connect(url)
+	if err != nil {
+		return nil, err
+	}
+	b.enc = &ProtobufEncoder{}
+	b.prefix = prefix
+	return b, nil
+}
+
+func (c *natsConnetion) getSubject(subject string) string {
+	if c.prefix != "" {
+		subject = c.prefix + "." + subject
+	}
+	return subject
+}
+
+func (c *natsConnetion) Publish(subject string, msg any, opts ...PublishOption) error {
+	m := &nats.Msg{Subject: c.getSubject(subject)}
+	switch v := msg.(type) {
+	case *nats.Msg:
+		m = v
+	case []byte:
+		m.Data = v
+	default:
+		data, err := c.enc.Encode(subject, v)
+		if err != nil {
+			return err
+		}
+		m.Data = data
+	}
+
+	filters := PublishFilters(NewPublishOptions(opts...))
+	if len(filters) > 0 {
+		for _, f := range filters {
+			if m = f(m); m == nil {
+				return nil
+			}
+		}
+	}
+
+	return c.Conn.PublishMsg(m)
+}
+
+func (c *natsConnetion) Subscribe(subject string, handler any, opts ...SubscribeOption) (Subscription, error) {
+
+	subject = c.getSubject(subject)
+	return c.subscribe(subject, handler, SubscribeFilters(NewSubscribeOptions(opts...)))
+}
+
+func (c *natsConnetion) Close() (err error) {
+	if err = c.Conn.Drain(); err != nil {
+		return err
+	}
+	c.Conn.Close()
+	return
+}
+
+// Dissect the cb Handler's signature
+func argInfo(cb nats.Handler) (reflect.Type, int) {
+	cbType := reflect.TypeOf(cb)
+	if cbType.Kind() != reflect.Func {
+		panic("handler needs to be a func")
+	}
+	numArgs := cbType.NumIn()
+	if numArgs == 0 {
+		return nil, numArgs
+	}
+	return cbType.In(numArgs - 1), numArgs
+}
+
+var emptyMsgType = reflect.TypeOf(&nats.Msg{})
+
+type MsgFilter func(*nats.Msg) *nats.Msg
+
+// Internal implementation that all public functions will use.
+func (c *natsConnetion) subscribe(subject string, cb nats.Handler, filters []MsgFilter) (*nats.Subscription, error) {
+	if cb == nil {
+		return nil, errors.New("handler required for subscription")
+	}
+	argType, numArgs := argInfo(cb)
+	if argType == nil {
+		return nil, errors.New("handler requires at least one argument")
+	}
+
+	cbValue := reflect.ValueOf(cb)
+	wantsRaw := (argType == emptyMsgType)
+
+	natsCB := func(m *nats.Msg) {
+		if len(filters) > 0 {
+			for _, f := range filters {
+				if m = f(m); m == nil {
+					return
+				}
+			}
+		}
+
+		var oV []reflect.Value
+		if wantsRaw {
+			oV = []reflect.Value{reflect.ValueOf(m)}
+		} else {
+			var oPtr reflect.Value
+			if argType.Kind() != reflect.Ptr {
+				oPtr = reflect.New(argType)
+			} else {
+				oPtr = reflect.New(argType.Elem())
+			}
+			if err := c.enc.Decode(m.Subject, m.Data, oPtr.Interface()); err != nil {
+				if errorHandler := c.Conn.ErrorHandler(); errorHandler != nil {
+					errorHandler(c.Conn, m.Sub, errors.Wrap(err, "Got an unmarshal error"))
+				}
+				return
+			}
+			if argType.Kind() != reflect.Ptr {
+				oPtr = reflect.Indirect(oPtr)
+			}
+
+			switch numArgs {
+			case 1:
+				oV = []reflect.Value{oPtr}
+			case 2:
+				subV := reflect.ValueOf(m.Subject)
+				oV = []reflect.Value{subV, oPtr}
+			case 3:
+				subV := reflect.ValueOf(m.Subject)
+				replyV := reflect.ValueOf(m.Reply)
+				oV = []reflect.Value{subV, replyV, oPtr}
+			}
+
+		}
+		cbValue.Call(oV)
+	}
+
+	return c.Conn.Subscribe(subject, natsCB)
+}
+
+func PublishFilters(opts *PublishOptions) []MsgFilter {
+	if opts == nil {
+		return nil
+	}
+	var filters []MsgFilter
+
+	if len(opts.Tags) > 0 {
+		filters = append(filters, func(msg *nats.Msg) *nats.Msg {
+			if msg.Header == nil {
+				msg.Header = make(nats.Header)
+			}
+			for _, v := range opts.Tags {
+				msg.Header.Add("Tag", v)
+			}
+			return msg
+		})
+	}
+
+	return filters
+}
+
+func SubscribeFilters(opts *SubscribeOptions) []MsgFilter {
+	if opts == nil {
+		return nil
+	}
+	var filters []MsgFilter
+
+	if len(opts.FilterTags) > 0 {
+		filters = append(filters, func(msg *nats.Msg) *nats.Msg {
+			tags := msg.Header.Values("Tag")
+			for _, tag := range tags {
+				for _, v := range opts.FilterTags {
+					if v == tag {
+						return msg
+					}
+				}
+			}
+			return nil
+		})
+	}
+
+	return filters
+}
diff --git a/pkg/events/nats_integration_test.go b/pkg/events/nats_integration_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..8fe38dbe1080d6a1ec7acdc9c565afd4c3852cdc
--- /dev/null
+++ b/pkg/events/nats_integration_test.go
@@ -0,0 +1,82 @@
+//go:build integration
+
+package events
+
+import (
+	"testing"
+	"time"
+
+	pb "git.perx.ru/perxis/perxis-go/pkg/events/test_proto"
+	"github.com/golang/protobuf/proto"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+type Test struct {
+	Text string
+}
+
+func (t *Test) ToProto() (proto.Message, error) {
+	return &pb.Test{Text: t.Text}, nil
+}
+
+func (t *Test) FromProto(message proto.Message) error {
+	t.Text = message.(*pb.Test).Text
+	return nil
+}
+
+func TestNatsBroker(t *testing.T) {
+
+	b, err := Open("nats://localhost:4222", "")
+	require.NoError(t, err)
+
+	resCh := make(chan string, 3)
+	_, err = b.Subscribe("a.*.c.>", func(t *Test) { resCh <- t.Text })
+	require.NoError(t, err)
+
+	require.NoError(t, b.Publish("a.b.c", &Test{Text: "1"}))
+	require.NoError(t, b.Publish("a.b.c.d", &Test{Text: "2"}))
+	require.NoError(t, b.Publish("a.b.c.d.e", &Test{Text: "3"}))
+	require.NoError(t, b.Publish("a.x.c", &Test{Text: "4"}))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "5"}))
+
+	time.Sleep(200 * time.Millisecond)
+	require.NoError(t, b.Close())
+	close(resCh)
+	assert.ElementsMatch(t, []string{"2", "3", "5"}, func() []string {
+		var res []string
+		for v := range resCh {
+			res = append(res, v)
+		}
+		return res
+	}())
+}
+
+func TestTags(t *testing.T) {
+
+	b, err := Open("nats://localhost:4222", "")
+	require.NoError(t, err)
+
+	resCh := make(chan string, 3)
+	_, err = b.Subscribe("a.*.c.>", func(t *Test) { resCh <- t.Text }, FilterTag("one", "two", "three"))
+	require.NoError(t, err)
+
+	require.NoError(t, b.Publish("a.b.c", &Test{Text: "1"}))
+	require.NoError(t, b.Publish("a.b.c.d", &Test{Text: "2"}))
+	require.NoError(t, b.Publish("a.b.c.d.e", &Test{Text: "3"}, Tag("one")))
+	require.NoError(t, b.Publish("a.x.c", &Test{Text: "4"}))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "5"}, Tag("two")))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "6"}, Tag("two", "one")))
+	require.NoError(t, b.Publish("a.x.c.d", &Test{Text: "7"}, Tag("four")))
+
+	time.Sleep(200 * time.Millisecond)
+	require.NoError(t, b.Close())
+	close(resCh)
+	assert.ElementsMatch(t, []string{"3", "5", "6"}, func() []string {
+		var res []string
+		for v := range resCh {
+			res = append(res, v)
+		}
+		return res
+	}())
+}
diff --git a/pkg/events/proto_encoder.go b/pkg/events/proto_encoder.go
new file mode 100644
index 0000000000000000000000000000000000000000..f18c2f576daddd4af61c9ca8138bbbc1903ad280
--- /dev/null
+++ b/pkg/events/proto_encoder.go
@@ -0,0 +1,63 @@
+package events
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/golang/protobuf/proto"
+	"github.com/nats-io/nats.go"
+	"github.com/nats-io/nats.go/encoders/protobuf"
+)
+
+type ProtoEncoder interface {
+	ToProto() (proto.Message, error)
+	FromProto(message proto.Message) error
+}
+
+const (
+	ProtobufEncoderName = "protobuf"
+)
+
+func init() {
+	nats.RegisterEncoder(ProtobufEncoderName, &ProtobufEncoder{})
+}
+
+type ProtobufEncoder struct {
+	protobuf.ProtobufEncoder
+}
+
+var (
+	ErrInvalidProtoMsgEncode = errors.New("events: object passed to encode must implement ProtoEncoder")
+	ErrInvalidProtoMsgDecode = errors.New("events: object passed to decode must implement ProtoDecoder")
+)
+
+func (pb *ProtobufEncoder) Encode(subject string, v interface{}) ([]byte, error) {
+	if v == nil {
+		return nil, nil
+	}
+	e, ok := v.(ProtoEncoder)
+	if !ok {
+		return nil, ErrInvalidProtoMsgEncode
+	}
+
+	m, err := e.ToProto()
+	if err != nil {
+		return nil, errors.Wrap(err, "nats: encode to proto")
+	}
+
+	return pb.ProtobufEncoder.Encode(subject, m)
+}
+
+func (pb *ProtobufEncoder) Decode(subject string, data []byte, vPtr interface{}) error {
+
+	enc, ok := vPtr.(ProtoEncoder)
+	if !ok {
+		return ErrInvalidProtoMsgDecode
+	}
+
+	msg, _ := enc.ToProto()
+
+	if err := pb.ProtobufEncoder.Decode(subject, data, msg); err != nil {
+		return err
+	}
+
+	return enc.FromProto(msg)
+}
diff --git a/pkg/events/test_proto/test.pb.go b/pkg/events/test_proto/test.pb.go
new file mode 100644
index 0000000000000000000000000000000000000000..de333160b391355e2d56b0976e547d58f63a62e1
--- /dev/null
+++ b/pkg/events/test_proto/test.pb.go
@@ -0,0 +1,143 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// 	protoc-gen-go v1.27.1
+// 	protoc        v3.21.5
+// source: test.proto
+
+package test_proto
+
+import (
+	protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+	protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+	reflect "reflect"
+	sync "sync"
+)
+
+const (
+	// Verify that this generated code is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+	// Verify that runtime/protoimpl is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+type Test struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
+}
+
+func (x *Test) Reset() {
+	*x = Test{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_test_proto_msgTypes[0]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Test) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Test) ProtoMessage() {}
+
+func (x *Test) ProtoReflect() protoreflect.Message {
+	mi := &file_test_proto_msgTypes[0]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Test.ProtoReflect.Descriptor instead.
+func (*Test) Descriptor() ([]byte, []int) {
+	return file_test_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *Test) GetText() string {
+	if x != nil {
+		return x.Text
+	}
+	return ""
+}
+
+var File_test_proto protoreflect.FileDescriptor
+
+var file_test_proto_rawDesc = []byte{
+	0x0a, 0x0a, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x74, 0x65,
+	0x73, 0x74, 0x22, 0x1a, 0x0a, 0x04, 0x54, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65,
+	0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x42, 0x38,
+	0x5a, 0x36, 0x67, 0x69, 0x74, 0x2e, 0x70, 0x65, 0x72, 0x78, 0x2e, 0x72, 0x75, 0x2f, 0x70, 0x65,
+	0x72, 0x78, 0x69, 0x73, 0x2f, 0x70, 0x65, 0x72, 0x78, 0x69, 0x73, 0x2f, 0x62, 0x72, 0x6f, 0x6b,
+	0x65, 0x72, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x74, 0x65,
+	0x73, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+}
+
+var (
+	file_test_proto_rawDescOnce sync.Once
+	file_test_proto_rawDescData = file_test_proto_rawDesc
+)
+
+func file_test_proto_rawDescGZIP() []byte {
+	file_test_proto_rawDescOnce.Do(func() {
+		file_test_proto_rawDescData = protoimpl.X.CompressGZIP(file_test_proto_rawDescData)
+	})
+	return file_test_proto_rawDescData
+}
+
+var file_test_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
+var file_test_proto_goTypes = []interface{}{
+	(*Test)(nil), // 0: test.Test
+}
+var file_test_proto_depIdxs = []int32{
+	0, // [0:0] is the sub-list for method output_type
+	0, // [0:0] is the sub-list for method input_type
+	0, // [0:0] is the sub-list for extension type_name
+	0, // [0:0] is the sub-list for extension extendee
+	0, // [0:0] is the sub-list for field type_name
+}
+
+func init() { file_test_proto_init() }
+func file_test_proto_init() {
+	if File_test_proto != nil {
+		return
+	}
+	if !protoimpl.UnsafeEnabled {
+		file_test_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Test); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+	}
+	type x struct{}
+	out := protoimpl.TypeBuilder{
+		File: protoimpl.DescBuilder{
+			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+			RawDescriptor: file_test_proto_rawDesc,
+			NumEnums:      0,
+			NumMessages:   1,
+			NumExtensions: 0,
+			NumServices:   0,
+		},
+		GoTypes:           file_test_proto_goTypes,
+		DependencyIndexes: file_test_proto_depIdxs,
+		MessageInfos:      file_test_proto_msgTypes,
+	}.Build()
+	File_test_proto = out.File
+	file_test_proto_rawDesc = nil
+	file_test_proto_goTypes = nil
+	file_test_proto_depIdxs = nil
+}
diff --git a/pkg/events/test_proto/test.proto b/pkg/events/test_proto/test.proto
new file mode 100644
index 0000000000000000000000000000000000000000..fecbc9d39bf39c65d97dc8d21cba8933a4243450
--- /dev/null
+++ b/pkg/events/test_proto/test.proto
@@ -0,0 +1,9 @@
+syntax = "proto3";
+
+option go_package = "git.perx.ru/perxis/perxis-go/broker/test_proto;test_proto";
+
+package test;
+
+message Test {
+  string text = 1;
+}
diff --git a/pkg/expr/bench.txt b/pkg/expr/bench.txt
deleted file mode 100644
index 0a8b9b81854561ae9a0ea3484a3fe8c9a544565b..0000000000000000000000000000000000000000
--- a/pkg/expr/bench.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-------
-BenchmarkConvertToMongo
-------
-Конвертация выражения expr `id in [ ...ids ]` в формат bson.
-
-test 1:
-
-Количество идентификаторов в фильтре: 10_000
-Размер выражения: 230007b (0,2Mb)
-
-goos: darwin
-goarch: amd64
-pkg: github.com/perxteam/perxis/pkg/expr
-cpu: Intel(R) Core(TM) i7-9750H CPU @ 2.60GHz
-BenchmarkConvertToMongo-12            27          44312238 ns/op        13374293 B/op      81067 allocs/op
-PASS
-ok      github.com/perxteam/perxis/pkg/expr     2.572s
-
-test 2:
-
-Количество идентификаторов в фильтре: 1_000_000
-Размер выражения: 23000007b (21,9Mb)
-
-goos: darwin
-goarch: amd64
-pkg: github.com/perxteam/perxis/pkg/expr
-cpu: Intel(R) Core(TM) i7-9750H CPU @ 2.60GHz
-BenchmarkConvertToMongo-12             1        4142071283 ns/op        1064427296 B/op  7135952 allocs/op
-PASS
-ok      github.com/perxteam/perxis/pkg/expr     4.646s
diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go
new file mode 100644
index 0000000000000000000000000000000000000000..ea2f1d436aba0ecced0a6473440e9fb4a782664d
--- /dev/null
+++ b/pkg/filter/filter.go
@@ -0,0 +1,410 @@
+package filter
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/hashicorp/go-multierror"
+	"github.com/mitchellh/mapstructure"
+	"go.mongodb.org/mongo-driver/bson"
+	"go.mongodb.org/mongo-driver/x/bsonx"
+)
+
+type Op string
+
+const (
+	Equal          Op = "eq"
+	NotEqual       Op = "neq"
+	Less           Op = "lt"
+	LessOrEqual    Op = "lte"
+	Greater        Op = "gt"
+	GreaterOrEqual Op = "gte"
+	In             Op = "in"
+	NotIn          Op = "nin"
+	Contains       Op = "contains"
+	NotContains    Op = "ncontains"
+	Or             Op = "or"
+	And            Op = "and"
+	Near           Op = "near"
+)
+
+type Filter struct {
+	Op    Op
+	Field string
+	Value interface{}
+}
+
+func (f Filter) Format(s fmt.State, verb rune) {
+	fmt.Fprintf(s, "{Op:%s Field:%s Value:%+v}", f.Op, f.Field, f.Value)
+}
+
+func NewFilter(op Op, field string, val interface{}) *Filter {
+	return &Filter{
+		Op:    op,
+		Field: field,
+		Value: val,
+	}
+}
+
+type FilterHandler struct {
+	schemas  []*schema.Schema
+	qbuilder QueryBuilder
+	prefix   string
+}
+
+func NewFilterHandler(sch ...*schema.Schema) *FilterHandler {
+	return &FilterHandler{
+		schemas: sch,
+		//qbuilder: qb,
+	}
+}
+
+func (h *FilterHandler) SetTrimPrefix(prefix string) *FilterHandler {
+	h.prefix = prefix
+	return h
+}
+
+func (h *FilterHandler) removeFieldPrefix(f string) string {
+	if h.prefix != "" {
+		return strings.TrimPrefix(f, h.prefix+".")
+	}
+	return f
+}
+
+func (h *FilterHandler) AddSchema(sch ...*schema.Schema) *FilterHandler {
+	for _, s := range sch {
+		h.schemas = append(h.schemas, s)
+	}
+	return h
+}
+
+func (h *FilterHandler) SetQueryBuilder(qb QueryBuilder) {
+	h.qbuilder = qb
+}
+
+func (h *FilterHandler) Validate(filter ...*Filter) (err error) {
+	if len(h.schemas) == 0 {
+		return errors.New("no schema provided")
+	}
+
+	for _, sch := range h.schemas {
+		var merr *multierror.Error
+
+		for _, f := range filter {
+			if err := h.validate(sch, f); err != nil {
+				merr = multierror.Append(merr, err)
+			}
+		}
+		if merr != nil {
+			merr.ErrorFormat = func(i []error) string {
+				return fmt.Sprintf("%d validation error(s)", len(i))
+			}
+			return errors.WithField(merr, "filter")
+		}
+	}
+	return nil
+}
+
+// todo: '$elemMatch' - запросы к полю-массиву на попадание в условие: '{ results: { $elemMatch: { $gte: 80, $lt: 85 } }' ?
+
+func (h *FilterHandler) validate(sch *schema.Schema, f *Filter) (err error) {
+	if f == nil {
+		return
+	}
+
+	fld := h.removeFieldPrefix(f.Field)
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		if f.Value, err = schema.Decode(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+		if err = validate.Validate(nil, fld, f.Value); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+	case In, NotIn:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+		val := reflect.ValueOf(f.Value)
+		if val.IsZero() || (val.Kind() != reflect.Array && val.Kind() != reflect.Slice) {
+			return h.formatErr(f.Field, f.Op, errors.New("\"IN/NOT IN\" operations require array type for value"))
+		}
+
+		switch fld.GetType().(type) {
+		case *field.ArrayType:
+			f.Value, err = schema.Decode(nil, fld, f.Value)
+			if err != nil {
+				return h.formatErr(f.Field, f.Op, err)
+			}
+		default:
+			decodedVal := make([]interface{}, 0, val.Len())
+			for i := 0; i < val.Len(); i++ {
+				v, err := schema.Decode(nil, fld, val.Index(i).Interface())
+				if err != nil {
+					return h.formatErr(f.Field, f.Op, err)
+				}
+				decodedVal = append(decodedVal, v)
+			}
+
+			f.Value = decodedVal
+		}
+
+	case Contains, NotContains:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		typ := fld.GetType()
+
+		if typ.Name() != "string" && typ.Name() != "array" {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+		}
+		if typ.Name() == "array" {
+			params := fld.Params.(*field.ArrayParameters)
+			if params.Item != nil || params.Item.GetType().Name() != "string" {
+				return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require field to be 'string' or 'string array'"))
+			}
+		}
+
+		if reflect.TypeOf(f.Value).Kind() != reflect.String {
+			return h.formatErr(f.Field, f.Op, errors.New("\"CONTAINS/NOT CONTAINS\" operations require value to be 'string'"))
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("array of filters should be provided for operations "))
+		}
+		for _, f := range fltrs {
+			err = h.validate(sch, f)
+			if err != nil {
+				return err
+			}
+		}
+
+	case Near:
+		fld := sch.GetField(fld)
+		if fld == nil {
+			return h.formatErr(f.Field, f.Op, errors.New("field not found in collection schema"))
+		}
+
+		_, ok := fld.Params.(*field.LocationParameters)
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("field must be a location"))
+		}
+
+		value, ok := f.Value.(map[string]interface{})
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should be map"))
+		}
+
+		point, ok := value["point"]
+		if !ok {
+			return h.formatErr(f.Field, f.Op, errors.New("filter value should have location"))
+		}
+
+		var p field.GeoJSON
+		if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": point}, &p); err != nil {
+			return h.formatErr(f.Field, f.Op, err)
+		}
+
+		maxD, ok := value["distance"]
+		if ok {
+			v := reflect.ValueOf(maxD)
+			if !v.Type().ConvertibleTo(reflect.TypeOf(float64(0))) {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance must be a number"))
+			}
+			val := v.Convert(reflect.TypeOf(float64(0)))
+			if val.Float() < 0 {
+				return h.formatErr(f.Field, f.Op, errors.New("filter value distance should not be negative"))
+			}
+		}
+
+	default:
+		return h.formatErr(f.Field, f.Op, errors.New("unknown operation"))
+	}
+
+	return nil
+}
+
+func (*FilterHandler) formatErr(args ...interface{}) error {
+	var (
+		f   string
+		op  Op
+		err error
+	)
+	for _, arg := range args {
+		switch v := arg.(type) {
+		case string:
+			f = v
+		case Op:
+			op = v
+		case error:
+			err = v
+		}
+	}
+	return errors.WithField(fmt.Errorf("op: '%s' %s", op, err), f)
+}
+
+func (h *FilterHandler) Query(filter ...*Filter) interface{} {
+	return h.qbuilder.Query(filter...)
+}
+
+type QueryBuilder interface {
+	Query(filter ...*Filter) interface{}
+	SetFieldPrefix(string)
+}
+
+type mongoQueryBuilder struct {
+	m      map[Op]string
+	prefix string
+}
+
+func NewMongoQueryBuilder() QueryBuilder {
+	b := new(mongoQueryBuilder)
+	b.m = map[Op]string{
+		Equal:          "$eq",
+		NotEqual:       "$ne",
+		Less:           "$lt",
+		LessOrEqual:    "$lte",
+		Greater:        "$gt",
+		GreaterOrEqual: "$gte",
+		In:             "$in",
+		NotIn:          "$nin",
+		Contains:       "$regex",
+		NotContains:    "$not",
+		Or:             "$or",
+		And:            "$and",
+		Near:           "$near",
+	}
+	return b
+}
+
+func (b *mongoQueryBuilder) getOp(op Op) string {
+	return b.m[op]
+}
+
+func (b *mongoQueryBuilder) SetFieldPrefix(prefix string) {
+	b.prefix = prefix
+}
+
+func (b *mongoQueryBuilder) Query(filters ...*Filter) interface{} {
+	if len(filters) == 0 {
+		return bson.M{}
+	}
+	filter := &Filter{Op: And, Value: filters}
+	return b.query(filter)
+}
+
+func (b *mongoQueryBuilder) query(f *Filter) bson.M {
+	if f == nil {
+		return nil
+	}
+
+	switch f.Op {
+	case Equal, NotEqual, Less, LessOrEqual, Greater, GreaterOrEqual, In, NotIn:
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): f.Value,
+			},
+		}
+	case Contains, NotContains:
+
+		val, _ := f.Value.(string)
+		return bson.M{
+			b.field(f.Field): bson.M{
+				b.getOp(f.Op): bsonx.Regex(val, ""),
+			},
+		}
+
+	case Or, And:
+		fltrs, ok := f.Value.([]*Filter)
+		if !ok {
+			return nil
+		}
+
+		arr := bson.A{}
+		for _, fltr := range fltrs {
+			arr = append(arr, b.query(fltr))
+		}
+		return bson.M{
+			b.getOp(f.Op): arr,
+		}
+	case Near:
+		val, ok := f.Value.(map[string]interface{})
+		if ok {
+			var p field.GeoJSON
+			c, ok := val["point"]
+			if !ok {
+				return nil
+			}
+			if err := mapstructure.Decode(map[string]interface{}{"type": "Point", "coordinates": c}, &p); err != nil {
+				return nil
+			}
+			q := bson.D{{Key: "$geometry", Value: p}}
+
+			if maxD, ok := val["distance"]; ok {
+				q = append(q, bson.E{Key: "$maxDistance", Value: maxD})
+			}
+
+			return bson.M{
+				b.field(f.Field + ".geometry"): bson.M{b.getOp(f.Op): q},
+			}
+		}
+	}
+
+	return nil
+}
+
+func (b *mongoQueryBuilder) field(f string) string {
+	if b.prefix == "" || strings.HasPrefix(f, b.prefix) {
+		return f
+	}
+	return b.prefix + "." + f
+}
+
+// $text search ??
+//func (b *mongoQueryBuilder) textSearchQuery(filters ...*Filter) string {
+//	cnt, notcnt := "", ""
+//	for _, f := range filters {
+//		val, ok := f.Value.(string)
+//		if !ok {
+//			continue
+//		}
+//		switch f.Op {
+//		case Contains:
+//			if len(cnt) > 0 {
+//				cnt += " "
+//			}
+//			cnt += val
+//		case NotContains:
+//			words := strings.Split(val, " ")
+//			for _, w := range words {
+//				if len(notcnt) > 0 {
+//					notcnt += " "
+//				}
+//				notcnt += "-" + w
+//			}
+//		}
+//	}
+//	if len(cnt) == 0 {
+//		return ""
+//	}
+//	if len(notcnt) > 0 {
+//		cnt += " " + notcnt
+//	}
+//	return cnt
+//}
diff --git a/pkg/filter/filter_test.go b/pkg/filter/filter_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..7283e26a0dfebcc5159211e49c30dc29150f12db
--- /dev/null
+++ b/pkg/filter/filter_test.go
@@ -0,0 +1,473 @@
+package filter
+
+import (
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+	"go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+func TestFilterHandler(t *testing.T) {
+
+	sch := schema.New(
+		"str", field.String(),
+		"num", field.Number(field.NumberFormatInt),
+		"obj", field.Object(
+			"bool", field.Bool(),
+			"arr", field.Array(field.Time()),
+			"list", field.Array(
+				field.Object(
+					"num1", field.Number(field.NumberFormatFloat),
+					"str1", field.String(),
+				),
+			),
+		),
+		"date", field.Time(),
+		"geo", field.Location(),
+	)
+	h := NewFilterHandler(sch)
+	ph := NewFilterHandler(sch).SetTrimPrefix("data")
+
+	h.SetQueryBuilder(NewMongoQueryBuilder())
+	ph.SetQueryBuilder(NewMongoQueryBuilder())
+
+	var err error
+
+	t.Run("Validate", func(t *testing.T) {
+		t.Run("Simple", func(t *testing.T) {
+			t.Run("String", func(t *testing.T) {
+				f := &Filter{Op: Equal, Field: "str", Value: "zzz"}
+				err = h.Validate(f)
+				require.NoError(t, err)
+
+				f = &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+			})
+			t.Run("Int", func(t *testing.T) {
+				f := &Filter{Op: NotEqual, Field: "num", Value: 5.0}
+				err = h.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+
+				f = &Filter{Op: NotEqual, Field: "data.num", Value: 5.0}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+				assert.IsType(t, int64(0), f.Value)
+			})
+			t.Run("Time", func(t *testing.T) {
+				f := &Filter{Op: LessOrEqual, Field: "date", Value: "22 Dec 1997"}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: LessOrEqual, Field: "data.date", Value: "22 Dec 1997"}
+				err = ph.Validate(f)
+				require.Error(t, err)
+			})
+			t.Run("Location", func(t *testing.T) {
+				f := &Filter{Op: Near, Field: "geo", Value: ""}
+				err = h.Validate(f)
+				require.Error(t, err)
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: ""}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+				fv := map[string]interface{}{
+					"point":    []float64{55, 55},
+					"distance": 1000,
+				}
+
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.NoError(t, err)
+
+				fv["distance"] = -1
+				f = &Filter{Op: Near, Field: "data.geo", Value: fv}
+				err = ph.Validate(f)
+				require.Error(t, err)
+
+			})
+		})
+		t.Run("Embedded array field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = h.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+
+			f = &Filter{Op: In, Field: "data.obj.arr", Value: []interface{}{"2012-11-01T22:08:41Z"}}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+			assert.Equal(t, w, f.Value.([]interface{})[0])
+		})
+		t.Run("Embedded string contains", func(t *testing.T) {
+			f := &Filter{Op: Contains, Field: "obj.list.str1", Value: "zzz"}
+			err = h.Validate(f)
+			require.NoError(t, err)
+
+			f = &Filter{Op: Contains, Field: "data.obj.list.str1", Value: "zzz"}
+			err = ph.Validate(f)
+			require.NoError(t, err)
+		})
+		t.Run("Compound filter with 'OR' operation", func(t *testing.T) {
+			t.Run("No Err", func(t *testing.T) {
+				w1, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+				w2, _ := time.Parse(time.RFC3339, "2015-12-01T22:08:41Z")
+
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: true},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"2012-11-01T22:08:41Z", "2015-12-01T22:08:41Z"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: true},
+					}},
+				}
+				err = ph.Validate(ff...)
+				require.NoError(t, err)
+				assert.ElementsMatch(t, []interface{}{w1, w2}, ff[0].Value.([]interface{}))
+				assert.Equal(t, w1, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[0].Value)
+				assert.Equal(t, w2, ff[1].Value.([]*Filter)[0].Value.([]*Filter)[1].Value)
+			})
+			t.Run("Multiple Errors", func(t *testing.T) {
+				ff := []*Filter{
+					{Op: In, Field: "date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+
+				ff = []*Filter{
+					{Op: In, Field: "data.date", Value: []interface{}{"5 Jan 2020", "10 June 2020"}},
+					{Op: Or, Field: "", Value: []*Filter{
+						{Op: And, Field: "", Value: []*Filter{
+							{Op: GreaterOrEqual, Field: "data.date", Value: "2012-11-01T22:08:41Z"},
+							{Op: LessOrEqual, Field: "data.date", Value: "2015-12-01T22:08:41Z"},
+						}},
+						{Op: Equal, Field: "data.obj.bool", Value: 15},
+					}},
+				}
+				err = h.Validate(ff...)
+				require.Error(t, err)
+				assert.Equal(t, err.Error(), "2 validation error(s)")
+			})
+		})
+	})
+
+	t.Run("Build Query", func(t *testing.T) {
+		t.Run("No Filters", func(t *testing.T) {
+			res := h.Query()
+			require.IsType(t, res, primitive.M{})
+
+			pres := ph.Query()
+			assert.Equal(t, res, pres, "пустой запрос с префиксом и без должны быть одинаковые")
+		})
+		t.Run("Equal String", func(t *testing.T) {
+			f := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"data.str": primitive.M{"$eq": "zzz"}}}}, b)
+
+			pf := &Filter{Op: Equal, Field: "data.str", Value: "zzz"}
+			pres := ph.Query(pf)
+			assert.Equal(t, res, pres, "запрос в БД с полями с префиксом и без должны быть одинаковые")
+		})
+		t.Run("In Array", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+		t.Run("Several ops for one field", func(t *testing.T) {
+			w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+			f := &Filter{Op: In, Field: "obj.arr", Value: []interface{}{w}}
+			res := h.Query(f)
+			b, ok := res.(primitive.M)
+			require.True(t, ok)
+			assert.Equal(t, primitive.M{"$and": primitive.A{primitive.M{"obj.arr": primitive.M{"$in": []interface{}{w}}}}}, b)
+		})
+	})
+}
+
+//func TestFilterHandler_Integration(t *testing.T) {
+//	ctx := context.Background()
+//
+//	uri := os.Getenv("MONGO_URL")
+//	if uri == "" {
+//		uri = "mongodb://localhost:27017"
+//	}
+//	opts := options.Client().SetConnectTimeout(15 * time.Second).ApplyURI(uri)
+//	client, err := mongo.Connect(context.Background(), opts)
+//	require.NoError(t, err)
+//	err = client.Ping(ctx, nil)
+//	require.NoError(t, err)
+//
+//	sch := schema.New(
+//		"name", field.String(validate.Required()),
+//		"color", field.String(),
+//		"qty", field.Number(field.NumberFormatInt),
+//		"info", field.Object(
+//			"is_fruit", field.Bool(),
+//			"similar", field.Array(
+//				field.Object(
+//					"name", field.Number(field.NumberFormatFloat),
+//					"color", field.String(),
+//				),
+//			),
+//			"desc", field.String(),
+//		),
+//		"produced", field.Time(),
+//		"shipment", field.Array(field.String()),
+//	)
+//
+//	w1, _ := time.Parse(time.RFC3339, "2020-01-01T10:08:41Z")
+//	w2, _ := time.Parse(time.RFC3339, "2020-05-01T10:08:41Z")
+//	w3, _ := time.Parse(time.RFC3339, "2020-10-01T10:08:41Z")
+//
+//	items := []map[string]interface{}{
+//		{
+//			"name":  "apple",
+//			"color": "red",
+//			"qty":   25,
+//			"info": map[string]interface{}{
+//				"is_fruit": true,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "pear", "color": "yellow"},
+//					map[string]interface{}{"name": "lemon", "color": "yellow"},
+//				},
+//				"desc": "An apple is the edible fruit . Apple trees are cultivated worldwide and have religious and mythological " +
+//					"significance in many cultures. Apples are eaten with honey at the Jewish New Year of Rosh Hashanah to symbolize a sweet new year.",
+//			},
+//			"produced":   w1,
+//			"shipment":   []interface{}{"Russia", "Iran"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.751472, 37.618727}},
+//		},
+//		{
+//			"name":  "orange",
+//			"color": "orange",
+//			"qty":   10,
+//			"info": map[string]interface{}{
+//				"is_fruit": true,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "lemon", "color": "yellow"},
+//					map[string]interface{}{"name": "grapefruit", "color": "red"},
+//				},
+//				"desc": "The orange is the edible fruit of various citrus species; a hybrid between pomelo and mandarin. Orange trees are widely grown" +
+//					" in tropical and subtropical climates for their sweet fruit. The fruit of the orange tree can be eaten fresh, or processed for its juice or fragrant peel.",
+//			},
+//			"produced":   w2,
+//			"shipment":   []interface{}{"Egypt", "Iran"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.716797, 37.552809}},
+//		},
+//		{
+//			"name":  "tomato",
+//			"color": "red",
+//			"qty":   1,
+//			"info": map[string]interface{}{
+//				"is_fruit": false,
+//				"similar": []interface{}{
+//					map[string]interface{}{"name": "cucumber", "color": "green"},
+//					map[string]interface{}{"name": "apple", "color": "yellow"},
+//				},
+//				"desc": "The tomato is the edible red berry. The tomato is consumed in diverse ways, raw or cooked, in many dishes, " +
+//					"sauces, salads, and drinks. Numerous varieties of the tomato plant are widely grown in temperate climates across the world.",
+//			},
+//			"produced":   w3,
+//			"shipment":   []interface{}{"Russia", "Italy"},
+//			"storepoint": map[string]interface{}{"type": "Point", "coordinates": []float64{55.760688, 37.619125}},
+//		},
+//	}
+//
+//	db := client.Database("perxis_test_filter")
+//	coll := db.Collection("items")
+//	coll.Drop(ctx)
+//
+//	for _, item := range items {
+//		_, err = coll.InsertOne(ctx, item)
+//		require.NoError(t, err)
+//	}
+//
+//	h := NewFilterHandler(sch)
+//	h.SetQueryBuilder(NewMongoQueryBuilder())
+//
+//	t.Run("By Color [Equal/NotEqual]", func(t *testing.T) {
+//		t.Run("Red", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: Equal, Field: "color", Value: "red"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Red", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotEqual, Field: "color", Value: "red"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "orange", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Quantity [Less/Greater]", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: LessOrEqual, Field: "qty", Value: 25}, &Filter{Op: Greater, Field: "qty", Value: 1})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 2)
+//		assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+//	})
+//	t.Run("Not Fruit [Equal embedded field]", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: Equal, Field: "info.is_fruit", Value: false})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 1)
+//		assert.Equal(t, "tomato", data[0]["name"])
+//	})
+//	t.Run("By Similar [In/NotIn]", func(t *testing.T) {
+//		t.Run("Similar to cucumber, pear", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: In, Field: "info.similar.name", Value: []string{"cucumber", "pear"}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Similar to cucumber, pear", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotIn, Field: "info.similar.name", Value: []string{"cucumber", "grapefruit"}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "apple", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Description [Contains/NotContains]", func(t *testing.T) {
+//		t.Run("Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: In, Field: "info.similar.color", Value: []string{"yellow"}},
+//				&Filter{Op: Contains, Field: "info.desc", Value: "edible fruit"},
+//			}})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "orange"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotContains, Field: "info.desc", Value: "fruit"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			for _, d := range data {
+//				fmt.Println(d["name"])
+//			}
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "tomato", data[0]["name"])
+//		})
+//	})
+//	t.Run("By Shipment [Contains/NotContains]", func(t *testing.T) {
+//		t.Run("Contains", func(t *testing.T) {
+//			query := h.Query(
+//				&Filter{Op: Contains, Field: "shipment", Value: "Russia"},
+//			)
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			require.Len(t, data, 2)
+//			assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//		})
+//		t.Run("Not Contains", func(t *testing.T) {
+//			query := h.Query(&Filter{Op: NotContains, Field: "shipment", Value: "Iran"})
+//			res, err := coll.Find(ctx, query)
+//			require.NoError(t, err)
+//
+//			var data []map[string]interface{}
+//			err = res.All(ctx, &data)
+//			require.NoError(t, err)
+//			for _, d := range data {
+//				fmt.Println(d["name"])
+//			}
+//			require.Len(t, data, 1)
+//			assert.Equal(t, "tomato", data[0]["name"])
+//		})
+//	})
+//	t.Run("Compound Query", func(t *testing.T) {
+//		query := h.Query(&Filter{Op: Or, Value: []*Filter{
+//			&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: In, Field: "color", Value: []interface{}{"red", "yellow", "green"}},
+//				&Filter{Op: Less, Field: "qty", Value: 10},
+//			}}, // 1 - tomato
+//			&Filter{Op: Equal, Field: "name", Value: "pepper"}, // 0
+//			&Filter{Op: And, Value: []*Filter{
+//				&Filter{Op: GreaterOrEqual, Field: "produced", Value: w1},
+//				&Filter{Op: Less, Field: "produced", Value: w2}, // 1 - apple
+//			}},
+//		}})
+//		res, err := coll.Find(ctx, query)
+//		require.NoError(t, err)
+//
+//		var data []map[string]interface{}
+//		err = res.All(ctx, &data)
+//		require.NoError(t, err)
+//		require.Len(t, data, 2)
+//		assert.ElementsMatch(t, []interface{}{"apple", "tomato"}, []interface{}{data[0]["name"], data[1]["name"]})
+//	})
+//}
diff --git a/pkg/invitations/invitation.go b/pkg/invitations/invitation.go
new file mode 100644
index 0000000000000000000000000000000000000000..5dc5913774fc5774076823309abdf52b195c87c2
--- /dev/null
+++ b/pkg/invitations/invitation.go
@@ -0,0 +1,16 @@
+package invitations
+
+import "time"
+
+const InvitationTTL = 7 * 24 * time.Hour
+
+type Invitation struct {
+	ID         string     `bson:"_id"`
+	Email      string     `bson:"email"`
+	OrgID      string     `bson:"orgId"`
+	SpaceID    string     `bson:"spaceId"`
+	OwnerID    string     `bson:"ownerId"` // Invitation owner
+	Role       string     `bson:"role"`
+	CreatedAt  *time.Time `bson:"createdAt"`
+	ValidUntil *time.Time `bson:"validUntil"`
+}
diff --git a/pkg/invitations/middleware/caching_middleware.go b/pkg/invitations/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..97a1bcb3367fa0cf0ab0f1bc7c638191cb72911c
--- /dev/null
+++ b/pkg/invitations/middleware/caching_middleware.go
@@ -0,0 +1,62 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/invitations"
+	services "git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Invitations) service.Invitations {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Invitations
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, invitation *service.Invitation) (inv *service.Invitation, err error) {
+	return m.next.Create(ctx, invitation)
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, invitationId string) (inv *service.Invitation, err error) {
+
+	value, e := m.cache.Get(invitationId)
+	if e == nil {
+		return value.(*service.Invitation), err
+	}
+	inv, err = m.next.Get(ctx, invitationId)
+	if err == nil {
+		m.cache.Set(invitationId, inv)
+	}
+	return inv, err
+}
+
+func (m cachingMiddleware) Accept(ctx context.Context, invitationId string, userId string) (err error) {
+
+	err = m.next.Accept(ctx, invitationId, userId)
+	if err == nil {
+		m.cache.Remove(invitationId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Find(ctx context.Context, filter *service.Filter, opts *services.FindOptions) (invitations []*service.Invitation, total int, err error) {
+	return m.next.Find(ctx, filter, opts)
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, invitationId string) (err error) {
+
+	err = m.next.Delete(ctx, invitationId)
+	if err == nil {
+		m.cache.Remove(invitationId)
+	}
+	return err
+}
diff --git a/pkg/invitations/middleware/caching_middleware_test.go b/pkg/invitations/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..b635ee0cd09b28245a8f29c67d7902a08392eba7
--- /dev/null
+++ b/pkg/invitations/middleware/caching_middleware_test.go
@@ -0,0 +1,129 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	invmocks "git.perx.ru/perxis/perxis-go/pkg/invitations/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestLocalesCache(t *testing.T) {
+
+	const (
+		orgID = "orgID"
+		email = "123@321.ru"
+		invID = "invID"
+		usrID = "usrID"
+		size  = 5
+		ttl   = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from Cache", func(t *testing.T) {
+		inv := &invmocks.Invitations{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(inv)
+
+		inv.On("Get", mock.Anything, invID).Return(&invitations.Invitation{ID: invID, Email: email, OrgID: orgID}, nil).Once()
+
+		v1, err := svc.Get(ctx, invID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, invID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
+
+		inv.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("Get from Accept", func(t *testing.T) {
+			inv := &invmocks.Invitations{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(inv)
+
+			inv.On("Get", mock.Anything, invID).Return(&invitations.Invitation{ID: invID, Email: email, OrgID: orgID}, nil).Once()
+
+			v1, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
+
+			inv.On("Accept", mock.Anything, invID, usrID).Return(nil).Once()
+			inv.On("Get", mock.Anything, invID).Return(nil, errNotFound).Once()
+
+			err = svc.Accept(ctx, invID, usrID)
+			require.NoError(t, err)
+
+			_, err = svc.Get(ctx, invID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после подтверждения объект будет удален из кэша и получена ошибка от сервиса.")
+
+			inv.AssertExpectations(t)
+		})
+
+		t.Run("Get from Delete", func(t *testing.T) {
+			inv := &invmocks.Invitations{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(inv)
+
+			inv.On("Get", mock.Anything, invID).Return(&invitations.Invitation{ID: invID, Email: email, OrgID: orgID}, nil).Once()
+
+			v1, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
+
+			inv.On("Delete", mock.Anything, invID).Return(nil).Once()
+			inv.On("Get", mock.Anything, invID).Return(nil, errNotFound).Once()
+
+			err = svc.Delete(ctx, invID)
+			require.NoError(t, err)
+
+			_, err = svc.Get(ctx, invID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаления кэша будет очищен и получена ошибка от сервиса.")
+
+			inv.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			inv := &invmocks.Invitations{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(inv)
+
+			inv.On("Get", mock.Anything, invID).Return(&invitations.Invitation{ID: invID, Email: email, OrgID: orgID}, nil).Once()
+
+			v1, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается что при повторном запросе объект будет получен из кэша.")
+
+			time.Sleep(2 * ttl)
+
+			inv.On("Get", mock.Anything, invID).Return(&invitations.Invitation{ID: invID, Email: email, OrgID: orgID}, nil).Once()
+
+			v3, err := svc.Get(ctx, invID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что при истечении ttl кеш будет очищен..")
+
+			inv.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/invitations/middleware/error_logging_middleware.go b/pkg/invitations/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..39c823ae6fdf7fb7cb5c181c42099ca16c1a2bad
--- /dev/null
+++ b/pkg/invitations/middleware/error_logging_middleware.go
@@ -0,0 +1,81 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/invitations -i Invitations -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements invitations.Invitations that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   invitations.Invitations
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the invitations.Invitations with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next invitations.Invitations) invitations.Invitations {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Accept(ctx context.Context, invitationId string, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Accept(ctx, invitationId, userId)
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, invitation *invitations.Invitation) (created *invitations.Invitation, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, invitation)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, invitationId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, invitationId)
+}
+
+func (m *errorLoggingMiddleware) Find(ctx context.Context, filter *invitations.Filter, opts *options.FindOptions) (invitations []*invitations.Invitation, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Find(ctx, filter, opts)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, invitationId string) (invitation *invitations.Invitation, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, invitationId)
+}
diff --git a/pkg/invitations/middleware/logging_middleware.go b/pkg/invitations/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..8f1ceb9959497794cc4bc7a2d6a963a949d9b1a3
--- /dev/null
+++ b/pkg/invitations/middleware/logging_middleware.go
@@ -0,0 +1,216 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/invitations -i Invitations -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements invitations.Invitations that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   invitations.Invitations
+}
+
+// LoggingMiddleware instruments an implementation of the invitations.Invitations with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next invitations.Invitations) invitations.Invitations {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Accept(ctx context.Context, invitationId string, userId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"invitationId": invitationId,
+		"userId":       userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Accept.Request", fields...)
+
+	err = m.next.Accept(ctx, invitationId, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Accept.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, invitation *invitations.Invitation) (created *invitations.Invitation, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":        ctx,
+		"invitation": invitation} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, invitation)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, invitationId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"invitationId": invitationId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, invitationId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Find(ctx context.Context, filter *invitations.Filter, opts *options.FindOptions) (invitations []*invitations.Invitation, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"filter": filter,
+		"opts":   opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Request", fields...)
+
+	invitations, total, err = m.next.Find(ctx, filter, opts)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"invitations": invitations,
+		"total":       total,
+		"err":         err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Response", fields...)
+
+	return invitations, total, err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, invitationId string) (invitation *invitations.Invitation, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"invitationId": invitationId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	invitation, err = m.next.Get(ctx, invitationId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"invitation": invitation,
+		"err":        err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return invitation, err
+}
diff --git a/pkg/invitations/middleware/middleware.go b/pkg/invitations/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1c054d8ae96ab5d45f1dd83af9bb440b3d429817
--- /dev/null
+++ b/pkg/invitations/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/invitations -i Invitations -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"go.uber.org/zap"
+)
+
+type Middleware func(invitations.Invitations) invitations.Invitations
+
+func WithLog(s invitations.Invitations, logger *zap.Logger, log_access bool) invitations.Invitations {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Invitations")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/invitations/middleware/recovering_middleware.go b/pkg/invitations/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..195933ecce0d9766d8a92bbe1e52df6dc4d7064e
--- /dev/null
+++ b/pkg/invitations/middleware/recovering_middleware.go
@@ -0,0 +1,92 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/invitations -i Invitations -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements invitations.Invitations that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   invitations.Invitations
+}
+
+// RecoveringMiddleware instruments an implementation of the invitations.Invitations with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next invitations.Invitations) invitations.Invitations {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Accept(ctx context.Context, invitationId string, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Accept(ctx, invitationId, userId)
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, invitation *invitations.Invitation) (created *invitations.Invitation, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, invitation)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, invitationId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, invitationId)
+}
+
+func (m *recoveringMiddleware) Find(ctx context.Context, filter *invitations.Filter, opts *options.FindOptions) (invitations []*invitations.Invitation, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Find(ctx, filter, opts)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, invitationId string) (invitation *invitations.Invitation, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, invitationId)
+}
diff --git a/pkg/invitations/mocks/Invitations.go b/pkg/invitations/mocks/Invitations.go
new file mode 100644
index 0000000000000000000000000000000000000000..610f9fff80d4867b35b80ff464bcf2a8a9d0e764
--- /dev/null
+++ b/pkg/invitations/mocks/Invitations.go
@@ -0,0 +1,120 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"github.com/stretchr/testify/mock"
+)
+
+// Invitations is an autogenerated mock type for the Invitations type
+type Invitations struct {
+	mock.Mock
+}
+
+// Accept provides a mock function with given fields: ctx, invitationId, userId
+func (_m *Invitations) Accept(ctx context.Context, invitationId string, userId string) error {
+	ret := _m.Called(ctx, invitationId, userId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, invitationId, userId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Create provides a mock function with given fields: ctx, invitation
+func (_m *Invitations) Create(ctx context.Context, invitation *invitations.Invitation) (*invitations.Invitation, error) {
+	ret := _m.Called(ctx, invitation)
+
+	var r0 *invitations.Invitation
+	if rf, ok := ret.Get(0).(func(context.Context, *invitations.Invitation) *invitations.Invitation); ok {
+		r0 = rf(ctx, invitation)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*invitations.Invitation)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *invitations.Invitation) error); ok {
+		r1 = rf(ctx, invitation)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, invitationId
+func (_m *Invitations) Delete(ctx context.Context, invitationId string) error {
+	ret := _m.Called(ctx, invitationId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string) error); ok {
+		r0 = rf(ctx, invitationId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, filter, opts
+func (_m *Invitations) Find(ctx context.Context, filter *invitations.Filter, opts *options.FindOptions) ([]*invitations.Invitation, int, error) {
+	ret := _m.Called(ctx, filter, opts)
+
+	var r0 []*invitations.Invitation
+	if rf, ok := ret.Get(0).(func(context.Context, *invitations.Filter, *options.FindOptions) []*invitations.Invitation); ok {
+		r0 = rf(ctx, filter, opts)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*invitations.Invitation)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, *invitations.Filter, *options.FindOptions) int); ok {
+		r1 = rf(ctx, filter, opts)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *invitations.Filter, *options.FindOptions) error); ok {
+		r2 = rf(ctx, filter, opts)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, invitationId
+func (_m *Invitations) Get(ctx context.Context, invitationId string) (*invitations.Invitation, error) {
+	ret := _m.Called(ctx, invitationId)
+
+	var r0 *invitations.Invitation
+	if rf, ok := ret.Get(0).(func(context.Context, string) *invitations.Invitation); ok {
+		r0 = rf(ctx, invitationId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*invitations.Invitation)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, invitationId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
diff --git a/pkg/invitations/service.go b/pkg/invitations/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..26426d67462cdcb51fc7fe8f1bd60dc1fde69260
--- /dev/null
+++ b/pkg/invitations/service.go
@@ -0,0 +1,27 @@
+package invitations
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+type Filter struct {
+	ID      []string
+	Email   []string
+	OrgID   []string
+	SpaceID []string
+	OwnerID []string
+	Role    []string
+}
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/invitations
+// @grpc-addr content.invitations.Invitations
+type Invitations interface {
+	Create(ctx context.Context, invitation *Invitation) (created *Invitation, err error)
+	Get(ctx context.Context, invitationId string) (invitation *Invitation, err error)
+	Accept(ctx context.Context, invitationId, userId string) (err error)
+	Find(ctx context.Context, filter *Filter, opts *options.FindOptions) (invitations []*Invitation, total int, err error)
+	Delete(ctx context.Context, invitationId string) (err error)
+}
diff --git a/pkg/invitations/transport/client.microgen.go b/pkg/invitations/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6f896b0e67fa88a1eaa539c305a47fada11c9c8b
--- /dev/null
+++ b/pkg/invitations/transport/client.microgen.go
@@ -0,0 +1,79 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	invitations "git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *invitations.Invitation) (res0 *invitations.Invitation, res1 error) {
+	request := CreateRequest{Invitation: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string) (res0 *invitations.Invitation, res1 error) {
+	request := GetRequest{InvitationId: arg1}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Invitation, res1
+}
+
+func (set EndpointsSet) Accept(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := AcceptRequest{
+		InvitationId: arg1,
+		UserId:       arg2,
+	}
+	_, res0 = set.AcceptEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1 *invitations.Filter, arg2 *options.FindOptions) (res0 []*invitations.Invitation, res1 int, res2 error) {
+	request := FindRequest{
+		Filter: arg1,
+		Opts:   arg2,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*FindResponse).Invitations, response.(*FindResponse).Total, res2
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string) (res0 error) {
+	request := DeleteRequest{InvitationId: arg1}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/invitations/transport/endpoints.microgen.go b/pkg/invitations/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..70d78bbcf04e68ad97c36d90b4f66f3287f108d2
--- /dev/null
+++ b/pkg/invitations/transport/endpoints.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Invitations API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	GetEndpoint    endpoint.Endpoint
+	AcceptEndpoint endpoint.Endpoint
+	FindEndpoint   endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+}
diff --git a/pkg/invitations/transport/exchanges.microgen.go b/pkg/invitations/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..bb36e466cbcb25584c462931a3eab55ffd90732c
--- /dev/null
+++ b/pkg/invitations/transport/exchanges.microgen.go
@@ -0,0 +1,46 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+type (
+	CreateRequest struct {
+		Invitation *invitations.Invitation `json:"invitation"`
+	}
+	CreateResponse struct {
+		Created *invitations.Invitation `json:"created"`
+	}
+
+	GetRequest struct {
+		InvitationId string `json:"invitation_id"`
+	}
+	GetResponse struct {
+		Invitation *invitations.Invitation `json:"invitation"`
+	}
+
+	AcceptRequest struct {
+		InvitationId string `json:"invitation_id"`
+		UserId       string `json:"user_id"`
+	}
+	// Formal exchange type, please do not delete.
+	AcceptResponse struct{}
+
+	FindRequest struct {
+		Filter *invitations.Filter  `json:"filter"`
+		Opts   *options.FindOptions `json:"opts"`
+	}
+	FindResponse struct {
+		Invitations []*invitations.Invitation `json:"invitations"`
+		Total       int                       `json:"total"`
+	}
+
+	DeleteRequest struct {
+		InvitationId string `json:"invitation_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+)
diff --git a/pkg/invitations/transport/grpc/client.microgen.go b/pkg/invitations/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..e319e19f9d32b9d3efa1e0d6fe485372e177efa2
--- /dev/null
+++ b/pkg/invitations/transport/grpc/client.microgen.go
@@ -0,0 +1,54 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/invitations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/invitations"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.invitations.Invitations"
+	}
+	return transport.EndpointsSet{
+		AcceptEndpoint: grpckit.NewClient(
+			conn, addr, "Accept",
+			_Encode_Accept_Request,
+			_Decode_Accept_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/invitations/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/invitations/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..85403cab8c13fb71612e275a2851c02a904d8abc
--- /dev/null
+++ b/pkg/invitations/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,223 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/invitations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/invitations"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	pbInvitation, err := PtrInvitationToProto(req.Invitation)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Invitation: pbInvitation}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{InvitationId: req.InvitationId}, nil
+}
+
+func _Encode_Accept_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AcceptRequest")
+	}
+	req := request.(*transport.AcceptRequest)
+	return &pb.AcceptRequest{
+		InvitationId: req.InvitationId,
+		UserId:       req.UserId,
+	}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := PtrServicesFindOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{InvitationId: req.InvitationId}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respInvitation, err := PtrInvitationToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Invitation: respInvitation}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respInvitation, err := PtrInvitationToProto(resp.Invitation)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Invitation: respInvitation}, nil
+}
+
+func _Encode_Accept_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respInvitations, err := ListPtrInvitationToProto(resp.Invitations)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Invitations: respInvitations,
+		Total:       int64(resp.Total),
+	}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	invitation, err := ProtoToPtrInvitation(req.Invitation)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Invitation: invitation}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{InvitationId: string(req.InvitationId)}, nil
+}
+
+func _Decode_Accept_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AcceptRequest")
+	}
+	req := request.(*pb.AcceptRequest)
+	return &transport.AcceptRequest{
+		InvitationId: string(req.InvitationId),
+		UserId:       string(req.UserId),
+	}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := ProtoToPtrServicesFindOptions(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{InvitationId: string(req.InvitationId)}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respInvitation, err := ProtoToPtrInvitation(resp.Invitation)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respInvitation}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respInvitation, err := ProtoToPtrInvitation(resp.Invitation)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Invitation: respInvitation}, nil
+}
+
+func _Decode_Accept_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respInvitations, err := ProtoToListPtrInvitation(resp.Invitations)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Invitations: respInvitations,
+		Total:       int(resp.Total),
+	}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/invitations/transport/grpc/protobuf_type_converters.microgen.go b/pkg/invitations/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..9899e39885a8ca07de15f74f2eeda24d65296928
--- /dev/null
+++ b/pkg/invitations/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,161 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"time"
+
+	service "git.perx.ru/perxis/perxis-go/pkg/invitations"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	pb "git.perx.ru/perxis/perxis-go/proto/invitations"
+	"github.com/golang/protobuf/ptypes"
+	timestamp "github.com/golang/protobuf/ptypes/timestamp"
+)
+
+func PtrTimeTimeToProto(validUntil *time.Time) (*timestamp.Timestamp, error) {
+	if validUntil == nil {
+		return nil, nil
+	}
+	t, err := ptypes.TimestampProto(*validUntil)
+	if err != nil {
+		return nil, err
+	}
+	return t, err
+}
+
+func ProtoToPtrTimeTime(protoValidUntil *timestamp.Timestamp) (*time.Time, error) {
+	if protoValidUntil == nil {
+		return nil, nil
+	}
+	t, err := ptypes.Timestamp(protoValidUntil)
+	if err != nil {
+		return nil, err
+	}
+	return &t, nil
+}
+
+func PtrInvitationToProto(invitation *service.Invitation) (*pb.Invitation, error) {
+	if invitation == nil {
+		return nil, nil
+	}
+	pi := &pb.Invitation{
+		Id:      invitation.ID,
+		Email:   invitation.Email,
+		OrgId:   invitation.OrgID,
+		SpaceId: invitation.SpaceID,
+		OwnerId: invitation.OwnerID,
+		Role:    invitation.Role,
+	}
+	if invitation.CreatedAt != nil && !invitation.CreatedAt.IsZero() {
+		t, _ := ptypes.TimestampProto(*invitation.CreatedAt)
+		pi.CreatedAt = t
+	}
+	if invitation.ValidUntil != nil && !invitation.ValidUntil.IsZero() {
+		t, _ := ptypes.TimestampProto(*invitation.ValidUntil)
+		pi.ValidUntil = t
+	}
+	return pi, nil
+}
+
+func ProtoToPtrInvitation(protoInvitation *pb.Invitation) (*service.Invitation, error) {
+	if protoInvitation == nil {
+		return nil, nil
+	}
+	i := &service.Invitation{
+		ID:      protoInvitation.Id,
+		Email:   protoInvitation.Email,
+		OrgID:   protoInvitation.OrgId,
+		SpaceID: protoInvitation.SpaceId,
+		OwnerID: protoInvitation.OwnerId,
+		Role:    protoInvitation.Role,
+	}
+	if protoInvitation.CreatedAt != nil {
+		t, _ := ptypes.Timestamp(protoInvitation.CreatedAt)
+		i.CreatedAt = &t
+	}
+	if protoInvitation.ValidUntil != nil {
+		t, _ := ptypes.Timestamp(protoInvitation.ValidUntil)
+		i.ValidUntil = &t
+	}
+	return i, nil
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	return &pb.Filter{
+		Id:      filter.ID,
+		Email:   filter.Email,
+		OrgId:   filter.OrgID,
+		SpaceId: filter.SpaceID,
+		OwnerId: filter.OwnerID,
+		Role:    filter.Role,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	return &service.Filter{
+		ID:      protoFilter.Id,
+		Email:   protoFilter.Email,
+		OrgID:   protoFilter.OrgId,
+		SpaceID: protoFilter.SpaceId,
+		OwnerID: protoFilter.OwnerId,
+		Role:    protoFilter.Role,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*pb.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.FindOptions{
+		Sort:     opts.Sort,
+		PageNum:  int32(opts.PageNum),
+		PageSize: int32(opts.PageSize),
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *pb.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+	}, nil
+}
+
+func ListPtrInvitationToProto(invitations []*service.Invitation) ([]*pb.Invitation, error) {
+	protoInvitations := make([]*pb.Invitation, 0, len(invitations))
+	for _, i := range invitations {
+		pi, err := PtrInvitationToProto(i)
+		if err != nil {
+			return nil, err
+		}
+		protoInvitations = append(protoInvitations, pi)
+	}
+	return protoInvitations, nil
+}
+
+func ProtoToListPtrInvitation(protoInvitations []*pb.Invitation) ([]*service.Invitation, error) {
+	invitations := make([]*service.Invitation, 0, len(protoInvitations))
+	for _, pi := range protoInvitations {
+		p, err := ProtoToPtrInvitation(pi)
+		if err != nil {
+			return nil, err
+		}
+		invitations = append(invitations, p)
+	}
+	return invitations, nil
+}
diff --git a/pkg/invitations/transport/grpc/server.microgen.go b/pkg/invitations/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..fa7cf241737173d3c41cadaa2ae9fa17ecb43fe7
--- /dev/null
+++ b/pkg/invitations/transport/grpc/server.microgen.go
@@ -0,0 +1,97 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/invitations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/invitations"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type invitationsServer struct {
+	create grpc.Handler
+	get    grpc.Handler
+	accept grpc.Handler
+	find   grpc.Handler
+	delete grpc.Handler
+
+	pb.UnimplementedInvitationsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.InvitationsServer {
+	return &invitationsServer{
+		accept: grpc.NewServer(
+			endpoints.AcceptEndpoint,
+			_Decode_Accept_Request,
+			_Encode_Accept_Response,
+			opts...,
+		),
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *invitationsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *invitationsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *invitationsServer) Accept(ctx context.Context, req *pb.AcceptRequest) (*empty.Empty, error) {
+	_, resp, err := S.accept.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *invitationsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
+
+func (S *invitationsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/invitations/transport/server.microgen.go b/pkg/invitations/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..326659f2b813d8f6b306d8b9498c0590c6434011
--- /dev/null
+++ b/pkg/invitations/transport/server.microgen.go
@@ -0,0 +1,63 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	invitations "git.perx.ru/perxis/perxis-go/pkg/invitations"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc invitations.Invitations) EndpointsSet {
+	return EndpointsSet{
+		AcceptEndpoint: AcceptEndpoint(svc),
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		FindEndpoint:   FindEndpoint(svc),
+		GetEndpoint:    GetEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc invitations.Invitations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Invitation)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc invitations.Invitations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.InvitationId)
+		return &GetResponse{Invitation: res0}, res1
+	}
+}
+
+func AcceptEndpoint(svc invitations.Invitations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AcceptRequest)
+		res0 := svc.Accept(arg0, req.InvitationId, req.UserId)
+		return &AcceptResponse{}, res0
+	}
+}
+
+func FindEndpoint(svc invitations.Invitations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.Filter, req.Opts)
+		return &FindResponse{
+			Invitations: res0,
+			Total:       res1,
+		}, res2
+	}
+}
+
+func DeleteEndpoint(svc invitations.Invitations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.InvitationId)
+		return &DeleteResponse{}, res0
+	}
+}
diff --git a/pkg/items/codec.go b/pkg/items/codec.go
new file mode 100644
index 0000000000000000000000000000000000000000..6264c3b582a2af08c1746c763807b315c0ae2fa9
--- /dev/null
+++ b/pkg/items/codec.go
@@ -0,0 +1,9 @@
+package items
+
+type Encoder interface {
+	Encode(item *Item) (any, error)
+}
+
+type Decoder interface {
+	Decode(value any, item *Item) error
+}
diff --git a/pkg/items/context.go b/pkg/items/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..87e600e5b40da50381245a626e8228ab20485de8
--- /dev/null
+++ b/pkg/items/context.go
@@ -0,0 +1,71 @@
+package items
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/clients"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+type Context struct {
+	Items
+	Clients clients.Clients
+
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+	Item         *Item
+	Space        *spaces.Space
+	Environment  *environments.Environment
+
+	ViewSpaceID       string
+	ViewEnvironmentID string
+	ViewCollectionID  string
+	ViewSpace         *spaces.Space
+	ViewEnvironment   *environments.Environment
+}
+
+type itemsCtx struct{}
+
+func WithContext(ctx context.Context, itmCtx *Context) context.Context {
+	if ctx == nil {
+		ctx = context.Background()
+	}
+
+	if itmCtx.ViewSpaceID == "" {
+		itmCtx.ViewSpaceID = itmCtx.SpaceID
+	}
+	if itmCtx.ViewEnvironmentID == "" {
+		itmCtx.ViewEnvironmentID = itmCtx.EnvID
+	}
+	if itmCtx.ViewCollectionID == "" {
+		itmCtx.ViewCollectionID = itmCtx.CollectionID
+	}
+	if itmCtx.ViewSpace == nil {
+		itmCtx.ViewSpace = itmCtx.Space
+	}
+	if itmCtx.ViewEnvironment == nil {
+		itmCtx.ViewEnvironment = itmCtx.Environment
+	}
+
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p != nil {
+		*p = *itmCtx
+		return ctx
+	}
+
+	return context.WithValue(ctx, itemsCtx{}, itmCtx)
+}
+
+func GetContext(ctx context.Context) *Context {
+	if ctx == nil {
+		return new(Context)
+	}
+	p, _ := ctx.Value(itemsCtx{}).(*Context)
+	if p == nil {
+		return new(Context)
+	}
+	return p
+}
diff --git a/pkg/items/events.go b/pkg/items/events.go
new file mode 100644
index 0000000000000000000000000000000000000000..14ff72216edb875ed3ca15cc879913fdb21a7ed7
--- /dev/null
+++ b/pkg/items/events.go
@@ -0,0 +1,140 @@
+package items
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"github.com/golang/protobuf/proto"
+)
+
+const (
+	EventCreateItem    = "create_item"
+	EventUpdateItem    = "update_item"
+	EventPublishItem   = "publish_item"
+	EventUnpublishItem = "unpublish_item"
+	EventDeleteItem    = "delete_item"
+
+	DefaultEventSubject = "content.{{.EventType}}.{{.SpaceID}}.{{.EnvID}}.{{.CollectionID}}.{{.ItemID}}"
+)
+
+var (
+	ErrInvalidEventType = func(expected string, got any) error {
+		return errors.Errorf("invalid message type: expected '%s', got '%t'", expected, got)
+	}
+)
+
+type EventCreate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventCreate) ToProto() (proto.Message, error) {
+	return &pb.EventCreate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventCreate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventCreate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventCreate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUpdate struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUpdate) ToProto() (proto.Message, error) {
+	return &pb.EventUpdate{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUpdate) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUpdate)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUpdate", message)
+	}
+
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventPublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventPublish) ToProto() (proto.Message, error) {
+	return &pb.EventPublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventPublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventPublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventPublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventUnpublish struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventUnpublish) ToProto() (proto.Message, error) {
+	return &pb.EventUnpublish{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventUnpublish) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventUnpublish)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventUnpublish", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
+
+type EventDelete struct {
+	SpaceID      string
+	EnvID        string
+	CollectionID string
+	ItemID       string
+}
+
+func (e EventDelete) ToProto() (proto.Message, error) {
+	return &pb.EventDelete{SpaceId: e.SpaceID, EnvId: e.EnvID, CollectionId: e.CollectionID, ItemId: e.ItemID}, nil
+}
+
+func (e *EventDelete) FromProto(message proto.Message) error {
+	p, ok := message.(*pb.EventDelete)
+	if !ok {
+		return ErrInvalidEventType("*pb.EventDelete", message)
+	}
+	e.SpaceID = p.SpaceId
+	e.EnvID = p.EnvId
+	e.CollectionID = p.CollectionId
+	e.ItemID = p.ItemId
+	return nil
+}
diff --git a/pkg/items/item.go b/pkg/items/item.go
new file mode 100644
index 0000000000000000000000000000000000000000..fc3a5154f621b601c02761c126be3ce72aa979ca
--- /dev/null
+++ b/pkg/items/item.go
@@ -0,0 +1,566 @@
+package items
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	"google.golang.org/protobuf/types/known/structpb"
+	"google.golang.org/protobuf/types/known/timestamppb"
+)
+
+var (
+	ErrNotSystemField = errors.New("not a system field")
+	ErrIncorrectValue = errors.New("incorrect value")
+	ErrIncorrectField = errors.New("incorrect field")
+)
+
+type State int
+
+func (s State) String() string {
+	switch s {
+	case StateDraft:
+		return "Draft"
+	case StateArchived:
+		return "Archived"
+	case StateChanged:
+		return "Changed"
+	case StatePublished:
+		return "Published"
+	}
+	return "Unknown"
+}
+
+const (
+	StateDraft State = iota
+	StatePublished
+	StateChanged
+	StateArchived
+
+	StateMax = StateArchived
+
+	SoftDeleteSeparator = "___"
+)
+
+var PermissionsAllowAny = &Permissions{
+	Edit:       true,
+	Archive:    true,
+	Publish:    true,
+	SoftDelete: true,
+	HardDelete: true,
+}
+
+// SystemFields - системные поля Item
+var SystemFields = []string{
+	"id",
+	"space_id",
+	"env_id",
+	"collection_id",
+	"state",
+	"created_rev_at",
+	"created_by",
+	"created_at",
+	"updated_at",
+	"updated_by",
+	"revision_id",
+	"published_at",
+	"published_by",
+	"archived_at",
+	"archived_by",
+	"data",
+	"translations",
+	"locale",
+	"deleted",
+	"hidden",
+	"template",
+}
+
+type Permissions struct {
+	Edit       bool
+	Archive    bool
+	Publish    bool
+	SoftDelete bool
+	HardDelete bool
+}
+
+type Item struct {
+	ID           string                            `json:"id" bson:"_id"` // ID - Идентификатор записи. Автоматически генерируется системой при сохранении первой ревизии.
+	SpaceID      string                            `json:"spaceId" bson:"-"`
+	EnvID        string                            `json:"envId" bson:"-"`
+	CollectionID string                            `json:"collectionId" bson:"-"`
+	State        State                             `json:"state" bson:"state"`
+	CreatedRevAt time.Time                         `json:"createdRevAt,omitempty" bson:"created_rev_at,omitempty"`
+	CreatedBy    string                            `json:"createdBy,omitempty" bson:"created_by,omitempty"`
+	CreatedAt    time.Time                         `json:"createdAt,omitempty" bson:"created_at,omitempty"`
+	UpdatedAt    time.Time                         `json:"updatedAt,omitempty" bson:"updated_at,omitempty"`
+	UpdatedBy    string                            `json:"updatedBy,omitempty" bson:"updated_by,omitempty"`
+	Data         map[string]interface{}            `json:"data" bson:"data"`
+	Locale       string                            `json:"locale" bson:"-"`
+	Translations map[string]map[string]interface{} `json:"translations" bson:"translations,omitempty"`
+	RevisionID   string                            `json:"revId,omitempty" bson:"revision_id"`
+	PublishedAt  time.Time                         `json:"publishedAt,omitempty" bson:"published_at,omitempty"`
+	PublishedBy  string                            `json:"publishedBy,omitempty" bson:"published_by,omitempty"`
+	ArchivedAt   time.Time                         `json:"archivedAt,omitempty" bson:"archived_at,omitempty"`
+	ArchivedBy   string                            `json:"archivedBy,omitempty" bson:"archived_by,omitempty"`
+	Permissions  *Permissions                      `json:"permissions,omitempty" bson:"-"`
+
+	// Флаги записи
+	Deleted  bool `json:"deleted" bson:"deleted,omitempty"`
+	Hidden   bool `json:"hidden" bson:"hidden,omitempty"`
+	Template bool `json:"template" bson:"template,omitempty"`
+}
+
+func NewItem(spaceID, envID, collID, id string, data map[string]interface{}, translations map[string]map[string]interface{}) *Item {
+	return &Item{
+		ID:           id,
+		SpaceID:      spaceID,
+		EnvID:        envID,
+		CollectionID: collID,
+		Data:         data,
+		Translations: translations,
+	}
+}
+
+func (i *Item) Clone() *Item {
+	itm := *i
+	itm.Data = data.CloneMap(i.Data)
+
+	if i.Translations != nil {
+		itm.Translations = make(map[string]map[string]interface{}, len(i.Translations))
+		for t, m := range i.Translations {
+			itm.Translations[t] = data.CloneMap(m)
+		}
+	}
+
+	return &itm
+}
+
+func (i *Item) ToMap() map[string]interface{} {
+	return map[string]interface{}{
+		"id":             i.ID,
+		"space_id":       i.SpaceID,
+		"env_id":         i.EnvID,
+		"collection_id":  i.CollectionID,
+		"state":          i.State,
+		"created_rev_at": i.CreatedRevAt,
+		"created_by":     i.CreatedBy,
+		"created_at":     i.CreatedAt,
+		"updated_at":     i.UpdatedAt,
+		"updated_by":     i.UpdatedBy,
+		"revision_id":    i.RevisionID,
+		"published_at":   i.PublishedAt,
+		"published_by":   i.PublishedBy,
+		"archived_at":    i.ArchivedAt,
+		"archived_by":    i.ArchivedBy,
+		"data":           i.Data,
+		"translations":   i.Translations,
+		"locale":         i.Locale,
+		"deleted":        i.Deleted,
+		"hidden":         i.Hidden,
+		"template":       i.Template,
+	}
+}
+
+func (i *Item) SetData(locale string, data map[string]interface{}) {
+	if locale != "" {
+		if i.Translations == nil {
+			i.Translations = make(map[string]map[string]interface{})
+		}
+		i.Translations[locale] = data
+		return
+	}
+	i.Data = data
+}
+
+func (i *Item) GetData(locale string) map[string]interface{} {
+	if locale != "" && i.Translations != nil {
+		translation, _ := i.Translations[locale]
+		return MergeData(i.Data, translation)
+	}
+	return i.Data
+}
+
+func (i Item) Encode(ctx context.Context, s *schema.Schema) (*Item, error) {
+	if i.Data != nil {
+		dt, err := schema.Encode(nil, s, i.Data)
+		if err != nil {
+			//return errors.WithField(err, "data")
+			return nil, err
+		}
+		i.Data = dt.(map[string]interface{})
+	}
+	if len(i.Translations) > 0 {
+		for l, v := range i.Translations {
+			dt, err := schema.Encode(nil, s, v)
+			if err != nil {
+				//return errors.WithField(err, fmt.Sprintf("translations.%s", l))
+				return nil, err
+			}
+			i.Translations[l] = dt.(map[string]interface{})
+		}
+	}
+	return &i, nil
+}
+
+func (i Item) Decode(ctx context.Context, s *schema.Schema) (res *Item, err error) {
+
+	if i.Data != nil {
+		i.Data, err = s.Decode(ctx, i.Data)
+		if err != nil {
+			return nil, err
+			//return errors.WithField(err, "data")
+		}
+	}
+
+	return &i, nil
+}
+
+// MergeData дополняет отсутствующие данные из оригинальных данных
+func MergeData(data ...map[string]interface{}) map[string]interface{} {
+	merge := make(map[string]interface{})
+	for _, d := range data {
+		for k, v := range d {
+			merge[k] = v
+		}
+	}
+	return merge
+}
+
+// ClearData убирает данные которые не изменились по сравнению с оригинальными данными
+func ClearData(data ...map[string]interface{}) map[string]interface{} {
+	var clear map[string]interface{}
+
+	for _, d := range data {
+		if clear == nil {
+			clear = d
+			continue
+		}
+
+		for k, v := range d {
+			if reflect.DeepEqual(clear[k], v) {
+				delete(clear, k)
+			}
+		}
+	}
+
+	return clear
+}
+
+type ProcessDataFunc func(ctx context.Context, sch *schema.Schema, data map[string]interface{}) (map[string]interface{}, error)
+
+func (i Item) ProcessData(ctx context.Context, sch *schema.Schema, fn ProcessDataFunc, locales ...string) (*Item, error) {
+	if i.Data != nil {
+		dt, err := fn(ctx, sch, i.Data)
+		if err != nil {
+			return nil, errors.WithField(err, "data")
+		}
+		i.Data = dt
+	}
+
+	tr := make(map[string]map[string]interface{})
+	for _, l := range locales {
+
+		data := i.GetData(l)
+
+		dt, err := fn(ctx, sch, data)
+		if err != nil {
+			return nil, errors.WithField(err, fmt.Sprintf("translations.%s", l))
+		}
+		tr[l] = dt
+
+	}
+
+	i.Translations = nil
+	if len(tr) > 0 {
+		i.Translations = tr
+	}
+
+	return &i, nil
+}
+
+// IsSystemField возвращает являться ли поле системным
+func IsSystemField(field string) bool {
+	if data.Contains(field, SystemFields) {
+		return true
+	}
+	return false
+}
+
+// SetSystemField устанавливает значение системного поля
+func (i *Item) SetSystemField(field string, value interface{}) error {
+	ok := true
+	switch field {
+	case "id":
+		i.ID, ok = value.(string)
+	case "space_id":
+		i.SpaceID, ok = value.(string)
+	case "env_id":
+		i.EnvID, ok = value.(string)
+	case "collection_id":
+		i.CollectionID, ok = value.(string)
+	case "created_rev_at":
+		i.CreatedRevAt, ok = value.(time.Time)
+	case "created_by":
+		i.CreatedBy, ok = value.(string)
+	case "created_at":
+		i.CreatedAt, ok = value.(time.Time)
+	case "updated_by":
+		i.UpdatedBy, ok = value.(string)
+	case "updated_at":
+		i.UpdatedAt, ok = value.(time.Time)
+	case "revision_id":
+		i.RevisionID, ok = value.(string)
+	case "published_by":
+		i.PublishedBy, ok = value.(string)
+	case "published_at":
+		i.PublishedAt, ok = value.(time.Time)
+	case "hidden":
+		i.Hidden, ok = value.(bool)
+	case "deleted":
+		i.Deleted, ok = value.(bool)
+	case "template":
+		i.Template, ok = value.(bool)
+	default:
+		return ErrNotSystemField
+	}
+
+	if !ok {
+		return ErrIncorrectValue
+	}
+
+	return nil
+}
+
+// GetSystem устанавливает значение системного поля
+func (i *Item) GetSystem(field string) (any, error) {
+	switch field {
+	case "id":
+		return i.ID, nil
+	case "space_id":
+		return i.SpaceID, nil
+	case "env_id":
+		return i.EnvID, nil
+	case "collection_id":
+		return i.CollectionID, nil
+	case "created_rev_at":
+		return i.CreatedRevAt, nil
+	case "created_by":
+		return i.CreatedBy, nil
+	case "created_at":
+		return i.CreatedAt, nil
+	case "updated_by":
+		return i.UpdatedBy, nil
+	case "updated_at":
+		return i.UpdatedAt, nil
+	case "revision_id":
+		return i.RevisionID, nil
+	case "published_by":
+		return i.PublishedBy, nil
+	case "published_at":
+		return i.PublishedAt, nil
+	case "hidden":
+		return i.Hidden, nil
+	case "deleted":
+		return i.Deleted, nil
+	case "template":
+		return i.Template, nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+func (i *Item) setItemData(field string, value interface{}) error {
+	if i.Data == nil {
+		i.Data = make(map[string]any)
+	}
+
+	return data.Set(field, i.Data, value)
+}
+
+func (i *Item) getItemData(field string) (any, error) {
+	if i.Data != nil {
+		if v, ok := data.Get(field, i.Data); ok {
+			return v, nil
+		}
+	}
+
+	return nil, ErrIncorrectField
+}
+
+// Set устанавливает значение поля
+func (i *Item) Set(field string, value interface{}) error {
+	if err := i.SetSystemField(field, value); !errors.Is(err, ErrNotSystemField) {
+		return errors.Wrapf(err, "fail to set system field '%s' value", field)
+	}
+
+	return i.setItemData(field, value)
+}
+
+// Get возвращает значение поля
+func (i *Item) Get(field string) (any, error) {
+	if v, err := i.GetSystem(field); err == nil {
+		return v, err
+	}
+
+	return i.getItemData(field)
+}
+
+// GetSystemField возвращает описание поля для системных аттрибутов Item
+func GetSystemField(fld string) (*field.Field, error) {
+	switch fld {
+	case "id", "space_id", "env_id", "collection_id", "revision_id":
+		return field.String(), nil
+	case "created_rev_at", "created_at", "updated_at", "published_at":
+		return field.Time(), nil
+	case "created_by", "updated_by", "published_by":
+		return field.String(), nil
+	case "hidden", "deleted", "template":
+		return field.Bool(), nil
+	}
+
+	return nil, ErrNotSystemField
+}
+
+// GetField возвращает значение поля
+func GetField(field string, sch *schema.Schema) (*field.Field, error) {
+	if f, err := GetSystemField(field); err == nil {
+		return f, err
+	}
+
+	f := sch.GetField(field)
+	if f == nil {
+		return nil, ErrIncorrectField
+	}
+
+	return f, nil
+}
+
+// GetSystemNamedFields возвращает описание всех системных полей Item
+func GetSystemNamedFields() []field.NamedField {
+	fields := make([]field.NamedField, 0, len(SystemFields))
+	for _, n := range SystemFields {
+		f := field.NamedField{Name: n}
+		f.Field, _ = GetSystemField(n)
+		fields = append(fields, f)
+	}
+
+	return fields
+}
+
+func ItemToProto(item *Item) *pb.Item {
+	if item == nil {
+		return nil
+	}
+
+	protoItem := &pb.Item{
+		Id:           item.ID,
+		SpaceId:      item.SpaceID,
+		EnvId:        item.EnvID,
+		CollectionId: item.CollectionID,
+		State:        pb.Item_State(item.State),
+		CreatedBy:    item.CreatedBy,
+		UpdatedBy:    item.UpdatedBy,
+		RevisionId:   item.RevisionID,
+		PublishedBy:  item.PublishedBy,
+		ArchivedBy:   item.ArchivedBy,
+		Locale:       item.Locale,
+		Hidden:       item.Hidden,
+		Template:     item.Template,
+		Deleted:      item.Deleted,
+	}
+
+	if item.Data != nil {
+		protoItem.Data, _ = structpb.NewStruct(item.Data)
+	}
+	if item.Translations != nil {
+		protoItem.Translations = make(map[string]*structpb.Struct, len(item.Translations))
+		for k, v := range item.Translations {
+			protoItem.Translations[k], _ = structpb.NewStruct(v)
+		}
+	}
+
+	protoItem.CreatedRevAt = timestamppb.New(item.CreatedRevAt)
+	protoItem.PublishedAt = timestamppb.New(item.PublishedAt)
+	protoItem.ArchivedAt = timestamppb.New(item.ArchivedAt)
+	protoItem.CreatedAt = timestamppb.New(item.CreatedAt)
+	protoItem.UpdatedAt = timestamppb.New(item.UpdatedAt)
+
+	if item.Permissions != nil {
+		protoItem.Permissions = &pb.Permissions{
+			Edit:       item.Permissions.Edit,
+			Archive:    item.Permissions.Archive,
+			Publish:    item.Permissions.Publish,
+			SoftDelete: item.Permissions.SoftDelete,
+			HardDelete: item.Permissions.HardDelete,
+		}
+	}
+
+	return protoItem
+}
+
+func ItemFromProto(protoItem *pb.Item) *Item {
+
+	if protoItem == nil {
+		return nil
+	}
+
+	item := &Item{
+		ID:           protoItem.Id,
+		SpaceID:      protoItem.SpaceId,
+		EnvID:        protoItem.EnvId,
+		CollectionID: protoItem.CollectionId,
+		State:        State(protoItem.State),
+		CreatedBy:    protoItem.CreatedBy,
+		UpdatedBy:    protoItem.UpdatedBy,
+		RevisionID:   protoItem.RevisionId,
+		PublishedBy:  protoItem.PublishedBy,
+		ArchivedBy:   protoItem.ArchivedBy,
+		Locale:       protoItem.Locale,
+		Hidden:       protoItem.Hidden,
+		Template:     protoItem.Template,
+		Deleted:      protoItem.Deleted,
+	}
+
+	if protoItem.Data != nil {
+		item.Data = protoItem.Data.AsMap()
+	}
+
+	if protoItem.Translations != nil {
+		item.Translations = make(map[string]map[string]interface{}, len(protoItem.Translations))
+		for k, v := range protoItem.Translations {
+			item.Translations[k] = v.AsMap()
+		}
+	}
+
+	if protoItem.Permissions != nil {
+		item.Permissions = &Permissions{
+			Edit:       protoItem.Permissions.Edit,
+			Archive:    protoItem.Permissions.Archive,
+			Publish:    protoItem.Permissions.Publish,
+			SoftDelete: protoItem.Permissions.SoftDelete,
+			HardDelete: protoItem.Permissions.HardDelete,
+		}
+	}
+
+	item.CreatedRevAt = protoItem.CreatedRevAt.AsTime()
+	item.PublishedAt = protoItem.PublishedAt.AsTime()
+	item.ArchivedAt = protoItem.ArchivedAt.AsTime()
+	item.CreatedAt = protoItem.CreatedAt.AsTime()
+	item.UpdatedAt = protoItem.UpdatedAt.AsTime()
+
+	return item
+}
+
+func GetItemIDs(arr []*Item) []string {
+	res := make([]string, len(arr))
+	for i, e := range arr {
+		res[i] = e.ID
+	}
+	return res
+}
diff --git a/pkg/items/item_test.go b/pkg/items/item_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..fb54fc501f45281bbafd37983de3b8638d5692d4
--- /dev/null
+++ b/pkg/items/item_test.go
@@ -0,0 +1,61 @@
+package items
+
+import (
+	"fmt"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+)
+
+func TestItem_Set(t *testing.T) {
+	item := &Item{}
+
+	item.Set("id", "id")
+	assert.Equal(t, "id", item.ID)
+	now := time.Now()
+
+	item.Set("created_at", now)
+	assert.Equal(t, now, item.CreatedAt)
+
+	item.Set("a.b.c", 101)
+	assert.Equal(t, map[string]any{"a": map[string]any{"b": map[string]any{"c": 101}}}, item.Data)
+
+}
+
+func TestGetField(t *testing.T) {
+	sch := schema.New(
+		"a", field.String(),
+		"obj", field.Object(
+			"a", field.Number(field.NumberFormatFloat),
+			"b", field.String(),
+		),
+		"arr", field.Array(field.Object("a", field.Time())),
+	)
+
+	tests := []struct {
+		name    string
+		field   string
+		want    *field.Field
+		wantErr assert.ErrorAssertionFunc
+	}{
+		{"Simple", "a", field.String(), assert.NoError},
+		{"Incorrect field", "b", nil, assert.Error},
+		{"Object", "obj", field.Object("a", field.Number(field.NumberFormatFloat), "b", field.String()), assert.NoError},
+		{"Object path", "obj.a", field.Number(field.NumberFormatFloat), assert.NoError},
+		{"Array", "arr", field.Array(field.Object("a", field.Time())), assert.NoError},
+		{"Array path", "arr.a", field.Time(), assert.NoError},
+		{"Array item", "arr.", field.Object("a", field.Time()), assert.NoError},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := GetField(tt.field, sch)
+			if !tt.wantErr(t, err, fmt.Sprintf("GetField(%v, sch)", tt.field)) {
+				return
+			}
+			assert.Equalf(t, tt.want, got, "GetField(%v, sch)", tt.field)
+		})
+	}
+}
diff --git a/pkg/items/middleware/caching_middleware.go b/pkg/items/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0455cb276c5ccae86aeb27619fe55369a735550a
--- /dev/null
+++ b/pkg/items/middleware/caching_middleware.go
@@ -0,0 +1,176 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	envService "git.perx.ru/perxis/perxis-go/pkg/environments"
+	service "git.perx.ru/perxis/perxis-go/pkg/items"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache, cachePublished *cache.Cache, envs envService.Environments) Middleware {
+	return func(next service.Items) service.Items {
+		return &cachingMiddleware{
+			cache:          cache,
+			cachePublished: cachePublished,
+			Items:          next,
+			envs:           envs,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache          *cache.Cache
+	cachePublished *cache.Cache
+	envs           envService.Environments
+	service.Items
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.GetOptions) (itm *service.Item, err error) {
+
+	value, e := m.cache.Get(makeKey(spaceId, envId, collectionId, itemId))
+	if e == nil {
+		return value.(*service.Item), err
+	}
+	itm, err = m.Items.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, itm.SpaceID, itm.EnvID)
+		if err != nil {
+			return nil, err
+		}
+		m.cache.Set(makeKey(itm.SpaceID, env.ID, itm.CollectionID, itm.ID), itm)
+		for _, al := range env.Aliases {
+			m.cache.Set(makeKey(itm.SpaceID, al, itm.CollectionID, itm.ID), itm)
+		}
+	}
+	return itm, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, item *service.Item, options ...*service.UpdateOptions) (err error) {
+
+	err = m.Items.Update(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.DeleteOptions) (err error) {
+
+	err = m.Items.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, spaceId, envId)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(spaceId, env.ID, collectionId, itemId))
+		m.cachePublished.Remove(makeKey(spaceId, env.ID, collectionId, itemId))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(spaceId, al, collectionId, itemId))
+			m.cachePublished.Remove(makeKey(spaceId, al, collectionId, itemId))
+		}
+
+	}
+	return err
+}
+
+func (m cachingMiddleware) Publish(ctx context.Context, item *service.Item, options ...*service.PublishOptions) (err error) {
+
+	err = m.Items.Publish(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Unpublish(ctx context.Context, item *service.Item, options ...*service.UnpublishOptions) (err error) {
+
+	err = m.Items.Unpublish(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*service.GetPublishedOptions) (itm *service.Item, err error) {
+
+	opts := service.MergeGetPublishedOptions(options...)
+
+	val, e := m.cachePublished.Get(makeKey(spaceId, envId, collectionId, itemId))
+	if e == nil {
+		value := val.(map[string]*service.Item)
+		if i, ok := value[opts.LocaleID]; ok {
+			return i, nil
+		}
+	}
+
+	itm, err = m.Items.GetPublished(ctx, spaceId, envId, collectionId, itemId, opts)
+
+	if err == nil {
+		env, err := m.envs.Get(ctx, itm.SpaceID, itm.EnvID)
+		if err != nil {
+			return nil, err
+		}
+		var value = make(map[string]*service.Item)
+		if val != nil {
+			value = val.(map[string]*service.Item)
+		}
+		value[opts.LocaleID] = itm
+		m.cachePublished.Set(makeKey(itm.SpaceID, env.ID, itm.CollectionID, itm.ID), value)
+		for _, al := range env.Aliases {
+			m.cachePublished.Set(makeKey(itm.SpaceID, al, itm.CollectionID, itm.ID), value)
+		}
+	}
+
+	return itm, err
+}
+
+func (m cachingMiddleware) Archive(ctx context.Context, item *service.Item, options ...*service.ArchiveOptions) (err error) {
+
+	err = m.Items.Archive(ctx, item, options...)
+	if err == nil {
+		env, err := m.envs.Get(ctx, item.SpaceID, item.EnvID)
+		if err != nil {
+			return err
+		}
+		m.cache.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		m.cachePublished.Remove(makeKey(item.SpaceID, env.ID, item.CollectionID, item.ID))
+		for _, al := range env.Aliases {
+			m.cache.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+			m.cachePublished.Remove(makeKey(item.SpaceID, al, item.CollectionID, item.ID))
+		}
+	}
+	return err
+}
diff --git a/pkg/items/middleware/caching_middleware_test.go b/pkg/items/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..9e02b49beb80507ef7fbf25d3c67d5c8a022ce1b
--- /dev/null
+++ b/pkg/items/middleware/caching_middleware_test.go
@@ -0,0 +1,685 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	envmocks "git.perx.ru/perxis/perxis-go/pkg/environments/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	itmsmocks "git.perx.ru/perxis/perxis-go/pkg/items/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestItemsCache(t *testing.T) {
+
+	const (
+		colID    = "colID"
+		spaceID  = "spaceID"
+		envID    = "envID"
+		envAlias = "envAlias"
+		itemID   = "itemID"
+		locID    = "locID"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша, при повторном запросе.")
+
+		v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("Get from cache(by Alias)", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша, при повторном запросе.")
+
+		v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+		assert.Same(t, v3, v2, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+		require.NoError(t, err)
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+
+		v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+		v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кеша, при запросе того же объекта по alias окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache(by Alias)", func(t *testing.T) {
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+
+		v2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+		v3, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кеша, при запросе того же объекта по ID окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("GetPublished from cache (with different locales)", func(t *testing.T) {
+		const (
+			loc1 = "loc1"
+			loc2 = "loc2"
+		)
+
+		itms := &itmsmocks.Items{}
+		env := &envmocks.Environments{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+		env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Twice()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+		itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+		v1loc1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err, "Ожидается получение объекта из сервиса и добавление его в кеш с loc1.")
+
+		v1loc2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err, "Ожидается получение объекта из сервиса и добавление его в кеш с loc2 вместе с loc1.")
+
+		v2loc1, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err)
+		assert.Same(t, v1loc1, v2loc1, "Ожидается получение объекта c локализацией loc1 из кеша.")
+
+		v2loc2, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err)
+		assert.Same(t, v1loc2, v2loc2, "Ожидается получение объекта c локализацией loc2 из кеша.")
+
+		v3loc1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: loc1})
+		require.NoError(t, err)
+		assert.Same(t, v2loc1, v3loc1, "Ожидается получение объекта c локализацией loc1 из кеша, при запросе того же объекта по ID окружения.")
+
+		v3loc2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: loc2})
+		require.NoError(t, err)
+		assert.Same(t, v2loc2, v3loc2, "Ожидается получение объекта c локализацией loc2 из кеша, при запросе того же объекта по ID окружения.")
+
+		env.AssertExpectations(t)
+		itms.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после обновления и получение его заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Archive(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Archive", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Archive(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateArchived}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после архивации и получение из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Publish(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			itms.On("Publish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			err = svc.Publish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после публикации и получение заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша при повторном запросе.")
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Delete", mock.Anything, spaceID, envID, colID, itemID).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Delete(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(nil, errNotFound).Once()
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(nil, errNotFound).Once()
+			_, err = svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление из кэша после удаления объекта и получение ошибки от сервиса.")
+
+			_, err = svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление из кэша после удаления объекта и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(Get)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после снятия с публикации и получение заново из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Publish(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Publish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Publish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после публикации и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Update(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, Data: map[string]interface{}{"f1": "d2"}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша при обновлении и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(Get by Alias)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша по alias окружения.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("Get", mock.Anything, spaceID, envAlias, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envAlias, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кэша после снятия с публикации и получение из сервиса по alias окружения.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Update(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished, Data: map[string]interface{}{"f1": "d1"}}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Update(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished, Data: map[string]interface{}{"f1": "d2"}})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после его обновления и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после его обновления и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Archive(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Archive", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Archive(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после его архивации и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после его архивации и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Delete(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Delete", mock.Anything, spaceID, envID, colID, itemID).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Delete(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша после удаления из хранилища и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается очистка кеша по alias окружения после удаления объекта и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish(GetPublished)", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v2, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v2, v3, "Ожидается получение объекта из кеша по о alias окружения.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по ID окружения после снятия с публикации и получение ошибки от сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша по alias окружения после снятия с публикации и получение ошибки от сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After Unpublish by Alias", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			env.On("Get", mock.Anything, spaceID, envAlias).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envID, colID, itemID, mock.Anything).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StatePublished}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			v3, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+
+			v4, err := svc.GetPublished(ctx, spaceID, envID, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.NoError(t, err)
+			assert.Same(t, v3, v4, "Ожидается получение опубликованного объекта из кеша.")
+
+			itms.On("Unpublish", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Unpublish(ctx, &items.Item{ID: itemID, SpaceID: spaceID, EnvID: envAlias, CollectionID: colID, State: items.StatePublished})
+			require.NoError(t, err)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+			itms.On("GetPublished", mock.Anything, spaceID, envAlias, colID, itemID, mock.Anything).Return(nil, errNotFound).Once()
+
+			v5, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v5, v2, "Ожидается удаление объекта из кэша и получение заново из сервиса.")
+
+			_, err = svc.GetPublished(ctx, spaceID, envAlias, colID, itemID, &items.GetPublishedOptions{LocaleID: locID})
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша и получение ошибки от сервиса из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			itms := &itmsmocks.Items{}
+			env := &envmocks.Environments{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl), cache.NewCache(size, ttl), env)(itms)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кеша.")
+
+			time.Sleep(2 * ttl)
+
+			env.On("Get", mock.Anything, spaceID, envID).Return(&environments.Environment{ID: envID, SpaceID: spaceID, Aliases: []string{envAlias}}, nil).Once()
+			itms.On("Get", mock.Anything, spaceID, envID, colID, itemID).Return(&items.Item{ID: itemID, SpaceID: spaceID, EnvID: envID, CollectionID: colID, State: items.StateDraft}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, envID, colID, itemID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается удаление объекта из кэша и получение из сервиса.")
+
+			env.AssertExpectations(t)
+			itms.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/items/middleware/client_encode_middleware.go b/pkg/items/middleware/client_encode_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..3aaa6b94badd9726e74d1cb3b0abbd02893f5838
--- /dev/null
+++ b/pkg/items/middleware/client_encode_middleware.go
@@ -0,0 +1,317 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+// ClientEncodeMiddleware выполняет операции encode/decode для передаваемых данных
+func ClientEncodeMiddleware(colls collections.Collections) Middleware {
+	return func(items items.Items) items.Items {
+		return &encodeDecodeMiddleware{
+			next:  items,
+			colls: colls,
+		}
+
+	}
+}
+
+type encodeDecodeMiddleware struct {
+	next  items.Items
+	colls collections.Collections
+}
+
+func (m *encodeDecodeMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	coll, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	if item, err = item.Encode(ctx, coll.Schema); err != nil {
+		return
+	}
+
+	itm, sch, err = m.next.Introspect(ctx, item, opts...)
+	if itm != nil && sch != nil {
+		var err error
+		if itm, err = itm.Decode(ctx, sch); err != nil {
+			return nil, nil, err
+		}
+	}
+	return itm, sch, err
+
+}
+
+func (m *encodeDecodeMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+
+	var col *collections.Collection
+
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+
+		col, err = m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return nil, err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return nil, err
+		}
+	}
+
+	res, err := m.next.Create(ctx, item, opts...)
+	if err == nil && (res.Data != nil || res.Translations != nil) {
+
+		if col == nil {
+			col, err = m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+			if err != nil {
+				return nil, err
+			}
+		}
+
+		res, err = res.Decode(ctx, col.Schema)
+	}
+
+	return res, err
+}
+
+func (m *encodeDecodeMiddleware) Update(ctx context.Context, upd *items.Item, options ...*items.UpdateOptions) (err error) {
+	var col *collections.Collection
+	if upd != nil && (upd.Data != nil || upd.Translations != nil) {
+		col, err = m.colls.Get(ctx, upd.SpaceID, upd.EnvID, upd.CollectionID)
+		if err != nil {
+			return err
+		}
+		if upd, err = upd.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+	return m.next.Update(ctx, upd, options...)
+}
+
+func (m *encodeDecodeMiddleware) Find(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	item, err = m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Publish(ctx context.Context, item *items.Item, opts ...*items.PublishOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Publish(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Unpublish(ctx context.Context, item *items.Item, opts ...*items.UnpublishOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Unpublish(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	item, err = m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	item, err = m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	if err == nil && item != nil {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		item, err = item.Decode(ctx, col.Schema)
+		if err != nil {
+			return nil, err
+
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	items, err = m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+	if err == nil && len(items) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	items, total, err = m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+	if err == nil && total > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, 0, err
+		}
+		for i, itm := range items {
+			itm, err = itm.Decode(ctx, col.Schema)
+			if err != nil {
+				return nil, 0, err
+			}
+
+			items[i] = itm
+		}
+	}
+	return
+}
+
+func (m *encodeDecodeMiddleware) Archive(ctx context.Context, item *items.Item, opts ...*items.ArchiveOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Archive(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Unarchive(ctx context.Context, item *items.Item, opts ...*items.UnarchiveOptions) (err error) {
+	if item != nil && (item.Data != nil || item.Translations != nil) {
+		col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+		if err != nil {
+			return err
+		}
+
+		if item, err = item.Encode(ctx, col.Schema); err != nil {
+			return err
+		}
+	}
+
+	return m.next.Unarchive(ctx, item, opts...)
+}
+
+func (m *encodeDecodeMiddleware) Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.DeleteOptions) (err error) {
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *encodeDecodeMiddleware) Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*items.UndeleteOptions) (err error) {
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *encodeDecodeMiddleware) Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	res, err := m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+	if len(res) > 0 && len(options) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, errors.Wrap(err, "encode aggregate result")
+		}
+		o := items.MergeAggregateOptions(options...)
+		res, err = items.DecodeAggregateResult(ctx, o.Fields, res, col.Schema)
+		if err != nil {
+			return nil, errors.Wrap(err, "encode aggregate result")
+		}
+	}
+	return res, err
+}
+
+func (m *encodeDecodeMiddleware) AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	res, err := m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+	if len(res) > 0 && len(options) > 0 {
+		col, err := m.colls.Get(ctx, spaceId, envId, collectionId)
+		if err != nil {
+			return nil, errors.Wrap(err, "get collection")
+		}
+		o := items.MergeAggregatePublishedOptions(options...)
+		res, err = items.DecodeAggregateResult(ctx, o.Fields, res, col.Schema)
+		if err != nil {
+			return nil, err
+		}
+	}
+	return res, err
+}
diff --git a/pkg/items/middleware/error_logging_middleware.go b/pkg/items/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..97967808d150cf951f3fb22b16b2836765fc611f
--- /dev/null
+++ b/pkg/items/middleware/error_logging_middleware.go
@@ -0,0 +1,211 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements items.Items that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the items.Items with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Archive(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, item, opts...)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+}
+
+func (m *errorLoggingMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Introspect(ctx, item, opts...)
+}
+
+func (m *errorLoggingMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Publish(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Unarchive(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *errorLoggingMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Unpublish(ctx, item, options...)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, item, options...)
+}
diff --git a/pkg/items/middleware/logging_middleware.go b/pkg/items/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..102b91874c63655c169e5c101b10490e375833b9
--- /dev/null
+++ b/pkg/items/middleware/logging_middleware.go
@@ -0,0 +1,732 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements items.Items that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// LoggingMiddleware instruments an implementation of the items.Items with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Aggregate.Request", fields...)
+
+	result, err = m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"result": result,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Aggregate.Response", fields...)
+
+	return result, err
+}
+
+func (m *loggingMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("AggregatePublished.Request", fields...)
+
+	result, err = m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"result": result,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("AggregatePublished.Response", fields...)
+
+	return result, err
+}
+
+func (m *loggingMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Archive.Request", fields...)
+
+	err = m.next.Archive(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Archive.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"item": item,
+		"opts": opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, item, opts...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Request", fields...)
+
+	items, total, err = m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindArchived.Request", fields...)
+
+	items, total, err = m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindArchived.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"filter":       filter,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindPublished.Request", fields...)
+
+	items, total, err = m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("FindPublished.Response", fields...)
+
+	return items, total, err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	item, err = m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetPublished.Request", fields...)
+
+	item, err = m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetPublished.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"revisionId":   revisionId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetRevision.Request", fields...)
+
+	item, err = m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"item": item,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetRevision.Response", fields...)
+
+	return item, err
+}
+
+func (m *loggingMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"item": item,
+		"opts": opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Introspect.Request", fields...)
+
+	itm, sch, err = m.next.Introspect(ctx, item, opts...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"itm": itm,
+		"sch": sch,
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Introspect.Response", fields...)
+
+	return itm, sch, err
+}
+
+func (m *loggingMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListRevisions.Request", fields...)
+
+	items, err = m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items": items,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListRevisions.Response", fields...)
+
+	return items, err
+}
+
+func (m *loggingMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Publish.Request", fields...)
+
+	err = m.next.Publish(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Publish.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unarchive.Request", fields...)
+
+	err = m.next.Unarchive(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unarchive.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":          ctx,
+		"spaceId":      spaceId,
+		"envId":        envId,
+		"collectionId": collectionId,
+		"itemId":       itemId,
+		"options":      options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Undelete.Request", fields...)
+
+	err = m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Undelete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unpublish.Request", fields...)
+
+	err = m.next.Unpublish(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Unpublish.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"item":    item,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, item, options...)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/items/middleware/middleware.go b/pkg/items/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..a1090fe5ad072cd42682c1a4fad8504f22136926
--- /dev/null
+++ b/pkg/items/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"go.uber.org/zap"
+)
+
+type Middleware func(items.Items) items.Items
+
+func WithLog(s items.Items, logger *zap.Logger, log_access bool) items.Items {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Items")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/items/middleware/recovering_middleware.go b/pkg/items/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..244fc8a0638fa9ad812afeb3f919907171fb9b1e
--- /dev/null
+++ b/pkg/items/middleware/recovering_middleware.go
@@ -0,0 +1,248 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/items -i Items -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements items.Items that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   items.Items
+}
+
+// RecoveringMiddleware instruments an implementation of the items.Items with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next items.Items) items.Items {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Aggregate(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (result map[string]interface{}, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.AggregatePublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Archive(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (created *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, item, opts...)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Find(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.FindArchived(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) (items []*items.Item, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.FindPublished(ctx, spaceId, envId, collectionId, filter, options...)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetPublished(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (item *items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetRevision(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+}
+
+func (m *recoveringMiddleware) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (itm *items.Item, sch *schema.Schema, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Introspect(ctx, item, opts...)
+}
+
+func (m *recoveringMiddleware) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) (items []*items.Item, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListRevisions(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Publish(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Unarchive(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Undelete(ctx, spaceId, envId, collectionId, itemId, options...)
+}
+
+func (m *recoveringMiddleware) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Unpublish(ctx, item, options...)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, item, options...)
+}
diff --git a/pkg/items/mocks/Items.go b/pkg/items/mocks/Items.go
new file mode 100644
index 0000000000000000000000000000000000000000..1d3ea35f22d13e65afc70e6c7cc847c60aa0f8a7
--- /dev/null
+++ b/pkg/items/mocks/Items.go
@@ -0,0 +1,538 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Items is an autogenerated mock type for the Items type
+type Items struct {
+	mock.Mock
+}
+
+// Aggregate provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Aggregate(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregateOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregateOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// AggregatePublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) AggregatePublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.AggregatePublishedOptions) (map[string]interface{}, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 map[string]interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) map[string]interface{}); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(map[string]interface{})
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.AggregatePublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Archive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Archive(ctx context.Context, item *items.Item, options ...*items.ArchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.ArchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Create provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Create(ctx context.Context, item *items.Item, opts ...*items.CreateOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.CreateOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.CreateOptions) error); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Delete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.DeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.DeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) Find(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindArchived provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindArchived(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindArchivedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindArchivedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// FindPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, filter, options
+func (_m *Items) FindPublished(ctx context.Context, spaceId string, envId string, collectionId string, filter *items.Filter, options ...*items.FindPublishedOptions) ([]*items.Item, int, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, filter)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) int); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, string, *items.Filter, ...*items.FindPublishedOptions) error); ok {
+		r2 = rf(ctx, spaceId, envId, collectionId, filter, options...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Get(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetPublished provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) GetPublished(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.GetPublishedOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.GetPublishedOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetRevision provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, revisionId, options
+func (_m *Items) GetRevision(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, revisionId string, options ...*items.GetRevisionOptions) (*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId, revisionId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) *items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, string, ...*items.GetRevisionOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, revisionId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Introspect provides a mock function with given fields: ctx, item, opts
+func (_m *Items) Introspect(ctx context.Context, item *items.Item, opts ...*items.IntrospectOptions) (*items.Item, *schema.Schema, error) {
+	_va := make([]interface{}, len(opts))
+	for _i := range opts {
+		_va[_i] = opts[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 *items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *items.Item); ok {
+		r0 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*items.Item)
+		}
+	}
+
+	var r1 *schema.Schema
+	if rf, ok := ret.Get(1).(func(context.Context, *items.Item, ...*items.IntrospectOptions) *schema.Schema); ok {
+		r1 = rf(ctx, item, opts...)
+	} else {
+		if ret.Get(1) != nil {
+			r1 = ret.Get(1).(*schema.Schema)
+		}
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *items.Item, ...*items.IntrospectOptions) error); ok {
+		r2 = rf(ctx, item, opts...)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// ListRevisions provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) ListRevisions(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.ListRevisionsOptions) ([]*items.Item, error) {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, ...*items.ListRevisionsOptions) error); ok {
+		r1 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Publish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Publish(ctx context.Context, item *items.Item, options ...*items.PublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.PublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unarchive provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unarchive(ctx context.Context, item *items.Item, options ...*items.UnarchiveOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnarchiveOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Undelete provides a mock function with given fields: ctx, spaceId, envId, collectionId, itemId, options
+func (_m *Items) Undelete(ctx context.Context, spaceId string, envId string, collectionId string, itemId string, options ...*items.UndeleteOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, spaceId, envId, collectionId, itemId)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, ...*items.UndeleteOptions) error); ok {
+		r0 = rf(ctx, spaceId, envId, collectionId, itemId, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Unpublish provides a mock function with given fields: ctx, item, options
+func (_m *Items) Unpublish(ctx context.Context, item *items.Item, options ...*items.UnpublishOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UnpublishOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Update provides a mock function with given fields: ctx, item, options
+func (_m *Items) Update(ctx context.Context, item *items.Item, options ...*items.UpdateOptions) error {
+	_va := make([]interface{}, len(options))
+	for _i := range options {
+		_va[_i] = options[_i]
+	}
+	var _ca []interface{}
+	_ca = append(_ca, ctx, item)
+	_ca = append(_ca, _va...)
+	ret := _m.Called(_ca...)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *items.Item, ...*items.UpdateOptions) error); ok {
+		r0 = rf(ctx, item, options...)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+type mockConstructorTestingTNewItems interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewItems creates a new instance of Items. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewItems(t mockConstructorTestingTNewItems) *Items {
+	mock := &Items{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/mocks/PreSaver.go b/pkg/items/mocks/PreSaver.go
new file mode 100644
index 0000000000000000000000000000000000000000..6010e17bd14db83518507ac53ab35076a6cbc5bf
--- /dev/null
+++ b/pkg/items/mocks/PreSaver.go
@@ -0,0 +1,62 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	field "git.perx.ru/perxis/perxis-go/pkg/schema/field"
+
+	mock "github.com/stretchr/testify/mock"
+)
+
+// PreSaver is an autogenerated mock type for the PreSaver type
+type PreSaver struct {
+	mock.Mock
+}
+
+// PreSave provides a mock function with given fields: ctx, f, v, itemCtx
+func (_m *PreSaver) PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *items.Context) (interface{}, bool, error) {
+	ret := _m.Called(ctx, f, v, itemCtx)
+
+	var r0 interface{}
+	if rf, ok := ret.Get(0).(func(context.Context, *field.Field, interface{}, *items.Context) interface{}); ok {
+		r0 = rf(ctx, f, v, itemCtx)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(interface{})
+		}
+	}
+
+	var r1 bool
+	if rf, ok := ret.Get(1).(func(context.Context, *field.Field, interface{}, *items.Context) bool); ok {
+		r1 = rf(ctx, f, v, itemCtx)
+	} else {
+		r1 = ret.Get(1).(bool)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *field.Field, interface{}, *items.Context) error); ok {
+		r2 = rf(ctx, f, v, itemCtx)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+type mockConstructorTestingTNewPreSaver interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewPreSaver creates a new instance of PreSaver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewPreSaver(t mockConstructorTestingTNewPreSaver) *PreSaver {
+	mock := &PreSaver{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/items/options.go b/pkg/items/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..d48a1cdaacb157e46c1185a37602c1703bb3b59e
--- /dev/null
+++ b/pkg/items/options.go
@@ -0,0 +1,422 @@
+package items
+
+import "git.perx.ru/perxis/perxis-go/pkg/options"
+
+type Options struct {
+	Env               map[string]interface{}
+	Filter            []string
+	PermissionsFilter []string
+}
+
+func MergeOptions(opts ...Options) Options {
+	o := Options{
+		Env:    make(map[string]interface{}),
+		Filter: make([]string, 0),
+	}
+
+	for _, opt := range opts {
+
+		for k, v := range opt.Env {
+			o.Env[k] = v
+		}
+
+		o.Filter = append(o.Filter, opt.Filter...)
+		o.PermissionsFilter = append(o.PermissionsFilter, opt.PermissionsFilter...)
+	}
+
+	return o
+}
+
+type CreateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeCreateOptions(opts ...*CreateOptions) *CreateOptions {
+	o := &CreateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type IntrospectOptions struct {
+	Options
+	Locale string
+}
+
+func MergeIntrospectOptions(opts ...*IntrospectOptions) *IntrospectOptions {
+	o := &IntrospectOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetOptions struct {
+	Options
+}
+
+func MergeGetOptions(opts ...*GetOptions) *GetOptions {
+	o := &GetOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindOptions struct {
+	Options
+	options.FindOptions
+	Deleted   bool
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindOptions(opts ...interface{}) *FindOptions {
+	fo := &FindOptions{}
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindOptions(opts ...*FindOptions) *FindOptions {
+	o := NewFindOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Deleted = o.Deleted || opt.Deleted
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type UpdateOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions {
+	o := &UpdateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type DeleteOptions struct {
+	Options
+
+	Erase bool
+}
+
+func MergeDeleteOptions(opts ...*DeleteOptions) *DeleteOptions {
+	o := &DeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.Erase {
+			o.Erase = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type SoftDeleteOptions struct {
+	Options
+}
+
+func MergeSoftDeleteOptions(opts ...*SoftDeleteOptions) *SoftDeleteOptions {
+	o := &SoftDeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UndeleteOptions struct {
+	Options
+}
+
+func MergeUndeleteOptions(opts ...*UndeleteOptions) *UndeleteOptions {
+	o := &UndeleteOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type PublishOptions struct {
+	Options
+
+	UpdateAttrs bool
+}
+
+func MergePublishOptions(opts ...*PublishOptions) *PublishOptions {
+	o := &PublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		if opt.UpdateAttrs {
+			o.UpdateAttrs = true
+		}
+
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type UnpublishOptions struct {
+	Options
+}
+
+func MergeUnpublishOptions(opts ...*UnpublishOptions) *UnpublishOptions {
+	o := &UnpublishOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type GetPublishedOptions struct {
+	Options
+	LocaleID string
+}
+
+func NewGetPublishedOptions(oo ...interface{}) *GetPublishedOptions {
+	fo := &GetPublishedOptions{}
+	for _, o := range oo {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+	return fo
+}
+
+func MergeGetPublishedOptions(opts ...*GetPublishedOptions) *GetPublishedOptions {
+	o := &GetPublishedOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type FindPublishedOptions struct {
+	Options
+	options.FindOptions
+	LocaleID  string
+	Regular   bool
+	Hidden    bool
+	Templates bool
+}
+
+func NewFindPublishedOptions(opts ...interface{}) *FindPublishedOptions {
+	fo := &FindPublishedOptions{}
+	for _, o := range opts {
+		switch o := o.(type) {
+		case string:
+			fo.LocaleID = o
+		}
+	}
+
+	fo.FindOptions = *options.MergeFindOptions(opts...)
+	return fo
+}
+
+func MergeFindPublishedOptions(opts ...*FindPublishedOptions) *FindPublishedOptions {
+	o := NewFindPublishedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Regular = o.Regular || opt.Regular
+		o.Templates = o.Templates || opt.Templates
+		o.Hidden = o.Hidden || opt.Hidden
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+
+		if opt.LocaleID != "" {
+			o.LocaleID = opt.LocaleID
+		}
+	}
+	return o
+}
+
+type GetRevisionOptions struct {
+	Options
+}
+
+func MergeGetRevisionOptions(opts ...*GetRevisionOptions) *GetRevisionOptions {
+	o := &GetRevisionOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type ListRevisionsOptions struct {
+	Options
+	options.FindOptions
+}
+
+func MergeListRevisionsOptions(opts ...*ListRevisionsOptions) *ListRevisionsOptions {
+	o := &ListRevisionsOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(&o.FindOptions, &opt.FindOptions)
+	}
+	return o
+}
+
+type ArchiveOptions struct {
+	Options
+}
+
+func MergeArchiveOptions(opts ...*ArchiveOptions) *ArchiveOptions {
+	o := &ArchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type FindArchivedOptions struct {
+	Options
+	options.FindOptions
+}
+
+func NewFindArchivedOptions(oo ...interface{}) *FindArchivedOptions {
+	fo := &FindArchivedOptions{}
+	fo.FindOptions = *options.MergeFindOptions(oo...)
+	return fo
+}
+
+func MergeFindArchivedOptions(opts ...*FindArchivedOptions) *FindArchivedOptions {
+	o := NewFindArchivedOptions()
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+		o.FindOptions = *options.MergeFindOptions(o.FindOptions, opt.FindOptions)
+	}
+	return o
+}
+
+type UnarchiveOptions struct {
+	Options
+}
+
+func MergeUnarchiveOptions(opts ...*UnarchiveOptions) *UnarchiveOptions {
+	o := &UnarchiveOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+	}
+	return o
+}
+
+type AggregateOptions struct {
+	Options
+	options.SortOptions
+
+	// Fields поля которые должны быть возвращены или вычислены в результате.
+	// Ключ (string) - имя поля под которым будет добавляться результат.
+	// Значение (string) - является выражением, вычисление которого сформирует результат
+	// Функции для выражений (для поля F, типа T):
+	// - distinct(F) - все значения поля, тип результат []T
+	// - min(F) - минимальное значение поля, тип результат T
+	// - max(F) - максимальное значение поля, тип результат T
+	// - avg(F) - среднее значения поля, тип результат T
+	// - sum(F) - сумма значений поля, тип результат T
+	// - count() - число записей, тип результат int
+	Fields map[string]string
+}
+
+func MergeAggregateOptions(opts ...*AggregateOptions) *AggregateOptions {
+	o := &AggregateOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+		o.Options = MergeOptions(o.Options, opt.Options)
+
+		if o.Fields == nil {
+			o.Fields = opt.Fields
+			continue
+		}
+		for k, v := range opt.Fields {
+			o.Fields[k] = v
+		}
+	}
+	return o
+}
+
+type AggregatePublishedOptions AggregateOptions
+
+func MergeAggregatePublishedOptions(opts ...*AggregatePublishedOptions) *AggregatePublishedOptions {
+	ao := make([]*AggregateOptions, len(opts))
+	for i, opt := range opts {
+		ao[i] = (*AggregateOptions)(opt)
+	}
+	merged := MergeAggregateOptions(ao...)
+	return (*AggregatePublishedOptions)(merged)
+}
diff --git a/pkg/items/pagination.go b/pkg/items/pagination.go
new file mode 100644
index 0000000000000000000000000000000000000000..6fe197d7d36c1e191e300dfb2a7799d4eac58a94
--- /dev/null
+++ b/pkg/items/pagination.go
@@ -0,0 +1,136 @@
+package items
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"google.golang.org/grpc/codes"
+)
+
+type BatchProcessor struct {
+	Items                        Items
+	SpaceID, EnvID, CollectionID string
+	FindOptions                  *FindOptions
+	FindPublishedOptions         *FindPublishedOptions
+	Filter                       *Filter
+
+	pageSize, pageNum int
+	sort              []string
+	processed         int
+}
+
+func (b *BatchProcessor) getBatch(ctx context.Context) ([]*Item, bool, error) {
+	var res []*Item
+	var err error
+	var total int
+
+	if b.FindPublishedOptions != nil {
+		res, total, err = b.Items.FindPublished(
+			ctx,
+			b.SpaceID,
+			b.EnvID,
+			b.CollectionID,
+			b.Filter,
+			&FindPublishedOptions{
+				Regular:     b.FindPublishedOptions.Regular,
+				Hidden:      b.FindPublishedOptions.Hidden,
+				Templates:   b.FindPublishedOptions.Templates,
+				FindOptions: *options.NewFindOptions(b.pageNum, b.pageSize, b.sort...),
+			},
+		)
+	} else {
+		res, total, err = b.Items.Find(
+			ctx,
+			b.SpaceID,
+			b.EnvID,
+			b.CollectionID,
+			b.Filter,
+			&FindOptions{
+				Deleted:     b.FindOptions.Deleted,
+				Regular:     b.FindOptions.Regular,
+				Hidden:      b.FindOptions.Hidden,
+				Templates:   b.FindOptions.Templates,
+				FindOptions: *options.NewFindOptions(b.pageNum, b.pageSize, b.sort...),
+			},
+		)
+	}
+
+	if err == nil {
+		b.processed += len(res)
+		b.pageNum++
+	}
+
+	return res, b.processed != total, err
+}
+
+func (b *BatchProcessor) next(ctx context.Context) (res []*Item, next bool, err error) {
+
+	for {
+		res, next, err = b.getBatch(ctx)
+		if err != nil {
+			if errors.GetStatusCode(err) == codes.ResourceExhausted && b.reducePageSize() {
+				continue
+			}
+
+			return nil, false, err
+		}
+
+		break
+	}
+
+	return res, next, nil
+}
+
+func (b *BatchProcessor) reducePageSize() bool {
+	if b.pageSize == 1 {
+		return false
+	}
+
+	b.pageNum = 2 * b.pageNum
+	b.pageSize = b.pageSize / 2
+
+	return true
+}
+
+func (b *BatchProcessor) Do(ctx context.Context, f func(batch []*Item) error) (int, error) {
+
+	if b.FindOptions == nil && b.FindPublishedOptions == nil {
+		b.FindOptions = new(FindOptions)
+	}
+	if b.FindOptions != nil {
+		b.pageSize = b.FindOptions.PageSize
+		b.sort = b.FindOptions.Sort
+	}
+	if b.FindPublishedOptions != nil {
+		b.pageSize = b.FindPublishedOptions.PageSize
+		b.sort = b.FindPublishedOptions.Sort
+	}
+
+	if b.pageSize == 0 {
+		b.pageSize = 128
+	}
+
+	if b.Filter != nil && (len(b.Filter.ID) > 0 || len(b.Filter.Q) > 0) && !data.Contains("_id", b.sort) {
+		b.sort = append(b.sort, "_id")
+	}
+
+	var err error
+
+	next := true
+	for next {
+
+		var batch []*Item
+
+		batch, next, err = b.next(ctx)
+		if err != nil {
+			return 0, err
+		}
+
+		if err = f(batch); err != nil {
+			return 0, err
+		}
+	}
+	return b.processed, nil
+}
diff --git a/pkg/items/service.go b/pkg/items/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..c10a69c55878a1fbfe571dc0fad594c338dccb99
--- /dev/null
+++ b/pkg/items/service.go
@@ -0,0 +1,151 @@
+package items
+
+import (
+	"context"
+	"regexp"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/items
+// @grpc-addr content.items.Items
+type Items interface {
+	Create(ctx context.Context, item *Item, opts ...*CreateOptions) (created *Item, err error)
+	Introspect(ctx context.Context, item *Item, opts ...*IntrospectOptions) (itm *Item, sch *schema.Schema, err error)
+	Get(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetOptions) (item *Item, err error)
+	Find(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindOptions) (items []*Item, total int, err error)
+	Update(ctx context.Context, item *Item, options ...*UpdateOptions) (err error)
+
+	// Delete выполняет удаление элемента
+	// Если установлен флаг DeleteOptions.Erase то данные будут полностью удалены из системы.
+	// В противном случае выполняется "мягкое удаление", элемент помечается как удаленный и может быть восстановлен с помощью метода Items.Undelete и получен в Items.Get/Find
+	Delete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*DeleteOptions) (err error)
+
+	// Undelete восстанавливает элементы после "мягкого удаление"
+	Undelete(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*UndeleteOptions) (err error)
+
+	Publish(ctx context.Context, item *Item, options ...*PublishOptions) (err error)
+	Unpublish(ctx context.Context, item *Item, options ...*UnpublishOptions) (err error)
+	GetPublished(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*GetPublishedOptions) (item *Item, err error)
+	FindPublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindPublishedOptions) (items []*Item, total int, err error)
+
+	GetRevision(ctx context.Context, spaceId, envId, collectionId, itemId, revisionId string, options ...*GetRevisionOptions) (item *Item, err error)
+	ListRevisions(ctx context.Context, spaceId, envId, collectionId, itemId string, options ...*ListRevisionsOptions) (items []*Item, err error)
+
+	Archive(ctx context.Context, item *Item, options ...*ArchiveOptions) (err error)
+	FindArchived(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*FindArchivedOptions) (items []*Item, total int, err error)
+	Unarchive(ctx context.Context, item *Item, options ...*UnarchiveOptions) (err error)
+
+	// Aggregate выполняет агрегацию данных
+	Aggregate(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregateOptions) (result map[string]interface{}, err error)
+	// AggregatePublished выполняет агрегацию опубликованных данных
+	AggregatePublished(ctx context.Context, spaceId, envId, collectionId string, filter *Filter, options ...*AggregatePublishedOptions) (result map[string]interface{}, err error)
+}
+
+// PreSaver - интерфейс, который может быть реализован полем, чтобы получать событие PreSave перед сохранением Item в Storage
+type PreSaver interface {
+	PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *Context) (interface{}, bool, error)
+}
+
+type Filter struct {
+	ID     []string
+	Data   []*filter.Filter
+	Search string // Поиск, одновременно поддерживается только один запрос
+	Q      []string
+}
+
+func NewFilter(params ...interface{}) *Filter {
+	f := &Filter{}
+	for _, p := range params {
+		switch v := p.(type) {
+		case *filter.Filter:
+			f.Data = append(f.Data, v)
+		case string:
+			f.Q = append(f.Q, v)
+		}
+	}
+	return f
+}
+
+// AggregateExpRe - формат, которому должна соответствовать формула расчета данных
+var AggregateExpRe = regexp.MustCompile(`([a-zA-Z]+)\((.*)\)`)
+
+func ParseAggregateExp(exp string) (string, string, bool) {
+	ss := AggregateExpRe.FindAllStringSubmatch(exp, -1)
+	if len(ss) == 0 || len(ss[0]) < 2 {
+		return "", "", false
+	}
+	return ss[0][1], ss[0][2], true
+}
+
+func DecodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Decode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
+
+func EncodeAggregateResult(ctx context.Context, request map[string]string, r map[string]interface{}, s *schema.Schema) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(r))
+	for outputField, exp := range request {
+
+		funcName, fldName, ok := ParseAggregateExp(exp)
+		if !ok || fldName == "" {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		schemaFld := s.GetField(fldName)
+		if schemaFld == nil {
+			if v, ok := r[outputField]; ok {
+				result[outputField] = v
+			}
+			continue
+		}
+
+		if funcName == "distinct" {
+			schemaFld = field.Array(schemaFld)
+		}
+
+		data, err := schema.Encode(ctx, schemaFld, r[outputField])
+		if err != nil {
+			return nil, errors.Wrapf(err, "decode data for field '%s'", outputField)
+		}
+		result[outputField] = data
+	}
+
+	return result, nil
+}
diff --git a/pkg/items/transport/client.go b/pkg/items/transport/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..3f6bd04ceab90dad415d963c6db3d1a9f4fb4b47
--- /dev/null
+++ b/pkg/items/transport/client.go
@@ -0,0 +1,266 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"github.com/hashicorp/go-multierror"
+	"google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *items.Item, arg2 ...*items.CreateOptions) (res0 *items.Item, res1 error) {
+	request := CreateRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Introspect(arg0 context.Context, arg1 *items.Item, arg2 ...*items.IntrospectOptions) (res0 *items.Item, res1 *schema.Schema, res2 error) {
+	request := IntrospectRequest{
+		Item: arg1,
+		Opts: arg2,
+	}
+	response, res2 := set.IntrospectEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	resp := response.(*IntrospectResponse)
+
+	if len(resp.ValidationErrors) > 0 {
+		var merr *multierror.Error
+		for _, err := range resp.ValidationErrors {
+			var fieldErr errors.FieldError
+			if errors.As(err, &fieldErr) {
+				merr = multierror.Append(merr, fieldErr)
+			}
+		}
+
+		res2 = errors.Wrap(merr, "validation error")
+
+	}
+	return resp.Item, resp.Schema, res2
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetOptions) (res0 *items.Item, res1 error) {
+	request := GetRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetResponse).Item, res1
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindResponse).Items, response.(*FindResponse).Total, res2
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UpdateOptions) (res0 error) {
+	request := UpdateRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.DeleteOptions) (res0 error) {
+	request := DeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Undelete(arg0 context.Context, arg1, arg2, arg3, arg4 string, options ...*items.UndeleteOptions) (res0 error) {
+	request := UndeleteRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      options,
+	}
+	_, res0 = set.UndeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Publish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.PublishOptions) (res0 error) {
+	request := PublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.PublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Unpublish(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnpublishOptions) (res0 error) {
+	request := UnpublishRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnpublishEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) GetPublished(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.GetPublishedOptions) (res0 *items.Item, res1 error) {
+	request := GetPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.GetPublishedEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetPublishedResponse).Item, res1
+}
+
+func (set EndpointsSet) FindPublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindPublishedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindPublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindPublishedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindPublishedResponse).Items, response.(*FindPublishedResponse).Total, res2
+}
+
+func (set EndpointsSet) GetRevision(arg0 context.Context, arg1 string, arg2 string, arg3 string, arg4 string, arg5 string, arg6 ...*items.GetRevisionOptions) (res0 *items.Item, res1 error) {
+	request := GetRevisionRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		RevisionId:   arg5,
+		SpaceId:      arg1,
+		Options:      arg6,
+	}
+	response, res1 := set.GetRevisionEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*GetRevisionResponse).Item, res1
+}
+
+func (set EndpointsSet) ListRevisions(arg0 context.Context, arg1, arg2, arg3, arg4 string, arg5 ...*items.ListRevisionsOptions) (res0 []*items.Item, res1 error) {
+	request := ListRevisionsRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		ItemId:       arg4,
+		SpaceId:      arg1,
+		Options:      arg5,
+	}
+	response, res1 := set.ListRevisionsEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*ListRevisionsResponse).Items, res1
+}
+
+func (set EndpointsSet) Archive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.ArchiveOptions) (res0 error) {
+	request := ArchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.ArchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) FindArchived(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.FindArchivedOptions) (res0 []*items.Item, res1 int, res2 error) {
+	request := FindArchivedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Options:      arg5,
+		Filter:       arg4,
+		SpaceId:      arg1,
+	}
+	response, res2 := set.FindArchivedEndpoint(arg0, &request)
+	if res2 != nil {
+		return
+	}
+	return response.(*FindArchivedResponse).Items, response.(*FindArchivedResponse).Total, res2
+}
+
+func (set EndpointsSet) Unarchive(arg0 context.Context, arg1 *items.Item, arg2 ...*items.UnarchiveOptions) (res0 error) {
+	request := UnarchiveRequest{Item: arg1, Options: arg2}
+	_, res0 = set.UnarchiveEndpoint(arg0, &request)
+	if res0 != nil {
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Aggregate(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregateOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregateRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregateEndpoint(arg0, &request)
+	if res1 != nil {
+		return
+	}
+	return response.(*AggregateResponse).Result, res1
+}
+
+func (set EndpointsSet) AggregatePublished(arg0 context.Context, arg1, arg2, arg3 string, arg4 *items.Filter, arg5 ...*items.AggregatePublishedOptions) (res0 map[string]interface{}, res1 error) {
+	request := AggregatePublishedRequest{
+		CollectionId: arg3,
+		EnvId:        arg2,
+		Filter:       arg4,
+		Options:      arg5,
+		SpaceId:      arg1,
+	}
+	response, res1 := set.AggregatePublishedEndpoint(arg0, &request)
+
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*AggregatePublishedResponse).Result, res1
+}
diff --git a/pkg/items/transport/endpoints.microgen.go b/pkg/items/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a6e8d5a678cd7180deca17a97f615fe7793ff6e
--- /dev/null
+++ b/pkg/items/transport/endpoints.microgen.go
@@ -0,0 +1,27 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Items API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint             endpoint.Endpoint
+	IntrospectEndpoint         endpoint.Endpoint
+	GetEndpoint                endpoint.Endpoint
+	FindEndpoint               endpoint.Endpoint
+	UpdateEndpoint             endpoint.Endpoint
+	DeleteEndpoint             endpoint.Endpoint
+	UndeleteEndpoint           endpoint.Endpoint
+	PublishEndpoint            endpoint.Endpoint
+	UnpublishEndpoint          endpoint.Endpoint
+	GetPublishedEndpoint       endpoint.Endpoint
+	FindPublishedEndpoint      endpoint.Endpoint
+	GetRevisionEndpoint        endpoint.Endpoint
+	ListRevisionsEndpoint      endpoint.Endpoint
+	ArchiveEndpoint            endpoint.Endpoint
+	FindArchivedEndpoint       endpoint.Endpoint
+	UnarchiveEndpoint          endpoint.Endpoint
+	AggregateEndpoint          endpoint.Endpoint
+	AggregatePublishedEndpoint endpoint.Endpoint
+}
diff --git a/pkg/items/transport/exchanges.microgen.go b/pkg/items/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..b601946f74837d41df38b07e3c5887ba8698b183
--- /dev/null
+++ b/pkg/items/transport/exchanges.microgen.go
@@ -0,0 +1,186 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	schema "git.perx.ru/perxis/perxis-go/pkg/schema"
+)
+
+type (
+	CreateRequest struct {
+		Item *items.Item            `json:"item"`
+		Opts []*items.CreateOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	CreateResponse struct {
+		Created *items.Item `json:"created"`
+	}
+
+	IntrospectRequest struct {
+		Item *items.Item                `json:"item"`
+		Opts []*items.IntrospectOptions `json:"opts"` // This field was defined with ellipsis (...).
+	}
+	IntrospectResponse struct {
+		Item             *items.Item    `json:"item"`
+		Schema           *schema.Schema `json:"schema"`
+		ValidationErrors []error        `json:"validation_errors"`
+	}
+
+	GetRequest struct {
+		SpaceId      string              `json:"space_id"`
+		EnvId        string              `json:"env_id"`
+		CollectionId string              `json:"collection_id"`
+		ItemId       string              `json:"item_id"`
+		Options      []*items.GetOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindRequest struct {
+		SpaceId      string               `json:"space_id"`
+		EnvId        string               `json:"env_id"`
+		CollectionId string               `json:"collection_id"`
+		Filter       *items.Filter        `json:"filter"`
+		Options      []*items.FindOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UpdateRequest struct {
+		Item    *items.Item            `json:"item"`
+		Options []*items.UpdateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId      string                 `json:"space_id"`
+		EnvId        string                 `json:"env_id"`
+		CollectionId string                 `json:"collection_id"`
+		ItemId       string                 `json:"item_id"`
+		Options      []*items.DeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	UndeleteRequest struct {
+		SpaceId      string                   `json:"space_id"`
+		EnvId        string                   `json:"env_id"`
+		CollectionId string                   `json:"collection_id"`
+		ItemId       string                   `json:"item_id"`
+		Options      []*items.UndeleteOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UndeleteResponse struct{}
+
+	PublishRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.PublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	PublishResponse struct{}
+
+	UnpublishRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnpublishOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnpublishResponse struct{}
+
+	GetPublishedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		ItemId       string                       `json:"item_id"`
+		Options      []*items.GetPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetPublishedResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	FindPublishedRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		Filter       *items.Filter                 `json:"filter"`
+		Options      []*items.FindPublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindPublishedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	GetRevisionRequest struct {
+		SpaceId      string                      `json:"space_id"`
+		EnvId        string                      `json:"env_id"`
+		CollectionId string                      `json:"collection_id"`
+		ItemId       string                      `json:"item_id"`
+		RevisionId   string                      `json:"revision_id"`
+		Options      []*items.GetRevisionOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	GetRevisionResponse struct {
+		Item *items.Item `json:"item"`
+	}
+
+	ListRevisionsRequest struct {
+		SpaceId      string                        `json:"space_id"`
+		EnvId        string                        `json:"env_id"`
+		CollectionId string                        `json:"collection_id"`
+		ItemId       string                        `json:"item_id"`
+		Options      []*items.ListRevisionsOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	ListRevisionsResponse struct {
+		Items []*items.Item `json:"items"`
+	}
+
+	ArchiveRequest struct {
+		Item    *items.Item             `json:"item"`
+		Options []*items.ArchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	ArchiveResponse struct{}
+
+	FindArchivedRequest struct {
+		SpaceId      string                       `json:"space_id"`
+		EnvId        string                       `json:"env_id"`
+		CollectionId string                       `json:"collection_id"`
+		Filter       *items.Filter                `json:"filter"`
+		Options      []*items.FindArchivedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	FindArchivedResponse struct {
+		Items []*items.Item `json:"items"`
+		Total int           `json:"total"`
+	}
+
+	UnarchiveRequest struct {
+		Item    *items.Item               `json:"item"`
+		Options []*items.UnarchiveOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	// Formal exchange type, please do not delete.
+	UnarchiveResponse struct{}
+
+	AggregateRequest struct {
+		SpaceId      string                    `json:"space_id"`
+		EnvId        string                    `json:"env_id"`
+		CollectionId string                    `json:"collection_id"`
+		Filter       *items.Filter             `json:"filter"`
+		Options      []*items.AggregateOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregateResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+
+	AggregatePublishedRequest struct {
+		SpaceId      string                             `json:"space_id"`
+		EnvId        string                             `json:"env_id"`
+		CollectionId string                             `json:"collection_id"`
+		Filter       *items.Filter                      `json:"filter"`
+		Options      []*items.AggregatePublishedOptions `json:"options"` // This field was defined with ellipsis (...).
+	}
+	AggregatePublishedResponse struct {
+		Result map[string]interface{} `json:"result"`
+	}
+)
diff --git a/pkg/items/transport/grpc/client.go b/pkg/items/transport/grpc/client.go
new file mode 100644
index 0000000000000000000000000000000000000000..faea7cc6703746ba91b0af0e831431ffd76044fc
--- /dev/null
+++ b/pkg/items/transport/grpc/client.go
@@ -0,0 +1,34 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewClient(conn *grpc.ClientConn, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	c := NewGRPCClient(conn, "", opts...)
+	return transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ClientMiddleware(c.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ClientMiddleware(c.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ClientMiddleware(c.GetEndpoint),
+		FindEndpoint:               grpcerr.ClientMiddleware(c.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ClientMiddleware(c.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ClientMiddleware(c.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ClientMiddleware(c.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ClientMiddleware(c.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ClientMiddleware(c.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ClientMiddleware(c.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ClientMiddleware(c.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ClientMiddleware(c.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ClientMiddleware(c.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ClientMiddleware(c.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ClientMiddleware(c.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ClientMiddleware(c.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ClientMiddleware(c.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ClientMiddleware(c.AggregatePublishedEndpoint),
+	}
+}
diff --git a/pkg/items/transport/grpc/client.microgen.go b/pkg/items/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a91c8d16495333a356ebeb3e48100c40e0f7bc91
--- /dev/null
+++ b/pkg/items/transport/grpc/client.microgen.go
@@ -0,0 +1,145 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.items.Items"
+	}
+	return transport.EndpointsSet{
+		ArchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Archive",
+			_Encode_Archive_Request,
+			_Decode_Archive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UndeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Undelete",
+			_Encode_Undelete_Request,
+			_Decode_Undelete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindArchivedEndpoint: grpckit.NewClient(
+			conn, addr, "FindArchived",
+			_Encode_FindArchived_Request,
+			_Decode_FindArchived_Response,
+			pb.FindArchivedResponse{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		FindPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "FindPublished",
+			_Encode_FindPublished_Request,
+			_Decode_FindPublished_Response,
+			pb.FindPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		GetPublishedEndpoint: grpckit.NewClient(
+			conn, addr, "GetPublished",
+			_Encode_GetPublished_Request,
+			_Decode_GetPublished_Response,
+			pb.GetPublishedResponse{},
+			opts...,
+		).Endpoint(),
+		GetRevisionEndpoint: grpckit.NewClient(
+			conn, addr, "GetRevision",
+			_Encode_GetRevision_Request,
+			_Decode_GetRevision_Response,
+			pb.GetRevisionResponse{},
+			opts...,
+		).Endpoint(),
+		IntrospectEndpoint: grpckit.NewClient(
+			conn, addr, "Introspect",
+			_Encode_Introspect_Request,
+			_Decode_Introspect_Response,
+			pb.IntrospectResponse{},
+			opts...,
+		).Endpoint(),
+		ListRevisionsEndpoint: grpckit.NewClient(
+			conn, addr, "ListRevisions",
+			_Encode_ListRevisions_Request,
+			_Decode_ListRevisions_Response,
+			pb.ListRevisionsResponse{},
+			opts...,
+		).Endpoint(),
+		PublishEndpoint: grpckit.NewClient(
+			conn, addr, "Publish",
+			_Encode_Publish_Request,
+			_Decode_Publish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnarchiveEndpoint: grpckit.NewClient(
+			conn, addr, "Unarchive",
+			_Encode_Unarchive_Request,
+			_Decode_Unarchive_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UnpublishEndpoint: grpckit.NewClient(
+			conn, addr, "Unpublish",
+			_Encode_Unpublish_Request,
+			_Decode_Unpublish_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		AggregateEndpoint: grpckit.NewClient(
+			conn, addr, "Aggregate",
+			_Encode_Aggregate_Request,
+			_Decode_Aggregate_Response,
+			pb.AggregateResponse{},
+			opts...,
+		).Endpoint(),
+		AggregatePublishedEndpoint: grpckit.NewClient(
+			conn, addr, "AggregatePublished",
+			_Encode_AggregatePublished_Request,
+			_Decode_AggregatePublished_Response,
+			pb.AggregatePublishedResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..69a696df329a6e28e5912af8815df9852c0c504c
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,1010 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := CreateOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := UpdateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+
+	opts, err := DeleteOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.DeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      opts,
+	}, nil
+}
+
+func _Encode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*transport.UndeleteRequest)
+	return &pb.UndeleteRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*transport.PublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := PublishOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Encode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*transport.UnpublishRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Encode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*transport.GetPublishedRequest)
+	reqOptions, err := ElPtrGetPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*transport.FindPublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindPublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*transport.GetRevisionRequest)
+	return &pb.GetRevisionRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		RevisionId:   req.RevisionId,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*transport.ListRevisionsRequest)
+	reqOptions, err := ElPtrListRevisionsOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		ItemId:       req.ItemId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Encode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*transport.ArchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*transport.FindArchivedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrFindArchivedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      req.SpaceId,
+	}, nil
+}
+
+func _Encode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*transport.UnarchiveRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Encode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*transport.AggregateRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregateOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*transport.AggregatePublishedRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ElPtrAggregatePublishedOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedRequest{
+		CollectionId: req.CollectionId,
+		EnvId:        req.EnvId,
+		SpaceId:      req.SpaceId,
+		Options:      reqOptions,
+		Filter:       reqFilter,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrItemToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Item: respItem}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*transport.GetPublishedResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Encode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*transport.FindPublishedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindPublishedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*transport.GetRevisionResponse)
+	respItem, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Encode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*transport.ListRevisionsResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Encode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*transport.FindArchivedResponse)
+	respItems, err := ListPtrItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindArchivedResponse{
+		Items: respItems,
+		Total: int32(resp.Total),
+	}, nil
+}
+
+func _Encode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregateResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Encode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*transport.AggregatePublishedResponse)
+	result, err := MapStringInterfaceToProto(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToCreateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{
+		Item: reqItem,
+		Opts: opts,
+	}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Aggregate_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregateRequest")
+	}
+	req := request.(*pb.AggregateRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil AggregatePublishedRequest")
+	}
+	req := request.(*pb.AggregatePublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesAggregatePublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedRequest{
+		SpaceId:      string(req.SpaceId),
+		EnvId:        string(req.EnvId),
+		CollectionId: string(req.CollectionId),
+		Filter:       reqFilter,
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToUpdateOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+
+	opts, err := ProtoToDeleteOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.DeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      opts,
+	}, nil
+}
+
+func _Decode_Undelete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UndeleteRequest")
+	}
+	req := request.(*pb.UndeleteRequest)
+	return &transport.UndeleteRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Publish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil PublishRequest")
+	}
+	req := request.(*pb.PublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+
+	opts, err := ProtoToPublishOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.PublishRequest{
+		Item:    reqItem,
+		Options: opts,
+	}, nil
+}
+
+func _Decode_Unpublish_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnpublishRequest")
+	}
+	req := request.(*pb.UnpublishRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnpublishRequest{Item: reqItem}, nil
+}
+
+func _Decode_GetPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetPublishedRequest")
+	}
+	req := request.(*pb.GetPublishedRequest)
+	reqOptions, err := ProtoToElPtrGetPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_FindPublished_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindPublishedRequest")
+	}
+	req := request.(*pb.FindPublishedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindPublishedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_GetRevision_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRevisionRequest")
+	}
+	req := request.(*pb.GetRevisionRequest)
+	return &transport.GetRevisionRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		RevisionId:   string(req.RevisionId),
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_ListRevisions_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRevisionsRequest")
+	}
+	req := request.(*pb.ListRevisionsRequest)
+	reqOptions, err := ProtoToElPtrListRevisionsOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		ItemId:       string(req.ItemId),
+		SpaceId:      string(req.SpaceId),
+		Options:      reqOptions,
+	}, nil
+}
+
+func _Decode_Archive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ArchiveRequest")
+	}
+	req := request.(*pb.ArchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ArchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_FindArchived_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindArchivedRequest")
+	}
+	req := request.(*pb.FindArchivedRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToElPtrFindArchivedOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedRequest{
+		CollectionId: string(req.CollectionId),
+		EnvId:        string(req.EnvId),
+		Options:      reqOptions,
+		Filter:       reqFilter,
+		SpaceId:      string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Unarchive_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UnarchiveRequest")
+	}
+	req := request.(*pb.UnarchiveRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UnarchiveRequest{Item: reqItem}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrItem(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Item: respItem}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Undelete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Publish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Unpublish_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_GetPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetPublishedResponse")
+	}
+	resp := response.(*pb.GetPublishedResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetPublishedResponse{Item: respItem}, nil
+}
+
+func _Decode_FindPublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindPublishedResponse")
+	}
+	resp := response.(*pb.FindPublishedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindPublishedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_GetRevision_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetRevisionResponse")
+	}
+	resp := response.(*pb.GetRevisionResponse)
+	respItem, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetRevisionResponse{Item: respItem}, nil
+}
+
+func _Decode_ListRevisions_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListRevisionsResponse")
+	}
+	resp := response.(*pb.ListRevisionsResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListRevisionsResponse{Items: respItems}, nil
+}
+
+func _Decode_Archive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_FindArchived_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindArchivedResponse")
+	}
+	resp := response.(*pb.FindArchivedResponse)
+	respItems, err := ProtoToListPtrItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindArchivedResponse{
+		Items: respItems,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Decode_Unarchive_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*transport.IntrospectRequest)
+	reqItem, err := PtrItemToProto(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Encode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*transport.IntrospectResponse)
+	respItm, err := PtrItemToProto(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := PtrSchemaSchemaToProto(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrors, err := ValidationErrorsToProto(resp.ValidationErrors)
+	return &pb.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrors,
+	}, nil
+}
+
+func _Decode_Introspect_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil IntrospectRequest")
+	}
+	req := request.(*pb.IntrospectRequest)
+	reqItem, err := ProtoToPtrItem(req.Item)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectRequest{
+		Item: reqItem,
+	}, nil
+}
+
+func _Decode_Introspect_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil IntrospectResponse")
+	}
+	resp := response.(*pb.IntrospectResponse)
+	respItm, err := ProtoToPtrItem(resp.Item)
+	if err != nil {
+		return nil, err
+	}
+	respSch, err := ProtoToPtrSchemaSchema(resp.Schema)
+	if err != nil {
+		return nil, err
+	}
+	respErrs, err := ProtoToValidationErrors(resp.ValidationErrors)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.IntrospectResponse{
+		Item:             respItm,
+		Schema:           respSch,
+		ValidationErrors: respErrs,
+	}, nil
+}
+
+func _Decode_Aggregate_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregateResponse")
+	}
+	resp := response.(*pb.AggregateResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregateResponse{
+		Result: result,
+	}, nil
+}
+
+func _Decode_AggregatePublished_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil AggregatePublishedResponse")
+	}
+	resp := response.(*pb.AggregatePublishedResponse)
+	result, err := ProtoToMapStringInterface(resp.Result)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.AggregatePublishedResponse{
+		Result: result,
+	}, nil
+}
diff --git a/pkg/items/transport/grpc/protobuf_type_converters.microgen.go b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7eae996594e266ddfc712183bd3e1d7c4a39c78a
--- /dev/null
+++ b/pkg/items/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,627 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/filter"
+	service "git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	pbcommon "git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	jsoniter "github.com/json-iterator/go"
+	"google.golang.org/protobuf/types/known/structpb"
+)
+
+func MapStringInterfaceToProto(data map[string]interface{}) (*structpb.Struct, error) {
+	if data == nil {
+		return nil, nil
+	}
+	return structpb.NewStruct(data)
+}
+
+func ProtoToMapStringInterface(protoData *structpb.Struct) (map[string]interface{}, error) {
+	if protoData == nil {
+		return nil, nil
+	}
+	return protoData.AsMap(), nil
+}
+
+func MapStringMapStringInterfaceToProto(translations map[string]map[string]interface{}) (map[string]*structpb.Struct, error) {
+	if translations == nil {
+		return nil, nil
+	}
+	res := make(map[string]*structpb.Struct, len(translations))
+	for k, v := range translations {
+		res[k], _ = MapStringInterfaceToProto(v)
+	}
+	return res, nil
+}
+
+func PtrPermissionsToProto(permissions *service.Permissions) (*pb.Permissions, error) {
+	if permissions == nil {
+		return nil, nil
+	}
+
+	return &pb.Permissions{
+			Edit:       permissions.Edit,
+			Archive:    permissions.Archive,
+			Publish:    permissions.Publish,
+			SoftDelete: permissions.SoftDelete,
+			HardDelete: permissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToPtrPermissions(protoPermissions *pb.Permissions) (*service.Permissions, error) {
+	if protoPermissions == nil {
+		return nil, nil
+	}
+
+	return &service.Permissions{
+			Edit:       protoPermissions.Edit,
+			Archive:    protoPermissions.Archive,
+			Publish:    protoPermissions.Publish,
+			SoftDelete: protoPermissions.SoftDelete,
+			HardDelete: protoPermissions.HardDelete,
+		},
+		nil
+}
+
+func ProtoToMapStringMapStringInterface(protoTranslations map[string]*structpb.Struct) (map[string]map[string]interface{}, error) {
+	if protoTranslations == nil {
+		return nil, nil
+	}
+	res := make(map[string]map[string]interface{}, len(protoTranslations))
+	for k, v := range protoTranslations {
+		res[k], _ = ProtoToMapStringInterface(v)
+	}
+	return res, nil
+}
+
+func PtrItemToProto(item *service.Item) (*pb.Item, error) {
+	return service.ItemToProto(item), nil
+}
+
+func ProtoToPtrItem(protoItem *pb.Item) (*service.Item, error) {
+	return service.ItemFromProto(protoItem), nil
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*pbcommon.Filter, 0, len(filter.Data))
+	for _, f := range filter.Data {
+		pf := &pbcommon.Filter{
+			Op:    string(f.Op),
+			Field: f.Field,
+		}
+
+		val, err := structpb.NewValue(f.Value)
+		if err != nil {
+			return nil, err
+		}
+		pf.Value = val
+		dt = append(dt, pf)
+	}
+
+	return &pb.Filter{
+		Id:   filter.ID,
+		Data: dt,
+		Q:    filter.Q,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+
+	dt := make([]*filter.Filter, 0, len(protoFilter.Data))
+	for _, pf := range protoFilter.Data {
+
+		f := &filter.Filter{
+			Op:    filter.Op(pf.Op),
+			Field: pf.Field,
+			Value: pf.Value.AsInterface(),
+		}
+
+		dt = append(dt, f)
+	}
+
+	return &service.Filter{
+		ID:   protoFilter.Id,
+		Data: dt,
+		Q:    protoFilter.Q,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*pbcommon.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pbcommon.FindOptions{
+		Sort:          opts.Sort,
+		PageNum:       int32(opts.PageNum),
+		PageSize:      int32(opts.PageSize),
+		Fields:        opts.Fields,
+		ExcludeFields: opts.ExcludeFields,
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *pbcommon.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+		FieldOptions: options.FieldOptions{
+			Fields:        protoOpts.Fields,
+			ExcludeFields: protoOpts.ExcludeFields,
+		},
+	}, nil
+}
+
+func ListPtrItemToProto(items []*service.Item) ([]*pb.Item, error) {
+	protoItems := make([]*pb.Item, 0, len(items))
+	for _, itm := range items {
+		pi, err := PtrItemToProto(itm)
+		if err != nil {
+			return nil, err
+		}
+		protoItems = append(protoItems, pi)
+	}
+	return protoItems, nil
+}
+
+func ProtoToListPtrItem(protoItems []*pb.Item) ([]*service.Item, error) {
+	items := make([]*service.Item, 0, len(protoItems))
+	for _, itm := range protoItems {
+		pi, err := ProtoToPtrItem(itm)
+		if err != nil {
+			return nil, err
+		}
+		items = append(items, pi)
+	}
+	return items, nil
+}
+
+func ProtoToCreateOptions(protoOptions *pb.CreateOptions) ([]*service.CreateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.CreateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func CreateOptionsToProto(options []*service.CreateOptions) (*pb.CreateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeCreateOptions(options...)
+
+	return &pb.CreateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrGetOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindOptionsToProto(options []*service.FindOptions) (*pb.FindOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindOptions(options...)
+
+	var err error
+
+	fo := &pb.FindOptions{
+		Deleted:   opts.Deleted,
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindOptions(protoOptions *pb.FindOptions) ([]*service.FindOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindOptions{
+		Deleted:   protoOptions.Deleted,
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindOptions{fo}, nil
+}
+
+func ProtoToUpdateOptions(protoOptions *pb.UpdateOptions) ([]*service.UpdateOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.UpdateOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func UpdateOptionsToProto(options []*service.UpdateOptions) (*pb.UpdateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeUpdateOptions(options...)
+
+	return &pb.UpdateOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ProtoToDeleteOptions(protoOptions *pb.DeleteOptions) ([]*service.DeleteOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.DeleteOptions{
+		{Erase: protoOptions.Erase},
+	}, nil
+}
+
+func DeleteOptionsToProto(options []*service.DeleteOptions) (*pb.DeleteOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeDeleteOptions(options...)
+
+	return &pb.DeleteOptions{
+		Erase: opts.Erase,
+	}, nil
+}
+
+func ProtoToPublishOptions(protoOptions *pb.PublishOptions) ([]*service.PublishOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+	return []*service.PublishOptions{
+		{UpdateAttrs: protoOptions.UpdateAttrs},
+	}, nil
+}
+
+func PublishOptionsToProto(options []*service.PublishOptions) (*pb.PublishOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergePublishOptions(options...)
+
+	return &pb.PublishOptions{
+		UpdateAttrs: opts.UpdateAttrs,
+	}, nil
+}
+
+func ElPtrUnpublishOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnpublishOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrGetPublishedOptionsToProto(options []*service.GetPublishedOptions) (*pb.GetPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeGetPublishedOptions(options...)
+
+	return &pb.GetPublishedOptions{LocaleId: opts.LocaleID}, nil
+}
+
+func ProtoToElPtrGetPublishedOptions(protoOptions *pb.GetPublishedOptions) ([]*service.GetPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	return []*service.GetPublishedOptions{{LocaleID: protoOptions.LocaleId}}, nil
+}
+
+func ElPtrFindPublishedOptionsToProto(options []*service.FindPublishedOptions) (*pb.FindPublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindPublishedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindPublishedOptions{
+		Regular:   opts.Regular,
+		Hidden:    opts.Hidden,
+		Templates: opts.Templates,
+	}
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	fo.LocaleId = opts.LocaleID
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindPublishedOptions(protoOptions *pb.FindPublishedOptions) ([]*service.FindPublishedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindPublishedOptions{
+		Regular:   protoOptions.Regular,
+		Hidden:    protoOptions.Hidden,
+		Templates: protoOptions.Templates,
+	}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	fo.LocaleID = protoOptions.LocaleId
+
+	return []*service.FindPublishedOptions{fo}, nil
+}
+
+func ElPtrGetRevisionOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrGetRevisionOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrListRevisionsOptionsToProto(options []*service.ListRevisionsOptions) (*pb.ListRevisionsOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeListRevisionsOptions(options...)
+
+	var err error
+
+	fo := &pb.ListRevisionsOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrListRevisionsOptions(protoOptions *pb.ListRevisionsOptions) ([]*service.ListRevisionsOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.ListRevisionsOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.ListRevisionsOptions{fo}, nil
+}
+
+func ElPtrArchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrArchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrFindArchivedOptionsToProto(options []*service.FindArchivedOptions) (*pb.FindArchivedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+
+	opts := service.MergeFindArchivedOptions(options...)
+
+	var err error
+
+	fo := &pb.FindArchivedOptions{}
+
+	fo.Options, err = PtrServicesFindOptionsToProto(&opts.FindOptions)
+	if err != nil {
+		return nil, err
+	}
+
+	return fo, nil
+}
+
+func ProtoToElPtrFindArchivedOptions(protoOptions *pb.FindArchivedOptions) ([]*service.FindArchivedOptions, error) {
+	if protoOptions == nil {
+		return nil, nil
+	}
+
+	var err error
+	fo := &service.FindArchivedOptions{}
+
+	o, err := ProtoToPtrServicesFindOptions(protoOptions.Options)
+	if err != nil {
+		return nil, err
+	}
+	if o != nil {
+		fo.FindOptions = *o
+	}
+
+	return []*service.FindArchivedOptions{fo}, nil
+}
+
+func ElPtrUnarchiveOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrUnarchiveOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ElPtrIntrospectOptionsToProto() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToElPtrIntrospectOptions() {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToPtrServicesAggregateOptions(protoOpts *pb.AggregateOptions) ([]*service.AggregateOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregateOptions{&service.AggregateOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregateOptionsToProto(opts *service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregateOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregateOptionsToProto(options []*service.AggregateOptions) (*pb.AggregateOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregateOptions(options...)
+	return PtrServicesAggregateOptionsToProto(opts)
+}
+
+func ProtoToPtrServicesAggregatePublishedOptions(protoOpts *pb.AggregatePublishedOptions) ([]*service.AggregatePublishedOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return []*service.AggregatePublishedOptions{&service.AggregatePublishedOptions{Fields: protoOpts.Fields}}, nil
+}
+
+func PtrServicesAggregatePublishedOptionsToProto(opts *service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &pb.AggregatePublishedOptions{
+		Fields: opts.Fields,
+	}, nil
+}
+
+func ElPtrAggregatePublishedOptionsToProto(options []*service.AggregatePublishedOptions) (*pb.AggregatePublishedOptions, error) {
+	if options == nil {
+		return nil, nil
+	}
+	opts := service.MergeAggregatePublishedOptions(options...)
+	return PtrServicesAggregatePublishedOptionsToProto(opts)
+}
+
+func PtrSchemaSchemaToProto(sch *schema.Schema) (string, error) {
+	if sch == nil {
+		return "", nil
+	}
+	res, err := jsoniter.MarshalToString(sch)
+	if err != nil {
+		return "", err
+	}
+	return res, nil
+}
+
+func ProtoToPtrSchemaSchema(protoSch string) (*schema.Schema, error) {
+	if protoSch == "" {
+		return nil, nil
+	}
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(protoSch))
+	if err != nil {
+		return nil, fmt.Errorf("failed to decode schema. err: %s", err.Error())
+	}
+	return sch, nil
+}
+
+func ValidationErrorsToProto(errs []error) ([]*pbcommon.Error_BadRequest_FieldViolation, error) {
+	if errs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []*pbcommon.Error_BadRequest_FieldViolation
+	for _, err := range errs {
+
+		var fieldError errors.FieldError
+		if errors.As(err, &fieldError) {
+			validationErrors = append(validationErrors, &pbcommon.Error_BadRequest_FieldViolation{
+				Description: errors.Unwrap(fieldError).Error(),
+				Field:       fieldError.Field(),
+			})
+		}
+	}
+
+	return validationErrors, nil
+}
+
+func ProtoToValidationErrors(protoErrs []*pbcommon.Error_BadRequest_FieldViolation) ([]error, error) {
+	if protoErrs == nil {
+		return nil, nil
+	}
+
+	var validationErrors []error
+	for _, err := range protoErrs {
+		validationErrors = append(validationErrors, errors.WithField(errors.New(err.Description), err.Field))
+	}
+
+	return validationErrors, nil
+}
diff --git a/pkg/items/transport/grpc/server.go b/pkg/items/transport/grpc/server.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ac8a3b02dd4991518d8b132707bb2dd0ce3c362
--- /dev/null
+++ b/pkg/items/transport/grpc/server.go
@@ -0,0 +1,34 @@
+package transportgrpc
+
+import (
+	grpcerr "git.perx.ru/perxis/perxis-go/pkg/errors/grpc"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+)
+
+func NewServer(svc items.Items, opts ...grpckit.ServerOption) pb.ItemsServer {
+	eps := transport.Endpoints(svc)
+	eps = transport.EndpointsSet{
+		CreateEndpoint:             grpcerr.ServerMiddleware(eps.CreateEndpoint),
+		IntrospectEndpoint:         grpcerr.ServerMiddleware(eps.IntrospectEndpoint),
+		GetEndpoint:                grpcerr.ServerMiddleware(eps.GetEndpoint),
+		FindEndpoint:               grpcerr.ServerMiddleware(eps.FindEndpoint),
+		UpdateEndpoint:             grpcerr.ServerMiddleware(eps.UpdateEndpoint),
+		DeleteEndpoint:             grpcerr.ServerMiddleware(eps.DeleteEndpoint),
+		UndeleteEndpoint:           grpcerr.ServerMiddleware(eps.UndeleteEndpoint),
+		PublishEndpoint:            grpcerr.ServerMiddleware(eps.PublishEndpoint),
+		UnpublishEndpoint:          grpcerr.ServerMiddleware(eps.UnpublishEndpoint),
+		GetPublishedEndpoint:       grpcerr.ServerMiddleware(eps.GetPublishedEndpoint),
+		FindPublishedEndpoint:      grpcerr.ServerMiddleware(eps.FindPublishedEndpoint),
+		GetRevisionEndpoint:        grpcerr.ServerMiddleware(eps.GetRevisionEndpoint),
+		ListRevisionsEndpoint:      grpcerr.ServerMiddleware(eps.ListRevisionsEndpoint),
+		ArchiveEndpoint:            grpcerr.ServerMiddleware(eps.ArchiveEndpoint),
+		FindArchivedEndpoint:       grpcerr.ServerMiddleware(eps.FindArchivedEndpoint),
+		UnarchiveEndpoint:          grpcerr.ServerMiddleware(eps.UnarchiveEndpoint),
+		AggregateEndpoint:          grpcerr.ServerMiddleware(eps.AggregateEndpoint),
+		AggregatePublishedEndpoint: grpcerr.ServerMiddleware(eps.AggregatePublishedEndpoint),
+	}
+	return NewGRPCServer(&eps, opts...)
+}
diff --git a/pkg/items/transport/grpc/server.microgen.go b/pkg/items/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a904b1e5a610dc12e38768d88b92eee392a6d7af
--- /dev/null
+++ b/pkg/items/transport/grpc/server.microgen.go
@@ -0,0 +1,292 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/items/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/items"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type itemsServer struct {
+	create             grpc.Handler
+	introspect         grpc.Handler
+	get                grpc.Handler
+	find               grpc.Handler
+	update             grpc.Handler
+	delete             grpc.Handler
+	undelete           grpc.Handler
+	publish            grpc.Handler
+	unpublish          grpc.Handler
+	getPublished       grpc.Handler
+	findPublished      grpc.Handler
+	getRevision        grpc.Handler
+	listRevisions      grpc.Handler
+	archive            grpc.Handler
+	findArchived       grpc.Handler
+	unarchive          grpc.Handler
+	aggregate          grpc.Handler
+	aggregatePublished grpc.Handler
+
+	pb.UnimplementedItemsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ItemsServer {
+	return &itemsServer{
+		archive: grpc.NewServer(
+			endpoints.ArchiveEndpoint,
+			_Decode_Archive_Request,
+			_Encode_Archive_Response,
+			opts...,
+		),
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		undelete: grpc.NewServer(
+			endpoints.UndeleteEndpoint,
+			_Decode_Undelete_Request,
+			_Encode_Undelete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		findArchived: grpc.NewServer(
+			endpoints.FindArchivedEndpoint,
+			_Decode_FindArchived_Request,
+			_Encode_FindArchived_Response,
+			opts...,
+		),
+		findPublished: grpc.NewServer(
+			endpoints.FindPublishedEndpoint,
+			_Decode_FindPublished_Request,
+			_Encode_FindPublished_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		getPublished: grpc.NewServer(
+			endpoints.GetPublishedEndpoint,
+			_Decode_GetPublished_Request,
+			_Encode_GetPublished_Response,
+			opts...,
+		),
+		getRevision: grpc.NewServer(
+			endpoints.GetRevisionEndpoint,
+			_Decode_GetRevision_Request,
+			_Encode_GetRevision_Response,
+			opts...,
+		),
+		introspect: grpc.NewServer(
+			endpoints.IntrospectEndpoint,
+			_Decode_Introspect_Request,
+			_Encode_Introspect_Response,
+			opts...,
+		),
+		listRevisions: grpc.NewServer(
+			endpoints.ListRevisionsEndpoint,
+			_Decode_ListRevisions_Request,
+			_Encode_ListRevisions_Response,
+			opts...,
+		),
+		publish: grpc.NewServer(
+			endpoints.PublishEndpoint,
+			_Decode_Publish_Request,
+			_Encode_Publish_Response,
+			opts...,
+		),
+		unarchive: grpc.NewServer(
+			endpoints.UnarchiveEndpoint,
+			_Decode_Unarchive_Request,
+			_Encode_Unarchive_Response,
+			opts...,
+		),
+		unpublish: grpc.NewServer(
+			endpoints.UnpublishEndpoint,
+			_Decode_Unpublish_Request,
+			_Encode_Unpublish_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+		aggregate: grpc.NewServer(
+			endpoints.AggregateEndpoint,
+			_Decode_Aggregate_Request,
+			_Encode_Aggregate_Response,
+			opts...,
+		),
+		aggregatePublished: grpc.NewServer(
+			endpoints.AggregatePublishedEndpoint,
+			_Decode_AggregatePublished_Request,
+			_Encode_AggregatePublished_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *itemsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *itemsServer) Introspect(ctx context.Context, req *pb.IntrospectRequest) (*pb.IntrospectResponse, error) {
+	_, resp, err := S.introspect.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.IntrospectResponse), nil
+}
+
+func (S *itemsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *itemsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
+
+func (S *itemsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Undelete(ctx context.Context, req *pb.UndeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.undelete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Publish(ctx context.Context, req *pb.PublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.publish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Unpublish(ctx context.Context, req *pb.UnpublishRequest) (*empty.Empty, error) {
+	_, resp, err := S.unpublish.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) GetPublished(ctx context.Context, req *pb.GetPublishedRequest) (*pb.GetPublishedResponse, error) {
+	_, resp, err := S.getPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetPublishedResponse), nil
+}
+
+func (S *itemsServer) FindPublished(ctx context.Context, req *pb.FindPublishedRequest) (*pb.FindPublishedResponse, error) {
+	_, resp, err := S.findPublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindPublishedResponse), nil
+}
+
+func (S *itemsServer) GetRevision(ctx context.Context, req *pb.GetRevisionRequest) (*pb.GetRevisionResponse, error) {
+	_, resp, err := S.getRevision.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetRevisionResponse), nil
+}
+
+func (S *itemsServer) ListRevisions(ctx context.Context, req *pb.ListRevisionsRequest) (*pb.ListRevisionsResponse, error) {
+	_, resp, err := S.listRevisions.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListRevisionsResponse), nil
+}
+
+func (S *itemsServer) Archive(ctx context.Context, req *pb.ArchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.archive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) FindArchived(ctx context.Context, req *pb.FindArchivedRequest) (*pb.FindArchivedResponse, error) {
+	_, resp, err := S.findArchived.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindArchivedResponse), nil
+}
+
+func (S *itemsServer) Unarchive(ctx context.Context, req *pb.UnarchiveRequest) (*empty.Empty, error) {
+	_, resp, err := S.unarchive.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *itemsServer) Aggregate(ctx context.Context, req *pb.AggregateRequest) (*pb.AggregateResponse, error) {
+	_, resp, err := S.aggregate.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregateResponse), nil
+}
+
+func (S *itemsServer) AggregatePublished(ctx context.Context, req *pb.AggregatePublishedRequest) (*pb.AggregatePublishedResponse, error) {
+	_, resp, err := S.aggregatePublished.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.AggregatePublishedResponse), nil
+}
diff --git a/pkg/items/transport/server.microgen.go b/pkg/items/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ba5f4a265125ea7b7168fab8d9c7c0f747f23bf
--- /dev/null
+++ b/pkg/items/transport/server.microgen.go
@@ -0,0 +1,220 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	endpoint "github.com/go-kit/kit/endpoint"
+	"github.com/hashicorp/go-multierror"
+)
+
+func Endpoints(svc items.Items) EndpointsSet {
+	return EndpointsSet{
+		ArchiveEndpoint:            ArchiveEndpoint(svc),
+		CreateEndpoint:             CreateEndpoint(svc),
+		DeleteEndpoint:             DeleteEndpoint(svc),
+		UndeleteEndpoint:           UndeleteEndpoint(svc),
+		FindArchivedEndpoint:       FindArchivedEndpoint(svc),
+		FindEndpoint:               FindEndpoint(svc),
+		FindPublishedEndpoint:      FindPublishedEndpoint(svc),
+		GetEndpoint:                GetEndpoint(svc),
+		GetPublishedEndpoint:       GetPublishedEndpoint(svc),
+		GetRevisionEndpoint:        GetRevisionEndpoint(svc),
+		IntrospectEndpoint:         IntrospectEndpoint(svc),
+		ListRevisionsEndpoint:      ListRevisionsEndpoint(svc),
+		PublishEndpoint:            PublishEndpoint(svc),
+		UnarchiveEndpoint:          UnarchiveEndpoint(svc),
+		UnpublishEndpoint:          UnpublishEndpoint(svc),
+		UpdateEndpoint:             UpdateEndpoint(svc),
+		AggregateEndpoint:          AggregateEndpoint(svc),
+		AggregatePublishedEndpoint: AggregatePublishedEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Item, req.Opts...)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func IntrospectEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*IntrospectRequest)
+		res0, res1, res2 := svc.Introspect(arg0, req.Item, req.Opts...)
+		resp := &IntrospectResponse{
+			Item:   res0,
+			Schema: res1,
+		}
+		if res2 != nil {
+
+			err := res2
+
+			var merr *multierror.Error
+			if (strings.Contains(err.Error(), "validation error") ||
+				strings.Contains(err.Error(), "modification error") ||
+				strings.Contains(err.Error(), "decode error") ||
+				strings.Contains(err.Error(), "encode error")) && errors.As(err, &merr) {
+
+				errs := make([]error, 0)
+				for _, e := range merr.WrappedErrors() {
+					var errField errors.FieldError
+					if errors.As(e, &errField) {
+						errs = append(errs, e)
+					}
+				}
+
+				if len(errs) > 0 {
+					resp.ValidationErrors = errs
+					res2 = nil
+				}
+			}
+		}
+		return resp, res2
+	}
+}
+
+func GetEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetResponse{Item: res0}, res1
+	}
+}
+
+func FindEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UpdateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Item, req.Options...)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func UndeleteEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UndeleteRequest)
+		res0 := svc.Undelete(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &UndeleteResponse{}, res0
+	}
+}
+
+func PublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*PublishRequest)
+		res0 := svc.Publish(arg0, req.Item, req.Options...)
+		return &PublishResponse{}, res0
+	}
+}
+
+func UnpublishEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnpublishRequest)
+		res0 := svc.Unpublish(arg0, req.Item, req.Options...)
+		return &UnpublishResponse{}, res0
+	}
+}
+
+func GetPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetPublishedRequest)
+		res0, res1 := svc.GetPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &GetPublishedResponse{Item: res0}, res1
+	}
+}
+
+func FindPublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindPublishedRequest)
+		res0, res1, res2 := svc.FindPublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindPublishedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func GetRevisionEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRevisionRequest)
+		res0, res1 := svc.GetRevision(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.RevisionId, req.Options...)
+		return &GetRevisionResponse{Item: res0}, res1
+	}
+}
+
+func ListRevisionsEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRevisionsRequest)
+		res0, res1 := svc.ListRevisions(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.ItemId, req.Options...)
+		return &ListRevisionsResponse{Items: res0}, res1
+	}
+}
+
+func ArchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ArchiveRequest)
+		res0 := svc.Archive(arg0, req.Item, req.Options...)
+		return &ArchiveResponse{}, res0
+	}
+}
+
+func FindArchivedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindArchivedRequest)
+		res0, res1, res2 := svc.FindArchived(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &FindArchivedResponse{
+			Items: res0,
+			Total: res1,
+		}, res2
+	}
+}
+
+func UnarchiveEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UnarchiveRequest)
+		res0 := svc.Unarchive(arg0, req.Item, req.Options...)
+		return &UnarchiveResponse{}, res0
+	}
+}
+
+func AggregateEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregateRequest)
+		res0, res1 := svc.Aggregate(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregateResponse{
+			Result: res0,
+		}, res1
+	}
+}
+func AggregatePublishedEndpoint(svc items.Items) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*AggregatePublishedRequest)
+		res0, res1 := svc.AggregatePublished(arg0, req.SpaceId, req.EnvId, req.CollectionId, req.Filter, req.Options...)
+		return &AggregatePublishedResponse{
+			Result: res0,
+		}, res1
+	}
+}
diff --git a/pkg/locales/locale.go b/pkg/locales/locale.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3cc43c6c625a5090d975409d7aa2beb16eefc3a
--- /dev/null
+++ b/pkg/locales/locale.go
@@ -0,0 +1,7 @@
+package locales
+
+type Locale struct {
+	ID      string `json:"id" bson:"_id"` // (Пример: "en", "en-US")
+	SpaceID string `json:"spaceId" bson:"-"`
+	Name    string `json:"name" bson:"name"` // (Пример: "English", "English (US)" )
+}
diff --git a/pkg/locales/middleware/caching_middleware.go b/pkg/locales/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..8b4635ab4a0214f99407b67020737724ebf3c841
--- /dev/null
+++ b/pkg/locales/middleware/caching_middleware.go
@@ -0,0 +1,53 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/locales"
+)
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Locales) service.Locales {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Locales
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, locale *service.Locale) (loc *service.Locale, err error) {
+
+	loc, err = m.next.Create(ctx, locale)
+	if err == nil {
+		m.cache.Remove(loc.SpaceID)
+	}
+	return loc, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, spaceId string) (locales []*service.Locale, err error) {
+
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.([]*service.Locale), err
+	}
+	locales, err = m.next.List(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, locales)
+	}
+	return locales, err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string, localeId string) (err error) {
+
+	err = m.next.Delete(ctx, spaceId, localeId)
+	if err == nil {
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
diff --git a/pkg/locales/middleware/caching_middleware_test.go b/pkg/locales/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..de5e7a9f742b6336ecd5a67d529ed468236838f4
--- /dev/null
+++ b/pkg/locales/middleware/caching_middleware_test.go
@@ -0,0 +1,130 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	locmocks "git.perx.ru/perxis/perxis-go/pkg/locales/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestLocalesCache(t *testing.T) {
+
+	const (
+		loc1    = "loc1"
+		loc2    = "loc2"
+		spaceID = "spaceID"
+		size    = 5
+		ttl     = 20 * time.Millisecond
+	)
+
+	ctx := context.Background()
+
+	t.Run("List from Cache", func(t *testing.T) {
+		loc := &locmocks.Locales{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(loc)
+
+		loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{{ID: loc1, Name: "name1", SpaceID: spaceID}}, nil).Once()
+
+		vl1, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+
+		vl2, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, vl1[0], vl2[0], "Ожидается что при повторном запросе объекты будут получены из кэша.")
+
+		loc.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Delete", func(t *testing.T) {
+			loc := &locmocks.Locales{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(loc)
+
+			loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{{ID: loc1, Name: "name1", SpaceID: spaceID}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается что при повторном запросе объекты будут получены из кэша.")
+
+			loc.On("Delete", mock.Anything, spaceID, loc1).Return(nil).Once()
+
+			err = svc.Delete(ctx, spaceID, loc1)
+			require.NoError(t, err)
+
+			loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 0, "Ожидается что после удаление объекты будут удалены из кеша.")
+
+			loc.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			loc := &locmocks.Locales{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(loc)
+
+			loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{{ID: loc1, Name: "name1", SpaceID: spaceID}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается что при повторном запросе объекты будут получены из кэша.")
+
+			loc.On("Create", mock.Anything, mock.Anything).Return(&locales.Locale{ID: loc2, Name: "name2", SpaceID: spaceID}, nil).Once()
+
+			_, err = svc.Create(ctx, &locales.Locale{ID: loc2, Name: "name2", SpaceID: spaceID})
+			require.NoError(t, err)
+
+			loc.On("List", mock.Anything, spaceID).
+				Return([]*locales.Locale{
+					{ID: loc1, Name: "name1", SpaceID: spaceID},
+					{ID: loc2, Name: "name2", SpaceID: spaceID},
+				}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 2, "Ожидается что после создания нового объекта данные будут удалены из кеша и получены из сервиса.")
+
+			loc.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			loc := &locmocks.Locales{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(loc)
+
+			loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{{ID: loc1, Name: "name1", SpaceID: spaceID}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается что при повторном запросе объекты будут получены из кэша.")
+
+			time.Sleep(2 * ttl)
+			loc.On("List", mock.Anything, spaceID).Return([]*locales.Locale{{ID: loc1, Name: "name1", SpaceID: spaceID}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что элементы будут получены из кэша.")
+
+			loc.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/locales/middleware/error_logging_middleware.go b/pkg/locales/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..695c91128d6f093d93f022468d464bedbd571e04
--- /dev/null
+++ b/pkg/locales/middleware/error_logging_middleware.go
@@ -0,0 +1,60 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/locales -i Locales -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements locales.Locales that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   locales.Locales
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the locales.Locales with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next locales.Locales) locales.Locales {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, locale *locales.Locale) (created *locales.Locale, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, locale)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, localeId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, localeId)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, spaceId string) (locales []*locales.Locale, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, spaceId)
+}
diff --git a/pkg/locales/middleware/logging_middleware.go b/pkg/locales/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..fb98d25759e87d199e8c65a204f30b3acb48c1f4
--- /dev/null
+++ b/pkg/locales/middleware/logging_middleware.go
@@ -0,0 +1,142 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/locales -i Locales -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements locales.Locales that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   locales.Locales
+}
+
+// LoggingMiddleware instruments an implementation of the locales.Locales with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next locales.Locales) locales.Locales {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, locale *locales.Locale) (created *locales.Locale, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"locale": locale} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, locale)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, localeId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":      ctx,
+		"spaceId":  spaceId,
+		"localeId": localeId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, localeId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, spaceId string) (locales []*locales.Locale, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	locales, err = m.next.List(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"locales": locales,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return locales, err
+}
diff --git a/pkg/locales/middleware/middleware.go b/pkg/locales/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..726b535247256a109f0fc5aa100e0a61cc928555
--- /dev/null
+++ b/pkg/locales/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/locales -i Locales -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	"go.uber.org/zap"
+)
+
+type Middleware func(locales.Locales) locales.Locales
+
+func WithLog(s locales.Locales, logger *zap.Logger, log_access bool) locales.Locales {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Locales")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/locales/middleware/recovering_middleware.go b/pkg/locales/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..44b198550418034e1963fec5ccbd405e0ea12ef4
--- /dev/null
+++ b/pkg/locales/middleware/recovering_middleware.go
@@ -0,0 +1,67 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/locales -i Locales -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/locales"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements locales.Locales that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   locales.Locales
+}
+
+// RecoveringMiddleware instruments an implementation of the locales.Locales with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next locales.Locales) locales.Locales {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, locale *locales.Locale) (created *locales.Locale, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, locale)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, localeId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, localeId)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, spaceId string) (locales []*locales.Locale, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, spaceId)
+}
diff --git a/pkg/locales/mocks/Locales.go b/pkg/locales/mocks/Locales.go
new file mode 100644
index 0000000000000000000000000000000000000000..3e63dfedfff975f52f886d19e1e8982bccb96b7e
--- /dev/null
+++ b/pkg/locales/mocks/Locales.go
@@ -0,0 +1,75 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Locales is an autogenerated mock type for the Locales type
+type Locales struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, locale
+func (_m *Locales) Create(ctx context.Context, locale *locales.Locale) (*locales.Locale, error) {
+	ret := _m.Called(ctx, locale)
+
+	var r0 *locales.Locale
+	if rf, ok := ret.Get(0).(func(context.Context, *locales.Locale) *locales.Locale); ok {
+		r0 = rf(ctx, locale)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*locales.Locale)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *locales.Locale) error); ok {
+		r1 = rf(ctx, locale)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, localeId
+func (_m *Locales) Delete(ctx context.Context, spaceId string, localeId string) error {
+	ret := _m.Called(ctx, spaceId, localeId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, spaceId, localeId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// List provides a mock function with given fields: ctx, spaceId
+func (_m *Locales) List(ctx context.Context, spaceId string) ([]*locales.Locale, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*locales.Locale
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*locales.Locale); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*locales.Locale)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
diff --git a/pkg/locales/service.go b/pkg/locales/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..7724d7f7ecdb2f4733d97bff01113d105adabdeb
--- /dev/null
+++ b/pkg/locales/service.go
@@ -0,0 +1,14 @@
+package locales
+
+import (
+	"context"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/locales
+// @grpc-addr content.locales.Locales
+type Locales interface {
+	Create(ctx context.Context, locale *Locale) (created *Locale, err error)
+	List(ctx context.Context, spaceId string) (locales []*Locale, err error)
+	Delete(ctx context.Context, spaceId, localeId string) (err error)
+}
diff --git a/pkg/locales/transport/client.microgen.go b/pkg/locales/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..f8cd9dee23dafeff1d635b3f5ee42f6e5c8667f1
--- /dev/null
+++ b/pkg/locales/transport/client.microgen.go
@@ -0,0 +1,51 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *locales.Locale) (res0 *locales.Locale, res1 error) {
+	request := CreateRequest{Locale: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string) (res0 []*locales.Locale, res1 error) {
+	request := ListRequest{SpaceId: arg1}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Locales, res1
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := DeleteRequest{
+		LocaleId: arg2,
+		SpaceId:  arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/locales/transport/endpoints.microgen.go b/pkg/locales/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..ffca7318747104f8b58af332e90646f8cc6a8b9c
--- /dev/null
+++ b/pkg/locales/transport/endpoints.microgen.go
@@ -0,0 +1,12 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Locales API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	ListEndpoint   endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+}
diff --git a/pkg/locales/transport/exchanges.microgen.go b/pkg/locales/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a07204e13a233fd7bae40c74e9984061871a92b7
--- /dev/null
+++ b/pkg/locales/transport/exchanges.microgen.go
@@ -0,0 +1,28 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+
+type (
+	CreateRequest struct {
+		Locale *locales.Locale `json:"locale"`
+	}
+	CreateResponse struct {
+		Created *locales.Locale `json:"created"`
+	}
+
+	ListRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListResponse struct {
+		Locales []*locales.Locale `json:"locales"`
+	}
+
+	DeleteRequest struct {
+		SpaceId  string `json:"space_id"`
+		LocaleId string `json:"locale_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+)
diff --git a/pkg/locales/transport/grpc/client.microgen.go b/pkg/locales/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..3af5bbae0ed86bf079d93622cc8f1aa4f7fbdfd8
--- /dev/null
+++ b/pkg/locales/transport/grpc/client.microgen.go
@@ -0,0 +1,40 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/locales/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/locales"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.locales.Locales"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/locales/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/locales/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5d06e8232c89f226e4f7c0a80ca6d52eb219d48f
--- /dev/null
+++ b/pkg/locales/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,131 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/locales/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/locales"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	pbLocale, err := PtrLocaleToProto(req.Locale)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Locale: pbLocale}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	return &pb.ListRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		LocaleId: req.LocaleId,
+		SpaceId:  req.SpaceId,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respLocale, err := PtrLocaleToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Locale: respLocale}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respLocales, err := ListPtrLocaleToProto(resp.Locales)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Locales: respLocales}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	locale, err := ProtoToPtrLocale(req.Locale)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Locale: locale}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	return &transport.ListRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		LocaleId: string(req.LocaleId),
+		SpaceId:  string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respLocale, err := ProtoToPtrLocale(resp.Locale)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respLocale}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respLocales, err := ProtoToListPtrLocale(resp.Locales)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Locales: respLocales}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/locales/transport/grpc/protobuf_type_converters.microgen.go b/pkg/locales/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6dca0bb8afa68dfe3fa17b53d315f3716c48ae36
--- /dev/null
+++ b/pkg/locales/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,48 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	service "git.perx.ru/perxis/perxis-go/pkg/locales"
+	pb "git.perx.ru/perxis/perxis-go/proto/locales"
+)
+
+func PtrLocaleToProto(locale *service.Locale) (*pb.Locale, error) {
+	if locale == nil {
+		return nil, nil
+	}
+	return &pb.Locale{Id: locale.ID, Name: locale.Name, SpaceId: locale.SpaceID}, nil
+}
+
+func ProtoToPtrLocale(protoLocale *pb.Locale) (*service.Locale, error) {
+	if protoLocale == nil {
+		return nil, nil
+	}
+	return &service.Locale{ID: protoLocale.Id, Name: protoLocale.Name, SpaceID: protoLocale.SpaceId}, nil
+}
+
+func ListPtrLocaleToProto(locales []*service.Locale) ([]*pb.Locale, error) {
+	protoLocales := make([]*pb.Locale, 0, len(locales))
+	for _, l := range locales {
+		pl, err := PtrLocaleToProto(l)
+		if err != nil {
+			return nil, err
+		}
+		protoLocales = append(protoLocales, pl)
+	}
+	return protoLocales, nil
+}
+
+func ProtoToListPtrLocale(protoLocales []*pb.Locale) ([]*service.Locale, error) {
+	locales := make([]*service.Locale, 0, len(protoLocales))
+	for _, pl := range protoLocales {
+		l, err := ProtoToPtrLocale(pl)
+		if err != nil {
+			return nil, err
+		}
+		locales = append(locales, l)
+	}
+	return locales, nil
+}
diff --git a/pkg/locales/transport/grpc/server.microgen.go b/pkg/locales/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..88e549f6e23e99a04d037fcefb4603a13821d24a
--- /dev/null
+++ b/pkg/locales/transport/grpc/server.microgen.go
@@ -0,0 +1,67 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/locales/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/locales"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type localesServer struct {
+	create grpc.Handler
+	list   grpc.Handler
+	delete grpc.Handler
+
+	pb.UnimplementedLocalesServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.LocalesServer {
+	return &localesServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *localesServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *localesServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *localesServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/locales/transport/server.microgen.go b/pkg/locales/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..5ce815dcb52415a93ddfeb095f99e22fb14c4492
--- /dev/null
+++ b/pkg/locales/transport/server.microgen.go
@@ -0,0 +1,42 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	locales "git.perx.ru/perxis/perxis-go/pkg/locales"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc locales.Locales) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		ListEndpoint:   ListEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc locales.Locales) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Locale)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func ListEndpoint(svc locales.Locales) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId)
+		return &ListResponse{Locales: res0}, res1
+	}
+}
+
+func DeleteEndpoint(svc locales.Locales) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.LocaleId)
+		return &DeleteResponse{}, res0
+	}
+}
diff --git a/pkg/members/members.go b/pkg/members/members.go
new file mode 100644
index 0000000000000000000000000000000000000000..0993b6fddfe34cf7f257e6b70433893485a60dba
--- /dev/null
+++ b/pkg/members/members.go
@@ -0,0 +1,35 @@
+package members
+
+import (
+	"fmt"
+)
+
+type Member struct {
+	OrgId  string `bson:"orgId"`
+	UserId string `bson:"userId"`
+	Role   Role   `bson:"role"`
+}
+
+type Role uint
+
+const (
+	NotMember Role = iota
+	RoleMember
+	RoleOwner
+	RoleAdmin
+)
+
+func (r Role) IsPrivileged() bool {
+	return r == RoleOwner || r == RoleAdmin
+}
+
+func (r Role) Format(s fmt.State, verb rune) {
+	switch r {
+	case RoleOwner:
+		fmt.Fprint(s, "owner")
+	case RoleAdmin:
+		fmt.Fprint(s, "admin")
+	case RoleMember:
+		fmt.Fprint(s, "member")
+	}
+}
diff --git a/pkg/members/middleware/caching_middleware.go b/pkg/members/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..2faa5ce433281d8d396bb5c912e4e91cfaff727f
--- /dev/null
+++ b/pkg/members/middleware/caching_middleware.go
@@ -0,0 +1,102 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/members"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Members) service.Members {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Members
+}
+
+func (m cachingMiddleware) Set(ctx context.Context, orgId string, userId string, role service.Role) (err error) {
+
+	err = m.next.Set(ctx, orgId, userId, role)
+	if err == nil {
+		m.cache.Remove(makeKey(orgId, userId))
+		m.cache.Remove(makeKey(orgId))
+		m.cache.Remove(makeKey(userId))
+	}
+	return err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, orgId string, userId string) (role service.Role, err error) {
+
+	key := makeKey(orgId, userId)
+	value, e := m.cache.Get(key)
+	if e == nil {
+		return value.(service.Role), err
+	}
+	role, err = m.next.Get(ctx, orgId, userId)
+	if err == nil {
+		m.cache.Set(key, role)
+	}
+	return role, err
+}
+
+func (m cachingMiddleware) Remove(ctx context.Context, orgId string, userId string) (err error) {
+
+	err = m.next.Remove(ctx, orgId, userId)
+	if err == nil {
+		m.cache.Remove(makeKey(orgId, userId))
+		m.cache.Remove(makeKey(orgId))
+		m.cache.Remove(makeKey(userId))
+	}
+	return err
+}
+
+func (m cachingMiddleware) RemoveAll(ctx context.Context, orgId string) (err error) {
+
+	err = m.next.RemoveAll(ctx, orgId)
+	if err == nil {
+		members, _ := m.ListMembers(ctx, orgId)
+		for _, member := range members {
+			m.cache.Remove(member.UserId)
+			m.cache.Remove(makeKey(orgId, member.UserId))
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) ListMembers(ctx context.Context, orgId string) (members []*service.Member, err error) {
+
+	value, e := m.cache.Get(makeKey(orgId))
+	if e == nil {
+		return value.([]*service.Member), err
+	}
+	members, err = m.next.ListMembers(ctx, orgId)
+	if err == nil {
+		m.cache.Set(makeKey(orgId), members)
+	}
+	return members, err
+}
+
+func (m cachingMiddleware) ListOrganizations(ctx context.Context, userId string) (members []*service.Member, err error) {
+
+	value, e := m.cache.Get(makeKey(userId))
+	if e == nil {
+		return value.([]*service.Member), err
+	}
+	members, err = m.next.ListOrganizations(ctx, userId)
+	if err == nil {
+		m.cache.Set(makeKey(userId), members)
+	}
+	return members, err
+}
diff --git a/pkg/members/middleware/caching_middleware_test.go b/pkg/members/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..1844dc58ebb97b228800a95f9c9cb8b142407fb4
--- /dev/null
+++ b/pkg/members/middleware/caching_middleware_test.go
@@ -0,0 +1,147 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	mocksmembers "git.perx.ru/perxis/perxis-go/pkg/members/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestMembersCache(t *testing.T) {
+
+	const (
+		orgId  = "orgId"
+		userId = "userId"
+		size   = 5
+		ttl    = 20 * time.Millisecond
+	)
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		mbrs := &mocksmembers.Members{}
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(mbrs)
+
+		mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleOwner, nil).Once()
+
+		v1, err := svc.Get(ctx, orgId, userId)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, orgId, userId)
+		require.NoError(t, err)
+		assert.Equal(t, v1, v2, "Ожидается получение объекта из кэша, после повторного запроса get.")
+
+		mbrs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Set", func(t *testing.T) {
+			mbrs := &mocksmembers.Members{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(mbrs)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleOwner, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.Equal(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			mbrs.On("Set", mock.Anything, orgId, userId, members.RoleMember).Return(nil).Once()
+
+			err = svc.Set(ctx, orgId, userId, members.RoleMember)
+			require.NoError(t, err)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleMember, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.NotEqual(t, v2, v3, "Ожидается удаление объекта из кэша и получение заново из сервиса.")
+			mbrs.AssertExpectations(t)
+		})
+
+		t.Run("After Remove", func(t *testing.T) {
+			mbrs := &mocksmembers.Members{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(mbrs)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleOwner, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.Equal(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			mbrs.On("Remove", mock.Anything, orgId, userId).Return(nil).Once()
+
+			err = svc.Remove(ctx, orgId, userId)
+			require.NoError(t, err)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.NotMember, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.NotEqual(t, v2, v3, "Ожидается удаление объекта из кэша после удаления из хранилища и получение заново из сервиса.")
+
+			mbrs.AssertExpectations(t)
+		})
+
+		t.Run("After RemoveAll", func(t *testing.T) {
+			mbrs := &mocksmembers.Members{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(mbrs)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleOwner, nil).Once()
+			mbrs.On("ListMembers", mock.Anything, orgId).Return([]*members.Member{{OrgId: orgId, UserId: userId, Role: members.RoleOwner}}, nil)
+
+			v1, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.Equal(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			mbrs.On("RemoveAll", mock.Anything, orgId).Return(nil).Once()
+
+			err = svc.RemoveAll(ctx, orgId)
+			require.NoError(t, err)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.NotMember, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.NotEqual(t, v2, v3, "Ожидается удаление объекта из кэша после удаления из хранилища и получение заново из сервиса.")
+
+			mbrs.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			mbrs := &mocksmembers.Members{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(mbrs)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleOwner, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId, userId)
+
+			v2, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.Equal(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+
+			mbrs.On("Get", mock.Anything, orgId, userId).Return(members.RoleMember, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId, userId)
+			require.NoError(t, err)
+			assert.NotEqual(t, v2, v3, "Ожидается удаление объекта из кэша после истечения ttl и получение заново из сервиса.")
+			mbrs.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/members/middleware/error_logging_middleware.go b/pkg/members/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..08d2814bf4fc8f16f0df57462770342a93e191c8
--- /dev/null
+++ b/pkg/members/middleware/error_logging_middleware.go
@@ -0,0 +1,90 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/members -i Members -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements members.Members that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   members.Members
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the members.Members with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next members.Members) members.Members {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, orgId string, userId string) (role members.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, orgId, userId)
+}
+
+func (m *errorLoggingMiddleware) ListMembers(ctx context.Context, orgId string) (members []*members.Member, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListMembers(ctx, orgId)
+}
+
+func (m *errorLoggingMiddleware) ListOrganizations(ctx context.Context, userId string) (organizations []*members.Member, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.ListOrganizations(ctx, userId)
+}
+
+func (m *errorLoggingMiddleware) Remove(ctx context.Context, orgId string, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Remove(ctx, orgId, userId)
+}
+
+func (m *errorLoggingMiddleware) RemoveAll(ctx context.Context, orgId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.RemoveAll(ctx, orgId)
+}
+
+func (m *errorLoggingMiddleware) Set(ctx context.Context, orgId string, userId string, role members.Role) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Set(ctx, orgId, userId, role)
+}
diff --git a/pkg/members/middleware/logging_middleware.go b/pkg/members/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1b9ead6d06d694945c89638c52d4bce07e4ee938
--- /dev/null
+++ b/pkg/members/middleware/logging_middleware.go
@@ -0,0 +1,251 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/members -i Members -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements members.Members that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   members.Members
+}
+
+// LoggingMiddleware instruments an implementation of the members.Members with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next members.Members) members.Members {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, orgId string, userId string) (role members.Role, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"orgId":  orgId,
+		"userId": userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	role, err = m.next.Get(ctx, orgId, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"role": role,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return role, err
+}
+
+func (m *loggingMiddleware) ListMembers(ctx context.Context, orgId string) (members []*members.Member, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"orgId": orgId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListMembers.Request", fields...)
+
+	members, err = m.next.ListMembers(ctx, orgId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"members": members,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListMembers.Response", fields...)
+
+	return members, err
+}
+
+func (m *loggingMiddleware) ListOrganizations(ctx context.Context, userId string) (organizations []*members.Member, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"userId": userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListOrganizations.Request", fields...)
+
+	organizations, err = m.next.ListOrganizations(ctx, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"organizations": organizations,
+		"err":           err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("ListOrganizations.Response", fields...)
+
+	return organizations, err
+}
+
+func (m *loggingMiddleware) Remove(ctx context.Context, orgId string, userId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"orgId":  orgId,
+		"userId": userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Remove.Request", fields...)
+
+	err = m.next.Remove(ctx, orgId, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Remove.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) RemoveAll(ctx context.Context, orgId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"orgId": orgId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("RemoveAll.Request", fields...)
+
+	err = m.next.RemoveAll(ctx, orgId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("RemoveAll.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Set(ctx context.Context, orgId string, userId string, role members.Role) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"orgId":  orgId,
+		"userId": userId,
+		"role":   role} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Set.Request", fields...)
+
+	err = m.next.Set(ctx, orgId, userId, role)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Set.Response", fields...)
+
+	return err
+}
diff --git a/pkg/members/middleware/middleware.go b/pkg/members/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1aa0cfbe798f5688587ab8e4f5d4e166383d8d92
--- /dev/null
+++ b/pkg/members/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/members -i Members -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"go.uber.org/zap"
+)
+
+type Middleware func(members.Members) members.Members
+
+func WithLog(s members.Members, logger *zap.Logger, log_access bool) members.Members {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Members")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/members/middleware/recovering_middleware.go b/pkg/members/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..ec6db9f480d35215c47bff6fceef8c0e01a77447
--- /dev/null
+++ b/pkg/members/middleware/recovering_middleware.go
@@ -0,0 +1,103 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/members -i Members -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/members"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements members.Members that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   members.Members
+}
+
+// RecoveringMiddleware instruments an implementation of the members.Members with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next members.Members) members.Members {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, orgId string, userId string) (role members.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, orgId, userId)
+}
+
+func (m *recoveringMiddleware) ListMembers(ctx context.Context, orgId string) (members []*members.Member, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListMembers(ctx, orgId)
+}
+
+func (m *recoveringMiddleware) ListOrganizations(ctx context.Context, userId string) (organizations []*members.Member, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.ListOrganizations(ctx, userId)
+}
+
+func (m *recoveringMiddleware) Remove(ctx context.Context, orgId string, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Remove(ctx, orgId, userId)
+}
+
+func (m *recoveringMiddleware) RemoveAll(ctx context.Context, orgId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.RemoveAll(ctx, orgId)
+}
+
+func (m *recoveringMiddleware) Set(ctx context.Context, orgId string, userId string, role members.Role) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Set(ctx, orgId, userId, role)
+}
diff --git a/pkg/members/mocks/Members.go b/pkg/members/mocks/Members.go
new file mode 100644
index 0000000000000000000000000000000000000000..2adeaac705bf18a17eb06f7932d5d3769de1b50f
--- /dev/null
+++ b/pkg/members/mocks/Members.go
@@ -0,0 +1,124 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	members "git.perx.ru/perxis/perxis-go/pkg/members"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Members is an autogenerated mock type for the Members type
+type Members struct {
+	mock.Mock
+}
+
+// Get provides a mock function with given fields: ctx, orgId, userId
+func (_m *Members) Get(ctx context.Context, orgId string, userId string) (members.Role, error) {
+	ret := _m.Called(ctx, orgId, userId)
+
+	var r0 members.Role
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) members.Role); ok {
+		r0 = rf(ctx, orgId, userId)
+	} else {
+		r0 = ret.Get(0).(members.Role)
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, orgId, userId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// ListMembers provides a mock function with given fields: ctx, orgId
+func (_m *Members) ListMembers(ctx context.Context, orgId string) ([]*members.Member, error) {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 []*members.Member
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*members.Member); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*members.Member)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, orgId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// ListOrganizations provides a mock function with given fields: ctx, userId
+func (_m *Members) ListOrganizations(ctx context.Context, userId string) ([]*members.Member, error) {
+	ret := _m.Called(ctx, userId)
+
+	var r0 []*members.Member
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*members.Member); ok {
+		r0 = rf(ctx, userId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*members.Member)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, userId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Remove provides a mock function with given fields: ctx, orgId, userId
+func (_m *Members) Remove(ctx context.Context, orgId string, userId string) error {
+	ret := _m.Called(ctx, orgId, userId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, orgId, userId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// RemoveAll provides a mock function with given fields: ctx, orgId
+func (_m *Members) RemoveAll(ctx context.Context, orgId string) error {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string) error); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Set provides a mock function with given fields: ctx, orgId, userId, role
+func (_m *Members) Set(ctx context.Context, orgId string, userId string, role members.Role) error {
+	ret := _m.Called(ctx, orgId, userId, role)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, members.Role) error); ok {
+		r0 = rf(ctx, orgId, userId, role)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/members/observer/mocks/Observer.go b/pkg/members/observer/mocks/Observer.go
new file mode 100644
index 0000000000000000000000000000000000000000..dd236a90fcc00410e57457eddb57e16a40245b92
--- /dev/null
+++ b/pkg/members/observer/mocks/Observer.go
@@ -0,0 +1,29 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	collaborators "git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Observer is an autogenerated mock type for the Observer type
+type Observer struct {
+	mock.Mock
+}
+
+// OnCollaboratorSet provides a mock function with given fields: ctx, collaborator
+func (_m *Observer) OnCollaboratorSet(ctx context.Context, collaborator *collaborators.Collaborator) (string, error) {
+	ret := _m.Called(ctx, collaborator)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *collaborators.Collaborator) error); ok {
+		r0 = rf(ctx, collaborator)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return "", r0
+}
diff --git a/pkg/members/observer/service.go b/pkg/members/observer/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..2a4ecb7828ff828baa36d5eb09589f9727dfe1ac
--- /dev/null
+++ b/pkg/members/observer/service.go
@@ -0,0 +1,14 @@
+package observer
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collaborators"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/members
+// @grpc-addr account.members.Observer
+type Observer interface {
+	OnCollaboratorSet(ctx context.Context, collaborator *collaborators.Collaborator) (delayedTaskID string, err error)
+}
diff --git a/pkg/members/observer/transport/client.microgen.go b/pkg/members/observer/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..28ca2a88bdbb5e5095b8c2f7d2e9a5e85b67cc09
--- /dev/null
+++ b/pkg/members/observer/transport/client.microgen.go
@@ -0,0 +1,24 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	collaborators "git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) OnCollaboratorSet(arg0 context.Context, arg1 *collaborators.Collaborator) (res0 string, res1 error) {
+	request := OnCollaboratorSetRequest{Collaborator: arg1}
+	response, res1 := set.OnCollaboratorSetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*OnCollaboratorSetResponse).DelayedTaskID, res1
+}
diff --git a/pkg/members/observer/transport/endpoints.microgen.go b/pkg/members/observer/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..859f2e0c3edd4edcf67e21acca5545b927b07f32
--- /dev/null
+++ b/pkg/members/observer/transport/endpoints.microgen.go
@@ -0,0 +1,10 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Observer API and used for transport purposes.
+type EndpointsSet struct {
+	OnCollaboratorSetEndpoint endpoint.Endpoint
+}
diff --git a/pkg/members/observer/transport/exchanges.microgen.go b/pkg/members/observer/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..56b68bd77675c788e11605b58a6e2ef9c1cffd7e
--- /dev/null
+++ b/pkg/members/observer/transport/exchanges.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import collaborators "git.perx.ru/perxis/perxis-go/pkg/collaborators"
+
+type (
+	OnCollaboratorSetRequest struct {
+		Collaborator *collaborators.Collaborator `json:"collaborator"`
+	}
+	OnCollaboratorSetResponse struct {
+		DelayedTaskID string `json:"delayed_task_id"`
+	}
+)
diff --git a/pkg/members/observer/transport/grpc/client.microgen.go b/pkg/members/observer/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6e1e21c700a08ab5738950f6b8b95aa8ed34e732
--- /dev/null
+++ b/pkg/members/observer/transport/grpc/client.microgen.go
@@ -0,0 +1,23 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/members/observer/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "account.members.Observer"
+	}
+	return transport.EndpointsSet{OnCollaboratorSetEndpoint: grpckit.NewClient(
+		conn, addr, "OnCollaboratorSet",
+		_Encode_OnCollaboratorSet_Request,
+		_Decode_OnCollaboratorSet_Response,
+		pb.OnCollaboratorSetResponse{},
+		opts...,
+	).Endpoint()}
+}
diff --git a/pkg/members/observer/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/members/observer/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..57c7b9d9e66969a356a1fcd8ad180731059206ab
--- /dev/null
+++ b/pkg/members/observer/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,52 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/members/observer/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+)
+
+func _Encode_OnCollaboratorSet_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil OnCollaboratorSetRequest")
+	}
+	req := request.(*transport.OnCollaboratorSetRequest)
+	reqCollaborator, err := PtrCollaboratorsCollaboratorToProto(req.Collaborator)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.OnCollaboratorSetRequest{Collaborator: reqCollaborator}, nil
+}
+
+func _Decode_OnCollaboratorSet_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil OnCollaboratorSetRequest")
+	}
+	req := request.(*pb.OnCollaboratorSetRequest)
+	reqCollaborator, err := ProtoToPtrCollaboratorsCollaborator(req.Collaborator)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.OnCollaboratorSetRequest{Collaborator: reqCollaborator}, nil
+}
+
+func _Decode_OnCollaboratorSet_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil OnCollaboratorSetResponse")
+	}
+	resp := response.(*pb.OnCollaboratorSetResponse)
+	return &transport.OnCollaboratorSetResponse{DelayedTaskID: string(resp.DelayedTaskId)}, nil
+}
+
+func _Encode_OnCollaboratorSet_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil OnCollaboratorSetResponse")
+	}
+	resp := response.(*transport.OnCollaboratorSetResponse)
+	return &pb.OnCollaboratorSetResponse{DelayedTaskId: resp.DelayedTaskID}, nil
+}
diff --git a/pkg/members/observer/transport/grpc/protobuf_type_converters.microgen.go b/pkg/members/observer/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..9016b6319c0747adcb032e119623a19aa3696ca6
--- /dev/null
+++ b/pkg/members/observer/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,32 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	collaborators "git.perx.ru/perxis/perxis-go/pkg/collaborators"
+	pbcommon "git.perx.ru/perxis/perxis-go/proto/common"
+)
+
+func PtrCollaboratorsCollaboratorToProto(collaborator *collaborators.Collaborator) (*pbcommon.Collaborator, error) {
+	if collaborator == nil {
+		return nil, nil
+	}
+	return &pbcommon.Collaborator{
+		SpaceId: collaborator.SpaceID,
+		Subject: collaborator.Subject,
+		Role:    collaborator.Role,
+	}, nil
+}
+
+func ProtoToPtrCollaboratorsCollaborator(protoCollaborator *pbcommon.Collaborator) (*collaborators.Collaborator, error) {
+	if protoCollaborator == nil {
+		return nil, nil
+	}
+	return &collaborators.Collaborator{
+		SpaceID: protoCollaborator.SpaceId,
+		Subject: protoCollaborator.Subject,
+		Role:    protoCollaborator.Role,
+	}, nil
+}
diff --git a/pkg/members/observer/transport/grpc/server.microgen.go b/pkg/members/observer/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..68d0069d06b5c3b34ccb98507e01a860672b00d2
--- /dev/null
+++ b/pkg/members/observer/transport/grpc/server.microgen.go
@@ -0,0 +1,34 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/members/observer/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	context "golang.org/x/net/context"
+)
+
+type observerServer struct {
+	onCollaboratorSet grpc.Handler
+
+	pb.UnimplementedObserverServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ObserverServer {
+	return &observerServer{onCollaboratorSet: grpc.NewServer(
+		endpoints.OnCollaboratorSetEndpoint,
+		_Decode_OnCollaboratorSet_Request,
+		_Encode_OnCollaboratorSet_Response,
+		opts...,
+	)}
+}
+
+func (S *observerServer) OnCollaboratorSet(ctx context.Context, req *pb.OnCollaboratorSetRequest) (*pb.OnCollaboratorSetResponse, error) {
+	_, resp, err := S.onCollaboratorSet.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.OnCollaboratorSetResponse), nil
+}
diff --git a/pkg/members/observer/transport/server.microgen.go b/pkg/members/observer/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..db97b834f222926dd4b5f5bbf6df56bb47a93845
--- /dev/null
+++ b/pkg/members/observer/transport/server.microgen.go
@@ -0,0 +1,22 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	observer "git.perx.ru/perxis/perxis-go/pkg/members/observer"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc observer.Observer) EndpointsSet {
+	return EndpointsSet{OnCollaboratorSetEndpoint: OnCollaboratorSetEndpoint(svc)}
+}
+
+func OnCollaboratorSetEndpoint(svc observer.Observer) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*OnCollaboratorSetRequest)
+		res0, res1 := svc.OnCollaboratorSet(arg0, req.Collaborator)
+		return &OnCollaboratorSetResponse{DelayedTaskID: res0}, res1
+	}
+}
diff --git a/pkg/members/service.go b/pkg/members/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b88239b9b8509d8c1f65ca1a1a353cfe7281503
--- /dev/null
+++ b/pkg/members/service.go
@@ -0,0 +1,23 @@
+package members
+
+import (
+	"context"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/members
+// @grpc-addr account.members.Members
+type Members interface {
+	Set(ctx context.Context, orgId, userId string, role Role) (err error)
+
+	Get(ctx context.Context, orgId, userId string) (role Role, err error)
+
+	Remove(ctx context.Context, orgId, userId string) (err error)
+
+	// @microgen -
+	RemoveAll(ctx context.Context, orgId string) (err error)
+
+	ListMembers(ctx context.Context, orgId string) (members []*Member, err error)
+
+	ListOrganizations(ctx context.Context, userId string) (organizations []*Member, err error)
+}
diff --git a/pkg/members/transport/client.microgen.go b/pkg/members/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4cd10cad5b1b970f30a9380eae77a5d706748a08
--- /dev/null
+++ b/pkg/members/transport/client.microgen.go
@@ -0,0 +1,86 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	members "git.perx.ru/perxis/perxis-go/pkg/members"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Set(arg0 context.Context, arg1 string, arg2 string, arg3 members.Role) (res0 error) {
+	request := SetRequest{
+		OrgId:  arg1,
+		Role:   arg3,
+		UserId: arg2,
+	}
+	_, res0 = set.SetEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string) (res0 members.Role, res1 error) {
+	request := GetRequest{
+		OrgId:  arg1,
+		UserId: arg2,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Role, res1
+}
+
+func (set EndpointsSet) Remove(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := RemoveRequest{
+		OrgId:  arg1,
+		UserId: arg2,
+	}
+	_, res0 = set.RemoveEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) RemoveAll(arg0 context.Context, arg1 string) (res0 error) {
+	return
+}
+
+func (set EndpointsSet) ListMembers(arg0 context.Context, arg1 string) (res0 []*members.Member, res1 error) {
+	request := ListMembersRequest{OrgId: arg1}
+	response, res1 := set.ListMembersEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListMembersResponse).Members, res1
+}
+
+func (set EndpointsSet) ListOrganizations(arg0 context.Context, arg1 string) (res0 []*members.Member, res1 error) {
+	request := ListOrganizationsRequest{UserId: arg1}
+	response, res1 := set.ListOrganizationsEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListOrganizationsResponse).Organizations, res1
+}
diff --git a/pkg/members/transport/endpoints.microgen.go b/pkg/members/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..88740bb07c63a46ff111527fdeae40dfc89a6167
--- /dev/null
+++ b/pkg/members/transport/endpoints.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Members API and used for transport purposes.
+type EndpointsSet struct {
+	SetEndpoint               endpoint.Endpoint
+	GetEndpoint               endpoint.Endpoint
+	RemoveEndpoint            endpoint.Endpoint
+	ListMembersEndpoint       endpoint.Endpoint
+	ListOrganizationsEndpoint endpoint.Endpoint
+}
diff --git a/pkg/members/transport/exchanges.microgen.go b/pkg/members/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..afa03b65ed508022e89565e2f0881e6afb3c5678
--- /dev/null
+++ b/pkg/members/transport/exchanges.microgen.go
@@ -0,0 +1,44 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import members "git.perx.ru/perxis/perxis-go/pkg/members"
+
+type (
+	SetRequest struct {
+		OrgId  string       `json:"org_id"`
+		UserId string       `json:"user_id"`
+		Role   members.Role `json:"role"`
+	}
+	// Formal exchange type, please do not delete.
+	SetResponse struct{}
+
+	GetRequest struct {
+		OrgId  string `json:"org_id"`
+		UserId string `json:"user_id"`
+	}
+	GetResponse struct {
+		Role members.Role `json:"role"`
+	}
+
+	RemoveRequest struct {
+		OrgId  string `json:"org_id"`
+		UserId string `json:"user_id"`
+	}
+	// Formal exchange type, please do not delete.
+	RemoveResponse struct{}
+
+	ListMembersRequest struct {
+		OrgId string `json:"org_id"`
+	}
+	ListMembersResponse struct {
+		Members []*members.Member `json:"members"`
+	}
+
+	ListOrganizationsRequest struct {
+		UserId string `json:"user_id"`
+	}
+	ListOrganizationsResponse struct {
+		Organizations []*members.Member `json:"organizations"`
+	}
+)
diff --git a/pkg/members/transport/grpc/client.microgen.go b/pkg/members/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..6df6b7951b031d7125b2a93174cf38117671ec7a
--- /dev/null
+++ b/pkg/members/transport/grpc/client.microgen.go
@@ -0,0 +1,54 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/members/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "account.members.Members"
+	}
+	return transport.EndpointsSet{
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListMembersEndpoint: grpckit.NewClient(
+			conn, addr, "ListMembers",
+			_Encode_ListMembers_Request,
+			_Decode_ListMembers_Response,
+			pb.ListMembersResponse{},
+			opts...,
+		).Endpoint(),
+		ListOrganizationsEndpoint: grpckit.NewClient(
+			conn, addr, "ListOrganizations",
+			_Encode_ListOrganizations_Request,
+			_Decode_ListOrganizations_Response,
+			pb.ListOrganizationsResponse{},
+			opts...,
+		).Endpoint(),
+		RemoveEndpoint: grpckit.NewClient(
+			conn, addr, "Remove",
+			_Encode_Remove_Request,
+			_Decode_Remove_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		SetEndpoint: grpckit.NewClient(
+			conn, addr, "Set",
+			_Encode_Set_Request,
+			_Decode_Set_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/members/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/members/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..9ee491a71335a89beb14582f02b8b26fd3a264d6
--- /dev/null
+++ b/pkg/members/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,209 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	"git.perx.ru/perxis/perxis-go/pkg/members/transport"
+	"git.perx.ru/perxis/perxis-go/proto/members"
+	"github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Set_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetRequest")
+	}
+	req := request.(*transport.SetRequest)
+	reqRole, err := RoleToProto(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &members.SetRequest{
+		OrgId:  req.OrgId,
+		Role:   reqRole,
+		UserId: req.UserId,
+	}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &members.GetRequest{
+		OrgId:  req.OrgId,
+		UserId: req.UserId,
+	}, nil
+}
+
+func _Encode_Remove_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil RemoveRequest")
+	}
+	req := request.(*transport.RemoveRequest)
+	return &members.RemoveRequest{
+		OrgId:  req.OrgId,
+		UserId: req.UserId,
+	}, nil
+}
+
+func _Encode_ListMembers_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListMembersRequest")
+	}
+	req := request.(*transport.ListMembersRequest)
+	return &members.ListMembersRequest{OrgId: req.OrgId}, nil
+}
+
+func _Encode_ListOrganizations_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListOrganizationsRequest")
+	}
+	req := request.(*transport.ListOrganizationsRequest)
+	return &members.ListOrganizationsRequest{UserId: req.UserId}, nil
+}
+
+func _Encode_Set_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respRole, err := RoleToProto(resp.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &members.GetResponse{Role: respRole}, nil
+}
+
+func _Encode_Remove_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_ListMembers_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListMembersResponse")
+	}
+	resp := response.(*transport.ListMembersResponse)
+	respMembers, err := ListPtrMemberToProto(resp.Members)
+	if err != nil {
+		return nil, err
+	}
+	return &members.ListMembersResponse{Members: respMembers}, nil
+}
+
+func _Encode_ListOrganizations_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListOrganizationsResponse")
+	}
+	resp := response.(*transport.ListOrganizationsResponse)
+	respOrganizations, err := ListPtrMemberToProto(resp.Organizations)
+	if err != nil {
+		return nil, err
+	}
+	return &members.ListOrganizationsResponse{Organizations: respOrganizations}, nil
+}
+
+func _Decode_Set_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil SetRequest")
+	}
+	req := request.(*members.SetRequest)
+	reqRole, err := ProtoToRole(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.SetRequest{
+		OrgId:  string(req.OrgId),
+		Role:   reqRole,
+		UserId: string(req.UserId),
+	}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*members.GetRequest)
+	return &transport.GetRequest{
+		OrgId:  string(req.OrgId),
+		UserId: string(req.UserId),
+	}, nil
+}
+
+func _Decode_Remove_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil RemoveRequest")
+	}
+	req := request.(*members.RemoveRequest)
+	return &transport.RemoveRequest{
+		OrgId:  string(req.OrgId),
+		UserId: string(req.UserId),
+	}, nil
+}
+
+func _Decode_ListMembers_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListMembersRequest")
+	}
+	req := request.(*members.ListMembersRequest)
+	return &transport.ListMembersRequest{OrgId: string(req.OrgId)}, nil
+}
+
+func _Decode_ListOrganizations_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListOrganizationsRequest")
+	}
+	req := request.(*members.ListOrganizationsRequest)
+	return &transport.ListOrganizationsRequest{UserId: string(req.UserId)}, nil
+}
+
+func _Decode_Set_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*members.GetResponse)
+	respRole, err := ProtoToRole(resp.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Role: respRole}, nil
+}
+
+func _Decode_Remove_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_ListMembers_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListMembersResponse")
+	}
+	resp := response.(*members.ListMembersResponse)
+	respMembers, err := ProtoToListPtrMember(resp.Members)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListMembersResponse{Members: respMembers}, nil
+}
+
+func _Decode_ListOrganizations_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListOrganizationsResponse")
+	}
+	resp := response.(*members.ListOrganizationsResponse)
+	respOrganizations, err := ProtoToListPtrMember(resp.Organizations)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListOrganizationsResponse{Organizations: respOrganizations}, nil
+}
diff --git a/pkg/members/transport/grpc/protobuf_type_converters.microgen.go b/pkg/members/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..70d4628a570042ecad26a9adc557f948a8098778
--- /dev/null
+++ b/pkg/members/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,42 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	service "git.perx.ru/perxis/perxis-go/pkg/members"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+)
+
+func RoleToProto(role service.Role) (pb.Role, error) {
+	return pb.Role(role), nil
+}
+
+func ProtoToRole(protoRole pb.Role) (service.Role, error) {
+	return service.Role(protoRole), nil
+}
+
+func ListPtrMemberToProto(members []*service.Member) ([]*pb.Member, error) {
+	res := make([]*pb.Member, 0, len(members))
+	for _, m := range members {
+		res = append(res, &pb.Member{
+			OrgId:  m.OrgId,
+			UserId: m.UserId,
+			Role:   pb.Role(m.Role),
+		})
+	}
+	return res, nil
+}
+
+func ProtoToListPtrMember(protoMembers []*pb.Member) ([]*service.Member, error) {
+	res := make([]*service.Member, 0, len(protoMembers))
+	for _, m := range protoMembers {
+		res = append(res, &service.Member{
+			OrgId:  m.OrgId,
+			UserId: m.UserId,
+			Role:   service.Role(m.Role),
+		})
+	}
+	return res, nil
+}
diff --git a/pkg/members/transport/grpc/server.microgen.go b/pkg/members/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..1d11e1c0990df144d99f511d20b4f844d4431b7a
--- /dev/null
+++ b/pkg/members/transport/grpc/server.microgen.go
@@ -0,0 +1,97 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/members/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/members"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type membersServer struct {
+	set               grpc.Handler
+	get               grpc.Handler
+	remove            grpc.Handler
+	listMembers       grpc.Handler
+	listOrganizations grpc.Handler
+
+	pb.UnimplementedMembersServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.MembersServer {
+	return &membersServer{
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		listMembers: grpc.NewServer(
+			endpoints.ListMembersEndpoint,
+			_Decode_ListMembers_Request,
+			_Encode_ListMembers_Response,
+			opts...,
+		),
+		listOrganizations: grpc.NewServer(
+			endpoints.ListOrganizationsEndpoint,
+			_Decode_ListOrganizations_Request,
+			_Encode_ListOrganizations_Response,
+			opts...,
+		),
+		remove: grpc.NewServer(
+			endpoints.RemoveEndpoint,
+			_Decode_Remove_Request,
+			_Encode_Remove_Response,
+			opts...,
+		),
+		set: grpc.NewServer(
+			endpoints.SetEndpoint,
+			_Decode_Set_Request,
+			_Encode_Set_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *membersServer) Set(ctx context.Context, req *pb.SetRequest) (*empty.Empty, error) {
+	_, resp, err := S.set.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *membersServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *membersServer) Remove(ctx context.Context, req *pb.RemoveRequest) (*empty.Empty, error) {
+	_, resp, err := S.remove.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *membersServer) ListMembers(ctx context.Context, req *pb.ListMembersRequest) (*pb.ListMembersResponse, error) {
+	_, resp, err := S.listMembers.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListMembersResponse), nil
+}
+
+func (S *membersServer) ListOrganizations(ctx context.Context, req *pb.ListOrganizationsRequest) (*pb.ListOrganizationsResponse, error) {
+	_, resp, err := S.listOrganizations.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListOrganizationsResponse), nil
+}
diff --git a/pkg/members/transport/server.microgen.go b/pkg/members/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..b1c40ac3039b2ce46685880e93651f20d4fe7d16
--- /dev/null
+++ b/pkg/members/transport/server.microgen.go
@@ -0,0 +1,60 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	members "git.perx.ru/perxis/perxis-go/pkg/members"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc members.Members) EndpointsSet {
+	return EndpointsSet{
+		GetEndpoint:               GetEndpoint(svc),
+		ListMembersEndpoint:       ListMembersEndpoint(svc),
+		ListOrganizationsEndpoint: ListOrganizationsEndpoint(svc),
+		RemoveEndpoint:            RemoveEndpoint(svc),
+		SetEndpoint:               SetEndpoint(svc),
+	}
+}
+
+func SetEndpoint(svc members.Members) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*SetRequest)
+		res0 := svc.Set(arg0, req.OrgId, req.UserId, req.Role)
+		return &SetResponse{}, res0
+	}
+}
+
+func GetEndpoint(svc members.Members) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.OrgId, req.UserId)
+		return &GetResponse{Role: res0}, res1
+	}
+}
+
+func RemoveEndpoint(svc members.Members) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*RemoveRequest)
+		res0 := svc.Remove(arg0, req.OrgId, req.UserId)
+		return &RemoveResponse{}, res0
+	}
+}
+
+func ListMembersEndpoint(svc members.Members) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListMembersRequest)
+		res0, res1 := svc.ListMembers(arg0, req.OrgId)
+		return &ListMembersResponse{Members: res0}, res1
+	}
+}
+
+func ListOrganizationsEndpoint(svc members.Members) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListOrganizationsRequest)
+		res0, res1 := svc.ListOrganizations(arg0, req.UserId)
+		return &ListOrganizationsResponse{Organizations: res0}, res1
+	}
+}
diff --git a/pkg/optional/optional.go b/pkg/optional/optional.go
new file mode 100644
index 0000000000000000000000000000000000000000..94e89bf6a04708abf853f2e8aaf8d7dbd9e99371
--- /dev/null
+++ b/pkg/optional/optional.go
@@ -0,0 +1,10 @@
+package optional
+
+var (
+	True  *bool = Bool(true)
+	False *bool = Bool(false)
+)
+
+func Bool(v bool) *bool {
+	return &v
+}
diff --git a/pkg/options/options.go b/pkg/options/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..7f8b0cd624db7e6c4c35fe62333a67ba633fbe7e
--- /dev/null
+++ b/pkg/options/options.go
@@ -0,0 +1,122 @@
+package options
+
+import "time"
+
+// SortOptions настройки сортировки результатов
+type SortOptions struct {
+	Sort []string
+}
+
+// PaginationOptions настройки возвращаемых страниц результатов
+type PaginationOptions struct {
+	PageNum  int
+	PageSize int
+}
+
+// FieldOptions настройки включения/исключения полей из результатов запроса
+type FieldOptions struct {
+	// Fields - Наименования полей для включения/исключения из результатов запроса (только указанные поля)
+	// Если `ExcludeFields` не установлен, то результат содержит только указанные поля
+	// Если `ExcludeFields` установлен, то результат содержит все поля кроме указанных
+	Fields []string
+
+	// ExcludeFields- Если флаг установлен, то перечисленные поля `Fields` следует исключить из результатов
+	ExcludeFields bool
+}
+
+// FindOptions настройки возвращаемых результатов поиска
+type FindOptions struct {
+	SortOptions
+	PaginationOptions
+	FieldOptions
+}
+
+// NewFindOptions создает новые результаты поиска
+func NewFindOptions(pageNum, pageSize int, sort ...string) *FindOptions {
+	return &FindOptions{
+		PaginationOptions: PaginationOptions{
+			PageNum:  pageNum,
+			PageSize: pageSize,
+		},
+		SortOptions: SortOptions{
+			Sort: sort,
+		},
+	}
+}
+
+// MergeFindOptions объединяет в FindOptions различные варианты настроек
+func MergeFindOptions(opts ...interface{}) *FindOptions {
+	fo := &FindOptions{}
+	for _, opt := range opts {
+		if opt == nil {
+			continue
+		}
+
+		switch o := opt.(type) {
+		case FindOptions:
+			fo.SortOptions = MergeSortOptions(fo.SortOptions, o.SortOptions)
+			fo.PaginationOptions = MergePaginationOptions(fo.PaginationOptions, o.PaginationOptions)
+			fo.FieldOptions = MergeFieldOptions(fo.FieldOptions, o.FieldOptions)
+		case *FindOptions:
+			fo.SortOptions = MergeSortOptions(fo.SortOptions, o.SortOptions)
+			fo.PaginationOptions = MergePaginationOptions(fo.PaginationOptions, o.PaginationOptions)
+			fo.FieldOptions = MergeFieldOptions(fo.FieldOptions, o.FieldOptions)
+		case SortOptions:
+			fo.SortOptions = MergeSortOptions(fo.SortOptions, o)
+		case *SortOptions:
+			fo.SortOptions = MergeSortOptions(fo.SortOptions, *o)
+		case PaginationOptions:
+			fo.PaginationOptions = MergePaginationOptions(fo.PaginationOptions, o)
+		case *PaginationOptions:
+			fo.PaginationOptions = MergePaginationOptions(fo.PaginationOptions, *o)
+		case FieldOptions:
+			fo.FieldOptions = o
+		case *FieldOptions:
+			fo.FieldOptions = *o
+		}
+	}
+	return fo
+}
+
+type TimeFilter struct {
+	Before, After time.Time
+}
+
+// MergeSortOptions объединяет настройки сортировки
+func MergeSortOptions(options ...SortOptions) SortOptions {
+	fo := SortOptions{}
+	for _, opt := range options {
+		if len(opt.Sort) == 0 {
+			continue
+		}
+		fo.Sort = append(fo.Sort, opt.Sort...)
+	}
+	return fo
+}
+
+// MergePaginationOptions объединяет настройки страниц
+func MergePaginationOptions(options ...PaginationOptions) PaginationOptions {
+	fo := PaginationOptions{}
+	for _, opt := range options {
+		if opt.PageSize == 0 && opt.PageNum == 0 {
+			continue
+		}
+		fo.PageNum = opt.PageNum
+		fo.PageSize = opt.PageSize
+	}
+	return fo
+}
+
+// MergeFieldOptions выполняет слияние опций для возвращаемых полей.
+// Выбирается не пустое значение.
+func MergeFieldOptions(options ...FieldOptions) FieldOptions {
+	fo := FieldOptions{}
+	for _, opt := range options {
+		if len(opt.Fields) > 0 {
+			fo.Fields = opt.Fields
+			fo.ExcludeFields = opt.ExcludeFields
+			return fo
+		}
+	}
+	return fo
+}
diff --git a/pkg/options/options_test.go b/pkg/options/options_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..981849a0e2625a8bd7e796a22eaa8529d606ef01
--- /dev/null
+++ b/pkg/options/options_test.go
@@ -0,0 +1,60 @@
+package options
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestOptions_MergePaginationOptions(t *testing.T) {
+
+	var tt = []struct {
+		name     string
+		options  []PaginationOptions
+		expected PaginationOptions
+	}{
+		{
+			name:     "Nil option",
+			options:  nil,
+			expected: PaginationOptions{},
+		},
+		{
+			name:     "Empty options",
+			options:  []PaginationOptions{},
+			expected: PaginationOptions{},
+		},
+		{
+			name:     "One option",
+			options:  []PaginationOptions{{PageNum: 10, PageSize: 100}},
+			expected: PaginationOptions{PageNum: 10, PageSize: 100},
+		},
+		{
+			name:     "Merge #1",
+			options:  []PaginationOptions{{PageNum: 0, PageSize: 0}, {PageNum: 10, PageSize: 100}},
+			expected: PaginationOptions{PageNum: 10, PageSize: 100},
+		},
+		{
+			name:     "Merge #2",
+			options:  []PaginationOptions{{PageNum: 10, PageSize: 100}, {PageNum: 0, PageSize: 0}},
+			expected: PaginationOptions{PageNum: 10, PageSize: 100},
+		},
+		{
+			name:     "Merge #3",
+			options:  []PaginationOptions{{PageNum: 0, PageSize: 0}, {PageNum: 10, PageSize: 100}, {PageNum: 0, PageSize: 0}},
+			expected: PaginationOptions{PageNum: 10, PageSize: 100},
+		},
+		{
+			name:     "Merge #4",
+			options:  []PaginationOptions{{PageNum: 10, PageSize: 100}, {}},
+			expected: PaginationOptions{PageNum: 10, PageSize: 100},
+		},
+	}
+
+	for _, v := range tt {
+
+		t.Run(v.name, func(t *testing.T) {
+			actual := MergePaginationOptions(v.options...)
+			assert.Equal(t, v.expected, actual)
+		})
+	}
+}
diff --git a/pkg/organizations/middleware/caching_middleware.go b/pkg/organizations/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..2017c9913b41ae8fbeb05e116abdda037fb5667e
--- /dev/null
+++ b/pkg/organizations/middleware/caching_middleware.go
@@ -0,0 +1,62 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	service "git.perx.ru/perxis/perxis-go/pkg/organizations"
+)
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Organizations) service.Organizations {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Organizations
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, org *service.Organization) (organization *service.Organization, err error) {
+	return m.next.Create(ctx, org)
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, orgId string) (organization *service.Organization, err error) {
+
+	value, e := m.cache.Get(orgId)
+	if e == nil {
+		return value.(*service.Organization), err
+	}
+	organization, err = m.next.Get(ctx, orgId)
+	if err == nil {
+		m.cache.Set(orgId, organization)
+	}
+	return organization, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, org *service.Organization) (err error) {
+
+	err = m.next.Update(ctx, org)
+	if err == nil {
+		m.cache.Remove(org.ID)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, orgId string) (err error) {
+
+	err = m.next.Delete(ctx, orgId)
+	if err == nil {
+		m.cache.Remove(orgId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Find(ctx context.Context, filter *service.Filter, opts *options.FindOptions) (organizations []*service.Organization, total int, err error) {
+	return m.next.Find(ctx, filter, opts)
+}
diff --git a/pkg/organizations/middleware/caching_middleware_test.go b/pkg/organizations/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..59248ded4d85f904bf67e21956ad2df355706cdf
--- /dev/null
+++ b/pkg/organizations/middleware/caching_middleware_test.go
@@ -0,0 +1,119 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	mocksorgs "git.perx.ru/perxis/perxis-go/pkg/organizations/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestOrganizationsCache(t *testing.T) {
+
+	const (
+		orgId = "orgId"
+		size  = 5
+		ttl   = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		orgs := &mocksorgs.Organizations{}
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(orgs)
+
+		orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "Organization"}, nil).Once()
+
+		v1, err := svc.Get(ctx, orgId)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, orgId)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+		orgs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update", func(t *testing.T) {
+			orgs := &mocksorgs.Organizations{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(orgs)
+
+			orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "Organization"}, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			orgs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Update(ctx, &organizations.Organization{ID: orgId, Name: "OrganizationUPD"})
+
+			orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "OrganizationUPD"}, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается удаление объекта из кэша после обновления и получение заново из сервиса.")
+
+			orgs.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			orgs := &mocksorgs.Organizations{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(orgs)
+
+			orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "Organization"}, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			orgs.On("Delete", mock.Anything, mock.Anything).Return(nil).Once()
+			err = svc.Delete(ctx, orgId)
+
+			orgs.On("Get", mock.Anything, orgId).Return(nil, errNotFound).Once()
+
+			_, err = svc.Get(ctx, orgId)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается удаление объекта из кэша после удаления из хранилища и получение ошибки от сервиса.")
+
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			orgs := &mocksorgs.Organizations{}
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(orgs)
+
+			orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "Organization"}, nil).Once()
+
+			v1, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+
+			orgs.On("Get", mock.Anything, orgId).Return(&organizations.Organization{ID: orgId, Name: "Organization"}, nil).Once()
+
+			v3, err := svc.Get(ctx, orgId)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается удаление объекта из кэша и получение заново из сервиса.")
+
+			orgs.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/organizations/middleware/error_logging_middleware.go b/pkg/organizations/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..c9631f9144db6244044379219250ca334893d0d6
--- /dev/null
+++ b/pkg/organizations/middleware/error_logging_middleware.go
@@ -0,0 +1,81 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/organizations -i Organizations -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements organizations.Organizations that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   organizations.Organizations
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the organizations.Organizations with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next organizations.Organizations) organizations.Organizations {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, org *organizations.Organization) (created *organizations.Organization, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, org)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, orgId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, orgId)
+}
+
+func (m *errorLoggingMiddleware) Find(ctx context.Context, filter *organizations.Filter, opts *options.FindOptions) (orgs []*organizations.Organization, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Find(ctx, filter, opts)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, orgId string) (org *organizations.Organization, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, orgId)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, org *organizations.Organization) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, org)
+}
diff --git a/pkg/organizations/middleware/logging_middleware.go b/pkg/organizations/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..6f33296d5cecbee19be4bff65da4280486cd1958
--- /dev/null
+++ b/pkg/organizations/middleware/logging_middleware.go
@@ -0,0 +1,215 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/organizations -i Organizations -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements organizations.Organizations that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   organizations.Organizations
+}
+
+// LoggingMiddleware instruments an implementation of the organizations.Organizations with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next organizations.Organizations) organizations.Organizations {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, org *organizations.Organization) (created *organizations.Organization, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx": ctx,
+		"org": org} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, org)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, orgId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"orgId": orgId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, orgId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Find(ctx context.Context, filter *organizations.Filter, opts *options.FindOptions) (orgs []*organizations.Organization, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"filter": filter,
+		"opts":   opts} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Request", fields...)
+
+	orgs, total, err = m.next.Find(ctx, filter, opts)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"orgs":  orgs,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Response", fields...)
+
+	return orgs, total, err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, orgId string) (org *organizations.Organization, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"orgId": orgId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	org, err = m.next.Get(ctx, orgId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"org": org,
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return org, err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, org *organizations.Organization) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx": ctx,
+		"org": org} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, org)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/organizations/middleware/middleware.go b/pkg/organizations/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..fe3c3d645e19cc9a0c46742be61edec6bda3882e
--- /dev/null
+++ b/pkg/organizations/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/organizations -i Organizations -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"go.uber.org/zap"
+)
+
+type Middleware func(organizations.Organizations) organizations.Organizations
+
+func WithLog(s organizations.Organizations, logger *zap.Logger, log_access bool) organizations.Organizations {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Organizations")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/organizations/middleware/recovering_middleware.go b/pkg/organizations/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..35f3a6c5f7333f7fa3664c7101d31b72be270d33
--- /dev/null
+++ b/pkg/organizations/middleware/recovering_middleware.go
@@ -0,0 +1,92 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/organizations -i Organizations -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements organizations.Organizations that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   organizations.Organizations
+}
+
+// RecoveringMiddleware instruments an implementation of the organizations.Organizations with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next organizations.Organizations) organizations.Organizations {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, org *organizations.Organization) (created *organizations.Organization, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, org)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, orgId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, orgId)
+}
+
+func (m *recoveringMiddleware) Find(ctx context.Context, filter *organizations.Filter, opts *options.FindOptions) (orgs []*organizations.Organization, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Find(ctx, filter, opts)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, orgId string) (org *organizations.Organization, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, orgId)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, org *organizations.Organization) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, org)
+}
diff --git a/pkg/organizations/mocks/Organizations.go b/pkg/organizations/mocks/Organizations.go
new file mode 100644
index 0000000000000000000000000000000000000000..7b7255897b5f33f44202ef8e79255a93ea7ee429
--- /dev/null
+++ b/pkg/organizations/mocks/Organizations.go
@@ -0,0 +1,120 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	services "git.perx.ru/perxis/perxis-go/pkg/options"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Organizations is an autogenerated mock type for the Organizations type
+type Organizations struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, org
+func (_m *Organizations) Create(ctx context.Context, org *organizations.Organization) (*organizations.Organization, error) {
+	ret := _m.Called(ctx, org)
+
+	var r0 *organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Organization) *organizations.Organization); ok {
+		r0 = rf(ctx, org)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*organizations.Organization)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *organizations.Organization) error); ok {
+		r1 = rf(ctx, org)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, orgId
+func (_m *Organizations) Delete(ctx context.Context, orgId string) error {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string) error); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, filter, opts
+func (_m *Organizations) Find(ctx context.Context, filter *organizations.Filter, opts *services.FindOptions) ([]*organizations.Organization, int, error) {
+	ret := _m.Called(ctx, filter, opts)
+
+	var r0 []*organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Filter, *services.FindOptions) []*organizations.Organization); ok {
+		r0 = rf(ctx, filter, opts)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*organizations.Organization)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, *organizations.Filter, *services.FindOptions) int); ok {
+		r1 = rf(ctx, filter, opts)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *organizations.Filter, *services.FindOptions) error); ok {
+		r2 = rf(ctx, filter, opts)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, orgId
+func (_m *Organizations) Get(ctx context.Context, orgId string) (*organizations.Organization, error) {
+	ret := _m.Called(ctx, orgId)
+
+	var r0 *organizations.Organization
+	if rf, ok := ret.Get(0).(func(context.Context, string) *organizations.Organization); ok {
+		r0 = rf(ctx, orgId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*organizations.Organization)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, orgId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Update provides a mock function with given fields: ctx, org
+func (_m *Organizations) Update(ctx context.Context, org *organizations.Organization) error {
+	ret := _m.Called(ctx, org)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *organizations.Organization) error); ok {
+		r0 = rf(ctx, org)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/organizations/organization.go b/pkg/organizations/organization.go
new file mode 100644
index 0000000000000000000000000000000000000000..b95daa6988710d26fcda200eebef8afeff9158df
--- /dev/null
+++ b/pkg/organizations/organization.go
@@ -0,0 +1,14 @@
+package organizations
+
+type Organization struct {
+	ID          string  `bson:"_id"`
+	Name        string  `bson:"name"`
+	Description string  `bson:"description"`
+	LogoURL     string  `bson:"logoUrl"`
+	OwnerID     *string `bson:"-"`
+}
+
+func (o *Organization) SetOwnerID(s string) *Organization {
+	o.OwnerID = &s
+	return o
+}
diff --git a/pkg/organizations/service.go b/pkg/organizations/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..9ba3b9a91e6acd2353b1cebff342fa6a329547c9
--- /dev/null
+++ b/pkg/organizations/service.go
@@ -0,0 +1,25 @@
+package organizations
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/organizations
+// @grpc-addr account.organizations.Organizations
+type Organizations interface {
+	Create(ctx context.Context, org *Organization) (created *Organization, err error)
+
+	Get(ctx context.Context, orgId string) (org *Organization, err error)
+	Update(ctx context.Context, org *Organization) (err error)
+	Delete(ctx context.Context, orgId string) (err error)
+	Find(ctx context.Context, filter *Filter, opts *options.FindOptions) (orgs []*Organization, total int, err error)
+}
+
+// Organizations
+type Filter struct {
+	ID   []string
+	Name []string
+}
diff --git a/pkg/organizations/transport/client.microgen.go b/pkg/organizations/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..00b14af754877e02fe3936269d72cb13c7b138da
--- /dev/null
+++ b/pkg/organizations/transport/client.microgen.go
@@ -0,0 +1,76 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *organizations.Organization) (res0 *organizations.Organization, res1 error) {
+	request := CreateRequest{Org: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string) (res0 *organizations.Organization, res1 error) {
+	request := GetRequest{OrgId: arg1}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Org, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *organizations.Organization) (res0 error) {
+	request := UpdateRequest{Org: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string) (res0 error) {
+	request := DeleteRequest{OrgId: arg1}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1 *organizations.Filter, arg2 *options.FindOptions) (res0 []*organizations.Organization, res1 int, res2 error) {
+	request := FindRequest{
+		Filter: arg1,
+		Opts:   arg2,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*FindResponse).Orgs, response.(*FindResponse).Total, res2
+}
diff --git a/pkg/organizations/transport/endpoints.microgen.go b/pkg/organizations/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7e15457fef975b2f07611ef46786cf7b73f5bbee
--- /dev/null
+++ b/pkg/organizations/transport/endpoints.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Organizations API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	GetEndpoint    endpoint.Endpoint
+	UpdateEndpoint endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+	FindEndpoint   endpoint.Endpoint
+}
diff --git a/pkg/organizations/transport/exchanges.microgen.go b/pkg/organizations/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..7ae098e5b5b555cfe834c55e2fe73c79805fdbfd
--- /dev/null
+++ b/pkg/organizations/transport/exchanges.microgen.go
@@ -0,0 +1,45 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+)
+
+type (
+	CreateRequest struct {
+		Org *organizations.Organization `json:"org"`
+	}
+	CreateResponse struct {
+		Created *organizations.Organization `json:"created"`
+	}
+
+	GetRequest struct {
+		OrgId string `json:"org_id"`
+	}
+	GetResponse struct {
+		Org *organizations.Organization `json:"org"`
+	}
+
+	UpdateRequest struct {
+		Org *organizations.Organization `json:"org"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		OrgId string `json:"org_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	FindRequest struct {
+		Filter *organizations.Filter `json:"filter"`
+		Opts   *options.FindOptions `json:"opts"`
+	}
+	FindResponse struct {
+		Orgs  []*organizations.Organization `json:"orgs"`
+		Total int                           `json:"total"`
+	}
+)
diff --git a/pkg/organizations/transport/grpc/client.microgen.go b/pkg/organizations/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..92918a88884de0a621858b7e09c49c66d83e5cab
--- /dev/null
+++ b/pkg/organizations/transport/grpc/client.microgen.go
@@ -0,0 +1,54 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "account.organizations.Organizations"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..966189093b756b558406ebacf758da6c2a3f4839
--- /dev/null
+++ b/pkg/organizations/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,225 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqOrg, err := PtrOrganizationToProto(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Org: reqOrg}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{OrgId: req.OrgId}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{OrgId: req.OrgId}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := PtrServicesFindOptionsToProto(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrOrganizationToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respOrg, err := PtrOrganizationToProto(resp.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Org: respOrg}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respOrgs, err := ListPtrOrganizationToProto(resp.Orgs)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Orgs:  respOrgs,
+		Total: int64(resp.Total),
+	}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqOrg, err := ProtoToPtrOrganization(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Org: reqOrg}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{OrgId: string(req.OrgId)}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{OrgId: string(req.OrgId)}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOpts, err := ProtoToPtrServicesFindOptions(req.Opts)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		Filter: reqFilter,
+		Opts:   reqOpts,
+	}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrOrganization(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respOrg, err := ProtoToPtrOrganization(resp.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Org: respOrg}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respOrgs, err := ProtoToListPtrOrganization(resp.Orgs)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Orgs:  respOrgs,
+		Total: int(resp.Total),
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqOrg, err := PtrOrganizationToProto(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Org: reqOrg}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqOrg, err := ProtoToPtrOrganization(req.Org)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Org: reqOrg}, nil
+}
diff --git a/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go b/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..15fce8f5a7966f8fa2958fef30f62805923f512b
--- /dev/null
+++ b/pkg/organizations/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,111 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/organizations"
+	"git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+)
+
+func PtrOrganizationToProto(org *organizations.Organization) (*pb.Organization, error) {
+	if org == nil {
+		return nil, nil
+	}
+	po := &pb.Organization{
+		Id:          org.ID,
+		Name:        org.Name,
+		Description: org.Description,
+		LogoUrl:     org.LogoURL,
+		OwnerId:     org.OwnerID,
+	}
+
+	return po, nil
+}
+
+func ProtoToPtrOrganization(protoOrg *pb.Organization) (*organizations.Organization, error) {
+	if protoOrg == nil {
+		return nil, nil
+	}
+	o := &organizations.Organization{
+		ID:          protoOrg.Id,
+		Name:        protoOrg.Name,
+		Description: protoOrg.Description,
+		LogoURL:     protoOrg.LogoUrl,
+		OwnerID:     protoOrg.OwnerId,
+	}
+	return o, nil
+}
+
+func PtrFilterToProto(filter *organizations.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	return &pb.Filter{
+		Ids:   filter.ID,
+		Names: filter.Name,
+	}, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*organizations.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	return &organizations.Filter{
+		ID:   protoFilter.Ids,
+		Name: protoFilter.Names,
+	}, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*common.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &common.FindOptions{
+		Sort:     opts.Sort,
+		PageNum:  int32(opts.PageNum),
+		PageSize: int32(opts.PageSize),
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *common.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+	}, nil
+}
+
+func ListPtrOrganizationToProto(orgs []*organizations.Organization) ([]*pb.Organization, error) {
+	protoOrgs := make([]*pb.Organization, 0, len(orgs))
+	for _, o := range orgs {
+		op, err := PtrOrganizationToProto(o)
+		if err != nil {
+			return nil, err
+		}
+		protoOrgs = append(protoOrgs, op)
+	}
+	return protoOrgs, nil
+}
+
+func ProtoToListPtrOrganization(protoOrgs []*pb.Organization) ([]*organizations.Organization, error) {
+	orgs := make([]*organizations.Organization, 0, len(protoOrgs))
+	for _, op := range protoOrgs {
+		o, err := ProtoToPtrOrganization(op)
+		if err != nil {
+			return nil, err
+		}
+		orgs = append(orgs, o)
+	}
+	return orgs, nil
+}
diff --git a/pkg/organizations/transport/grpc/server.microgen.go b/pkg/organizations/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..1cf24d998b6422ad418cfbb0a47a51eba6fe801d
--- /dev/null
+++ b/pkg/organizations/transport/grpc/server.microgen.go
@@ -0,0 +1,97 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/organizations/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/organizations"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type organizationsServer struct {
+	create grpc.Handler
+	get    grpc.Handler
+	update grpc.Handler
+	delete grpc.Handler
+	find   grpc.Handler
+
+	pb.UnimplementedOrganizationsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.OrganizationsServer {
+	return &organizationsServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *organizationsServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *organizationsServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *organizationsServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *organizationsServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *organizationsServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
diff --git a/pkg/organizations/transport/server.microgen.go b/pkg/organizations/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..2e08f8f534f862552ecc5008b2f76277e702a9ab
--- /dev/null
+++ b/pkg/organizations/transport/server.microgen.go
@@ -0,0 +1,63 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	organizations "git.perx.ru/perxis/perxis-go/pkg/organizations"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc organizations.Organizations) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		FindEndpoint:   FindEndpoint(svc),
+		GetEndpoint:    GetEndpoint(svc),
+		UpdateEndpoint: UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Org)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.OrgId)
+		return &GetResponse{Org: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Org)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.OrgId)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func FindEndpoint(svc organizations.Organizations) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.Filter, req.Opts)
+		return &FindResponse{
+			Orgs:  res0,
+			Total: res1,
+		}, res2
+	}
+}
diff --git a/pkg/references/field.go b/pkg/references/field.go
new file mode 100644
index 0000000000000000000000000000000000000000..c98ed2298150e012545d55d5364f62c4f44eac1b
--- /dev/null
+++ b/pkg/references/field.go
@@ -0,0 +1,143 @@
+package references
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+const ReferenceTypeName = "reference"
+
+type ReferenceParameters struct {
+	AllowedCollections []string `json:"allowedCollections"`
+}
+
+func (p ReferenceParameters) Type() field.Type { return &ReferenceType{} }
+
+func (p ReferenceParameters) Clone(reset bool) field.Parameters {
+	if p.AllowedCollections != nil {
+		cols := make([]string, 0, len(p.AllowedCollections))
+		for _, c := range p.AllowedCollections {
+			cols = append(cols, c)
+		}
+		p.AllowedCollections = cols
+	}
+	return &p
+}
+
+type ReferenceType struct{}
+
+func NewReferenceType() *ReferenceType {
+	return &ReferenceType{}
+}
+
+func (t ReferenceType) Name() string { return ReferenceTypeName }
+
+func (t *ReferenceType) NewParameters() field.Parameters {
+	return &ReferenceParameters{}
+}
+
+func (t ReferenceType) Decode(_ context.Context, fld *field.Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+
+	r, ok := v.(map[string]interface{})
+	if !ok {
+		return nil, errors.Errorf("ReferenceField decode error: incorrect type: \"%s\", expected \"map[string]interface{}\"", reflect.ValueOf(v).Kind())
+	}
+	id, ok1 := r["id"].(string)
+	if !ok1 || id == "" {
+		return nil, errors.Errorf("ReferenceField decode error: field \"id\" required")
+	}
+	collID, ok2 := r["collection_id"].(string)
+	if !ok2 || collID == "" {
+		return nil, errors.Errorf("ReferenceField decode error: field \"collection_id\" required")
+	}
+	disabled, _ := r["disabled"].(bool)
+
+	return &Reference{ID: id, CollectionID: collID, Disabled: disabled}, nil
+}
+
+func (t ReferenceType) Encode(_ context.Context, fld *field.Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+
+	val, ok := v.(*Reference)
+
+	if !ok {
+		return nil, errors.Errorf("ReferenceField encode error: incorrect type: \"%s\", expected \"*Reference\"", reflect.ValueOf(v).Kind())
+	}
+	if val == nil {
+		return nil, nil
+	}
+	ref := map[string]interface{}{
+		"id":            val.ID,
+		"collection_id": val.CollectionID,
+		"disabled":      val.Disabled,
+	}
+	return ref, nil
+}
+
+func (t *ReferenceType) PreSave(ctx context.Context, f *field.Field, v interface{}, itemCtx *items.Context) (interface{}, bool, error) {
+	params, ok := f.Params.(*ReferenceParameters)
+	if !ok {
+		return nil, false, errors.New("field parameters required")
+	}
+
+	if v == nil {
+		return nil, false, nil
+	}
+	ref, ok := v.(*Reference)
+	if !ok {
+		return nil, false, fmt.Errorf("ReferenceField presave error: incorrect type: \"%s\", expected \"*Reference\"", reflect.ValueOf(v).Kind())
+	}
+
+	// Проверка на наличие ссылок заданных типов
+	if len(params.AllowedCollections) > 0 {
+		ok := false
+		for _, allowed := range params.AllowedCollections {
+
+			if data.GlobMatch(ref.CollectionID, allowed) {
+				ok = true
+				break
+			}
+		}
+		if !ok {
+			return nil, false, errors.Errorf("usage of collection \"%s\" not allowed", ref.CollectionID)
+		}
+	}
+
+	return ref, false, nil
+}
+
+// Field - создает новое поле Field типа ReferenceType
+// ReferenceType должен быть предварительно создан через `NewReferenceType` и зарегистрирован `field.Register`
+func Field(allowedColls []string, o ...interface{}) *field.Field {
+	_, ok := field.GetType(ReferenceTypeName)
+	if !ok {
+		panic("field reference type not registered")
+	}
+
+	return field.NewField(&ReferenceParameters{AllowedCollections: allowedColls}, o...)
+}
+
+func (t *ReferenceType) IsEmpty(v interface{}) bool {
+	if v == nil {
+		return true
+	}
+
+	ref, ok := v.(*Reference)
+
+	return !ok || ref.ID == "" && ref.CollectionID == ""
+}
+
+func init() {
+	field.Register(NewReferenceType())
+}
diff --git a/pkg/references/field_test.go b/pkg/references/field_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..a51cb2ac65a3483cbe58169421734f17b6dce20c
--- /dev/null
+++ b/pkg/references/field_test.go
@@ -0,0 +1,300 @@
+package references
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/items/mocks"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestReferenceField_Decode(t *testing.T) {
+
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": "11111111"},
+			&Reference{ID: "11111111", CollectionID: "media"},
+			false,
+		},
+		{
+			"Invalid CollectionID",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": 11111111},
+			"decode error: ReferenceField decode error: field \"id\" required",
+			true,
+		},
+		{
+			"Extra Field",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "extra": true},
+			&Reference{ID: "11111111", CollectionID: "media"},
+			false,
+		},
+		{
+			"Enabled",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": true},
+			&Reference{ID: "11111111", CollectionID: "media", Disabled: true},
+			false,
+		},
+		{
+			"Disabled",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": false},
+			&Reference{ID: "11111111", CollectionID: "media", Disabled: false},
+			false,
+		},
+		{
+			"Disabled wrong type",
+			Field(nil),
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": 3},
+			&Reference{ID: "11111111", CollectionID: "media", Disabled: false},
+			false,
+		},
+		{
+			"Missing Field",
+			Field(nil),
+			map[string]interface{}{"id": "11111111"},
+			"decode error: ReferenceField decode error: field \"collection_id\" required",
+			true,
+		},
+		{
+			"Invalid type",
+			Field(nil),
+			"string",
+			"decode error: ReferenceField decode error: incorrect type: \"string\", expected \"map[string]interface{}\"",
+			true,
+		},
+		{
+			"Invalid  element type",
+			Field(nil),
+			[]interface{}{"string"},
+			"decode error: ReferenceField decode error: incorrect type: \"slice\", expected \"map[string]interface{}\"",
+			true,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := schema.Decode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Decode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Decode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestReferenceField_Encode(t *testing.T) {
+
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Field(nil),
+			&Reference{ID: "11111111", CollectionID: "media"},
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": false},
+			false,
+		},
+		{
+			"Enabled",
+			Field(nil),
+			&Reference{ID: "11111111", CollectionID: "media", Disabled: true},
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": true},
+			false,
+		},
+		{
+			"Disabled",
+			Field(nil),
+			&Reference{ID: "11111111", CollectionID: "media", Disabled: false},
+			map[string]interface{}{"collection_id": "media", "id": "11111111", "disabled": false},
+			false,
+		},
+		{
+			"From Map",
+			Field(nil),
+			map[string]interface{}{"id": "11111111", "collection_id": "media"},
+			"encode error: ReferenceField encode error: incorrect type: \"map\", expected \"*Reference\"",
+			true,
+		},
+		{
+			"Invalid type",
+			Field(nil),
+			"string",
+			"encode error: ReferenceField encode error: incorrect type: \"string\", expected \"*Reference\"",
+			true,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := schema.Encode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Encode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Encode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestReferenceField_PreSave(t *testing.T) {
+	ctx := context.Background()
+	rt := NewReferenceType()
+
+	t.Run("Nil Value", func(t *testing.T) {
+		svc := &mocks.Items{}
+		itemCtx := &items.Context{Items: svc, SpaceID: "sp", EnvID: "env"}
+		f := Field(nil)
+		_, _, err := rt.PreSave(ctx, f, nil, itemCtx)
+		require.NoError(t, err)
+		svc.AssertExpectations(t)
+	})
+	t.Run("Nil Context", func(t *testing.T) {
+		svc := &mocks.Items{}
+		f := Field(nil)
+		ref := &Reference{
+			ID: "111", CollectionID: "media",
+		}
+		_, _, err := rt.PreSave(ctx, f, ref, nil)
+		require.NoError(t, err)
+		svc.AssertExpectations(t)
+	})
+	t.Run("Referenced Items Exist", func(t *testing.T) {
+		t.Run("Correct", func(t *testing.T) {
+			svc := &mocks.Items{}
+			itemCtx := &items.Context{Items: svc, SpaceID: "sp", EnvID: "env"}
+
+			f := Field(nil)
+			ref := &Reference{
+				ID: "111", CollectionID: "media",
+			}
+			_, _, err := rt.PreSave(ctx, f, ref, itemCtx)
+			require.NoError(t, err)
+			svc.AssertExpectations(t)
+		})
+		t.Run("Item Not Found", func(t *testing.T) {
+			svc := &mocks.Items{}
+
+			itemCtx := &items.Context{Items: svc, SpaceID: "sp", EnvID: "env"}
+
+			f := Field(nil)
+			ref := &Reference{
+				ID: "111", CollectionID: "media",
+			}
+			_, _, err := rt.PreSave(ctx, f, ref, itemCtx)
+			require.NoError(t, err)
+			svc.AssertExpectations(t)
+		})
+	})
+	t.Run("Allowed Types", func(t *testing.T) {
+		t.Run("Correct", func(t *testing.T) {
+			svc := &mocks.Items{}
+
+			itemCtx := &items.Context{Items: svc, SpaceID: "sp", EnvID: "env"}
+
+			f := Field([]string{"media"})
+
+			ref := &Reference{
+				ID: "111", CollectionID: "media",
+			}
+
+			_, _, err := rt.PreSave(ctx, f, ref, itemCtx)
+			require.NoError(t, err)
+			svc.AssertExpectations(t)
+		})
+		t.Run("Not Allowed", func(t *testing.T) {
+			svc := &mocks.Items{}
+			f := Field([]string{"cars", "motorbikes"})
+			itemCtx := &items.Context{Items: svc, SpaceID: "sp", EnvID: "env"}
+			ref := &Reference{
+				ID: "111", CollectionID: "media",
+			}
+			_, _, err := rt.PreSave(ctx, f, ref, itemCtx)
+			require.Error(t, err)
+			assert.Equal(t, "usage of collection \"media\" not allowed", err.Error())
+			svc.AssertExpectations(t)
+		})
+	})
+}
+
+func TestReferenceField_Validate(t *testing.T) {
+	rt := NewReferenceType()
+	field.Register(rt)
+
+	t.Run("Max Elements", func(t *testing.T) {
+		t.Run("Correct", func(t *testing.T) {
+			f := Field(nil, validate.MaxItems(1))
+			refs := []*Reference{
+				{ID: "111", CollectionID: "media"},
+			}
+			err := validate.Validate(nil, f, refs)
+			require.NoError(t, err)
+		})
+		t.Run("Limit exceeded", func(t *testing.T) {
+			f := Field(nil, validate.MaxItems(1))
+			refs := []*Reference{
+				{ID: "111", CollectionID: "media"},
+				{ID: "222", CollectionID: "media"},
+			}
+			err := validate.Validate(nil, f, refs)
+			require.Error(t, err)
+			require.Contains(t, err.Error(), "validation error: maximum elements number is 1")
+		})
+	})
+	t.Run("Required", func(t *testing.T) {
+		t.Run("Correct", func(t *testing.T) {
+			f := Field(nil, validate.Required())
+			ref := &Reference{ID: "111", CollectionID: "media"}
+			err := validate.Validate(nil, f, ref)
+			require.NoError(t, err)
+		})
+		t.Run("Empty", func(t *testing.T) {
+			f := Field(nil, validate.Required())
+			ref := &Reference{}
+			err := validate.Validate(nil, f, ref)
+			require.Error(t, err)
+			require.Contains(t, err.Error(), "validation error: value is required")
+		})
+	})
+}
+
+func TestReference_JSON(t *testing.T) {
+	fld := Field([]string{"col1"}).AddOptions(validate.MaxItems(2))
+
+	b, err := json.MarshalIndent(fld, "", "  ")
+	require.NoError(t, err)
+
+	res := field.NewField(nil)
+	err = json.Unmarshal(b, res)
+	require.NoError(t, err)
+
+	assert.Equal(t, fld, res)
+}
diff --git a/pkg/references/middleware/client_encode_middleware.go b/pkg/references/middleware/client_encode_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1cd9cb2f22340a5d90feb95bd00accb1bae39e0f
--- /dev/null
+++ b/pkg/references/middleware/client_encode_middleware.go
@@ -0,0 +1,42 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+)
+
+// ClientEncodeMiddleware выполняет операции encode/decode для передаваемых данных
+func ClientEncodeMiddleware(colls collections.Collections) Middleware {
+	return func(refs references.References) references.References {
+		return &encodeDecodeMiddleware{
+			next:  refs,
+			colls: colls,
+		}
+	}
+}
+
+type encodeDecodeMiddleware struct {
+	next  references.References
+	colls collections.Collections
+}
+
+func (m *encodeDecodeMiddleware) Get(ctx context.Context, spaceId, envId string, refs []*references.Reference) (items []*items.Item, notfound []*references.Reference, err error) {
+	items, notfound, err = m.next.Get(ctx, spaceId, envId, refs)
+	if err == nil && len(items) > 0 {
+		for i, item := range items {
+			col, err := m.colls.Get(ctx, item.SpaceID, item.EnvID, item.CollectionID)
+			if err != nil {
+				return nil, nil, err
+			}
+
+			if item, err = item.Decode(ctx, col.Schema); err != nil {
+				return nil, nil, err
+			}
+			items[i] = item
+		}
+	}
+	return
+}
diff --git a/pkg/references/middleware/error_logging_middleware.go b/pkg/references/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..9a62947f240842c6afb099a9b03cc966d9bd99b5
--- /dev/null
+++ b/pkg/references/middleware/error_logging_middleware.go
@@ -0,0 +1,41 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/references -i References -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements references.References that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   references.References
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the references.References with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next references.References) references.References {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, envId string, references []*references.Reference) (items []*items.Item, notfound []*references.Reference, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, envId, references)
+}
diff --git a/pkg/references/middleware/logging_middleware.go b/pkg/references/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..62a0e9d5cf26c6ab0d131fff2fd443cffd8fb8f3
--- /dev/null
+++ b/pkg/references/middleware/logging_middleware.go
@@ -0,0 +1,74 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/references -i References -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements references.References that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   references.References
+}
+
+// LoggingMiddleware instruments an implementation of the references.References with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next references.References) references.References {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, envId string, references []*references.Reference) (items []*items.Item, notfound []*references.Reference, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":        ctx,
+		"spaceId":    spaceId,
+		"envId":      envId,
+		"references": references} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	items, notfound, err = m.next.Get(ctx, spaceId, envId, references)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"items":    items,
+		"notfound": notfound,
+		"err":      err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return items, notfound, err
+}
diff --git a/pkg/references/middleware/middleware.go b/pkg/references/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..dfed8dc0c821998df97d414876b6839a9961f6b0
--- /dev/null
+++ b/pkg/references/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/references -i References -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+	"go.uber.org/zap"
+)
+
+type Middleware func(references.References) references.References
+
+func WithLog(s references.References, logger *zap.Logger, log_access bool) references.References {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("References")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/references/middleware/recovering_middleware.go b/pkg/references/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..b1a4eb6b629e5a48f7508edbe34f5450d5f52b10
--- /dev/null
+++ b/pkg/references/middleware/recovering_middleware.go
@@ -0,0 +1,44 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/references -i References -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	"git.perx.ru/perxis/perxis-go/pkg/references"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements references.References that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   references.References
+}
+
+// RecoveringMiddleware instruments an implementation of the references.References with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next references.References) references.References {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, envId string, references []*references.Reference) (items []*items.Item, notfound []*references.Reference, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, envId, references)
+}
diff --git a/pkg/references/mocks/References.go b/pkg/references/mocks/References.go
new file mode 100644
index 0000000000000000000000000000000000000000..c1ead2b49f11fd6492e3ce8f9eb5319cf3b208bc
--- /dev/null
+++ b/pkg/references/mocks/References.go
@@ -0,0 +1,63 @@
+// Code generated by mockery v2.14.0. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	references "git.perx.ru/perxis/perxis-go/pkg/references"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// References is an autogenerated mock type for the References type
+type References struct {
+	mock.Mock
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, envId, _a3
+func (_m *References) Get(ctx context.Context, spaceId string, envId string, _a3 []*references.Reference) ([]*items.Item, []*references.Reference, error) {
+	ret := _m.Called(ctx, spaceId, envId, _a3)
+
+	var r0 []*items.Item
+	if rf, ok := ret.Get(0).(func(context.Context, string, string, []*references.Reference) []*items.Item); ok {
+		r0 = rf(ctx, spaceId, envId, _a3)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*items.Item)
+		}
+	}
+
+	var r1 []*references.Reference
+	if rf, ok := ret.Get(1).(func(context.Context, string, string, []*references.Reference) []*references.Reference); ok {
+		r1 = rf(ctx, spaceId, envId, _a3)
+	} else {
+		if ret.Get(1) != nil {
+			r1 = ret.Get(1).([]*references.Reference)
+		}
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, string, string, []*references.Reference) error); ok {
+		r2 = rf(ctx, spaceId, envId, _a3)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+type mockConstructorTestingTNewReferences interface {
+	mock.TestingT
+	Cleanup(func())
+}
+
+// NewReferences creates a new instance of References. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+func NewReferences(t mockConstructorTestingTNewReferences) *References {
+	mock := &References{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/pkg/references/reference.go b/pkg/references/reference.go
new file mode 100644
index 0000000000000000000000000000000000000000..12dac1865a17a39ac6876aa4390f89d408e4f76c
--- /dev/null
+++ b/pkg/references/reference.go
@@ -0,0 +1,102 @@
+package references
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	pb "git.perx.ru/perxis/perxis-go/proto/references"
+	"go.mongodb.org/mongo-driver/bson"
+)
+
+type Reference struct {
+	ID           string `json:"id" bson:"id" mapstructure:"id"`
+	CollectionID string `json:"collection_id" bson:"collection_id" mapstructure:"collection_id"`
+	Disabled     bool   `json:"disabled,omitempty" bson:"disabled,omitempty" mapstructure:"disabled"`
+}
+
+func (r *Reference) MarshalBSON() ([]byte, error) {
+	d := bson.D{
+		{Key: "id", Value: r.ID},
+		{Key: "collection_id", Value: r.CollectionID},
+	}
+
+	if r.Disabled {
+		d = append(d, bson.E{Key: "disabled", Value: true})
+	}
+
+	return bson.Marshal(d)
+}
+
+func ReferenceFromPB(refPB *pb.Reference) *Reference {
+	return &Reference{
+		ID:           refPB.Id,
+		CollectionID: refPB.CollectionId,
+		Disabled:     refPB.Disabled,
+	}
+}
+
+func ReferenceFromItem(item *items.Item) *Reference {
+	if item == nil {
+		return nil
+	}
+
+	return &Reference{
+		ID:           item.ID,
+		CollectionID: item.CollectionID,
+	}
+}
+
+func ReferenceToPB(ref *Reference) *pb.Reference {
+	return &pb.Reference{
+		Id:           ref.ID,
+		CollectionId: ref.CollectionID,
+		Disabled:     ref.Disabled,
+	}
+}
+
+func ReferenceListFromPB(listPB []*pb.Reference) []*Reference {
+	list := make([]*Reference, 0, len(listPB))
+	for _, refPB := range listPB {
+		list = append(list, ReferenceFromPB(refPB))
+	}
+	return list
+}
+
+func (r *Reference) String() string {
+	if r == nil {
+		return ""
+	}
+	return r.CollectionID + "." + r.ID
+}
+
+func (r *Reference) Equal(r1 *Reference) bool {
+	return r == r1 || r != nil && r1 != nil && r.CollectionID == r1.CollectionID && r.ID == r1.ID && r.Disabled == r1.Disabled
+}
+
+func EqualArrays(sr1, sr2 []*Reference) bool {
+	if len(sr1) != len(sr2) {
+		return false
+	}
+	for i, r := range sr1 {
+		if !r.Equal(sr2[i]) {
+			return false
+		}
+	}
+	return true
+}
+
+func (r *Reference) IsValid() bool {
+	return r != nil && r.ID != "" && r.CollectionID != "" && !r.Disabled
+}
+
+func (r *Reference) Fetch(i interface{}) interface{} {
+	p, _ := i.(string)
+	switch p {
+	case "id":
+		return r.ID
+	case "collection_id":
+		return r.CollectionID
+	case "disabled":
+		return r.Disabled
+	default:
+		panic("unknown parameter")
+	}
+}
diff --git a/pkg/references/service.go b/pkg/references/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..14e2e8eb6f6eb0558033046840a5e9c2cda7f387
--- /dev/null
+++ b/pkg/references/service.go
@@ -0,0 +1,14 @@
+package references
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/references
+// @grpc-addr content.references.References
+type References interface {
+	Get(ctx context.Context, spaceId, envId string, references []*Reference) (items []*items.Item, notfound []*Reference, err error)
+}
diff --git a/pkg/references/transport/client.microgen.go b/pkg/references/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..a79d9192b857e3360087ddae86298e880df93be9
--- /dev/null
+++ b/pkg/references/transport/client.microgen.go
@@ -0,0 +1,29 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	references "git.perx.ru/perxis/perxis-go/pkg/references"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string, arg3 []*references.Reference) (res0 []*items.Item, res1 []*references.Reference, res2 error) {
+	request := GetRequest{
+		EnvId:      arg2,
+		References: arg3,
+		SpaceId:    arg1,
+	}
+	response, res2 := set.GetEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Items, response.(*GetResponse).Notfound, res2
+}
diff --git a/pkg/references/transport/endpoints.microgen.go b/pkg/references/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..737752a45beef781518c38424e8fe8c31ac47d6a
--- /dev/null
+++ b/pkg/references/transport/endpoints.microgen.go
@@ -0,0 +1,10 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements References API and used for transport purposes.
+type EndpointsSet struct {
+	GetEndpoint endpoint.Endpoint
+}
diff --git a/pkg/references/transport/exchanges.microgen.go b/pkg/references/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..bd31aa2d3f78b6e0d321eb7ce602501056842d16
--- /dev/null
+++ b/pkg/references/transport/exchanges.microgen.go
@@ -0,0 +1,20 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	items "git.perx.ru/perxis/perxis-go/pkg/items"
+	references "git.perx.ru/perxis/perxis-go/pkg/references"
+)
+
+type (
+	GetRequest struct {
+		SpaceId    string                  `json:"space_id"`
+		EnvId      string                  `json:"env_id"`
+		References []*references.Reference `json:"references"`
+	}
+	GetResponse struct {
+		Items    []*items.Item           `json:"items"`
+		Notfound []*references.Reference `json:"notfound"`
+	}
+)
diff --git a/pkg/references/transport/grpc/client.microgen.go b/pkg/references/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..f475ea9cb27f040fbcdb32a06b8893f7975c46ec
--- /dev/null
+++ b/pkg/references/transport/grpc/client.microgen.go
@@ -0,0 +1,23 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/references/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/references"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.references.References"
+	}
+	return transport.EndpointsSet{GetEndpoint: grpckit.NewClient(
+		conn, addr, "Get",
+		_Encode_Get_Request,
+		_Decode_Get_Response,
+		pb.GetResponse{},
+		opts...,
+	).Endpoint()}
+}
diff --git a/pkg/references/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/references/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..d47c0574bb55f654778bafac1035fbb6ba018d0d
--- /dev/null
+++ b/pkg/references/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,82 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/references/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/references"
+)
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	reqReferences, err := ListPtrReferenceToProto(req.References)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetRequest{
+		EnvId:      req.EnvId,
+		References: reqReferences,
+		SpaceId:    req.SpaceId,
+	}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respItems, err := ListPtrItemsItemToProto(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	respNotfound, err := ListPtrReferenceToProto(resp.Notfound)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{
+		Items:    respItems,
+		Notfound: respNotfound,
+	}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	reqReferences, err := ProtoToListPtrReference(req.References)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetRequest{
+		EnvId:      string(req.EnvId),
+		References: reqReferences,
+		SpaceId:    string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respItems, err := ProtoToListPtrItemsItem(resp.Items)
+	if err != nil {
+		return nil, err
+	}
+	respNotfound, err := ProtoToListPtrReference(resp.Notfound)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{
+		Items:    respItems,
+		Notfound: respNotfound,
+	}, nil
+}
diff --git a/pkg/references/transport/grpc/protobuf_type_converters.microgen.go b/pkg/references/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..549d11725e085c4a35aaa047a2a4b1e9c43355df
--- /dev/null
+++ b/pkg/references/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,71 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	itemstransportgrpc "git.perx.ru/perxis/perxis-go/pkg/items/transport/grpc"
+	service "git.perx.ru/perxis/perxis-go/pkg/references"
+	itemspb "git.perx.ru/perxis/perxis-go/proto/items"
+	pb "git.perx.ru/perxis/perxis-go/proto/references"
+)
+
+func PtrItemToReference(ref *service.Reference) (*pb.Reference, error) {
+	if ref == nil {
+		return nil, nil
+	}
+
+	protoRef := &pb.Reference{
+		Id:           ref.ID,
+		CollectionId: ref.CollectionID,
+	}
+
+	return protoRef, nil
+}
+
+func ListPtrReferenceToProto(refs []*service.Reference) ([]*pb.Reference, error) {
+	protoRefs := make([]*pb.Reference, 0, len(refs))
+	for _, ref := range refs {
+		pr, err := PtrItemToReference(ref)
+		if err != nil {
+			return nil, err
+		}
+		protoRefs = append(protoRefs, pr)
+	}
+	return protoRefs, nil
+}
+
+func ProtoToPtrReference(protoRef *pb.Reference) (*service.Reference, error) {
+	if protoRef == nil {
+		return nil, nil
+	}
+
+	ref := &service.Reference{
+		ID:           protoRef.Id,
+		CollectionID: protoRef.CollectionId,
+	}
+
+	return ref, nil
+}
+
+func ProtoToListPtrReference(protoRefs []*pb.Reference) ([]*service.Reference, error) {
+	refs := make([]*service.Reference, 0, len(protoRefs))
+	for _, ref := range protoRefs {
+		r, err := ProtoToPtrReference(ref)
+		if err != nil {
+			return nil, err
+		}
+		refs = append(refs, r)
+	}
+	return refs, nil
+}
+
+func ListPtrItemsItemToProto(items []*items.Item) ([]*itemspb.Item, error) {
+	return itemstransportgrpc.ListPtrItemToProto(items)
+}
+
+func ProtoToListPtrItemsItem(protoItems []*itemspb.Item) ([]*items.Item, error) {
+	return itemstransportgrpc.ProtoToListPtrItem(protoItems)
+}
diff --git a/pkg/references/transport/grpc/server.microgen.go b/pkg/references/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..960cf20350072d36498b2bb57edb8c594d193d28
--- /dev/null
+++ b/pkg/references/transport/grpc/server.microgen.go
@@ -0,0 +1,34 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/references/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/references"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	context "golang.org/x/net/context"
+)
+
+type referencesServer struct {
+	get grpc.Handler
+
+	pb.UnsafeReferencesServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.ReferencesServer {
+	return &referencesServer{get: grpc.NewServer(
+		endpoints.GetEndpoint,
+		_Decode_Get_Request,
+		_Encode_Get_Response,
+		opts...,
+	)}
+}
+
+func (S *referencesServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
diff --git a/pkg/references/transport/server.microgen.go b/pkg/references/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..f66b4c6b4b8ff529bb498f11cec2bfce8f79c2bf
--- /dev/null
+++ b/pkg/references/transport/server.microgen.go
@@ -0,0 +1,25 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	references "git.perx.ru/perxis/perxis-go/pkg/references"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc references.References) EndpointsSet {
+	return EndpointsSet{GetEndpoint: GetEndpoint(svc)}
+}
+
+func GetEndpoint(svc references.References) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1, res2 := svc.Get(arg0, req.SpaceId, req.EnvId, req.References)
+		return &GetResponse{
+			Items:    res0,
+			Notfound: res1,
+		}, res2
+	}
+}
diff --git a/pkg/roles/middleware/caching_middleware.go b/pkg/roles/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..19bdfe6b890806a98ac4aed81485f2f5314020b6
--- /dev/null
+++ b/pkg/roles/middleware/caching_middleware.go
@@ -0,0 +1,80 @@
+package service
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/roles"
+)
+
+func makeKey(ss ...string) string {
+	return strings.Join(ss, "-")
+}
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Roles) service.Roles {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Roles
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, role *service.Role) (rl *service.Role, err error) {
+	rl, err = m.next.Create(ctx, role)
+	if err == nil {
+		m.cache.Remove(rl.SpaceID)
+	}
+	return rl, err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId string, roleId string) (rl *service.Role, err error) {
+	key := makeKey(spaceId, roleId)
+	value, e := m.cache.Get(key)
+	if e == nil {
+		return value.(*service.Role), err
+	}
+	rl, err = m.next.Get(ctx, spaceId, roleId)
+	if err == nil {
+		m.cache.Set(key, rl)
+	}
+	return rl, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, spaceId string) (roles []*service.Role, err error) {
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.([]*service.Role), err
+	}
+	roles, err = m.next.List(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, roles)
+	}
+	return roles, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, role *service.Role) (err error) {
+	err = m.next.Update(ctx, role)
+	if err == nil {
+		key := makeKey(role.SpaceID, role.ID)
+		m.cache.Remove(key)
+		m.cache.Remove(role.SpaceID)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string, roleId string) (err error) {
+	err = m.next.Delete(ctx, spaceId, roleId)
+	if err == nil {
+		key := makeKey(spaceId, roleId)
+		m.cache.Remove(key)
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
diff --git a/pkg/roles/middleware/caching_middleware_test.go b/pkg/roles/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..bb5496b55fae164ea3681e644c7625ab9a2b9407
--- /dev/null
+++ b/pkg/roles/middleware/caching_middleware_test.go
@@ -0,0 +1,201 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	rsmocks "git.perx.ru/perxis/perxis-go/pkg/roles/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestRolesCache(t *testing.T) {
+
+	const (
+		roleID  = "roleID"
+		spaceID = "spaceID"
+		size    = 5
+		ttl     = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		rl := &rsmocks.Roles{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+		rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "Role"}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID, roleID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID, roleID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+		rl.AssertExpectations(t)
+	})
+
+	t.Run("List from cache", func(t *testing.T) {
+		rl := &rsmocks.Roles{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+		rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "Role"}}, nil).Once()
+
+		vl1, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+
+		vl2, err := svc.List(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+		rl.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update", func(t *testing.T) {
+			rl := &rsmocks.Roles{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "Role"}, nil).Once()
+			rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "Role"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объектов из кэша.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			rl.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Update(ctx, &roles.Role{ID: roleID, SpaceID: spaceID, Description: "RoleUPD"})
+			require.NoError(t, err)
+
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "RoleUPD"}, nil).Once()
+			rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "RoleUPD"}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что кеш объекта был удален после его обновления и объект был запрошен из сервиса.")
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что кеш объектов был удален после обновления объекта.")
+
+			rl.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			rl := &rsmocks.Roles{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "Role"}, nil).Once()
+			rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "Role"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			rl.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Update(ctx, &roles.Role{ID: roleID, SpaceID: spaceID, Description: "RoleUPD"})
+			require.NoError(t, err)
+
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(nil, errNotFound).Once()
+			rl.On("List", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+
+			v3, err := svc.Get(ctx, spaceID, roleID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаления кеш объекта был удален и получена ошибка сервиса.")
+			assert.Nil(t, v3)
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаления кеш объекта был удален и получена ошибка сервиса.")
+			assert.Nil(t, vl3)
+
+			rl.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			rl := &rsmocks.Roles{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+			rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "Role"}}, nil).Once()
+
+			vl1, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объекта из кэша.")
+
+			rl.On("Create", mock.Anything, mock.Anything).Return(&roles.Role{ID: "roleID2", SpaceID: spaceID, Description: "Role2"}, nil).Once()
+
+			_, err = svc.Create(ctx, &roles.Role{ID: "roleID2", SpaceID: spaceID, Description: "Role2"})
+			require.NoError(t, err)
+
+			rl.On("List", mock.Anything, spaceID).Return([]*roles.Role{{ID: roleID, SpaceID: spaceID, Description: "Role"}, {ID: "roleID2", SpaceID: spaceID, Description: "Role2"}}, nil).Once()
+
+			vl3, err := svc.List(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 2, "Ожидает что после создания нового объекта,  кеш будет очищен и объекты запрошены заново из сервиса.")
+
+			rl.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			rl := &rsmocks.Roles{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(rl)
+
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "Role"}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+			rl.On("Get", mock.Anything, spaceID, roleID).Return(&roles.Role{ID: roleID, SpaceID: spaceID, Description: "Role"}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID, roleID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что объект был удален из кеша и получен заново из сервиса.")
+
+			rl.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/roles/middleware/error_logging_middleware.go b/pkg/roles/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..7afe8f1f1003031ddedb437d0422dfb4123d4526
--- /dev/null
+++ b/pkg/roles/middleware/error_logging_middleware.go
@@ -0,0 +1,80 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/roles -i Roles -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements roles.Roles that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   roles.Roles
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the roles.Roles with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next roles.Roles) roles.Roles {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, role *roles.Role) (created *roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, role)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string, roleId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId, roleId)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string, roleId string) (role *roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId, roleId)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, spaceId string) (roles []*roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, role *roles.Role) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, role)
+}
diff --git a/pkg/roles/middleware/logging_middleware.go b/pkg/roles/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..ab536b6ad9e168adc79691a3127be8a47f9e5ba7
--- /dev/null
+++ b/pkg/roles/middleware/logging_middleware.go
@@ -0,0 +1,214 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/roles -i Roles -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements roles.Roles that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   roles.Roles
+}
+
+// LoggingMiddleware instruments an implementation of the roles.Roles with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next roles.Roles) roles.Roles {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, role *roles.Role) (created *roles.Role, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"role": role} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, role)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string, roleId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"roleId":  roleId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId, roleId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string, roleId string) (role *roles.Role, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"roleId":  roleId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	role, err = m.next.Get(ctx, spaceId, roleId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"role": role,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return role, err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, spaceId string) (roles []*roles.Role, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	roles, err = m.next.List(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"roles": roles,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return roles, err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, role *roles.Role) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":  ctx,
+		"role": role} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, role)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/roles/middleware/middleware.go b/pkg/roles/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0a5198110dfc463b80274230a7d1cbc65283debd
--- /dev/null
+++ b/pkg/roles/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/roles -i Roles -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"go.uber.org/zap"
+)
+
+type Middleware func(roles.Roles) roles.Roles
+
+func WithLog(s roles.Roles, logger *zap.Logger, log_access bool) roles.Roles {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Roles")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/roles/middleware/recovering_middleware.go b/pkg/roles/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..0c0f023b6fa4ccb6f8dc4dadbf59a58bf024a6cb
--- /dev/null
+++ b/pkg/roles/middleware/recovering_middleware.go
@@ -0,0 +1,91 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/roles -i Roles -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/roles"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements roles.Roles that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   roles.Roles
+}
+
+// RecoveringMiddleware instruments an implementation of the roles.Roles with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next roles.Roles) roles.Roles {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, role *roles.Role) (created *roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, role)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string, roleId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId, roleId)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string, roleId string) (role *roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId, roleId)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, spaceId string) (roles []*roles.Role, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, role *roles.Role) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, role)
+}
diff --git a/pkg/roles/mocks/Roles.go b/pkg/roles/mocks/Roles.go
new file mode 100644
index 0000000000000000000000000000000000000000..d7e61236e36d2a5f544950f0634c5204d4a30a3e
--- /dev/null
+++ b/pkg/roles/mocks/Roles.go
@@ -0,0 +1,112 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	"context"
+
+	roles "git.perx.ru/perxis/perxis-go/pkg/roles"
+	"github.com/stretchr/testify/mock"
+)
+
+// Roles is an autogenerated mock type for the Roles type
+type Roles struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, role
+func (_m *Roles) Create(ctx context.Context, role *roles.Role) (*roles.Role, error) {
+	ret := _m.Called(ctx, role)
+
+	var r0 *roles.Role
+	if rf, ok := ret.Get(0).(func(context.Context, *roles.Role) *roles.Role); ok {
+		r0 = rf(ctx, role)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*roles.Role)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *roles.Role) error); ok {
+		r1 = rf(ctx, role)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, spaceId, roleId
+func (_m *Roles) Delete(ctx context.Context, spaceId string, roleId string) error {
+	ret := _m.Called(ctx, spaceId, roleId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
+		r0 = rf(ctx, spaceId, roleId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Get provides a mock function with given fields: ctx, spaceId, roleId
+func (_m *Roles) Get(ctx context.Context, spaceId string, roleId string) (*roles.Role, error) {
+	ret := _m.Called(ctx, spaceId, roleId)
+
+	var r0 *roles.Role
+	if rf, ok := ret.Get(0).(func(context.Context, string, string) *roles.Role); ok {
+		r0 = rf(ctx, spaceId, roleId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*roles.Role)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok {
+		r1 = rf(ctx, spaceId, roleId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// List provides a mock function with given fields: ctx, spaceId
+func (_m *Roles) List(ctx context.Context, spaceId string) ([]*roles.Role, error) {
+	ret := _m.Called(ctx, spaceId)
+
+	var r0 []*roles.Role
+	if rf, ok := ret.Get(0).(func(context.Context, string) []*roles.Role); ok {
+		r0 = rf(ctx, spaceId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*roles.Role)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, spaceId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Update provides a mock function with given fields: ctx, role
+func (_m *Roles) Update(ctx context.Context, role *roles.Role) error {
+	ret := _m.Called(ctx, role)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *roles.Role) error); ok {
+		r0 = rf(ctx, role)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/roles/role.go b/pkg/roles/role.go
new file mode 100644
index 0000000000000000000000000000000000000000..eecafb64135a8b4545fe7ef894529641ddbab69a
--- /dev/null
+++ b/pkg/roles/role.go
@@ -0,0 +1,73 @@
+package roles
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/environments"
+	"git.perx.ru/perxis/perxis-go/pkg/permission"
+)
+
+const (
+	AnonymousRole  = "anonymous"
+	AuthorizedRole = "authorized"
+	ViewRole       = "view"
+)
+
+type Role struct {
+	// Внутренний идентификатор роли
+	ID string `json:"id" bson:"_id"`
+
+	// Идентификатор пространства
+	SpaceID string `json:"spaceId" bson:"-"`
+
+	// Описание роли, назначение
+	Description string `json:"description" bson:"description"`
+
+	// Список доступных окружений (ID или Alias)
+	Environments []string `json:"environments" bson:"environments"`
+
+	// Список правил доступа к коллекциям
+	Rules permission.Rules `json:"rules" bson:"rules"`
+
+	// Разрешить доступ API управления
+	AllowManagement bool `json:"allow_management" bson:"allow_management"`
+}
+
+func (r Role) CanAccessEnvironment(ctx context.Context, service environments.Environments, spaceID, envID string) bool {
+	if spaceID == "" || envID == "" {
+		return false
+	}
+
+	if r.AllowManagement {
+		return true
+	}
+
+	// Если явно не указаны доступные окружения - доступ по умолчанию к окружению master
+	if len(r.Environments) == 0 {
+		r.Environments = []string{environments.DefaultEnvironment}
+	}
+
+	for _, e := range r.Environments {
+		if envID == e || data.GlobMatch(envID, e) {
+			return true
+		}
+	}
+
+	env, err := service.Get(ctx, spaceID, envID)
+	if err != nil || env == nil {
+		return false
+	}
+
+	aliases := append(env.Aliases, env.ID)
+
+	for _, e := range r.Environments {
+		for _, a := range aliases {
+			if a == e || data.GlobMatch(a, e) {
+				return true
+			}
+		}
+	}
+
+	return false
+}
diff --git a/pkg/roles/service.go b/pkg/roles/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..b003008b10c7c0a0de6549c60262741fdf441f65
--- /dev/null
+++ b/pkg/roles/service.go
@@ -0,0 +1,16 @@
+package roles
+
+import (
+	"context"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/roles
+// @grpc-addr content.roles.Roles
+type Roles interface {
+	Create(ctx context.Context, role *Role) (created *Role, err error)
+	Get(ctx context.Context, spaceId, roleId string) (role *Role, err error)
+	List(ctx context.Context, spaceId string) (roles []*Role, err error)
+	Update(ctx context.Context, role *Role) (err error)
+	Delete(ctx context.Context, spaceId, roleId string) (err error)
+}
diff --git a/pkg/roles/transport/client.microgen.go b/pkg/roles/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..71b1de9e2b1b746962effe61dc63c37cf7977e69
--- /dev/null
+++ b/pkg/roles/transport/client.microgen.go
@@ -0,0 +1,78 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	roles "git.perx.ru/perxis/perxis-go/pkg/roles"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *roles.Role) (res0 *roles.Role, res1 error) {
+	request := CreateRequest{Role: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).Created, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string, arg2 string) (res0 *roles.Role, res1 error) {
+	request := GetRequest{
+		RoleId:  arg2,
+		SpaceId: arg1,
+	}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Role, res1
+}
+
+func (set EndpointsSet) List(arg0 context.Context, arg1 string) (res0 []*roles.Role, res1 error) {
+	request := ListRequest{SpaceId: arg1}
+	response, res1 := set.ListEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*ListResponse).Roles, res1
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *roles.Role) (res0 error) {
+	request := UpdateRequest{Role: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string, arg2 string) (res0 error) {
+	request := DeleteRequest{
+		RoleId:  arg2,
+		SpaceId: arg1,
+	}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
diff --git a/pkg/roles/transport/endpoints.microgen.go b/pkg/roles/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..06fe337c3c48656eff8074237e5714c29eac8bb2
--- /dev/null
+++ b/pkg/roles/transport/endpoints.microgen.go
@@ -0,0 +1,14 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Roles API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint endpoint.Endpoint
+	GetEndpoint    endpoint.Endpoint
+	ListEndpoint   endpoint.Endpoint
+	UpdateEndpoint endpoint.Endpoint
+	DeleteEndpoint endpoint.Endpoint
+}
diff --git a/pkg/roles/transport/exchanges.microgen.go b/pkg/roles/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..bb7d8e5010877efb628ce34b4f89ae66857476eb
--- /dev/null
+++ b/pkg/roles/transport/exchanges.microgen.go
@@ -0,0 +1,42 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import roles "git.perx.ru/perxis/perxis-go/pkg/roles"
+
+type (
+	CreateRequest struct {
+		Role *roles.Role `json:"role"`
+	}
+	CreateResponse struct {
+		Created *roles.Role `json:"created"`
+	}
+
+	GetRequest struct {
+		SpaceId string `json:"space_id"`
+		RoleId  string `json:"role_id"`
+	}
+	GetResponse struct {
+		Role *roles.Role `json:"role"`
+	}
+
+	ListRequest struct {
+		SpaceId string `json:"space_id"`
+	}
+	ListResponse struct {
+		Roles []*roles.Role `json:"roles"`
+	}
+
+	UpdateRequest struct {
+		Role *roles.Role `json:"role"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		SpaceId string `json:"space_id"`
+		RoleId  string `json:"role_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+)
diff --git a/pkg/roles/transport/grpc/client.microgen.go b/pkg/roles/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..747b1a8cada8b5e189e5e4c7994c79cb36df8b02
--- /dev/null
+++ b/pkg/roles/transport/grpc/client.microgen.go
@@ -0,0 +1,54 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/roles/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/roles"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "content.roles.Roles"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		ListEndpoint: grpckit.NewClient(
+			conn, addr, "List",
+			_Encode_List_Request,
+			_Decode_List_Response,
+			pb.ListResponse{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/roles/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/roles/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..aa66bf57105f290aae04a763c5a8e931a410fc53
--- /dev/null
+++ b/pkg/roles/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,209 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/roles/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/roles"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqRole, err := PtrRoleToProto(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Role: reqRole}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{
+		RoleId:  req.RoleId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*transport.ListRequest)
+	return &pb.ListRequest{SpaceId: req.SpaceId}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqRole, err := PtrRoleToProto(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Role: reqRole}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{
+		RoleId:  req.RoleId,
+		SpaceId: req.SpaceId,
+	}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respRole, err := PtrRoleToProto(resp.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Role: respRole}, nil
+}
+
+func _Encode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*transport.ListResponse)
+	respRoles, err := ListPtrRoleToProto(resp.Roles)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.ListResponse{Roles: respRoles}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqRole, err := ProtoToPtrRole(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Role: reqRole}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{
+		RoleId:  string(req.RoleId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_List_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil ListRequest")
+	}
+	req := request.(*pb.ListRequest)
+	return &transport.ListRequest{SpaceId: string(req.SpaceId)}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqRole, err := ProtoToPtrRole(req.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Role: reqRole}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{
+		RoleId:  string(req.RoleId),
+		SpaceId: string(req.SpaceId),
+	}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respRole, err := ProtoToPtrRole(resp.Role)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Role: respRole}, nil
+}
+
+func _Decode_List_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil ListResponse")
+	}
+	resp := response.(*pb.ListResponse)
+	respRoles, err := ProtoToListPtrRole(resp.Roles)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.ListResponse{Roles: respRoles}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respCreated, err := PtrRoleToProto(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{Created: respCreated}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respCreated, err := ProtoToPtrRole(resp.Created)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{Created: respCreated}, nil
+}
diff --git a/pkg/roles/transport/grpc/protobuf_type_converters.microgen.go b/pkg/roles/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..10f207e5ebf3e07804009bf0bbb62401e0bdbf9a
--- /dev/null
+++ b/pkg/roles/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,110 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	permission "git.perx.ru/perxis/perxis-go/pkg/permission"
+	service "git.perx.ru/perxis/perxis-go/pkg/roles"
+	commonpb "git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/roles"
+)
+
+func PtrRoleToProto(role *service.Role) (*pb.Role, error) {
+	if role == nil {
+		return nil, nil
+	}
+	rules := make([]*commonpb.Rule, 0, len(role.Rules))
+	for _, r := range role.Rules {
+		pr, _ := PtrPermissionRuleToProto(r)
+		rules = append(rules, pr)
+	}
+	return &pb.Role{
+		Id:              role.ID,
+		SpaceId:         role.SpaceID,
+		Description:     role.Description,
+		Rules:           rules,
+		Environments:    role.Environments,
+		AllowManagement: role.AllowManagement,
+	}, nil
+}
+
+func ProtoToPtrRole(protoRole *pb.Role) (*service.Role, error) {
+	if protoRole == nil {
+		return nil, nil
+	}
+
+	rules := make([]*permission.Rule, 0, len(protoRole.Rules))
+	for _, pr := range protoRole.Rules {
+		r, _ := ProtoToPtrPermissionRule(pr)
+		rules = append(rules, r)
+	}
+
+	return &service.Role{
+		ID:              protoRole.Id,
+		SpaceID:         protoRole.SpaceId,
+		Description:     protoRole.Description,
+		Rules:           rules,
+		Environments:    protoRole.Environments,
+		AllowManagement: protoRole.AllowManagement,
+	}, nil
+}
+
+func ListPtrRoleToProto(roles []*service.Role) ([]*pb.Role, error) {
+	protoRoles := make([]*pb.Role, 0, len(roles))
+	for _, r := range roles {
+		protoRole, _ := PtrRoleToProto(r)
+		protoRoles = append(protoRoles, protoRole)
+	}
+	return protoRoles, nil
+}
+
+func ProtoToListPtrRole(protoRoles []*pb.Role) ([]*service.Role, error) {
+	roles := make([]*service.Role, 0, len(protoRoles))
+	for _, r := range protoRoles {
+		role, _ := ProtoToPtrRole(r)
+		roles = append(roles, role)
+	}
+	return roles, nil
+}
+
+func PtrPermissionRuleToProto(rule *permission.Rule) (*commonpb.Rule, error) {
+	if rule == nil {
+		return nil, nil
+	}
+	actions := make([]commonpb.Action, 0, len(rule.Actions))
+	for _, a := range rule.Actions {
+		actions = append(actions, commonpb.Action(a))
+	}
+	return &commonpb.Rule{
+		CollectionId:    rule.CollectionID,
+		Actions:         actions,
+		Access:          commonpb.Access(rule.Access),
+		HiddenFields:    rule.HiddenFields,
+		ReadonlyFields:  rule.ReadonlyFields,
+		WriteonlyFields: rule.WriteonlyFields,
+		ReadFilter:      rule.ReadFilter,
+		WriteFilter:     rule.WriteFilter,
+	}, nil
+}
+
+func ProtoToPtrPermissionRule(protoRule *commonpb.Rule) (*permission.Rule, error) {
+	if protoRule == nil {
+		return nil, nil
+	}
+	actions := make([]permission.Action, 0, len(protoRule.Actions))
+	for _, a := range protoRule.Actions {
+		actions = append(actions, permission.Action(a))
+	}
+	return &permission.Rule{
+		CollectionID:    protoRule.CollectionId,
+		Actions:         actions,
+		Access:          permission.Access(protoRule.Access),
+		HiddenFields:    protoRule.HiddenFields,
+		ReadonlyFields:  protoRule.ReadonlyFields,
+		WriteonlyFields: protoRule.WriteonlyFields,
+		ReadFilter:      protoRule.ReadFilter,
+		WriteFilter:     protoRule.WriteFilter,
+	}, nil
+}
diff --git a/pkg/roles/transport/grpc/server.microgen.go b/pkg/roles/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..dc012c75f1ff3a6965d9152f204c7d7f6b61d285
--- /dev/null
+++ b/pkg/roles/transport/grpc/server.microgen.go
@@ -0,0 +1,97 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/roles/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/roles"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type rolesServer struct {
+	create grpc.Handler
+	get    grpc.Handler
+	list   grpc.Handler
+	update grpc.Handler
+	delete grpc.Handler
+
+	pb.UnimplementedRolesServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.RolesServer {
+	return &rolesServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		list: grpc.NewServer(
+			endpoints.ListEndpoint,
+			_Decode_List_Request,
+			_Encode_List_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *rolesServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *rolesServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *rolesServer) List(ctx context.Context, req *pb.ListRequest) (*pb.ListResponse, error) {
+	_, resp, err := S.list.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.ListResponse), nil
+}
+
+func (S *rolesServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *rolesServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
diff --git a/pkg/roles/transport/server.microgen.go b/pkg/roles/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..119aae999a13bab884d4f98d09346b1ef2d13071
--- /dev/null
+++ b/pkg/roles/transport/server.microgen.go
@@ -0,0 +1,60 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	roles "git.perx.ru/perxis/perxis-go/pkg/roles"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc roles.Roles) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint: CreateEndpoint(svc),
+		DeleteEndpoint: DeleteEndpoint(svc),
+		GetEndpoint:    GetEndpoint(svc),
+		ListEndpoint:   ListEndpoint(svc),
+		UpdateEndpoint: UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc roles.Roles) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Role)
+		return &CreateResponse{Created: res0}, res1
+	}
+}
+
+func GetEndpoint(svc roles.Roles) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.SpaceId, req.RoleId)
+		return &GetResponse{Role: res0}, res1
+	}
+}
+
+func ListEndpoint(svc roles.Roles) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*ListRequest)
+		res0, res1 := svc.List(arg0, req.SpaceId)
+		return &ListResponse{Roles: res0}, res1
+	}
+}
+
+func UpdateEndpoint(svc roles.Roles) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Role)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc roles.Roles) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.SpaceId, req.RoleId)
+		return &DeleteResponse{}, res0
+	}
+}
diff --git a/pkg/schema/field/array.go b/pkg/schema/field/array.go
new file mode 100644
index 0000000000000000000000000000000000000000..6700eb9b09af7bb714566dca8c82e90d026c7d57
--- /dev/null
+++ b/pkg/schema/field/array.go
@@ -0,0 +1,161 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"strconv"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/hashicorp/go-multierror"
+)
+
+var arrayType = &ArrayType{}
+
+type ArrayParameters struct {
+	Item *Field `json:"item"`
+}
+
+func (ArrayParameters) Type() Type { return arrayType }
+
+func (p ArrayParameters) Clone(reset bool) Parameters {
+	return &ArrayParameters{Item: p.Item.Clone(reset)}
+}
+
+type ArrayType struct{}
+
+func (ArrayType) Name() string {
+	return "array"
+}
+
+func (ArrayType) NewParameters() Parameters {
+	return &ArrayParameters{}
+}
+
+func (ArrayType) IsEmpty(v interface{}) bool {
+	arr, _ := v.([]interface{}) // todo: нужно возвращать ошибку?
+	return len(arr) == 0
+}
+
+//func (ArrayType) Decode(ctx *Context.Context, field *Field, v interface{}) (interface{}, error) {
+//	params, ok := field.Params.(*ArrayParameters)
+//	if !ok {
+//		return nil, errors.New("field parameters required")
+//	}
+//
+//	arr, ok := v.([]interface{})
+//	if !ok {
+//		return nil, fmt.Errorf("[]interface{} required")
+//	}
+//
+//	m := make([]interface{}, 0, len(arr))
+//
+//	for _, i := range arr {
+//		item, err := Decode(ctx, params.Item, i)
+//		if err != nil {
+//			return nil, err
+//		}
+//		m = append(m, item)
+//	}
+//
+//	return m, nil
+//}
+//
+//func (ArrayType) Encode(ctx *Context.Context, field *Field, v interface{}) (interface{}, error) {
+//	params, ok := field.Params.(*ArrayParameters)
+//	if !ok {
+//		return nil, errors.New("field parameters required")
+//	}
+//
+//	arr, ok := v.([]interface{})
+//	if !ok {
+//		return nil, fmt.Errorf("[]interface{} required")
+//	}
+//
+//	m := make([]interface{}, 0, len(arr))
+//
+//	for _, i := range arr {
+//		item, err := params.Item.Encode(ctx, i)
+//		if err != nil {
+//			return nil, err
+//		}
+//		m = append(m, item)
+//	}
+//
+//	return m, nil
+//}
+
+//func (ArrayType) Validate(ctx *Context.Context, field *Field, v interface{}) error {
+//	params, ok := field.Params.(*ArrayParameters)
+//	if !ok {
+//		return errors.New("field parameters required")
+//	}
+//
+//	m, ok := v.([]interface{})
+//	if !ok {
+//		return errors.New("[]interface{} is required")
+//	}
+//	for _, i := range m {
+//		err := params.Item.Validate(ctx, i)
+//		if err != nil {
+//			return err
+//		}
+//	}
+//	return nil
+//}
+
+func (ArrayType) Walk(ctx context.Context, field *Field, v interface{}, fn WalkFunc, opts *WalkOptions) (interface{}, bool, error) {
+	var changed bool
+	params, ok := field.Params.(*ArrayParameters)
+	if !ok {
+		return nil, false, errors.New("field parameters required")
+	}
+
+	// В массиве нет в данных и не выполняется обход по схеме
+	if !opts.WalkSchema && v == nil {
+		return nil, false, nil
+	}
+
+	// Выполняется обход по схеме
+	if opts.WalkSchema && v == nil {
+		params.Item.Walk(ctx, v, fn, WalkOpts(opts))
+		return nil, false, nil
+	}
+
+	arr, ok := v.([]interface{})
+	if !ok {
+		return nil, false, fmt.Errorf("incorrect type: \"%s\", expected \"[]interface{}\"", reflect.ValueOf(v).Kind())
+	}
+
+	m := make([]interface{}, 0, len(arr))
+
+	var merr *multierror.Error
+	for i, value := range arr {
+
+		valueNew, valueChanged, err := params.Item.Walk(ctx, value, fn, WalkOpts(opts))
+
+		if err != nil {
+			merr = multierror.Append(merr, errors.WithField(err, strconv.Itoa(i)))
+		}
+
+		if valueChanged {
+			m = append(m, valueNew)
+			changed = true
+		} else {
+			m = append(m, value)
+		}
+	}
+
+	if merr != nil {
+		merr.ErrorFormat = func(i []error) string {
+			return fmt.Sprintf("%d error(s)", len(i))
+		}
+		return nil, false, merr
+	}
+
+	return m, changed, nil
+}
+
+func Array(item *Field, o ...interface{}) *Field {
+	return NewField(&ArrayParameters{Item: item}, o...)
+}
diff --git a/pkg/schema/field/array_test.go b/pkg/schema/field/array_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..94e60258661932dc72c2c34c27ea7cecb5c6b33d
--- /dev/null
+++ b/pkg/schema/field/array_test.go
@@ -0,0 +1,85 @@
+package field
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestArrayField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Array(Number("float")),
+			[]interface{}{1.0, 2.0},
+			[]interface{}{1.0, 2.0},
+			false,
+		},
+		{
+			"Incorrect type",
+			Array(Number("int")),
+			"1 2 3",
+			"decode error: incorrect type: \"string\", expected \"[]interface{}\"",
+			true,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Decode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.ElementsMatch(t, got, tt.want, fmt.Sprintf("Decode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestArrayField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Array(Number("float")),
+			[]interface{}{1.0, 2.0},
+			[]interface{}{1.0, 2.0},
+			false,
+		},
+		{
+			"Incorrect type",
+			Array(Number("int")),
+			"1 2 3",
+			"encode error: incorrect type: \"string\", expected \"[]interface{}\"",
+			true,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Decode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.ElementsMatch(t, got, tt.want, fmt.Sprintf("Decode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/boolean.go b/pkg/schema/field/boolean.go
new file mode 100644
index 0000000000000000000000000000000000000000..94580541f8e33f9930adad94bff6a5c18915d05b
--- /dev/null
+++ b/pkg/schema/field/boolean.go
@@ -0,0 +1,51 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+)
+
+var boolType = &BoolType{}
+
+type BoolParameters struct{}
+
+func (b BoolParameters) Type() Type                   { return boolType }
+func (b *BoolParameters) Clone(reset bool) Parameters { return b }
+
+type BoolType struct{}
+
+func (b BoolType) Name() string {
+	return "bool"
+}
+
+func (b BoolType) NewParameters() Parameters {
+	return &BoolParameters{}
+}
+func (BoolType) IsEmpty(v interface{}) bool {
+	return v == nil
+}
+
+func (b BoolType) Decode(_ context.Context, field *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return v, nil
+	}
+	if _, ok := v.(bool); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("incorrect type: \"%s\", expected \"boolean\"", reflect.ValueOf(v).Kind())
+}
+
+func (b BoolType) Encode(_ context.Context, field *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return v, nil
+	}
+	if _, ok := v.(bool); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("incorrect type: \"%s\", expected \"boolean\"", reflect.ValueOf(v).Kind())
+}
+
+func Bool(o ...interface{}) *Field {
+	return NewField(&BoolParameters{}, o...)
+}
diff --git a/pkg/schema/field/boolean_test.go b/pkg/schema/field/boolean_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..aaa09dcec353d7648a976759bb4fcc98d0fe0d7a
--- /dev/null
+++ b/pkg/schema/field/boolean_test.go
@@ -0,0 +1,73 @@
+package field
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestBooleanField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct data bool", Bool(), true, true, false},
+		{"Correct data nil", Bool(), nil, nil, false},
+		{"Wrong data int", Bool(), 1, "decode error: incorrect type: \"int\", expected \"boolean\"", true},
+		{"Wrong data float", Bool(), 0.0, "decode error: incorrect type: \"float64\", expected \"boolean\"", true},
+		{"Wrong data array", Bool(), [2]bool{true}, "decode error: incorrect type: \"array\", expected \"boolean\"", true},
+		{"Wrong data slice", Bool(), []bool{true}, "decode error: incorrect type: \"slice\", expected \"boolean\"", true},
+		{"Wrong data map", Bool(), map[bool]int{true: 1}, "decode error: incorrect type: \"map\", expected \"boolean\"", true},
+		{"Wrong data string", Bool(), "2", "decode error: incorrect type: \"string\", expected \"boolean\"", true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Decode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Decode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestBooleanField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Bool(), false, false, false},
+		{"Correct data nil", Bool(), nil, nil, false},
+		{"Wrong data int", Bool(), 1, "encode error: incorrect type: \"int\", expected \"boolean\"", true},
+		{"Wrong data float", Bool(), 0.0, "encode error: incorrect type: \"float64\", expected \"boolean\"", true},
+		{"Wrong data array", Bool(), [2]bool{true}, "encode error: incorrect type: \"array\", expected \"boolean\"", true},
+		{"Wrong data slice", Bool(), []bool{true}, "encode error: incorrect type: \"slice\", expected \"boolean\"", true},
+		{"Wrong data map", Bool(), map[bool]int{true: 1}, "encode error: incorrect type: \"map\", expected \"boolean\"", true},
+		{"Wrong data string", Bool(), "2", "encode error: incorrect type: \"string\", expected \"boolean\"", true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Encode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Encode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/encode.go b/pkg/schema/field/encode.go
new file mode 100644
index 0000000000000000000000000000000000000000..d891b41228e4ff70142cff9bdf3bca8cc99506dd
--- /dev/null
+++ b/pkg/schema/field/encode.go
@@ -0,0 +1,66 @@
+package field
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+)
+
+type Decoder interface {
+	Decode(ctx context.Context, field *Field, v interface{}) (interface{}, error)
+}
+
+type Encoder interface {
+	Encode(ctx context.Context, field *Field, v interface{}) (interface{}, error)
+}
+
+func Decode(ctx context.Context, w Walker, v interface{}) (interface{}, error) {
+	var err error
+	//if ctx == nil {
+	//	ctx = NewContext()
+	//}
+	//
+	//if m, ok := v.(map[string]interface{}); ok {
+	//	ctx = ctx.ExtendEnv(m)
+	//	ctx.DisableConditions = true
+	//}
+
+	val, _, err := w.Walk(ctx, v, func(ctx context.Context, f *Field, v interface{}) (res WalkFuncResult, err error) {
+		if decoder, ok := f.GetType().(Decoder); ok {
+			if v, err = decoder.Decode(ctx, f, v); err != nil {
+				return
+			}
+			res.Value = v
+			res.Changed = true
+			return
+		}
+		res.Value = v
+		return
+	})
+
+	if err != nil {
+		return nil, errors.Wrap(err, "decode error")
+	}
+
+	return val, nil
+}
+
+func Encode(ctx context.Context, w Walker, v interface{}) (interface{}, error) {
+	val, _, err := w.Walk(ctx, v, func(ctx context.Context, f *Field, v interface{}) (res WalkFuncResult, err error) {
+		if encode, ok := f.GetType().(Encoder); ok {
+			if v, err = encode.Encode(ctx, f, v); err != nil {
+				return
+			}
+			res.Value = v
+			res.Changed = true
+			return
+		}
+		res.Value = v
+		return
+	})
+	if err != nil {
+		return nil, errors.Wrap(err, "encode error")
+	}
+	return val, nil
+
+}
diff --git a/pkg/schema/field/errors.go b/pkg/schema/field/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..8ee095285a7493cf177cd9185d7235a344c49e67
--- /dev/null
+++ b/pkg/schema/field/errors.go
@@ -0,0 +1,7 @@
+package field
+
+import "errors"
+
+var (
+	ErrSkipOption = errors.New("option invalid, skipped")
+)
diff --git a/pkg/schema/field/evaluate.go b/pkg/schema/field/evaluate.go
new file mode 100644
index 0000000000000000000000000000000000000000..ad273819651a434a93e810564c0de5444579035f
--- /dev/null
+++ b/pkg/schema/field/evaluate.go
@@ -0,0 +1,49 @@
+package field
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+)
+
+const EvaluatePassesLimit = 10
+
+func Evaluate(ctx context.Context, w Walker, v interface{}) (interface{}, error) {
+	var err error
+
+	chg := true
+	val := v
+	i := 0
+
+	for chg {
+		m, _ := val.(map[string]interface{})
+
+		val, chg, err = w.Walk(expr.WithEnv(ctx, m), val, func(ctx context.Context, f *Field, v interface{}) (res WalkFuncResult, err error) {
+			enabled, _ := f.IsEnabled(ctx)
+
+			if !enabled {
+				res.Stop = true
+				if v != nil {
+					res.Changed = true // Значение изменилось на пустое
+				}
+				return
+			}
+
+			res.Value = v
+			return
+		})
+
+		if err != nil {
+			return nil, errors.Wrap(err, "evaluation error")
+		}
+
+		i += 1
+
+		if i > EvaluatePassesLimit {
+			return nil, errors.New("fail to evaluate data conditions")
+		}
+	}
+
+	return val, nil
+}
diff --git a/pkg/schema/field/field.go b/pkg/schema/field/field.go
new file mode 100644
index 0000000000000000000000000000000000000000..45ddb40d128ab3f87ff7c41e3cedf75009ba3f98
--- /dev/null
+++ b/pkg/schema/field/field.go
@@ -0,0 +1,528 @@
+package field
+
+import (
+	"context"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+)
+
+const (
+	FieldSeparator = "."
+	IncludeLimit   = 10
+)
+
+type (
+	Preparer interface {
+		Prepare(f *Field) error
+	}
+
+	Fielder interface {
+		GetField(path string) *Field
+	}
+)
+
+type Translation struct {
+	Locale      string `json:"locale,omitempty"`
+	Title       string `json:"title,omitempty"`
+	Description string `json:"description,omitempty"`
+}
+
+type View struct {
+	Widget  string                 `json:"widget,omitempty"`  // Виджет для отображения поля в списке
+	Options map[string]interface{} `json:"options,omitempty"` // Опции виджета, на усмотрения виджета
+}
+
+type UI struct {
+	Widget      string                 `json:"widget,omitempty"`      // Имя виджета для отображения поля в пользовательском интерфейсе
+	Placeholder string                 `json:"placeholder,omitempty"` // Подсказка для заполнения значения
+	Options     map[string]interface{} `json:"options,omitempty"`     // Опции виджета для отображения
+	ReadView    *View                  `json:"read_view,omitempty"`   // Настройки для отображения экрана в режиме просмотра элемента
+	EditView    *View                  `json:"edit_view,omitempty"`   // Настройки для отображения экрана в режиме редактирования элемента
+	ListView    *View                  `json:"list_view,omitempty"`   // Настройки для отображения экрана в режиме списке элементов
+}
+
+type Include struct {
+	Ref      string `json:"ref,omitempty"`
+	Optional bool   `json:"optional,omitempty"`
+}
+
+type Field struct {
+	Title            string        `json:"title,omitempty"`             // Название поля (Например: name)
+	Description      string        `json:"description,omitempty"`       // Описание поле (Например: User name)
+	Translations     []Translation `json:"translations,omitempty"`      // Переводы данных на разных языках
+	UI               *UI           `json:"ui,omitempty"`                // Опции пользовательского интерфейса
+	Includes         []Include     `json:"includes,omitempty"`          // Импорт схем
+	SingleLocale     bool          `json:"singleLocale,omitempty"`      // Без перевода
+	Indexed          bool          `json:"indexed,omitempty"`           // Построить индекс для поля
+	Unique           bool          `json:"unique,omitempty"`            // Значение поля должны быть уникальными
+	TextSearch       bool          `json:"text_search,omitempty"`       // Значение поля доступны для полнотекстового поиска
+	Params           Parameters    `json:"-"`                           // Параметры поля, определяет так же тип поля
+	Options          Options       `json:"options,omitempty"`           // Дополнительные опции
+	Condition        string        `json:"condition,omitempty"`         // Условие отображения поля
+	AdditionalValues bool          `json:"additional_values,omitempty"` // Разрешает дополнительные значения вне ограничений правил
+
+	prepared bool
+}
+
+// TODO: Replace with Named field???
+type PathField struct {
+	Field
+	Name string
+	Path string
+}
+
+type NamedField struct {
+	*Field
+	Name string
+}
+
+func NewField(params Parameters, opts ...interface{}) *Field {
+	f := &Field{}
+	f.Params = params
+	f.Options.Add(opts...)
+	return f
+}
+
+func (f Field) GetType() Type {
+	return f.Params.Type()
+}
+
+func (f *Field) AddOptions(t ...interface{}) *Field {
+	f.Options.Add(t...)
+	return f
+}
+
+func (f Field) WithUI(ui *UI) *Field {
+	f.UI = ui
+	return &f
+}
+
+func (f *Field) SetIncludes(includes ...interface{}) {
+	f.Includes = make([]Include, 0, len(includes))
+	for _, i := range includes {
+		switch v := i.(type) {
+		case string:
+			f.Includes = append(f.Includes, Include{Ref: v})
+		case Include:
+			f.Includes = append(f.Includes, v)
+		default:
+			panic("incorrect import type")
+		}
+	}
+}
+
+func (f Field) WithIncludes(includes ...interface{}) *Field {
+	f.SetIncludes(includes...)
+	return &f
+}
+
+func (f Field) GetIncludes() []string {
+	return f.getIncludes()
+}
+
+func (f Field) getIncludes() []string {
+	res := make([]string, len(f.Includes))
+	for i, inc := range f.Includes {
+		res[i] = inc.Ref
+	}
+	nested := f.GetNestedFields()
+	for _, fld := range nested {
+		res = append(res, fld.getIncludes()...)
+	}
+	return res
+}
+
+func (f Field) IsIncluded(name string) bool {
+	return data.GlobMatch(name, f.GetIncludes()...)
+}
+
+func (f Field) SetTitle(title string) *Field {
+	f.Title = title
+	return &f
+}
+
+func (f Field) SetDescription(desc string) *Field {
+	f.Description = desc
+	return &f
+}
+
+func (f Field) AddTranslation(locale, title, desc string) *Field {
+	for i, t := range f.Translations {
+		if t.Locale == locale {
+			f.Translations[i] = Translation{Locale: locale, Title: title, Description: desc}
+			return &f
+		}
+	}
+
+	f.Translations = append(f.Translations, Translation{Locale: locale, Title: title, Description: desc})
+	return &f
+}
+
+func (f Field) SetSingleLocale(r bool) *Field {
+	f.SingleLocale = r
+	return &f
+}
+
+func (f Field) SetIndexed(r bool) *Field {
+	f.Indexed = r
+	return &f
+}
+
+func (f Field) SetAdditionalValues() *Field {
+	f.AdditionalValues = true
+	return &f
+}
+
+func (f Field) SetUnique(r bool) *Field {
+	f.Unique = r
+	return &f
+}
+
+func (f Field) SetTextSearch(r bool) *Field {
+	f.TextSearch = r
+	return &f
+}
+
+func (f Field) SetCondition(c string) *Field {
+	f.Condition = c
+	return &f
+}
+
+func (f *Field) MustEnabled(ctx context.Context) bool {
+	if enabled, err := f.IsEnabled(ctx); !enabled || err != nil {
+		return false
+	}
+	return true
+}
+
+func (f *Field) IsEnabled(ctx context.Context) (bool, error) {
+	if f.Condition != "" {
+		out, err := expr.Eval(ctx, f.Condition, nil)
+		if err != nil {
+			return false, err
+		}
+
+		if enabled, ok := out.(bool); ok {
+			return enabled, nil
+		}
+
+		return false, errors.New("condition returns non-boolean value")
+	}
+
+	return true, nil
+}
+
+// Walk - выполняет обход данных по схеме и выполняет функцию, которая может модифицировать данные при необходимости
+func (f *Field) Walk(ctx context.Context, v interface{}, fn WalkFunc, opt ...WalkOption) (interface{}, bool, error) {
+	res, err := fn(ctx, f, v)
+
+	if err != nil {
+		return nil, false, err
+	}
+
+	if res.Changed || res.Stop {
+		return res.Value, res.Changed, err
+	}
+
+	if res.Context != nil {
+		ctx = res.Context
+	}
+
+	if walker, ok := f.GetType().(FieldWalker); ok {
+		val, changed, err := walker.Walk(ctx, f, v, fn, NewWalkOptions(opt...))
+		if err != nil {
+			return nil, false, err
+		}
+		return val, changed, err
+	}
+
+	return v, false, nil
+}
+
+// DEPRECATED
+func (f *Field) Prepare() error {
+	if preparer, ok := f.GetType().(Preparer); ok {
+		if err := preparer.Prepare(f); err != nil {
+			return err
+		}
+	}
+	for _, o := range f.Options {
+		if preparer, ok := o.(Preparer); ok {
+			if err := preparer.Prepare(f); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+// GetField возвращает поле по строковому пути
+func (f *Field) GetField(path string) *Field {
+	if path == "" {
+		switch params := f.Params.(type) {
+		case *ArrayParameters:
+			// Возвращаем поле Item если путь указан как "arr."
+			return params.Item
+		}
+		return nil
+	}
+
+	switch params := f.Params.(type) {
+	case *ObjectParameters:
+		pp := strings.SplitN(path, FieldSeparator, 2)
+
+		for k, v := range params.Fields {
+
+			p, ok := v.Params.(*ObjectParameters)
+			if ok && p.Inline {
+				f := v.GetField(path)
+				if f != nil {
+					return f
+				}
+			}
+
+			if k == pp[0] {
+				if len(pp) == 1 {
+					return v
+				}
+				return v.GetField(pp[1])
+			}
+		}
+	case Fielder:
+		return params.GetField(path)
+
+	case *ArrayParameters:
+		return params.Item.GetField(path)
+	}
+
+	return nil
+}
+
+// GetFieldsPath возвращает полный путь для массива полей
+func GetFieldsPath(flds []PathField) (res []string) {
+	for _, f := range flds {
+		res = append(res, f.Path)
+	}
+	return res
+}
+
+type FilterFunc func(*Field, string) bool
+
+func GetAll(field *Field, path string) bool { return true }
+
+func (f *Field) GetFields(filterFunc FilterFunc, pathPrefix ...string) (res []PathField) {
+	var path string
+
+	if len(pathPrefix) > 0 {
+		path = pathPrefix[0]
+	}
+
+	// добавление корневого объекта для чего-то нужно?
+	if path != "" && filterFunc(f, path) {
+		res = append(res, PathField{
+			Field: *f,
+			Path:  path,
+		})
+	}
+
+	switch params := f.Params.(type) {
+	case *ObjectParameters:
+		res = append(res, getFieldsObject(path, params, filterFunc, false)...)
+	case *ArrayParameters:
+		res = append(res, getFieldsArray(path, params, filterFunc)...)
+	}
+
+	//if len(pathPrefix) > 0 {
+	//	for _, r := range res {
+	//		r.Path = strings.Join([]string{pathPrefix[0], r.Path}, FieldSeparator)
+	//	}
+	//}
+
+	return res
+}
+
+func getFieldsArray(path string, params *ArrayParameters, filterFunc FilterFunc) (res []PathField) {
+
+	switch params := params.Item.Params.(type) {
+	case *ObjectParameters:
+		res = append(res, getFieldsObject(path, params, filterFunc, params.Inline)...)
+
+	case *ArrayParameters:
+		res = append(res, getFieldsArray(path, params, filterFunc)...)
+	}
+
+	return res
+}
+
+func getFieldsObject(path string, params *ObjectParameters, filterFunc FilterFunc, ignoreInline bool) (res []PathField) {
+	for k, v := range params.Fields {
+		if v == nil {
+			continue
+		}
+
+		var newPath string
+		lastIdx := strings.LastIndex(path, ".")
+
+		if path == "" || !ignoreInline && params.Inline && lastIdx < 0 {
+			newPath = k
+		} else {
+			if !params.Inline || ignoreInline {
+				newPath = strings.Join([]string{path, k}, FieldSeparator)
+			} else {
+				newPath = strings.Join([]string{path[:lastIdx], k}, FieldSeparator)
+			}
+		}
+
+		if flds := v.GetFields(filterFunc, newPath); len(flds) > 0 {
+			res = append(res, flds...)
+		}
+	}
+
+	return res
+}
+
+func (f *Field) GetNestedFields() []*Field {
+	switch params := f.Params.(type) {
+	case *ObjectParameters:
+		flds := make([]*Field, 0, len(params.Fields))
+		for _, v := range params.Fields {
+			if v == nil {
+				continue
+			}
+			flds = append(flds, v)
+		}
+		return flds
+	case *ArrayParameters:
+		return []*Field{params.Item}
+	}
+
+	return nil
+}
+
+// Clone создает копию поля
+// Параметр reset указывает необходимо ли отвязать параметры поля от вложенных полей
+func (f Field) Clone(reset bool) *Field {
+	if f.UI != nil {
+		ui := *f.UI
+		f.UI = &ui
+	}
+
+	if len(f.Translations) > 0 {
+		f.Translations = append(make([]Translation, 0, len(f.Translations)), f.Translations...)
+	}
+
+	if f.Options != nil {
+		opts := make(Options)
+		for k, v := range f.Options {
+			opts[k] = v
+		}
+		f.Options = opts
+	}
+
+	if f.Params != nil {
+		f.Params = f.Params.Clone(reset)
+	}
+
+	return &f
+}
+
+func (f *Field) mergeField(fld *Field) error {
+	if f.Title == "" {
+		f.Title = fld.Title
+	}
+
+	if f.Description == "" {
+		f.Description = fld.Description
+	}
+
+	if len(f.Translations) == 0 {
+		f.Translations = fld.Translations
+	}
+
+	if f.UI == nil {
+		f.UI = fld.UI
+	}
+
+	if len(f.Includes) > 0 {
+		f.Includes = fld.Includes
+	}
+
+	if f.Params == nil {
+		f.Params = fld.Params
+	} else if fld.Params != nil {
+		type Merger interface {
+			Merge(parameters Parameters) error
+		}
+
+		if merger, ok := f.Params.(Merger); ok {
+			if err := merger.Merge(fld.Params); err != nil {
+				return err
+			}
+		}
+	}
+
+	if f.Options == nil {
+		f.Options = fld.Options
+	}
+
+	if f.Condition == "" {
+		f.Condition = fld.Condition
+	}
+
+	return nil
+}
+
+func (f *Field) Merge(fields ...*Field) error {
+	for _, fld := range fields {
+		f.mergeField(fld)
+	}
+	return nil
+}
+
+func (f *Field) loadIncludes(ctx context.Context, loader Loader, depth int) error {
+	if depth > IncludeLimit {
+		return errors.New("limit for included fields exceeded")
+	}
+
+	for _, i := range f.Includes {
+		if loader == nil {
+			panic("schema loader not set")
+		}
+		importedField, err := loader.Load(ctx, i.Ref)
+		if err != nil {
+			if i.Optional {
+				continue
+			}
+			return err
+		}
+
+		for _, fld := range importedField {
+			depth += 1
+			if err := fld.loadIncludes(ctx, loader, depth); err != nil {
+				return err
+			}
+		}
+
+		if err = f.Merge(importedField...); err != nil {
+			return err
+		}
+	}
+	for _, i := range f.GetNestedFields() {
+		if err := i.loadIncludes(ctx, loader, depth); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (f *Field) LoadIncludes(ctx context.Context, loader Loader) error {
+	return f.loadIncludes(ctx, loader, 0)
+}
+
+func (f *Field) LoadRef(ctx context.Context, ref string, loader Loader) error {
+	f.SetIncludes(ref)
+	return f.LoadIncludes(ctx, loader)
+}
diff --git a/pkg/schema/field/field_json.go b/pkg/schema/field/field_json.go
new file mode 100644
index 0000000000000000000000000000000000000000..dfb212044838d66da90436b127594427d3f842da
--- /dev/null
+++ b/pkg/schema/field/field_json.go
@@ -0,0 +1,86 @@
+package field
+
+import (
+	"encoding/json"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/pkg/errors"
+)
+
+type FieldData Field
+
+type jsonField struct {
+	FieldData
+	Type   string          `json:"type"`
+	Params json.RawMessage `json:"params,omitempty"`
+}
+
+type ParametersConverter interface {
+	ConvertParameters(p Parameters) (Parameters, error)
+}
+
+type ParametersValidator interface {
+	ValidateParameters(p Parameters) error
+}
+
+func (f *Field) UnmarshalJSON(b []byte) error {
+	var j jsonField
+	if err := jsoniter.Unmarshal(b, &j); err != nil {
+		return errors.Wrapf(err, "error unmarshal json into field")
+	}
+
+	typ, ok := GetType(j.Type)
+
+	jsonParams := j.Params
+
+	// Unknown type, создаем поля Unknown которое будет хранить оригинальные параметры
+	if !ok {
+		*f = Field(j.FieldData)
+		f.Params = &UnknownParameters{Typ: j.Type, Params: jsonParams}
+		return nil
+	}
+
+	params := typ.NewParameters()
+	if len(j.Params) > 0 {
+		if err := jsoniter.Unmarshal(jsonParams, params); err != nil {
+			return errors.Wrapf(err, "error unmarshal json into field type %s", typ.Name())
+		}
+	}
+
+	if converter, ok := typ.(ParametersConverter); ok {
+		var err error
+		if params, err = converter.ConvertParameters(params); err != nil {
+			return errors.Wrap(err, "error unmarshal json")
+		}
+	}
+
+	if validator, ok := typ.(ParametersValidator); ok {
+		var err error
+		if err = validator.ValidateParameters(params); err != nil {
+			return errors.Wrap(err, "error validate json")
+		}
+	}
+
+	*f = Field(j.FieldData)
+	f.Params = params
+	f.Prepare()
+	return nil
+}
+
+func (f *Field) MarshalJSON() ([]byte, error) {
+	j := jsonField{
+		FieldData: FieldData(*f),
+	}
+
+	j.Type = f.GetType().Name()
+
+	if f.Params != nil {
+		b, err := jsoniter.Marshal(f.Params)
+		if err != nil {
+			return nil, err
+		}
+		j.Params = b
+	}
+
+	return jsoniter.Marshal(&j)
+}
diff --git a/pkg/schema/field/init.go b/pkg/schema/field/init.go
new file mode 100644
index 0000000000000000000000000000000000000000..45c4d11c1ecdc51927109cb3202704e093c2d53d
--- /dev/null
+++ b/pkg/schema/field/init.go
@@ -0,0 +1,13 @@
+package field
+
+func init() {
+	Register(boolType)
+	Register(stringType)
+	Register(numberType)
+	Register(arrayType)
+	Register(timeType)
+	Register(objectType)
+	Register(unknownType)
+	Register(locationType)
+	Register(primaryKeyType)
+}
diff --git a/pkg/schema/field/loader.go b/pkg/schema/field/loader.go
new file mode 100644
index 0000000000000000000000000000000000000000..583ecc2c7c6bdc0d278819d2327280ec87858117
--- /dev/null
+++ b/pkg/schema/field/loader.go
@@ -0,0 +1,34 @@
+package field
+
+import (
+	"context"
+	"fmt"
+)
+
+// Loader интерфейс загрузчика схем
+type Loader interface {
+	Load(ctx context.Context, ref string) (fs []*Field, err error)
+}
+
+type LoaderFunc func(ref string) (fs []*Field, err error)
+
+func (f LoaderFunc) Load(ctx context.Context, ref string) (fs []*Field, err error) {
+	return f(ref)
+}
+
+type multiLoader struct {
+	loaders []Loader
+}
+
+func (c *multiLoader) Load(ctx context.Context, ref string) (fs []*Field, err error) {
+	for _, l := range c.loaders {
+		if f, err := l.Load(nil, ref); err == nil {
+			return f, nil
+		}
+	}
+	return nil, fmt.Errorf("invalid schema reference: %s", ref)
+}
+
+func MultiLoader(loaders ...Loader) Loader {
+	return &multiLoader{loaders: loaders}
+}
diff --git a/pkg/schema/field/location.go b/pkg/schema/field/location.go
new file mode 100644
index 0000000000000000000000000000000000000000..eb6e175552a2504b667658c8b227bc3b7ff661b8
--- /dev/null
+++ b/pkg/schema/field/location.go
@@ -0,0 +1,137 @@
+package field
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"github.com/mitchellh/mapstructure"
+	"go.mongodb.org/mongo-driver/bson"
+	"go.mongodb.org/mongo-driver/mongo"
+	"go.mongodb.org/mongo-driver/mongo/options"
+	"go.mongodb.org/mongo-driver/x/bsonx"
+)
+
+var locationType = &LocationType{}
+
+type LocationParameters struct{}
+
+func (p LocationParameters) Type() Type                  { return locationType }
+func (p LocationParameters) Clone(reset bool) Parameters { return &LocationParameters{} }
+
+func (p LocationParameters) GetMongoIndexes(path string, f *Field) []mongo.IndexModel {
+	var add, geo mongo.IndexModel
+	a := path + ".address"
+	g := path + ".geometry"
+	add.Options = options.Index().SetName(a)
+	//add.Options.SetSparse(true)
+	add.Options.SetPartialFilterExpression(bson.M{a: bson.M{"$exists": true}})
+	geo.Options = options.Index().SetName(g)
+	if f.Unique {
+		add.Options.SetUnique(true)
+		geo.Options.SetUnique(true)
+	}
+
+	if f.Indexed {
+		add.Keys = bsonx.Doc{{Key: a, Value: bsonx.Int32(1)}}
+		geo.Keys = bsonx.Doc{{Key: g, Value: bsonx.String("2dsphere")}}
+	}
+	return []mongo.IndexModel{add, geo}
+}
+
+type LocationType struct{}
+
+type GeoJSON struct {
+	Type        string    `json:"type" bson:"type" mapstructure:"type,omitempty"`
+	Coordinates []float64 `json:"coordinates" bson:"coordinates" mapstructure:"coordinates"`
+}
+
+type GeoObject struct {
+	Address  string   `json:"address,omitempty" bson:"address" mapstructure:"address,omitempty"`
+	Geometry *GeoJSON `json:"geometry,omitempty" bson:"geometry" mapstructure:"geometry,omitempty"`
+}
+
+func (LocationType) Name() string {
+	return "location"
+}
+
+func (LocationType) NewParameters() Parameters {
+	return &LocationParameters{}
+}
+
+func (LocationType) IsEmpty(v interface{}) bool {
+	loc, _ := v.(*GeoObject)
+	return loc == nil || loc.Address != "" && loc.Geometry != nil
+}
+
+func (LocationType) Decode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+
+	if v == nil {
+		return nil, nil
+	}
+
+	var g GeoObject
+	if err := mapstructure.Decode(v, &g); err != nil {
+		return nil, err
+	}
+
+	if g.Address == "" && g.Geometry == nil {
+		return nil, errors.New("address or coordinates required")
+	}
+
+	if g.Geometry != nil {
+		if len(g.Geometry.Coordinates) != 2 {
+			return nil, errors.New("latitude and longitude required")
+		}
+
+		lat := g.Geometry.Coordinates[0]
+		lon := g.Geometry.Coordinates[1]
+
+		if lat < -180 || lat > 180 {
+			return nil, errors.New("invalid longitude values, valid are between -180 and 180")
+		}
+
+		if lon < -90 || lon > 90 {
+			return nil, errors.New("invalid latitude values, valid are between -90 and 90")
+		}
+
+		if g.Geometry.Type != "Point" {
+			g.Geometry.Type = "Point"
+		}
+	}
+
+	return &g, nil
+}
+
+func (LocationType) Encode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+
+	if v == nil {
+		return nil, nil
+	}
+
+	g, ok := v.(*GeoObject)
+	if !ok {
+		return nil, errors.New("couldn't encode GeoObject")
+	}
+
+	res := make(map[string]interface{})
+	if g.Address != "" {
+		res["address"] = g.Address
+	}
+
+	if g.Geometry != nil {
+		if len(g.Geometry.Coordinates) != 2 {
+			return nil, errors.New("latitude and longitude required")
+		}
+
+		lat := g.Geometry.Coordinates[0]
+		lon := g.Geometry.Coordinates[1]
+
+		res["geometry"] = map[string]interface{}{"type": g.Geometry.Type, "coordinates": []interface{}{lat, lon}}
+	}
+
+	return res, nil
+}
+
+func Location(o ...interface{}) *Field {
+	return NewField(&LocationParameters{}, o...)
+}
diff --git a/pkg/schema/field/location_test.go b/pkg/schema/field/location_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b8206b6128ae7f4bf8f4042cedc0540bf357992
--- /dev/null
+++ b/pkg/schema/field/location_test.go
@@ -0,0 +1,209 @@
+package field
+
+import (
+	"reflect"
+	"testing"
+)
+
+func TestLocationField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"address":  "msk",
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}},
+			},
+			&GeoObject{"msk", &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{55.7042351, 37.6152822}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []float64{55.7042351, 37.6152822}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{55, 37}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55, 37}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{180, 90}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{180, 90}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{-180, -90}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{-180, -90}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []int{55, 37}},
+			},
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55, 37}}},
+			false},
+		{"Correct",
+			Location(),
+			map[string]interface{}{
+				"address": "msk",
+			},
+			&GeoObject{Address: "msk"},
+			false},
+		{"Correct", Location(), nil, nil, false},
+
+		{"Wrong data", Location(), "", nil, true},
+		{"Wrong data", Location(), []interface{}{"55.7042351", "37.6152822"}, nil, true},
+		{"Wrong data", Location(), map[string]interface{}{"type": "Point", "coordinates": [][]interface{}{{55.7042351, 37.6152822}}}, nil, true},
+		{"Wrong data", Location(), []interface{}{55.7042351, 37.6152822, 1.0}, nil, true},
+		{"Wrong data",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []int{55, 37, 67}},
+			},
+			nil,
+			true},
+		{"Wrong data",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{180}},
+			},
+			nil,
+			true},
+		{"Wrong data",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{-180, -90.1}},
+			},
+			nil,
+			true},
+		{"Wrong data",
+			Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"coordinates": []interface{}{180.1, 90.1}},
+			},
+			nil,
+			true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestLocationField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Location(),
+			&GeoObject{Address: "msk", Geometry: &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			map[string]interface{}{
+				"address":  "msk",
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}}},
+			false},
+		{"Correct", Location(),
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55.7042351, 37.6152822}}},
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}}},
+			false},
+		{"Correct", Location(),
+			&GeoObject{Address: "msk"},
+			map[string]interface{}{
+				"address": "msk"},
+			false},
+		{"Correct", Location(),
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{55, 37}}},
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.0, 37.0}}},
+			false},
+		{"Correct", Location(),
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{180, 90}}},
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{180.0, 90.0}}},
+			false},
+		{"Correct", Location(),
+			&GeoObject{Geometry: &GeoJSON{"Point", []float64{-180, -90}}},
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{-180.0, -90.0}}},
+			false},
+		{"Correct", Location(), nil, nil, false},
+		{"Correct", Location(),
+			&GeoObject{},
+			map[string]interface{}{},
+			false},
+
+		{"Wrong data", Location(), "", nil, true},
+		{"Wrong data", Location(), []interface{}{55.7042351, 37.6152822}, nil, true},
+		{"Wrong data", Location(),
+			map[string]interface{}{
+				"address":  "msk",
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}}},
+			nil,
+			true},
+		{"Wrong data", Location(),
+			map[string]interface{}{
+				"geometry": map[string]interface{}{"type": "Point", "coordinates": []interface{}{55.7042351, 37.6152822}}},
+			nil,
+			true},
+		{"Wrong data", Location(),
+			map[string]interface{}{
+				"address": "msk"},
+			nil,
+			true},
+		{"Wrong data", Location(), &GeoJSON{}, nil, true},
+		{"Wrong data", Location(), &GeoJSON{Coordinates: []float64{55.7042351, 37.6152822}}, nil, true},
+		{"Wrong data", Location(), &GeoObject{Geometry: &GeoJSON{"Point", []float64{-180, -90, 50}}}, nil, true},
+		{"Wrong data", Location(), &GeoObject{Geometry: &GeoJSON{"Point", []float64{-180}}}, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Encode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Encode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/number.go b/pkg/schema/field/number.go
new file mode 100644
index 0000000000000000000000000000000000000000..502a2fef7c406b4b4dc96aad34efce2e518a1e28
--- /dev/null
+++ b/pkg/schema/field/number.go
@@ -0,0 +1,112 @@
+package field
+
+import (
+	"context"
+	"math"
+
+	"github.com/pkg/errors"
+)
+
+const (
+	NumberFormatInt   = "int"
+	NumberFormatFloat = "float"
+)
+
+var numberType = &NumberType{}
+
+type NumberParameters struct {
+	Format string `json:"format,omitempty"`
+}
+
+func (NumberParameters) Type() Type                    { return numberType }
+func (p NumberParameters) Clone(reset bool) Parameters { return &p }
+
+type NumberType struct{}
+
+func (NumberType) Name() string {
+	return "number"
+}
+
+func (NumberType) NewParameters() Parameters {
+	return &NumberParameters{}
+}
+
+func (NumberType) IsEmpty(v interface{}) bool {
+	return v == nil
+}
+
+func ToNumber(i interface{}) (interface{}, error) {
+	switch v := i.(type) {
+	case int64:
+		return v, nil
+	case int:
+		return int64(v), nil
+	case int8:
+		return int64(v), nil
+	case int32:
+		return int64(v), nil
+	case uint64:
+		return v, nil
+	case uint:
+		return uint64(v), nil
+	case uint8:
+		return uint64(v), nil
+	case uint32:
+		return uint64(v), nil
+	case float32:
+		return float64(v), nil
+	case float64:
+		return v, nil
+	}
+	return 0, errors.Errorf("error convert \"%s\" to number", i)
+}
+
+func (n NumberType) Decode(ctx context.Context, field *Field, v interface{}) (interface{}, error) {
+	return n.decode(ctx, field, v)
+}
+
+func (NumberType) decode(_ context.Context, field *Field, v interface{}) (interface{}, error) {
+	params, ok := field.Params.(*NumberParameters)
+	if !ok {
+		return nil, errors.New("field parameters required")
+	}
+
+	if v == nil {
+		return v, nil
+	}
+
+	n, err := ToNumber(v)
+	if err != nil {
+		return nil, err
+	}
+
+	switch params.Format {
+	case NumberFormatInt:
+		switch i := n.(type) {
+		case int64:
+			return i, nil
+		case uint64:
+			return i, nil
+		case float64:
+			return int64(math.Round(i)), nil
+		}
+	case NumberFormatFloat:
+		switch i := n.(type) {
+		case float64:
+			return i, nil
+		case int64:
+			return float64(i), nil
+		case uint64:
+			return float64(i), nil
+		}
+	}
+	return n, nil
+}
+
+func (n NumberType) Encode(ctx context.Context, field *Field, v interface{}) (interface{}, error) {
+	return n.decode(ctx, field, v)
+}
+
+func Number(format string, o ...interface{}) *Field {
+	return NewField(&NumberParameters{Format: format}, o...)
+}
diff --git a/pkg/schema/field/number_test.go b/pkg/schema/field/number_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..e731793d12135127dbfa67d949572f52708075a6
--- /dev/null
+++ b/pkg/schema/field/number_test.go
@@ -0,0 +1,88 @@
+package field
+
+import (
+	"reflect"
+	"testing"
+)
+
+func TestNumberField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Number("int"), int64(2), int64(2), false},     // #0
+		{"Correct", Number("int"), 2.2, int64(2), false},          // #1
+		{"Correct", Number("int"), 2, int64(2), false},            // #2
+		{"Correct", Number("int"), float32(2.2), int64(2), false}, // #3
+		{"Correct", Number("int"), float64(2.6), int64(3), false}, // #4
+		{"Correct", Number("int"), 2.6, int64(3), false},          // #5
+
+		{"Correct", Number("float"), int8(2), 2.0, false},                    // #6
+		{"Correct", Number("float"), 2.2, 2.2, false},                        // #7
+		{"Correct", Number("float"), 2, 2.0, false},                          // #8
+		{"Correct", Number("float"), float32(2.2), 2.200000047683716, false}, // #9
+		{"Correct", Number("float"), int64(2), 2.0, false},                   // #10
+
+		{"Wrong data", Number("int"), "", nil, true},         // #0
+		{"Wrong data", Number("int"), []byte(""), nil, true}, // #1
+
+		{"Wrong data", Number("float"), "", nil, true},         // #2
+		{"Wrong data", Number("float"), []byte(""), nil, true}, // #3
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestNumberField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Number("int"), int64(2), int64(2), false},     // #0
+		{"Correct", Number("int"), 2.2, int64(2), false},          // #1
+		{"Correct", Number("int"), 2, int64(2), false},            // #2
+		{"Correct", Number("int"), float32(2.2), int64(2), false}, // #3
+		{"Correct", Number("int"), float32(2.6), int64(3), false}, // #4
+		{"Correct", Number("int"), 2.6, int64(3), false},          // #5
+
+		{"Correct", Number("float"), int8(2), 2.0, false},                    // #6
+		{"Correct", Number("float"), 2.2, 2.2, false},                        // #7
+		{"Correct", Number("float"), 2, 2.0, false},                          // #8
+		{"Correct", Number("float"), float32(2.2), 2.200000047683716, false}, // #9
+		{"Correct", Number("float"), int64(2), 2.0, false},                   // #10
+
+		{"Wrong data", Number("int"), "", nil, true},         // #0
+		{"Wrong data", Number("int"), []byte(""), nil, true}, // #1
+
+		{"Wrong data", Number("float"), "", nil, true},         // #2
+		{"Wrong data", Number("float"), []byte(""), nil, true}, // #3
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/object.go b/pkg/schema/field/object.go
new file mode 100644
index 0000000000000000000000000000000000000000..d86aa2352544f68ea837152ec6799677f4624d1a
--- /dev/null
+++ b/pkg/schema/field/object.go
@@ -0,0 +1,297 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"regexp"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"github.com/hashicorp/go-multierror"
+)
+
+var objectType = &ObjectType{}
+var isValidName = regexp.MustCompile(`^[a-zA-Z][\w]*$`).MatchString
+
+type ObjectParameters struct {
+	Inline bool              `json:"inline"`
+	Fields map[string]*Field `json:"fields"`
+}
+
+func (ObjectParameters) Type() Type { return objectType }
+
+func (p ObjectParameters) Clone(reset bool) Parameters {
+	if reset {
+		p.Fields = nil
+		return &p
+	}
+
+	flds := make(map[string]*Field)
+	for k, v := range p.Fields {
+		flds[k] = v.Clone(reset)
+	}
+
+	p.Fields = flds
+	return &p
+}
+
+// IsInlineObject определяет являться ли поле name инлайн объектом
+func (p ObjectParameters) IsInlineObject(name string) bool {
+	fld, ok := p.Fields[name]
+	if !ok {
+		return false
+	}
+
+	if fldParams, ok := fld.Params.(*ObjectParameters); ok && fldParams.Inline {
+		return true
+	}
+
+	return false
+}
+
+// GetFields возвращает поля объекта.
+// Указание withInline позволяет так же включить поля указанные во вложенных inline объектам, и получиться поля для
+// всех данных относящихся к текущему объекту.
+func (p ObjectParameters) GetFields(withInline bool) map[string]*Field {
+	fields := make(map[string]*Field)
+	p.getFields(withInline, fields)
+	return fields
+}
+
+func (p ObjectParameters) getFields(withInline bool, fields map[string]*Field) {
+	for k, f := range p.Fields {
+		if obj, ok := f.Params.(*ObjectParameters); ok && obj.Inline {
+			obj.getFields(withInline, fields)
+			continue
+		}
+		fields[k] = f
+	}
+}
+
+func (p *ObjectParameters) Merge(parameters Parameters) error {
+	op, ok := parameters.(*ObjectParameters)
+	if !ok {
+		return errors.New("invalid object parameters")
+	}
+	for k, fld := range op.Fields {
+		if f, ok := p.Fields[k]; ok {
+			if err := f.Merge(fld); err != nil {
+				return err
+			}
+		} else {
+			p.Fields[k] = fld
+		}
+	}
+	return nil
+}
+
+type ObjectType struct{}
+
+func (ObjectType) Name() string {
+	return "object"
+}
+
+func (ObjectType) NewParameters() Parameters {
+	return &ObjectParameters{}
+}
+
+func (ObjectType) IsEmpty(v interface{}) bool {
+	m := reflect.ValueOf(v)
+	return m.IsNil() || m.Len() == 0
+}
+
+type fieldNameCtx struct{}
+
+var FieldName = fieldNameCtx{}
+
+func (ObjectType) Walk(ctx context.Context, field *Field, v interface{}, fn WalkFunc, opts *WalkOptions) (interface{}, bool, error) {
+	params, ok := field.Params.(*ObjectParameters)
+	if !ok {
+		return nil, false, errors.New("field parameters required")
+	}
+
+	// Объекта нет в данных, спускаться к полям мы не будем
+	// Если необходимо что бы выполнялся Walk по полям необходимо передать пустой объект
+	// Если нужно что бы всегда объект был, это можно сделать через Default
+	if !opts.WalkSchema && v == nil {
+		return nil, false, nil
+	}
+
+	m := reflect.ValueOf(v)
+
+	if m.IsValid() {
+		if m.Kind() != reflect.Map {
+			return nil, false, errors.Errorf("incorrect type: \"%s\", expected \"map\"", m.Kind())
+		}
+	}
+
+	if !opts.WalkSchema && m.IsNil() {
+		return nil, false, nil
+	}
+
+	// Добавляем к переменным уровень объекта
+	ctx = expr.WithEnvKV(ctx, "_", v)
+
+	mapNew := make(map[string]interface{})
+
+	var merr *multierror.Error
+	var changed bool
+	for name, fld := range params.Fields {
+		ctxField := context.WithValue(ctx, FieldName, name)
+
+		// Если поле является Inline-объектом, то передаются данные текущего объекта
+		if p, ok := fld.Params.(*ObjectParameters); ok && p.Inline {
+			valueNew, valueChanged, err := fld.Walk(ctxField, v, fn, WalkOpts(opts))
+
+			if err != nil {
+				merr = multierror.Append(merr, errors.WithField(err, name))
+			}
+
+			// Значение было изменено и оно не пустое (Inline объект не активен)
+			if valueChanged && valueNew != nil {
+				changed = true
+			}
+
+			if valueNew != nil {
+				for n, v := range valueNew.(map[string]interface{}) {
+					mapNew[n] = v
+				}
+			}
+		} else {
+			// Если значение нет, мы используем nil
+			var value interface{}
+			if m.IsValid() && !m.IsZero() && !m.IsNil() {
+				fieldValue := m.MapIndex(reflect.ValueOf(name))
+				if fieldValue.IsValid() {
+					value = fieldValue.Interface()
+				}
+			}
+
+			valueNew, valueChanged, err := fld.Walk(ctxField, value, fn, WalkOpts(opts))
+
+			if err != nil {
+				merr = multierror.Append(merr, errors.WithField(err, name))
+			}
+
+			// Если значение было изменено мы заменяем его на новое
+			if valueChanged {
+				changed = true
+				value = valueNew
+			}
+
+			// Если значение не пустое, мы записываем поле в результат
+			if value != nil {
+				mapNew[name] = value
+			}
+
+		}
+	}
+
+	if merr != nil {
+		//merr.ErrorFormat = func(i []error) string {
+		//	return fmt.Sprintf("%d error(s)", len(i))
+		//}
+		return nil, false, merr
+	}
+
+	if v == nil || !m.IsValid() || m.IsZero() || m.IsNil() {
+		return nil, false, nil
+	}
+
+	// Проверяем изменилось ли количество полей объекта.
+	// Inline-объект игнорирует изменение количества полей, так как получает так же поля родительского объекта.
+	if !changed && !params.Inline {
+		changed = m.Len() != len(mapNew)
+	}
+
+	// Объект всегда возвращает новый модифицированный результат
+	return mapNew, changed, nil
+}
+
+func (ObjectType) ValidateParameters(p Parameters) error {
+	params, ok := p.(*ObjectParameters)
+	if !ok {
+		return nil
+	}
+
+	if len(params.Fields) > 0 {
+		for k := range params.Fields {
+			if !isValidName(k) {
+				return fmt.Errorf("field name '%s' must be in Latin, must not start with a number, "+
+					"must not contain spaces - only characters '_' can be used", k)
+			}
+		}
+	}
+	return nil
+}
+
+func Object(kv ...interface{}) *Field {
+	params := &ObjectParameters{Fields: make(map[string]*Field)}
+
+	if len(kv) > 0 {
+		inline, ok := kv[0].(bool)
+		if ok {
+			params.Inline = inline
+			kv = kv[1:]
+		}
+	}
+
+	var order []interface{}
+	i := 0
+	for {
+		if i+2 > len(kv) {
+			break
+		}
+		k, v := kv[i], kv[i+1]
+		name, kOk := k.(string)
+		field, vOk := v.(*Field)
+		if !kOk || !vOk {
+			break
+		}
+
+		params.Fields[name] = field
+		order = append(order, name)
+
+		err := objectType.ValidateParameters(params)
+		if err != nil {
+			panic(err.Error())
+		}
+
+		i += 2
+	}
+
+	fld := NewField(params, kv[i:]...)
+	if len(order) > 0 {
+		fld = fld.WithUI(&UI{
+			Options: map[string]interface{}{"fields": order},
+		})
+	}
+
+	return fld
+}
+
+func AddField(field *Field, name string, fld *Field) error {
+	switch params := field.Params.(type) {
+	case *ObjectParameters:
+		if params.Fields == nil {
+			params.Fields = make(map[string]*Field)
+		}
+		params.Fields[name] = fld
+	case *ArrayParameters:
+		params.Item = fld
+	default:
+		return errors.New("AddField not supported")
+	}
+
+	return nil
+}
+
+func RemoveAllFields(obj *Field) error {
+	params, ok := obj.Params.(*ObjectParameters)
+	if !ok {
+		return errors.New("obj is not an object")
+	}
+	params.Fields = make(map[string]*Field)
+	return nil
+}
diff --git a/pkg/schema/field/object_test.go b/pkg/schema/field/object_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2ada594bcdeba09fab5dfcc95eb947e2028766d9
--- /dev/null
+++ b/pkg/schema/field/object_test.go
@@ -0,0 +1,232 @@
+package field
+
+import (
+	"fmt"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestObjectField_Decode(t *testing.T) {
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{"name": "string", "date": "2012-11-01T22:08:41Z", "bool": true},
+			map[string]interface{}{"name": "string", "date": w, "bool": true},
+			false,
+		},
+		{
+			"Remove undefined fields",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{"name": "string", "date": "2012-11-01T22:08:41Z", "bool": true, "extra": "string"},
+			map[string]interface{}{"name": "string", "date": w, "bool": true},
+			false,
+		},
+		{
+			"Empty data",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{},
+			map[string]interface{}{},
+			false,
+		},
+		{
+			"Nil data",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			nil,
+			nil,
+			false,
+		},
+		{
+			"Incorrect field",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{"name": "string", "date": "2012-11-01"},
+			"decode error: 1 error occurred:\n\t* field 'date': TimeType: decode error parsing time \"2012-11-01\" as \"2006-01-02T15:04:05Z07:00\": cannot parse \"\" as \"T\"\n\n",
+			true,
+		},
+		{
+			"Incorrect type#1",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			[]interface{}{"name", "string", "date", "2012-11-01"},
+			"decode error: incorrect type: \"slice\", expected \"map\"",
+			true,
+		},
+		{
+			"Incorrect type#2",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			"",
+			"decode error: incorrect type: \"string\", expected \"map\"",
+			true,
+		},
+		{
+			"Incorrect type#3",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			"some",
+			"decode error: incorrect type: \"string\", expected \"map\"",
+			true,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Decode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Decode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestObjectField_Encode(t *testing.T) {
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{
+			"Correct",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+
+			map[string]interface{}{"name": "string", "date": w, "bool": true},
+			map[string]interface{}{"bool": true, "name": "string", "date": "2012-11-01T22:08:41Z"},
+			false,
+		},
+		{
+			"Additional properties",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{"name": "string", "date": w, "extra": "string", "bool": true},
+			map[string]interface{}{"bool": true, "name": "string", "date": "2012-11-01T22:08:41Z"},
+			false,
+		},
+		{
+			"Empty data",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			map[string]interface{}{},
+			map[string]interface{}{},
+			false,
+		},
+		{
+			"Nil data",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			nil,
+			nil,
+			false,
+		},
+		{
+			"Incorrect type#1",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			[]interface{}{},
+			"encode error: incorrect type: \"slice\", expected \"map\"",
+			true,
+		},
+		{
+			"Incorrect type#2",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			"",
+			"encode error: incorrect type: \"string\", expected \"map\"",
+			true,
+		},
+		{
+			"Incorrect type#3",
+			Object("name", String(), "date", Time(), "bool", Bool()),
+			"some",
+			"encode error: incorrect type: \"string\", expected \"map\"",
+			true,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.want.(string), fmt.Sprintf("Encode() error = %v, want %v", err, tt.want.(string)))
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+				assert.Equal(t, got, tt.want, fmt.Sprintf("Encode() got = %v, want %v", got, tt.want))
+			}
+		})
+	}
+}
+
+func TestFieldNameValidate(t *testing.T) {
+	tests := []struct {
+		testName  string
+		fieldName string
+		wantErr   bool
+	}{
+		{
+			"Correct field name",
+			"name",
+			false,
+		},
+		{
+			"Not Latin",
+			"название",
+			true,
+		},
+		{
+			"Start with a number",
+			"1name",
+			true,
+		},
+		{
+			"Contains space",
+			"field name",
+			true,
+		},
+		{
+			"Contains symbols",
+			"name!",
+			true,
+		},
+		{
+			"Contains hyphen",
+			"field-name",
+			true,
+		},
+		{
+			"Contains underscore (success)",
+			"field_name",
+			false,
+		},
+		{
+			"Start with a capital letter (success)",
+			"Name",
+			false,
+		},
+		{
+			"Contain a capital letter (success)",
+			"fieldName",
+			false,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.testName, func(t *testing.T) {
+			params := &ObjectParameters{Fields: map[string]*Field{tt.fieldName: String()}}
+			err := objectType.ValidateParameters(params)
+			if tt.wantErr {
+				require.Error(t, err)
+			} else {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/options.go b/pkg/schema/field/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..c0c6e4e14e4a0a9073dffe91031dbe3d3ceb259f
--- /dev/null
+++ b/pkg/schema/field/options.go
@@ -0,0 +1,175 @@
+package field
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"reflect"
+	"sync"
+)
+
+type Option interface {
+	Transform(f *Field, v interface{}) (interface{}, error)
+}
+
+type PriorityOption interface {
+	GetPriority() int
+}
+
+type NamedOption interface {
+	GetName() string
+}
+
+type OptionValidator interface {
+	ValidateOption() error
+}
+
+//type jsonTransform struct {
+//	Name    string          `json:"name"`
+//	Options json.RawMessage `json:"options,omitempty"`
+//}
+//
+//func (t *Option) MarshalJSON() ([]byte, error) {
+//	b, err := json.Marshal(t.Transformation)
+//	if err != nil {
+//		return nil, err
+//	}
+//
+//	j := jsonTransform{Name: GetOptionName(t.Transformation), Options: b}
+//
+//	return json.Marshal(&j)
+//}
+//
+//func (t *Option) UnmarshalJSON(b []byte) error {
+//	var j jsonTransform
+//	if err := json.Unmarshal(b, &j); err != nil {
+//		return err
+//	}
+//
+//	i, ok := nameToOption.Load(j.Name)
+//	if !ok {
+//		return fmt.Errorf("unknown transformer name \"%s\"", j.Name)
+//	}
+//	typ := i.(reflect.Type)
+//	val := reflect.New(typ)
+//	v := val.Interface()
+//
+//	if len(j.Options) > 0 {
+//		if err := json.Unmarshal(j.Options, v); err != nil {
+//			return err
+//		}
+//	}
+//
+//	tr, _ := v.(Transformation)
+//	*t = Option{Transformation: tr}
+//	return nil
+//}
+
+var (
+	nameToOption sync.Map
+	optionToName sync.Map
+)
+
+func GetOptionName(o interface{}) string {
+	typ := reflect.TypeOf(o)
+	if typ.Kind() == reflect.Ptr {
+		typ = typ.Elem()
+	}
+	if val, ok := optionToName.Load(typ); ok {
+		v := val.(string)
+		return v
+	}
+	return ""
+}
+
+func RegisterOption(o interface{}) {
+	var name string
+	typ := reflect.TypeOf(o)
+	if typ.Kind() == reflect.Ptr {
+		typ = typ.Elem()
+	}
+
+	if namer, ok := o.(NamedOption); ok {
+		name = namer.GetName()
+	} else {
+		name = typ.Name()
+	}
+
+	nameToOption.Store(name, typ)
+	optionToName.Store(typ, name)
+}
+
+type Options map[string]interface{}
+
+func (options *Options) Add(opts ...interface{}) {
+	if len(opts) == 0 {
+		return
+	}
+	if *options == nil {
+		*options = make(Options)
+	}
+	for _, o := range opts {
+		name := GetOptionName(o)
+		(*options)[name] = o
+	}
+}
+
+//func (options Options) MarshalJSON() ([]byte, error) {
+//	m := make(map[string]json.RawMessage)
+//
+//	for k,v := range options {
+//		name := GetOptionName(t)
+//		b, err := json.Marshal(t)
+//		if err != nil {
+//			return nil, err
+//		}
+//		m[name] = b
+//	}
+//	return json.Marshal(&m)
+//}
+
+func (options *Options) UnmarshalJSON(b []byte) error {
+	m := make(map[string]json.RawMessage)
+	*options = make(Options)
+	if err := json.Unmarshal(b, &m); err != nil {
+		return err
+	}
+
+	for name, opts := range m {
+		i, ok := nameToOption.Load(name)
+		if !ok {
+			return fmt.Errorf("unknown option name \"%s\"", name)
+		}
+		typ := i.(reflect.Type)
+		val := reflect.New(typ)
+		v := val.Interface()
+		if len(opts) > 0 {
+			if err := json.Unmarshal(opts, v); err != nil {
+				return err
+			}
+		}
+		if validator, ok := v.(OptionValidator); ok {
+			err := validator.ValidateOption()
+			if errors.Is(err, ErrSkipOption) {
+				continue
+			}
+			if err != nil {
+				return err
+			}
+		}
+		options.Add(v)
+	}
+	return nil
+}
+
+func (options Options) Transform(field *Field, v interface{}) (interface{}, error) {
+	var err error
+	for _, t := range options {
+		o := t.(Option)
+		v, err = o.Transform(field, v)
+		if err != nil {
+			return nil, err
+		}
+	}
+	return v, nil
+}
diff --git a/pkg/schema/field/primary_key.go b/pkg/schema/field/primary_key.go
new file mode 100644
index 0000000000000000000000000000000000000000..b0b26e16a91cd506231ba46307485290a138bfb8
--- /dev/null
+++ b/pkg/schema/field/primary_key.go
@@ -0,0 +1,55 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+)
+
+//TODO readonly
+
+var primaryKeyType = &PrimaryKeyType{}
+
+type PrimaryKeyParameters struct{}
+
+func (p PrimaryKeyParameters) Type() Type                   { return primaryKeyType }
+func (p *PrimaryKeyParameters) Clone(reset bool) Parameters { return p }
+
+type PrimaryKeyType struct{}
+
+func (p PrimaryKeyType) Name() string {
+	return "PrimaryKey"
+}
+
+func (PrimaryKeyType) NewParameters() Parameters {
+	return &PrimaryKeyParameters{}
+}
+
+func (PrimaryKeyType) IsEmpty(v interface{}) bool {
+	s, _ := v.(string)
+	return s == ""
+}
+
+func (PrimaryKeyType) Decode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+	if _, ok := v.(string); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("PrimaryKeyField decode error: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func (PrimaryKeyType) Encode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+	if _, ok := v.(string); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("PrimaryKeyField encode error: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func PrimaryKey(o ...interface{}) *Field {
+	return NewField(&PrimaryKeyParameters{}, o...)
+}
diff --git a/pkg/schema/field/primary_key_test.go b/pkg/schema/field/primary_key_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f74f32cafc72612d9b1575561718e6c625334d53
--- /dev/null
+++ b/pkg/schema/field/primary_key_test.go
@@ -0,0 +1,56 @@
+package field
+
+import (
+	"reflect"
+	"testing"
+)
+
+func TestPrimaryKeyField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", PrimaryKey(), "custom_id", "custom_id", false},
+		{"Wrong data", PrimaryKey(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestPrimaryKeyField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", PrimaryKey(), "primary key", "primary key", false},
+		{"Wrong data", PrimaryKey(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/string.go b/pkg/schema/field/string.go
new file mode 100644
index 0000000000000000000000000000000000000000..b7e548b65f5c1572cc515cad19192899843ab62f
--- /dev/null
+++ b/pkg/schema/field/string.go
@@ -0,0 +1,53 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+)
+
+var stringType = &StringType{}
+
+type StringParameters struct{}
+
+func (s StringParameters) Type() Type                   { return stringType }
+func (s *StringParameters) Clone(reset bool) Parameters { return s }
+
+type StringType struct{}
+
+func (s StringType) Name() string {
+	return "string"
+}
+
+func (StringType) NewParameters() Parameters {
+	return &StringParameters{}
+}
+
+func (StringType) IsEmpty(v interface{}) bool {
+	s, _ := v.(string)
+	return s == ""
+}
+
+func (StringType) Decode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+	if _, ok := v.(string); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("StringField decode error: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func (StringType) Encode(_ context.Context, _ *Field, v interface{}) (interface{}, error) {
+	if v == nil {
+		return nil, nil
+	}
+	if _, ok := v.(string); ok {
+		return v, nil
+	}
+	return nil, fmt.Errorf("StringField encode error: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func String(o ...interface{}) *Field {
+	return NewField(&StringParameters{}, o...)
+}
diff --git a/pkg/schema/field/string_test.go b/pkg/schema/field/string_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..d0fca29b8ed3f0c4ec987bf6705b7ea435116e40
--- /dev/null
+++ b/pkg/schema/field/string_test.go
@@ -0,0 +1,56 @@
+package field
+
+import (
+	"reflect"
+	"testing"
+)
+
+func TestStringField_Decode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", String(), "string", "string", false},
+		{"Wrong data", String(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestStringField_Encode(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", String(), "string", "string", false},
+		{"Wrong data", String(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/time.go b/pkg/schema/field/time.go
new file mode 100644
index 0000000000000000000000000000000000000000..064906f236371d2914a0544c3f6abef83fd77f65
--- /dev/null
+++ b/pkg/schema/field/time.go
@@ -0,0 +1,85 @@
+package field
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"time"
+
+	"github.com/pkg/errors"
+)
+
+const DefaultTimeLayout = time.RFC3339
+
+var timeType = &TimeType{}
+
+type TimeParameters struct {
+	Layout string `json:"layout,omitempty"`
+}
+
+func (p TimeParameters) Type() Type                  { return timeType }
+func (p TimeParameters) Clone(reset bool) Parameters { return &p }
+
+func (p TimeParameters) GetLayout() string {
+	if p.Layout != "" {
+		return p.Layout
+	}
+	return DefaultTimeLayout
+}
+
+type TimeType struct{}
+
+func (TimeType) Name() string {
+	return "time"
+}
+
+func (TimeType) NewParameters() Parameters {
+	return &TimeParameters{}
+}
+
+func (TimeType) IsEmpty(v interface{}) bool {
+	t, _ := v.(time.Time)
+	return t.IsZero()
+}
+
+func (TimeType) Decode(_ context.Context, field *Field, v interface{}) (interface{}, error) {
+	params, ok := field.Params.(*TimeParameters)
+	if !ok {
+		return nil, errors.New("TimeType: field type parameters required")
+	}
+
+	if v == nil {
+		return v, nil
+	}
+	switch val := v.(type) {
+
+	case string:
+		if t, err := time.Parse(params.GetLayout(), val); err != nil {
+			return nil, fmt.Errorf("TimeType: decode error %w", err)
+		} else {
+			return t, nil
+		}
+	case time.Time:
+		return v, nil
+	}
+	return nil, fmt.Errorf("TimeType: decode: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func (TimeType) Encode(_ context.Context, field *Field, v interface{}) (interface{}, error) {
+	params, ok := field.Params.(*TimeParameters)
+	if !ok {
+		return nil, errors.New("TimeType: field type parameters required")
+	}
+
+	if v == nil {
+		return v, nil
+	}
+	if t, ok := v.(time.Time); ok {
+		return t.Format(params.GetLayout()), nil
+	}
+	return nil, fmt.Errorf("TimeType: encode: unsupported value type : \"%s\"", reflect.ValueOf(v).Kind())
+}
+
+func Time(o ...interface{}) *Field {
+	return NewField(&TimeParameters{}, o...)
+}
diff --git a/pkg/schema/field/time_test.go b/pkg/schema/field/time_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f9a8fb44552a0e0a704f7baae7ab10019c889bec
--- /dev/null
+++ b/pkg/schema/field/time_test.go
@@ -0,0 +1,61 @@
+package field
+
+import (
+	"reflect"
+	"testing"
+	"time"
+)
+
+func TestTimeField_Decode(t *testing.T) {
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Time(), "2012-11-01T22:08:41Z", w, false},
+		{"Incorrect format", Time(), "2012-11-01", nil, true},
+		{"Incorrect type", Time(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Decode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Decode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestTimeField_Encode(t *testing.T) {
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+	tests := []struct {
+		name    string
+		field   *Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Correct", Time(), w, "2012-11-01T22:08:41Z", false},
+		{"Incorrect type string", Time(), "2012-11-01T22:08:41Z", nil, true},
+		{"Incorrect type int", Time(), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := Encode(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Encode() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Encode() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/field/type.go b/pkg/schema/field/type.go
new file mode 100644
index 0000000000000000000000000000000000000000..edae76734d562d2546d3b68e89e125c7a87744a6
--- /dev/null
+++ b/pkg/schema/field/type.go
@@ -0,0 +1,47 @@
+package field
+
+import (
+	"fmt"
+	"reflect"
+	"sync"
+)
+
+var (
+	registry sync.Map
+)
+
+// Parameters - интерфейс который должен реализовывать параметр конкретного типа
+type Parameters interface {
+	Type() Type
+	Clone(reset bool) Parameters
+}
+
+// Type - тип поля, отвечает за получение, кодирование и декодирование параметров для данного типа
+type Type interface {
+	Name() string
+	NewParameters() Parameters
+	//Encode(ctx *Context.Context, field *Field, v interface{}) (interface{}, error)
+	//Decode(ctx *Context.Context, field *Field, v interface{}) (interface{}, error)
+}
+
+func Register(typ Type) {
+	registry.Store(typ.Name(), typ)
+}
+
+func Unregister(typ interface{}) {
+	switch t := typ.(type) {
+	case Type:
+		registry.Delete(t.Name())
+	case string:
+		registry.Delete(t)
+	default:
+		panic(fmt.Sprintf("unknown type: \"%s\"", reflect.ValueOf(t).Kind()))
+	}
+}
+
+func GetType(name string) (Type, bool) {
+	if i, ok := registry.Load(name); ok {
+		return i.(Type), true
+	}
+	return nil, false
+}
diff --git a/pkg/schema/field/unknown.go b/pkg/schema/field/unknown.go
new file mode 100644
index 0000000000000000000000000000000000000000..f28a51cd1fbdc20a6b026d4504872149c939e727
--- /dev/null
+++ b/pkg/schema/field/unknown.go
@@ -0,0 +1,57 @@
+package field
+
+import (
+	"encoding/json"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/pkg/errors"
+)
+
+var unknownType = &UnknownType{}
+
+type UnknownParameters struct {
+	Typ    string          `json:"type,omitempty"`
+	Params json.RawMessage `json:"params,omitempty"`
+}
+
+func (UnknownParameters) Type() Type                    { return unknownType }
+func (p UnknownParameters) Clone(reset bool) Parameters { return &p }
+
+type UnknownType struct{}
+
+func (UnknownType) Name() string {
+	return "unknown"
+}
+
+func (UnknownType) NewParameters() Parameters {
+	return &UnknownParameters{}
+}
+
+func (UnknownType) ConvertParameters(p Parameters) (Parameters, error) {
+	unknownParams, ok := p.(*UnknownParameters)
+	if !ok {
+		return p, nil
+	}
+
+	// Проверяем возможность восстановления исходного типа поля и возвращаем его если возможно
+	typ, ok := GetType(unknownParams.Typ)
+	if !ok {
+		return p, nil
+	}
+
+	params := typ.NewParameters()
+	if len(unknownParams.Params) > 0 {
+		if err := jsoniter.Unmarshal(unknownParams.Params, params); err != nil {
+			return p, errors.Wrapf(err, "error recover field type %s", typ.Name())
+		}
+	}
+
+	return params, nil
+}
+
+func Unknown(typ string, params json.RawMessage, o ...interface{}) *Field {
+	var pc ParametersConverter
+	pc = unknownType
+	_ = pc
+	return NewField(&UnknownParameters{Typ: typ, Params: params}, o...)
+}
diff --git a/pkg/schema/field/walk.go b/pkg/schema/field/walk.go
new file mode 100644
index 0000000000000000000000000000000000000000..d7b24a10098944b6c742421027075a0e2febe54e
--- /dev/null
+++ b/pkg/schema/field/walk.go
@@ -0,0 +1,57 @@
+package field
+
+import "context"
+
+type WalkFuncResult struct {
+	Context context.Context
+	Value   interface{}
+	Changed bool
+	Stop    bool
+}
+
+// WalkFunc - функция которая будет применена к значению каждого поля в результате обход данных функцией `Walk`
+// Возвращает новое значение если таковое имеется, признак изменения данных и ошибку в случае возникновения
+type WalkFunc func(ctx context.Context, fld *Field, v interface{}) (result WalkFuncResult, err error)
+
+// FieldWalker - интерфейс позволяющий выполнить обход предоставленного поля
+// Реализуется в типах
+type FieldWalker interface {
+	// Walk выполняет обход данных в соответствии с полем. Функция возвращает результат:
+	// результат обхода, флаг что данные изменены, ошибку
+	Walk(ctx context.Context, fld *Field, v interface{}, fn WalkFunc, opts *WalkOptions) (interface{}, bool, error)
+}
+
+type WalkOptions struct {
+	WalkSchema bool
+}
+
+type WalkOption func(opts *WalkOptions)
+
+// WalkSchema указывает что необходимо выполнять обход по схеме, вместо обхода по данным.
+// При обходе по данным в случае отсутствия данных для поля дальнейшая обработка данного поля не выполняется.
+// При обходе по схемы будут обработаны все поля присутствующие в схеме вне зависимости от наличия данных.
+func WalkSchema() WalkOption {
+	return func(opts *WalkOptions) {
+		opts.WalkSchema = true
+	}
+}
+
+func WalkOpts(o *WalkOptions) WalkOption {
+	return func(opts *WalkOptions) {
+		*opts = *o
+	}
+}
+
+func NewWalkOptions(opt ...WalkOption) *WalkOptions {
+	opts := &WalkOptions{}
+	for _, o := range opt {
+		o(opts)
+	}
+	return opts
+}
+
+// Walker - интерфейс позволяющий выполнить обход данных и содержит схему внутри
+// Реализовано в Field, Schema
+type Walker interface {
+	Walk(ctx context.Context, v interface{}, fn WalkFunc, opt ...WalkOption) (interface{}, bool, error)
+}
diff --git a/pkg/schema/loader.go b/pkg/schema/loader.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a646e2398eec49bdd1b8be135f9b9a54952a113
--- /dev/null
+++ b/pkg/schema/loader.go
@@ -0,0 +1,25 @@
+package schema
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+var defaultLoader field.Loader
+
+func SetDefaultLoader(l field.Loader) {
+	defaultLoader = l
+}
+
+func GetLoader() field.Loader {
+	return defaultLoader
+}
+
+func Load(ctx context.Context, ref string) (*Schema, error) {
+	s := New()
+	if err := s.Field.LoadRef(ctx, ref, GetLoader()); err != nil {
+		return nil, err
+	}
+	return s, nil
+}
diff --git a/pkg/schema/modify/default.go b/pkg/schema/modify/default.go
new file mode 100644
index 0000000000000000000000000000000000000000..66ce6b89c0619ceb232050fa5ab057e4330fdfcb
--- /dev/null
+++ b/pkg/schema/modify/default.go
@@ -0,0 +1,47 @@
+package modify
+
+import (
+	"context"
+	"encoding/json"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type defaultValue struct {
+	Value interface{}
+	ready bool
+}
+
+func (d *defaultValue) Prepare(f *field.Field) error {
+	var err error
+	d.Value, err = field.Decode(nil, f, d.Value)
+	return err
+}
+
+func (d *defaultValue) UnmarshalJSON(bytes []byte) error {
+	return json.Unmarshal(bytes, &d.Value)
+}
+
+func (d defaultValue) MarshalJSON() ([]byte, error) {
+	return json.Marshal(d.Value)
+}
+
+func Default(v interface{}) Modifier {
+	return &defaultValue{Value: v}
+}
+
+func (defaultValue) GetName() string { return "default" }
+
+func (d defaultValue) Modify(ctx context.Context, f *field.Field, v interface{}) (interface{}, bool, error) {
+	if !d.ready {
+		var err error
+		if d.Value, err = field.Decode(ctx, f, d.Value); err != nil {
+			return nil, false, err
+		}
+	}
+
+	if v == nil && d.Value != nil {
+		return d.Value, true, nil
+	}
+	return v, false, nil
+}
diff --git a/pkg/schema/modify/default_test.go b/pkg/schema/modify/default_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..b99630dc65a9249d04bc1b29ac9e9fc902c745cf
--- /dev/null
+++ b/pkg/schema/modify/default_test.go
@@ -0,0 +1,58 @@
+package modify
+
+import (
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestDefault(t *testing.T) {
+	now := time.Now()
+	_ = now
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"String value", field.String(Default("some")), "same", "same", false},
+		{"String", field.String(Default("some")), nil, "some", false},
+		{"Int", field.Number(field.NumberFormatInt, Default(2)), nil, int64(2), false},
+		{"Time", field.Time(Default(now)), nil, now, false},
+		{"Bool", field.Bool(Default(true)), nil, true, false},
+		{"Object: nil with field default", field.Object("name", field.String(Default("test"))), nil, nil, false},
+		{"Object: nil with object default", field.Object("name", field.String(Default("test"))).AddOptions(Default(map[string]interface{}{"name": "a"})), nil, map[string]interface{}{"name": "a"}, false},
+		{"Object: empty", field.Object(
+			"a", field.String(Default("a")),
+			"b", field.Number(field.NumberFormatInt, Default(1)),
+			"c", field.String(),
+		),
+			map[string]interface{}{},
+			map[string]interface{}{"a": "a", "b": int64(1)},
+			false},
+		{"Array of time",
+			field.Object("array", field.Array(field.Time())),
+			map[string]interface{}{"array": []interface{}{"2012-11-01T22:08:41Z", "2012-11-01T22:08:41Z"}},
+			map[string]interface{}{"array": []interface{}{w, w}},
+			false},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			got, _, err = Modify(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Modify() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			assert.Equal(t, tt.want, got)
+		})
+	}
+}
diff --git a/pkg/schema/modify/modify.go b/pkg/schema/modify/modify.go
new file mode 100644
index 0000000000000000000000000000000000000000..191a428c4efac916511dd37481d7e1bac00a87bc
--- /dev/null
+++ b/pkg/schema/modify/modify.go
@@ -0,0 +1,106 @@
+package modify
+
+import (
+	"context"
+	"sort"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+const ModifierPriority = 1000
+
+type Modifier interface {
+	Modify(ctx context.Context, f *field.Field, v interface{}) (interface{}, bool, error)
+}
+
+type Modifiers []Modifier
+
+func (l Modifiers) Len() int { return len(l) }
+func (l Modifiers) Less(i, j int) bool {
+	pi, pj := ModifierPriority, ModifierPriority
+	if o, ok := l[i].(field.PriorityOption); ok {
+		pi = o.GetPriority()
+	}
+	if o, ok := l[j].(field.PriorityOption); ok {
+		pj = o.GetPriority()
+	}
+	if pi == pj {
+		return field.GetOptionName(l[i]) < field.GetOptionName(l[j])
+	}
+	return pi < pj
+}
+func (l Modifiers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+
+func getModifiers(f *field.Field) Modifiers {
+	var ms Modifiers
+	for _, o := range f.Options {
+		if m, ok := o.(Modifier); ok {
+			ms = append(ms, m)
+		}
+	}
+	sort.Sort(ms)
+	return ms
+}
+
+func applyModifiers(ctx context.Context, f *field.Field, v interface{}) (interface{}, bool, error) {
+	var err error
+	var ok, modified bool
+	modifiers := getModifiers(f)
+	for _, i := range modifiers {
+		v, ok, err = i.Modify(ctx, f, v)
+		if err != nil {
+			return nil, false, err
+		}
+		modified = modified || ok
+	}
+	return v, modified, nil
+}
+
+func Modify(ctx context.Context, w field.Walker, v interface{}) (interface{}, bool, error) {
+	if m, ok := v.(map[string]interface{}); ok {
+		ctx = expr.WithEnv(ctx, m)
+	}
+
+	v, c, err := w.Walk(ctx, v, func(ctx context.Context, fld *field.Field, v interface{}) (res field.WalkFuncResult, err error) {
+		var vv interface{}
+		var changed bool
+
+		if vv, changed, err = applyModifiers(ctx, fld, v); err != nil {
+			return
+		}
+
+		if changed {
+			v = vv
+		}
+
+		if modifier, ok := fld.GetType().(Modifier); ok {
+			vv, ch, err := modifier.Modify(ctx, fld, v)
+
+			if err != nil {
+				return res, err
+			}
+
+			if ch {
+				v = vv
+			}
+		}
+
+		res.Value = v
+		res.Changed = changed
+		return
+	})
+
+	if err != nil {
+		return nil, false, errors.Wrap(err, "modification error")
+	}
+
+	return v, c, nil
+}
+
+func init() {
+	field.RegisterOption(trimSpace(true))
+	field.RegisterOption(defaultValue{})
+	field.RegisterOption(value{})
+}
diff --git a/pkg/schema/modify/string.go b/pkg/schema/modify/string.go
new file mode 100644
index 0000000000000000000000000000000000000000..2f0ecc6f24e114f89d97b5e0d54251da68c4aebf
--- /dev/null
+++ b/pkg/schema/modify/string.go
@@ -0,0 +1,33 @@
+package modify
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type trimSpace bool
+
+func TrimSpace() Modifier {
+	t := trimSpace(true)
+	return &t
+}
+
+func (c trimSpace) Modify(ctx context.Context, field *field.Field, v interface{}) (interface{}, bool, error) {
+	if !c {
+		return v, false, nil
+	}
+
+	if v == nil {
+		return nil, false, nil
+	}
+
+	if s, ok := v.(string); ok {
+		s = strings.TrimSpace(s)
+		return s, true, nil
+	}
+	return nil, false, fmt.Errorf("incorrect type: \"%s\", expected \"string\"", reflect.ValueOf(v).Kind())
+}
diff --git a/pkg/schema/modify/string_test.go b/pkg/schema/modify/string_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..ef90f7f6f4d6a43ff13b3ac10bda68602f09c5f4
--- /dev/null
+++ b/pkg/schema/modify/string_test.go
@@ -0,0 +1,78 @@
+package modify
+
+import (
+	"reflect"
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestTrimSpace(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"String with spaces", field.String().AddOptions(TrimSpace()), "    string string   ", "string string", false},
+		{"Nil", field.String().AddOptions(TrimSpace()), nil, nil, false},
+		{"Spaces", field.String().AddOptions(TrimSpace()), "      ", "", false},
+		{"Empty string", field.String().AddOptions(TrimSpace()), "", "", false},
+		{"Not a string", field.String().AddOptions(TrimSpace()), 2, nil, true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, _, err := Modify(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Modify() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Modify() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
+
+func TestModify(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+		error   string
+	}{
+		{"String Length Max", field.String().AddOptions(TrimSpace()), " ", false, ""},
+		{"String Length Min", field.String().AddOptions(TrimSpace()), " ", false, ""},
+		{"Nil Length Max", field.String().AddOptions(TrimSpace()), nil, false, ""},
+		{"Nil Length Min", field.String().AddOptions(TrimSpace()), nil, false, ""},
+		{"Int Length Max", field.String().AddOptions(TrimSpace()), 1, true, "modification error: incorrect type: \"int\", expected \"string\""},
+		{"Int Length Min", field.String().AddOptions(TrimSpace()), 1, true, "modification error: incorrect type: \"int\", expected \"string\""},
+		{"Float Length Max", field.String().AddOptions(TrimSpace()), 1.0, true, "modification error: incorrect type: \"float64\", expected \"string\""},
+		{"Float Length Min", field.String().AddOptions(TrimSpace()), 1.0, true, "modification error: incorrect type: \"float64\", expected \"string\""},
+		{"Bool Length Max", field.String().AddOptions(TrimSpace()), true, true, "modification error: incorrect type: \"bool\", expected \"string\""},
+		{"Bool Length Min", field.String().AddOptions(TrimSpace()), true, true, "modification error: incorrect type: \"bool\", expected \"string\""},
+		{"Array Length Max", field.String().AddOptions(TrimSpace()), [1]string{""}, true, "modification error: incorrect type: \"array\", expected \"string\""},
+		{"Array Length Min", field.String().AddOptions(TrimSpace()), [1]string{""}, true, "modification error: incorrect type: \"array\", expected \"string\""},
+		{"Slice Length Max", field.String().AddOptions(TrimSpace()), []string{""}, true, "modification error: incorrect type: \"slice\", expected \"string\""},
+		{"Slice Length Min", field.String().AddOptions(TrimSpace()), []string{""}, true, "modification error: incorrect type: \"slice\", expected \"string\""},
+		{"Map Length Max", field.String().AddOptions(TrimSpace()), map[string]string{"": ""}, true, "modification error: incorrect type: \"map\", expected \"string\""},
+		{"Map Length Min", field.String().AddOptions(TrimSpace()), map[string]string{"": ""}, true, "modification error: incorrect type: \"map\", expected \"string\""},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			_, _, err := Modify(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.error)
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/modify/value.go b/pkg/schema/modify/value.go
new file mode 100644
index 0000000000000000000000000000000000000000..1f87c80387c802eaf7fb7e3fec221ea263092050
--- /dev/null
+++ b/pkg/schema/modify/value.go
@@ -0,0 +1,39 @@
+package modify
+
+import (
+	"context"
+	"encoding/json"
+	"reflect"
+
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/pkg/errors"
+)
+
+type value struct {
+	Expression string
+}
+
+func (d *value) UnmarshalJSON(bytes []byte) error {
+	return json.Unmarshal(bytes, &d.Expression)
+}
+
+func (d value) MarshalJSON() ([]byte, error) {
+	return json.Marshal(d.Expression)
+}
+
+func Value(exp string) Modifier {
+	return &value{Expression: exp}
+}
+
+func (d value) Modify(ctx context.Context, f *field.Field, v interface{}) (interface{}, bool, error) {
+	val, err := expr.EvalKV(ctx, d.Expression, "_value", v)
+	if err != nil {
+		return nil, false, errors.Wrap(err, "value evaluation error")
+	}
+	// использование reflect.DeepEqual для учета случаев, когда в val массив или объект
+	if !reflect.DeepEqual(val, v) {
+		return val, true, nil
+	}
+	return v, false, nil
+}
diff --git a/pkg/schema/modify/value_test.go b/pkg/schema/modify/value_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..9878fc70663005f7650ac567899634a94446e8b9
--- /dev/null
+++ b/pkg/schema/modify/value_test.go
@@ -0,0 +1,63 @@
+package modify
+
+import (
+	"reflect"
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+func TestValue(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		want    interface{}
+		wantErr bool
+	}{
+		{"Condition with _value", field.String().AddOptions(Value("_value == 'ab' ? _value : _value + 'b'")), "a", "ab", false},
+		{"Condition with _value", field.String().AddOptions(Value("_value == 'ab' ? _value : _value + 'b'")), "ab", "ab", false},
+		{"Default with _value", field.String().AddOptions(Value("_value == nil ? 'abc' : _value")), nil, "abc", false},
+		{"Field global", field.Object(
+			"fld1", field.String(),
+			"fld2", field.String().AddOptions(Value("fld1 + 'b'")),
+		),
+			map[string]interface{}{"fld1": "a"},
+			map[string]interface{}{"fld1": "a", "fld2": "ab"}, false},
+		{"Field local", field.Object(
+			"fld1", field.String(),
+			"fld2", field.String().AddOptions(Value("_.fld1 + 'b'")),
+		),
+			map[string]interface{}{"fld1": "a"},
+			map[string]interface{}{"fld1": "a", "fld2": "ab"}, false},
+		{"Multiple fields ", field.Object(
+			"fld1", field.String(),
+			"fld2", field.String(),
+			"fld3", field.String(),
+			"fld4", field.String(),
+			"fld5", field.String().AddOptions(Value("_.fld1 + fld2 + _.fld3+fld4")),
+		),
+			map[string]interface{}{"fld1": "a", "fld2": "b", "fld3": "c", "fld4": "d"},
+			map[string]interface{}{"fld1": "a", "fld2": "b", "fld3": "c", "fld4": "d", "fld5": "abcd"},
+			false},
+		{"Nil field error", field.Object(
+			"fld1", field.String(),
+			"fld2", field.String().AddOptions(Value("_.fld1 + 'b'")),
+		),
+			map[string]interface{}{},
+			map[string]interface{}{},
+			true},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, _, err := Modify(nil, tt.field, tt.data)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Modify() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+			if !tt.wantErr && !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("Modify() got = %v, want %v", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/schema.go b/pkg/schema/schema.go
new file mode 100644
index 0000000000000000000000000000000000000000..e0eca6b6ffc02511acbe3712b2f2bf83fc5bc05e
--- /dev/null
+++ b/pkg/schema/schema.go
@@ -0,0 +1,215 @@
+package schema
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/modify"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+)
+
+type Schema struct {
+	field.Field
+	Loaded bool `json:"loaded"`
+}
+
+func New(kv ...interface{}) *Schema {
+	return &Schema{Field: *field.Object(kv...)}
+}
+
+func NewFromField(f *field.Field) *Schema {
+	return &Schema{Field: *f}
+}
+
+var (
+	Encode   = field.Encode
+	Decode   = field.Decode
+	Modify   = modify.Modify
+	Validate = validate.Validate
+	Evaluate = field.Evaluate
+)
+
+func (s *Schema) Clone(reset bool) *Schema {
+	return &Schema{
+		Field:  *s.Field.Clone(reset),
+		Loaded: s.Loaded,
+	}
+}
+
+func (s Schema) WithIncludes(includes ...interface{}) *Schema {
+	s.Field.SetIncludes(includes...)
+	return &s
+}
+
+func (s *Schema) Load(ctx context.Context) error {
+	if s.Loaded {
+		return nil
+	}
+	return s.LoadIncludes(ctx, nil)
+}
+
+func (s *Schema) LoadIncludes(ctx context.Context, loader field.Loader) (err error) {
+	if loader == nil {
+		loader = GetLoader()
+	}
+	err = s.Field.LoadIncludes(ctx, loader)
+	if err == nil {
+		s.Loaded = true
+	}
+	return
+}
+
+func (s *Schema) Modify(ctx context.Context, data map[string]interface{}) (res map[string]interface{}, err error) {
+	if err = s.Load(ctx); err != nil {
+		return nil, err
+	}
+
+	v, _, err := Modify(ctx, s, data)
+	if err != nil || v == nil {
+		return
+	}
+
+	res, _ = v.(map[string]interface{})
+	return
+}
+
+func (s *Schema) Validate(ctx context.Context, data map[string]interface{}) (err error) {
+	if err = s.Load(ctx); err != nil {
+		return err
+	}
+
+	return Validate(ctx, s, data)
+}
+
+func (s *Schema) Evaluate(ctx context.Context, data map[string]interface{}) (res map[string]interface{}, err error) {
+	if err = s.Load(ctx); err != nil {
+		return nil, err
+	}
+
+	v, err := Evaluate(ctx, s, data)
+	if err != nil || v == nil {
+		return
+	}
+	res, _ = v.(map[string]interface{})
+	return
+}
+
+func (s *Schema) Decode(ctx context.Context, v interface{}) (res map[string]interface{}, err error) {
+	if err = s.Load(ctx); err != nil {
+		return nil, err
+	}
+
+	if v, err = Decode(ctx, s, v); err != nil {
+		return nil, err
+	}
+	res, _ = v.(map[string]interface{})
+	return
+}
+
+func (s *Schema) Encode(ctx context.Context, v interface{}) (interface{}, error) {
+	if err := s.Load(ctx); err != nil {
+		return nil, err
+	}
+
+	var res interface{}
+	var err error
+
+	if res, err = Encode(ctx, s, v); err != nil {
+		return nil, err
+	}
+
+	return res, nil
+}
+
+func (s *Schema) ToValue(ctx context.Context, data map[string]interface{}) (res map[string]interface{}, err error) {
+	if err = s.Load(ctx); err != nil {
+		return nil, err
+	}
+
+	if data, err = s.Decode(ctx, data); err != nil {
+		return nil, err
+	}
+	if data, err = s.Modify(ctx, data); err != nil {
+		return nil, err
+	}
+	if data, err = s.Evaluate(ctx, data); err != nil {
+		return nil, err
+	}
+	if err = s.Validate(ctx, data); err != nil {
+		return nil, err
+	}
+	return data, err
+}
+
+type parentFieldCtxKey struct{}
+
+func (s *Schema) Introspect(ctx context.Context, data map[string]interface{}) (map[string]interface{}, *Schema, error) {
+	if err := s.Load(ctx); err != nil {
+		return nil, nil, err
+	}
+
+	var err error
+
+	chg := true
+	val := data
+	i := 0
+
+	var mutatedSchema *Schema
+
+	for chg {
+		mutatedSchema = nil
+
+		var res interface{}
+		res, chg, err = s.Walk(expr.WithEnv(ctx, val), val, func(ctx context.Context, f *field.Field, v interface{}) (res field.WalkFuncResult, err error) {
+			parent, _ := ctx.Value(parentFieldCtxKey{}).(*field.Field)
+			name, _ := ctx.Value(field.FieldName).(string)
+			enabled, err := f.IsEnabled(ctx)
+			if err != nil {
+				return
+			}
+
+			if !enabled {
+				res.Stop = true
+				if v != nil {
+					res.Changed = true
+				}
+				return
+			}
+
+			fld := f.Clone(true)
+			if mutatedSchema == nil {
+				mutatedSchema = &Schema{Field: *fld}
+				fld = &mutatedSchema.Field
+			}
+
+			if parent != nil && name != "" {
+				field.AddField(parent, name, fld)
+			}
+
+			ctx = context.WithValue(ctx, parentFieldCtxKey{}, fld)
+			res.Context = ctx
+
+			return
+		}, field.WalkSchema())
+
+		if err != nil {
+			return nil, nil, errors.Wrap(err, "evaluation error")
+		}
+
+		if res != nil {
+			val = res.(map[string]interface{})
+		} else {
+			val = nil
+		}
+
+		i += 1
+
+		if i > field.EvaluatePassesLimit {
+			return nil, nil, errors.New("fail to evaluate data conditions")
+		}
+	}
+
+	return val, mutatedSchema, nil
+}
diff --git a/pkg/schema/schema_json.go b/pkg/schema/schema_json.go
new file mode 100644
index 0000000000000000000000000000000000000000..906acb5d0d361611d01f43d10bd0f80377ccb4a7
--- /dev/null
+++ b/pkg/schema/schema_json.go
@@ -0,0 +1,54 @@
+package schema
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	jsoniter "github.com/json-iterator/go"
+)
+
+type jsonSchema struct {
+	//Field  json.RawMessage `json:"field,inline"`
+	Loaded bool `json:"loaded"`
+}
+
+func (s *Schema) UnmarshalJSON(b []byte) error {
+
+	var j *jsonSchema
+	if err := jsoniter.Unmarshal(b, &j); err != nil {
+		return errors.Wrapf(err, "error unmarshal json into field")
+	}
+	s.Loaded = j.Loaded
+
+	if err := s.Field.UnmarshalJSON(b); err != nil {
+		return err
+	}
+
+	//if len(j.Field) > 0 {
+	//	if err := s.Field.UnmarshalJSON(j.Field); err != nil {
+	//		return err
+	//	}
+	//	//if err := jsoniter.Unmarshal(j.Field, &s.Field); err != nil {
+	//	//	return err
+	//	//}
+	//}
+
+	return nil
+}
+
+func (s *Schema) MarshalJSON() ([]byte, error) {
+
+	jsonField, err := s.Field.MarshalJSON()
+	if err != nil {
+		return nil, err
+	}
+
+	jsonSch, err := jsoniter.Marshal(jsonSchema{
+		//Field: b,
+		Loaded: s.Loaded,
+	})
+	if err != nil {
+		return nil, err
+	}
+	jsonSch[0] = ',' // вместо такого можно сначала jsonField размаршалить в map[string]interface{}, добавить поле и замаршалить еще раз
+
+	return append(jsonField[:len(jsonField)-1], jsonSch...), nil
+}
diff --git a/pkg/schema/test/object_test.go b/pkg/schema/test/object_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..e5af975cdb9ed8e86044e5d357530dd2420afee9
--- /dev/null
+++ b/pkg/schema/test/object_test.go
@@ -0,0 +1,1490 @@
+package test
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/data"
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/modify"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/validate"
+	"github.com/hashicorp/go-multierror"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestDefaultTimeField_JSON(t *testing.T) {
+	w, _ := time.Parse(time.RFC3339, "2012-11-01T22:08:41Z")
+	fld := field.Object(
+		"timeField", field.Time(modify.Default(w)),
+	)
+
+	b, err := json.MarshalIndent(fld, "", "  ")
+	require.NoError(t, err)
+
+	res := &field.Field{}
+	err = json.Unmarshal(b, res)
+	require.NoError(t, err)
+
+	assert.Equal(t, fld, res)
+}
+
+func TestStringField_JSON(t *testing.T) {
+	fld := field.String().AddOptions(validate.MaxLength(200), modify.TrimSpace())
+
+	b, err := json.MarshalIndent(fld, "", "  ")
+	require.NoError(t, err)
+
+	res := field.NewField(nil)
+	err = json.Unmarshal(b, res)
+	require.NoError(t, err)
+
+	assert.Equal(t, fld, res)
+}
+
+func TestNumberField_JSON(t *testing.T) {
+	fld := field.Number(field.NumberFormatInt).AddOptions(
+		validate.Min(0),
+		validate.Max(10),
+		validate.MultipleOf(2),
+		validate.Enum(
+			validate.EnumOpt{
+				Name:  "N 1",
+				Value: 1.0,
+			},
+			validate.EnumOpt{
+				Name:  "N 2",
+				Value: 2.0,
+			},
+		),
+	)
+
+	b, err := json.MarshalIndent(fld, "", "  ")
+	require.NoError(t, err)
+	//fmt.Println(string(b))
+
+	res := field.NewField(nil)
+	err = json.Unmarshal(b, res)
+	require.NoError(t, err)
+
+	assert.Equal(t, fld, res)
+}
+
+func TestSchema_JSON(t *testing.T) {
+	enumStr := field.String().AddOptions(
+		validate.Enum(
+			validate.EnumOpt{
+				Name:  "N 1",
+				Value: "n1",
+			}, validate.EnumOpt{
+				Name:  "N 2",
+				Value: "n2",
+			},
+		),
+	).SetAdditionalValues()
+	enumInt := field.Number(field.NumberFormatFloat).AddOptions(
+		validate.Enum(
+			validate.EnumOpt{
+				Name:  "N 1",
+				Value: 1.1,
+			}, validate.EnumOpt{
+				Name:  "N 2",
+				Value: 2.5,
+			},
+		),
+	)
+	sch := schema.New(
+		"stringField", field.String().WithUI(&field.UI{Placeholder: "Test name"}).AddOptions(modify.TrimSpace()).AddOptions(validate.MinLength(2), validate.MaxLength(10)),
+		"stringField2", field.String(modify.Default("default")),
+		"intField", field.Number("int", validate.Required()),
+		"floatField", field.Number("float").SetIndexed(true),
+		"enumStringField", enumStr,
+		"enumIntField", enumInt,
+		"timeField", field.Time().SetSingleLocale(true),
+		"arrayField", field.Array(field.String(modify.Default("default"))),
+		"objectField", field.Object("innerStringField", field.String()).WithIncludes("ref1", field.Include{Ref: "ref2", Optional: true}),
+		"evaluatedField", field.String(modify.Value("stringField2 + '_' 	")),
+	)
+	sch.Loaded = true
+
+	b, err := json.MarshalIndent(sch, "", "  ")
+	require.NoError(t, err)
+	//fmt.Println(string(b))
+
+	res := schema.New()
+	err = json.Unmarshal(b, res)
+	require.NoError(t, err)
+
+	assert.Equal(t, sch, res)
+}
+
+func TestSchemaUI_UnmarshalJSON(t *testing.T) {
+	vw := &field.View{
+		Widget:  "Widget",
+		Options: map[string]interface{}{"title": "name", "key": "name"},
+	}
+	ui := &field.UI{
+		Widget:      "Widget",
+		Placeholder: "Placeholder",
+		Options:     map[string]interface{}{"title": "name", "key": "name"},
+		ListView:    vw,
+		ReadView:    vw,
+		EditView:    vw,
+	}
+	schm := schema.New(
+		"name", field.String().WithUI(ui),
+	)
+	schm.UI = ui
+
+	j := `{
+  "ui": {
+    "widget": "Widget",
+    "placeholder": "Placeholder",
+    "options": {
+      "title": "name",
+      "key": "name"
+    },
+    "read_view": {
+      "widget": "Widget",
+      "options": {
+        "title": "name",
+        "key": "name"
+      }
+    },
+    "edit_view": {
+      "widget": "Widget",
+      "options": {
+        "title": "name",
+        "key": "name"
+      }
+    },
+    "list_view": {
+      "widget": "Widget",
+      "options": {
+        "title": "name",
+        "key": "name"
+      }
+    }
+  },
+  "type": "object",
+  "params": {
+    "inline": false,
+    "fields": {
+      "name": {
+        "ui": {
+          "widget": "Widget",
+          "placeholder": "Placeholder",
+          "options": {
+            "title": "name",
+            "key": "name"
+          },
+          "read_view": {
+            "widget": "Widget",
+            "options": {
+              "title": "name",
+              "key": "name"
+            }
+          },
+          "edit_view": {
+            "widget": "Widget",
+            "options": {
+              "title": "name",
+              "key": "name"
+            }
+          },
+          "list_view": {
+            "widget": "Widget",
+            "options": {
+              "title": "name",
+              "key": "name"
+            }
+          }
+        },
+        "type": "string",
+        "params": {}
+      }
+    }
+  },
+  "loaded": false
+}`
+
+	sch := schema.New()
+	err := sch.UnmarshalJSON([]byte(j))
+	require.NoError(t, err)
+	assert.Equal(t, sch, schm)
+}
+
+func TestSchema_GetField(t *testing.T) {
+
+	sch := schema.New(
+		"str", field.String(),
+		"num", field.Number(field.NumberFormatInt),
+		"obj", field.Object(
+			"bool", field.Bool(),
+			"arr", field.Array(field.Time()),
+			"list", field.Array(
+				field.Object(
+					"num1", field.Number(field.NumberFormatFloat),
+					"str1", field.String(),
+					"obj1", field.Object(
+						"str2", field.String(),
+					),
+				),
+			),
+			"geo", field.Location(),
+		),
+	)
+
+	data := []struct {
+		fld    string
+		exists bool
+		typ    string
+	}{
+		{"str", true, "string"},
+		{"obj.bool", true, "bool"},
+		{"obj.list.num1", true, "number"},
+		{"obj.list.obj1.str2", true, "string"},
+		{"obj_list", false, ""},
+		{"zzz", false, ""},
+		{"obj.geo", true, "location"},
+	}
+
+	for _, d := range data {
+		t.Run(d.fld, func(t *testing.T) {
+			f := sch.GetField(d.fld)
+			if d.exists {
+				require.NotNil(t, f, fmt.Sprintf("not found '%s'", d.fld))
+				assert.Equal(t, d.typ, f.GetType().Name(), fmt.Sprintf("field '%s'", d.fld))
+			} else {
+				require.Nil(t, f)
+			}
+		})
+	}
+}
+
+func TestSchema_GetField_WithInline(t *testing.T) {
+
+	sch := schema.New(
+		"str", field.String(),
+		"obj1", field.Object(
+			true,
+			"obj11", field.Object(
+				true,
+				"obj111", field.Object(
+					true,
+					"str1", field.String(),
+					"str2", field.String(),
+				),
+				"arr1", field.Array(field.Object(
+					"str3", field.String(),
+				)),
+				"arr2", field.Array(field.String()),
+			),
+		),
+		"obj2", field.Object(
+			true,
+			"a", field.String(),
+			"b", field.String(),
+		),
+		"zz", field.Object(
+			true,
+			"zz", field.Array(field.Object(
+				"str3", field.String(),
+			)),
+		),
+	)
+
+	data := []struct {
+		fld    string
+		exists bool
+		typ    string
+	}{
+		{"str", true, "string"},
+		{"a", true, "string"},
+		{"b", true, "string"},
+		{"str1", true, "string"},
+		{"str2", true, "string"},
+		{"arr1", true, "array"},
+		{"arr2", true, "array"},
+		{"arr1.str3", true, "string"},
+		{"zz.str3", true, "string"},
+	}
+
+	for _, d := range data {
+		t.Run(d.fld, func(t *testing.T) {
+			f := sch.GetField(d.fld)
+			if d.exists {
+				require.NotNil(t, f, fmt.Sprintf("not found '%s'", d.fld))
+				assert.Equal(t, d.typ, f.GetType().Name(), fmt.Sprintf("field '%s'", d.fld))
+			} else {
+				require.Nil(t, f)
+			}
+		})
+	}
+}
+
+func TestSchema_GetFields(t *testing.T) {
+	sch := schema.New(
+		"str", field.String().SetTitle("Str"),
+		"num", field.Number(field.NumberFormatInt).SetIndexed(true).SetTitle("Num"),
+		"obj", field.Object(
+			"arr", field.Array(
+				field.Array(field.Time()).SetIndexed(true).SetTitle("NestedArr"),
+			).SetTitle("Arr"),
+			"list", field.Array(
+				field.Object(
+					"obj3", field.Object(
+						"str", field.String().SetIndexed(true).SetTitle("Obj2.List.Str"),
+					).SetTitle("Obj3"),
+				).SetTitle("Obj2"),
+			).SetTitle("List"),
+			"geo", field.Location().SetTitle("Geo"),
+		).SetTitle("Obj"),
+	)
+
+	flds := sch.GetFields(func(f *field.Field, path string) bool { return true })
+	assert.Len(t, flds, 8)
+
+	paths := make([]string, 0, len(flds))
+
+	for _, fld := range flds {
+		switch fld.Path {
+		case "str":
+			assert.IsType(t, &field.StringParameters{}, fld.Params)
+		case "num":
+			assert.IsType(t, &field.NumberParameters{}, fld.Params)
+		case "obj":
+			assert.IsType(t, &field.ObjectParameters{}, fld.Params)
+		case "obj.arr":
+			assert.IsType(t, &field.ArrayParameters{}, fld.Params)
+		case "obj.list":
+			assert.IsType(t, &field.ArrayParameters{}, fld.Params)
+		case "obj.list.obj3":
+			assert.IsType(t, &field.ObjectParameters{}, fld.Params)
+		case "obj.list.obj3.str":
+			assert.IsType(t, &field.StringParameters{}, fld.Params)
+		case "obj.geo":
+			assert.IsType(t, &field.LocationParameters{}, fld.Params)
+
+		}
+
+		paths = append(paths, fld.Path)
+	}
+
+	assert.ElementsMatch(
+		t,
+		[]string{"str", "num", "obj", "obj.arr", "obj.list", "obj.list.obj3", "obj.list.obj3.str", "obj.geo"},
+		paths,
+	)
+
+}
+
+func TestSchema_GetFieldByPath(t *testing.T) {
+	sch := schema.New(
+		"str", field.String().SetTitle("Str"),
+		"num", field.Number(field.NumberFormatInt).SetIndexed(true).SetTitle("Num"),
+		"obj", field.Object(
+			"arr", field.Array(
+				field.Array(field.Time()).SetIndexed(true).SetTitle("NestedArr"),
+			).SetTitle("Arr"),
+			"list", field.Array(
+				field.Object(
+					"obj3", field.Object(
+						"str", field.String().SetIndexed(true).SetTitle("Obj2.List.Str"),
+					).SetTitle("Obj3"),
+				).SetTitle("Obj2"),
+			).SetTitle("List"),
+			"geo", field.Location().SetTitle("Geo"),
+		).SetTitle("Obj"),
+	)
+
+	dt := []struct {
+		name  string
+		paths []string
+		want  []string
+	}{
+		{
+			"all",
+			[]string{"*"},
+			[]string{"str", "num", "obj", "obj.arr", "obj.list", "obj.list.obj3", "obj.list.obj3.str", "obj.geo"},
+		},
+		{
+			"full match",
+			[]string{"str", "obj.list.obj3", "some"},
+			[]string{"str", "obj.list.obj3"},
+		},
+		{
+			"glob",
+			[]string{"str*", "obj.list*", "*geo"},
+			[]string{"str", "obj.list", "obj.list.obj3", "obj.list.obj3.str", "obj.geo"},
+		},
+	}
+
+	for _, d := range dt {
+		t.Run(d.name, func(t *testing.T) {
+			got := field.GetFieldsPath(sch.GetFields(func(f *field.Field, path string) bool {
+				return data.GlobMatch(path, d.paths...)
+			}))
+			assert.ElementsMatch(t, d.want, got)
+		})
+	}
+}
+
+func TestSchema_GetFieldsInline(t *testing.T) {
+	t.Run("Basic", func(t *testing.T) {
+		sch := schema.New(
+			"str_1", field.String(),
+			"num", field.Number(field.NumberFormatInt).SetIndexed(true),
+			"obj_1", field.Object(
+				"arr", field.Array(field.Time()).SetIndexed(true),
+				"list", field.Array(
+					field.Object(
+						"obj_2", field.Object(
+							"str_2", field.String().SetIndexed(true),
+						),
+					),
+				),
+				"geo", field.Location(),
+			),
+		)
+
+		flds := field.GetFieldsPath(sch.GetFields(func(f *field.Field, p string) bool {
+			return true
+		}, "data"))
+		assert.ElementsMatch(
+			t,
+			[]string{
+				"data",
+				"data.str_1",
+				"data.num",
+				"data.obj_1",
+				"data.obj_1.arr",
+				"data.obj_1.list",
+				"data.obj_1.list.obj_2",
+				"data.obj_1.list.obj_2.str_2",
+				"data.obj_1.geo",
+			},
+			flds,
+		)
+	})
+	t.Run("Inline fields in schema in a row", func(t *testing.T) {
+		sch := schema.New(
+			"obj_inline_1", field.Object(
+				true,
+				"inline_field1", field.String().SetUnique(true),
+				"obj_inline_2", field.Object(true,
+					"inline_field2", field.String(),
+					"arr", field.Array(field.Object(true,
+						"inline_field3", field.String(),
+					)),
+				),
+			),
+		)
+
+		flds := field.GetFieldsPath(sch.GetFields(func(f *field.Field, p string) bool {
+			return true
+		}))
+		assert.ElementsMatch(
+			t,
+			[]string{
+				"obj_inline_1",
+				"inline_field1",
+				"obj_inline_2",
+				"inline_field2",
+				"arr",
+				"arr.inline_field3",
+			},
+			flds,
+		)
+	})
+	t.Run("Inline fields in schema in a row with prefix", func(t *testing.T) {
+		sch := schema.New(
+			"obj_inline_1", field.Object(true,
+				"inline_field1", field.String().SetUnique(true),
+				"obj_inline_2", field.Object(true,
+					"inline_field2", field.String(),
+					"obj_inline_3", field.Object(true,
+						"inline_field3", field.String(),
+					),
+				),
+			),
+		)
+
+		flds := field.GetFieldsPath(sch.GetFields(func(f *field.Field, p string) bool {
+			return true
+		}, "data"))
+		assert.ElementsMatch(
+			t,
+			[]string{
+				"data",
+				"data.obj_inline_1",
+				"data.inline_field1",
+				"data.obj_inline_2",
+				"data.inline_field2",
+				"data.obj_inline_3",
+				"data.inline_field3",
+			},
+			flds,
+		)
+	})
+	t.Run("Mixed fields in schema in a row", func(t *testing.T) {
+		sch := schema.New(
+			"obj_not_inline_1", field.Object(
+				"not_inline_field_1", field.String().SetUnique(true),
+				"obj_inline_1", field.Object(true,
+					"inline_field1", field.String(),
+					"obj_not_inline_2", field.Object(
+						"not_inline_field_2", field.String(),
+						"obj_inline_2", field.Object(true,
+							"inline_field2", field.String(),
+						),
+					),
+				),
+			),
+		)
+
+		flds := field.GetFieldsPath(sch.GetFields(func(f *field.Field, p string) bool {
+			return true
+		}, "data"))
+		assert.ElementsMatch(
+			t,
+			[]string{
+				"data",
+				"data.obj_not_inline_1",
+				"data.obj_not_inline_1.not_inline_field_1",
+				"data.obj_not_inline_1.obj_inline_1",
+				"data.obj_not_inline_1.inline_field1",
+				"data.obj_not_inline_1.obj_not_inline_2",
+				"data.obj_not_inline_1.obj_not_inline_2.not_inline_field_2",
+				"data.obj_not_inline_1.obj_not_inline_2.obj_inline_2",
+				"data.obj_not_inline_1.obj_not_inline_2.inline_field2",
+			},
+			flds,
+		)
+	})
+}
+
+func TestSchema_Clone(t *testing.T) {
+	sch := schema.New(
+		"f", field.String().WithUI(&field.UI{Placeholder: "Test name"}).AddOptions(modify.TrimSpace()).AddTranslation("ru", "ф", "Поле Ф"),
+		"obj", field.Object(
+			"list", field.Array(
+				field.Object(
+					"obj", field.Object(
+						"field", field.String(),
+					),
+				),
+			),
+		),
+	)
+
+	t.Run("Simple", func(t *testing.T) {
+		f := sch.GetField("f")
+		fld := f.Clone(false)
+
+		assert.Equal(t, f.UI, fld.UI)
+		assert.Equal(t, f.Options, fld.Options)
+		assert.Equal(t, f.Translations, fld.Translations)
+		assert.Equal(t, f.Params, fld.Params)
+	})
+
+	t.Run("Reset", func(t *testing.T) {
+		f := sch.GetField("obj")
+		fld := f.Clone(true)
+
+		assert.Equal(t, f.UI, fld.UI)
+		assert.Equal(t, f.Options, fld.Options)
+		assert.Equal(t, f.Translations, fld.Translations)
+		assert.NotEqual(t, f.Params, fld.Params)
+
+		f = sch.GetField("obj.list")
+		fld = f.Clone(true)
+
+		assert.Equal(t, f.UI, fld.UI)
+		assert.Equal(t, f.Options, fld.Options)
+		assert.Equal(t, f.Translations, fld.Translations)
+		assert.NotEqual(t, f.Params, fld.Params)
+
+		f = sch.GetField("obj.list.obj")
+		fld = f.Clone(true)
+
+		assert.Equal(t, f.UI, fld.UI)
+		assert.Equal(t, f.Options, fld.Options)
+		assert.Equal(t, f.Translations, fld.Translations)
+		assert.NotEqual(t, f.Params, fld.Params)
+	})
+}
+
+func TestSchema_Modify(t *testing.T) {
+	sch := schema.New(
+		"name", field.String(validate.Required()),
+		"last_name", field.String(validate.Required()),
+		"got_nobel", field.Bool(),
+		"times", field.Number("int"),
+		"dates", field.Array(field.Time()),
+	)
+
+	in := map[string]interface{}{"last_name": "Curie", "name": "Marie"}
+	_, _, err := modify.Modify(nil, sch, in)
+	require.NoError(t, err)
+}
+
+func TestSchema_Validate(t *testing.T) {
+	sch := schema.New(
+		"name", field.String(validate.Required()),
+		"last_name", field.String(),
+		"info", field.Object(
+			"time", field.Time(),
+			"numbers", field.Number(
+				field.NumberFormatInt,
+				validate.Enum(
+					validate.EnumOpt{Name: "first", Value: 1},
+					validate.EnumOpt{Name: "second", Value: 2},
+				),
+			),
+		),
+	)
+
+	in := map[string]interface{}{"info": map[string]interface{}{"time": time.Now()}, "name": "Name"}
+	err := validate.Validate(nil, sch, in)
+	require.NoError(t, err)
+}
+
+func TestSchema_DecodeErrors(t *testing.T) {
+	sch := schema.New(
+		"name", field.String(validate.Required()),
+		"last_name", field.String(),
+		"a", field.Object(
+			"time", field.Time(),
+			"num1", field.Number(field.NumberFormatInt),
+			"num2", field.Number(field.NumberFormatInt),
+			"num3", field.Number(field.NumberFormatInt),
+			"b", field.Object(
+				"num1", field.Number(field.NumberFormatInt),
+				"num2", field.Number(field.NumberFormatInt),
+				"num3", field.Number(field.NumberFormatInt),
+			),
+			"c", field.Array(field.Number(field.NumberFormatInt)),
+			"d", field.Number(field.NumberFormatInt, validate.Max(10)),
+		),
+	)
+
+	in := map[string]interface{}{"a": map[string]interface{}{"time": time.Now(), "num1": "a", "num2": "b", "num3": "c", "d": 20,
+		"b": map[string]interface{}{"time": time.Now(), "num1": "a", "num2": "b", "num3": "c", "str": "s"}, "c": []interface{}{"a", "b", "c"}},
+		"name": "Name"}
+	_, err := schema.Decode(nil, sch, in)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "decode error")
+}
+
+func TestSchema_ValidateErrors(t *testing.T) {
+	sch := schema.New(
+		"a", field.Object(
+			"num1", field.Number(field.NumberFormatInt, validate.Required()),
+			"num2", field.Number(field.NumberFormatInt, validate.Max(10)),
+			"num3", field.Number(field.NumberFormatInt, validate.Min(10)),
+			"str1", field.String(validate.MaxLength(5)),
+			"str2", field.String(validate.MinLength(5)),
+			"str3", field.String(validate.MinLength(5), validate.Enum(validate.EnumOpt{Value: "somesome"}, validate.EnumOpt{Value: "romoromo"})),
+		),
+	)
+
+	in := map[string]interface{}{"a": map[string]interface{}{"num2": 20, "num3": 5, "str1": "123456", "str2": "123", "str3": "some"}}
+	decoded, err := schema.Decode(nil, sch, in)
+	require.NoError(t, err)
+	err = validate.Validate(nil, sch, decoded)
+	require.Error(t, err)
+	require.Contains(t, err.Error(), "validation error")
+	var merr *multierror.Error
+	require.ErrorAs(t, err, &merr)
+	assert.Len(t, merr.Errors, 6)
+}
+
+func TestSchema_ValidateEmptyObject(t *testing.T) {
+	{
+		sch := schema.New(
+			"num1", field.Number(field.NumberFormatInt, validate.Required()),
+		)
+
+		res, err := schema.Decode(nil, sch, nil)
+		require.NoError(t, err)
+		res, _, err = modify.Modify(nil, sch, res)
+		require.NoError(t, err)
+		err = validate.Validate(nil, sch, res)
+		require.NoError(t, err, "поля объекта nil не проверяются")
+	}
+	{
+		sch := schema.New(
+			"num1", field.Number(field.NumberFormatInt, validate.Required()),
+		)
+
+		res, err := schema.Decode(nil, sch, map[string]interface{}{})
+		require.NoError(t, err)
+		res, _, err = modify.Modify(nil, sch, res)
+		require.NoError(t, err)
+		err = validate.Validate(nil, sch, res)
+		require.Error(t, err, "поля пустого объекта проверяются")
+	}
+	{
+		sch := schema.New(
+			"num1", field.Number(field.NumberFormatInt, validate.Required()),
+		)
+
+		res, err := schema.Decode(nil, sch, map[string]interface{}{"a": "sss"})
+		require.NoError(t, err)
+		res, _, err = modify.Modify(nil, sch, res)
+		require.NoError(t, err)
+		err = validate.Validate(nil, sch, res)
+		require.Error(t, err, "поля объекта с некорректными данными проверяются")
+	}
+
+	{
+		sch := schema.New(
+			"num1", field.Number(field.NumberFormatInt, validate.Required()),
+		).AddOptions(modify.Default(map[string]interface{}{}))
+
+		res, err := schema.Decode(nil, sch, nil)
+		require.NoError(t, err)
+		res, _, err = modify.Modify(nil, sch, res)
+		require.NoError(t, err)
+		err = validate.Validate(nil, sch, res)
+		require.Error(t, err, "поля nil объекта Default данными проверяются")
+	}
+}
+
+func TestSchema_ModificationErrors(t *testing.T) {
+	sch := schema.New(
+		"a", field.Object(
+			"num1", field.Number(field.NumberFormatInt, modify.TrimSpace()),
+			"str1", field.String(modify.TrimSpace()),
+		),
+	)
+
+	in := map[string]interface{}{"a": map[string]interface{}{"num1": 20, "num3": 5, "str1": "123456", "str2": "123", "str3": "some"}}
+	decoded, err := schema.Decode(nil, sch, in)
+	require.NoError(t, err)
+	_, _, err = modify.Modify(nil, sch, decoded)
+	require.Error(t, err)
+	require.Contains(t, err.Error(), "modification error")
+	var merr *multierror.Error
+	require.ErrorAs(t, err, &merr)
+	assert.Len(t, merr.Errors, 1)
+}
+
+func TestSchema_UnknownJSON(t *testing.T) {
+	sch := schema.New(
+		"name", field.String(validate.Required()),
+		"last_name", field.String(validate.Required()),
+		"got_nobel", field.Bool(),
+		"times", field.Number("int"),
+		"dates", field.Array(field.Time()),
+	)
+
+	b, err := json.Marshal(sch)
+	require.NoError(t, err)
+	field.Unregister("object")
+
+	s1 := schema.New()
+	err = json.Unmarshal(b, s1)
+	require.NoError(t, err)
+	assert.Equal(t, "unknown", s1.GetType().Name(), "Схема неизвестного типа должна определяться как unknown")
+
+	in := map[string]interface{}{"info": map[string]interface{}{"time": time.Now()}, "name": "Name"}
+	out, err := field.Decode(nil, s1, in)
+	require.NoError(t, err)
+	assert.Equal(t, in, out, "Данные неизвестного типа не изменяются при декодировании")
+	err = validate.Validate(nil, s1, in)
+	require.NoError(t, err, "Данные неизвестного типа не валидируются вглубь")
+
+	b, err = json.Marshal(s1)
+	require.NoError(t, err)
+	s2 := schema.New()
+	err = json.Unmarshal(b, s2)
+	require.NoError(t, err)
+	b, err = json.Marshal(s2)
+	require.NoError(t, err)
+	assert.Equal(t, "unknown", s2.GetType().Name(), "Схема неизвестного типа должна определяться как unknown")
+	assert.Equal(t, s1, s2, "Схема не должна меняться при повторном маршалинге")
+
+	field.Register(&field.ObjectType{})
+	s3 := schema.New()
+	err = json.Unmarshal(b, s3)
+	require.NoError(t, err)
+	assert.Equal(t, "object", s3.GetType().Name(), "Схема должна восстановить тип object при восстановление регистрации типа")
+	assert.Equal(t, sch, s3, "Схема должна восстановиться при восстановление регистрации типа")
+}
+
+func TestSchema_ValidOptions(t *testing.T) {
+	t.Run("Valid Options", func(t *testing.T) {
+		schm := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"required": {
+				"options": {
+					"required": true
+				},
+				"type": "string"
+			},
+			"readonly": {
+				"options": {
+					"readonly": true
+				},
+				"type": "string"
+			},
+			"enum": {
+				"options": {
+					"enum": [{
+							"name": "One",
+							"value": "one"
+						},
+						{
+							"name": "Two",
+							"value": "two"
+						}
+					]
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+
+		s := schema.New()
+		err := json.Unmarshal([]byte(schm), s)
+		require.NoError(t, err)
+
+		required := s.GetField("required")
+		readonly := s.GetField("readonly")
+		enum := s.GetField("enum")
+
+		require.NotEmpty(t, required.Options)
+		require.NotEmpty(t, readonly.Options)
+		require.NotEmpty(t, enum.Options)
+	})
+
+	t.Run("Invalid Options", func(t *testing.T) {
+		schm := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"required": {
+				"options": {
+					"required": false
+				},
+				"type": "string"
+			},
+			"readonly": {
+				"options": {
+					"readonly": false
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+
+		s := schema.New()
+		err := json.Unmarshal([]byte(schm), s)
+		require.NoError(t, err)
+
+		required := s.GetField("required")
+		readonly := s.GetField("readonly")
+
+		require.Empty(t, required.Options)
+		require.Empty(t, readonly.Options)
+	})
+
+	t.Run("Required Enum Name", func(t *testing.T) {
+		schm := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"enum": {
+				"options": {
+					"enum": [{
+							"value": "one"
+						},
+						{
+							"value": "two"
+						}
+					]
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+		s := schema.New()
+		err := json.Unmarshal([]byte(schm), s)
+		require.Error(t, err)
+		assert.Contains(t, err.Error(), "enum name is required")
+	})
+}
+
+func TestSchema_Condition(t *testing.T) {
+	sch := schema.New(
+		"type", field.String(modify.TrimSpace()),
+		"a", field.Number(field.NumberFormatInt).SetCondition("type contains 'a'"),
+		"b", field.Number(field.NumberFormatInt, validate.Required()).SetCondition("type contains 'b'"),
+		"c", field.Number(field.NumberFormatInt).SetCondition("a==10"),
+		"obj", field.Object(
+			"a", field.Number(field.NumberFormatInt).SetCondition("type contains 'a'"),
+			"b", field.Number(field.NumberFormatInt).SetCondition("type contains 'b'"),
+			"c", field.Number(field.NumberFormatInt).SetCondition("_.a < 10"),
+			"d", field.Number(field.NumberFormatInt, modify.Default(11)).SetCondition("_.a < 10"),
+		),
+		"obj3", field.Object(
+			"fld1", field.Number(field.NumberFormatInt),
+		).SetCondition("obj.d > 10"),
+	)
+
+	tests := []struct {
+		name    string
+		data    map[string]interface{}
+		want    map[string]interface{}
+		wantErr bool
+	}{
+		{"type a",
+			map[string]interface{}{
+				"type": "a",
+				"a":    int64(10),
+				"b":    int64(10),
+				"c":    int64(1),
+				"obj":  map[string]interface{}{"a": int64(1), "b": int64(20), "c": int64(11), "d": int64(11)},
+				"obj3": map[string]interface{}{"fld1": int64(6)},
+			},
+			map[string]interface{}{
+				"type": "a",
+				"a":    int64(10),
+				"c":    int64(1),
+				"obj":  map[string]interface{}{"a": int64(1), "c": int64(11), "d": int64(11)},
+				"obj3": map[string]interface{}{"fld1": int64(6)},
+			},
+			false},
+		{"type b",
+			map[string]interface{}{
+				"type": "b",
+				"a":    int64(10),
+				"b":    int64(10),
+				"c":    int64(1),
+				"obj":  map[string]interface{}{"a": int64(1), "b": int64(20), "c": int64(11), "d": int64(11)},
+				"obj3": map[string]interface{}{"fld1": int64(6)},
+			},
+			map[string]interface{}{
+				"type": "b",
+				"b":    int64(10),
+				"obj":  map[string]interface{}{"b": int64(20)},
+			},
+			false},
+		{"type ab + default",
+			map[string]interface{}{
+				"type": " ab  ",
+				"a":    int64(1),
+				"b":    int64(10),
+				"c":    int64(1),
+				"obj":  map[string]interface{}{"a": int64(1), "b": int64(20), "c": int64(11)},
+				"obj3": map[string]interface{}{"fld1": int64(6)},
+			},
+			map[string]interface{}{
+				"type": "ab",
+				"a":    int64(1),
+				"b":    int64(10),
+				"obj":  map[string]interface{}{"a": int64(1), "b": int64(20), "c": int64(11), "d": int64(11)},
+				"obj3": map[string]interface{}{"fld1": int64(6)},
+			},
+			false},
+	}
+
+	ctx := context.Background()
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := sch.ToValue(ctx, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+			} else {
+				require.NoError(t, err)
+			}
+			assert.Equal(t, tt.want, got)
+		})
+	}
+
+}
+
+func TestSchema_Inline(t *testing.T) {
+	sch := schema.New(
+		"a", field.String(),
+		"b", field.String(),
+		"obj", field.Object(
+			true,
+			"c", field.String(),
+			"d", field.Number(field.NumberFormatInt),
+			"inner_obj", field.Object(
+				true,
+				"f", field.String(),
+			),
+		).SetCondition("a == 'universe'"),
+		"overlap", field.Object(
+			"obj1", field.Object(
+				true,
+				"f1", field.Number(field.NumberFormatInt),
+				"f2", field.String(),
+			),
+			"obj2", field.Object(
+				true,
+				"f1", field.Number(field.NumberFormatInt),
+				"f2", field.Number(field.NumberFormatInt),
+			),
+		),
+		"arr", field.Array(
+			field.Object(
+				true,
+				"x", field.String(),
+			),
+		),
+	)
+
+	tests := []struct {
+		name    string
+		data    map[string]interface{}
+		want    map[string]interface{}
+		wantErr bool
+	}{
+		{"Condition success",
+			map[string]interface{}{"a": "universe", "b": "life", "c": "everything", "d": int64(42)},
+			map[string]interface{}{"a": "universe", "b": "life", "c": "everything", "d": int64(42)},
+			false,
+		},
+		{"Condition fail",
+			map[string]interface{}{"a": "life", "b": "universe", "c": "everything", "d": int64(42)},
+			map[string]interface{}{"a": "life", "b": "universe"},
+			false,
+		},
+		{"Condition success, level 2 inline",
+			map[string]interface{}{"a": "universe", "b": "life", "c": "everything", "d": int64(42), "f": "some"},
+			map[string]interface{}{"a": "universe", "b": "life", "c": "everything", "d": int64(42), "f": "some"},
+			false,
+		},
+		{"Condition fail, level 2 inline",
+			map[string]interface{}{"a": "life", "b": "universe", "c": "everything", "d": int64(42), "f": "some"},
+			map[string]interface{}{"a": "life", "b": "universe"},
+			false,
+		},
+		{"Overlapped",
+			map[string]interface{}{"overlap": map[string]interface{}{"f1": 42}},
+			map[string]interface{}{"overlap": map[string]interface{}{"f1": int64(42)}},
+			false,
+		},
+		{"Overlapped, type conflict",
+			map[string]interface{}{"overlap": map[string]interface{}{"f1": 42, "f2": "everything"}},
+			nil,
+			true,
+		},
+		{"Array, ignore inline",
+			map[string]interface{}{"a": "life", "b": "universe", "c": "everything", "d": int64(42), "x": "some", "arr": []interface{}{map[string]interface{}{"x": "some"}}},
+			map[string]interface{}{"a": "life", "b": "universe", "arr": []interface{}{map[string]interface{}{"x": "some"}}},
+			false,
+		},
+	}
+
+	ctx := context.Background()
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := sch.ToValue(ctx, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+			} else {
+				require.NoError(t, err)
+			}
+			assert.Equal(t, tt.want, got)
+		})
+	}
+}
+
+func TestSchema_Introspect(t *testing.T) {
+	tests := []struct {
+		name           string
+		data           map[string]interface{}
+		schema         *schema.Schema
+		want           map[string]interface{}
+		wantFields     []string
+		dontWantFields []string
+		wantErr        bool
+	}{
+		{"single true condition",
+			map[string]interface{}{
+				"a": "b",
+				"b": "b",
+			},
+			schema.New(
+				"a", field.String(),
+				"b", field.String().SetCondition("a == 'b'"),
+			),
+			map[string]interface{}{
+				"a": "b",
+				"b": "b",
+			},
+			[]string{"a", "b"},
+			[]string{},
+			false},
+		{"single false condition",
+			map[string]interface{}{
+				"a": "a",
+				"b": "b",
+			},
+			schema.New(
+				"a", field.String(),
+				"b", field.String().SetCondition("a == 'b'"),
+			),
+			map[string]interface{}{
+				"a": "a",
+			},
+			[]string{"a"},
+			[]string{"b"},
+			false},
+		{"multiple true conditions",
+			map[string]interface{}{
+				"a": "a",
+				"b": "b",
+				"c": "c",
+				"d": "d",
+			},
+			schema.New(
+				"a", field.String(),
+				"b", field.String().SetCondition("a == 'a'"),
+				"c", field.String().SetCondition("b == 'b'"),
+				"d", field.String().SetCondition("c == 'c'"),
+			),
+			map[string]interface{}{
+				"a": "a",
+				"b": "b",
+				"c": "c",
+				"d": "d",
+			},
+			[]string{"a", "b", "c", "d"},
+			[]string{},
+			false},
+		{"multiple conditions some true",
+			map[string]interface{}{
+				"a": "a",
+				"b": "bb",
+				"c": "c",
+				"d": "d",
+			},
+			schema.New(
+				"a", field.String(),
+				"b", field.String().SetCondition("a == 'a'"),
+				"c", field.String().SetCondition("b == 'b'"),
+				"d", field.String().SetCondition("c == 'c'"),
+			),
+			map[string]interface{}{
+				"a": "a",
+				"b": "bb",
+			},
+			[]string{"a", "b"},
+			[]string{"c", "d"},
+			false},
+		{"nil data",
+			nil,
+			schema.New(
+				"a", field.String(),
+				"b", field.String(),
+			),
+			nil,
+			[]string{"a", "b"},
+			nil,
+			false},
+		{"empty data",
+			map[string]interface{}{},
+			schema.New(
+				"a", field.String(),
+				"b", field.String(),
+			),
+			map[string]interface{}{},
+			[]string{"a", "b"},
+			nil,
+			false},
+		{"data with other fields",
+			map[string]interface{}{"some": "some"},
+			schema.New(
+				"a", field.String(),
+				"b", field.String(),
+			),
+			map[string]interface{}{},
+			[]string{"a", "b"},
+			nil,
+			false},
+		{"nil object",
+			map[string]interface{}{"a": "aa"},
+			schema.New(
+				"a", field.String(),
+				"j", field.Object(
+					"aa", field.String(),
+					"bb", field.Number(field.NumberFormatInt),
+				),
+			),
+			map[string]interface{}{"a": "aa"},
+			[]string{"a", "j", "j.aa", "j.bb"},
+			nil,
+			false},
+		{
+			"object condition",
+			map[string]interface{}{"key": "a", "object_b": map[string]interface{}{"field1": "a", "field2": "a"}},
+			schema.New(
+				"key", field.String(modify.Default("default")),
+				"object_b", field.Object(
+					"field1", field.String(),
+					"field2", field.String(),
+				),
+				"object_a", field.Object(
+					"field1", field.String(),
+					"field2", field.String(),
+				).SetCondition("key=='a'"),
+			),
+			map[string]interface{}{"key": "a", "object_b": map[string]interface{}{"field1": "a", "field2": "a"}},
+			[]string{"key", "object_b", "object_a", "object_b.field1", "object_b.field2"},
+			[]string{"field1", "field2"},
+			false,
+		},
+		{
+			"object condition with nil data",
+			nil,
+			schema.New(
+				"key", field.String(modify.Default("default")),
+				"object_b", field.Object(
+					"field1", field.String(),
+					"field2", field.String(),
+				),
+				"object_a", field.Object(
+					"field1", field.String(),
+					"field2", field.String(),
+				).SetCondition("key=='a'"),
+			),
+			nil,
+			[]string{"key", "object_b", "object_b.field1", "object_b.field2"},
+			[]string{"object_a", "field1", "field2"},
+			false,
+		},
+	}
+
+	ctx := context.Background()
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			gotValue, gotSchema, err := tt.schema.Introspect(ctx, tt.data)
+			require.NoError(t, err)
+
+			if tt.wantErr {
+				assert.Error(t, err)
+			} else {
+				assert.NoError(t, err)
+			}
+
+			for _, f := range tt.wantFields {
+				fld := gotSchema.GetField(f)
+				assert.NotNil(t, fld, fmt.Sprintf("поле '%s' должно присутствовать в схеме", f))
+			}
+
+			for _, f := range tt.dontWantFields {
+				fld := gotSchema.GetField(f)
+				assert.Nil(t, fld, fmt.Sprintf("поле '%s' должно отсутствовать в схеме", f))
+			}
+
+			//b, err := json.MarshalIndent(got.Schema, "", "  ")
+			//require.NoError(t, err)
+			//fmt.Printf("---\n%s\n---\n", b)
+			assert.Equal(t, tt.want, gotValue)
+		})
+	}
+
+}
+
+func TestSchema_IntrospectObjectArray(t *testing.T) {
+	tests := []struct {
+		name       string
+		data       map[string]interface{}
+		schema     *schema.Schema
+		want       map[string]interface{}
+		wantParams []string
+		wantErr    bool
+	}{
+		{
+			"simple",
+			map[string]interface{}{"array": []interface{}{map[string]interface{}{"field1": "a", "field2": "a"}}},
+			schema.New(
+				"array", field.Array(
+					field.Object(
+						"field1", field.String(),
+						"field2", field.String(),
+					),
+				)),
+			map[string]interface{}{"array": []interface{}{map[string]interface{}{"field1": "a", "field2": "a"}}},
+			[]string{"field1", "field2"},
+			false,
+		},
+		{
+			"empty data",
+			nil,
+			schema.New(
+				"array", field.Array(
+					field.Object(
+						"field1", field.String(),
+						"field2", field.String(),
+					),
+				)),
+			nil,
+			[]string{"field1", "field2"},
+			false,
+		},
+	}
+
+	ctx := context.Background()
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			gotValue, gotSchema, err := tt.schema.Introspect(ctx, tt.data)
+			require.NoError(t, err)
+
+			if tt.wantErr {
+				assert.Error(t, err)
+			} else {
+				assert.NoError(t, err)
+			}
+
+			for _, f := range tt.wantParams {
+				fld := gotSchema.GetField("array")
+				p, ok := fld.Params.(*field.ArrayParameters).Item.Params.(*field.ObjectParameters)
+				assert.True(t, ok)
+				assert.Contains(t, p.Fields, f, fmt.Sprintf("поле '%s' должно присутствовать в параметрах Item", f))
+			}
+			assert.Equal(t, tt.want, gotValue)
+		})
+	}
+
+}
+
+func TestSchema_Load(t *testing.T) {
+	f1 := schema.New(
+		"f", field.String(),
+		"s3", field.Object("a1", field.Number(field.NumberFormatInt), "f", field.String()).WithIncludes("f2"),
+	)
+	f2 := schema.New("a", field.String())
+
+	sch := schema.NewFromField(field.Object(
+		"s1", field.String(),
+		"s2", field.String(),
+		"s3", field.Object("a1", field.String(), "a2", field.String()),
+		"s4", field.Array(field.Object().WithIncludes("f2")),
+	).WithIncludes("f1", "f2"),
+	)
+
+	loader := field.MultiLoader(
+		field.LoaderFunc(func(ref string) (fs []*field.Field, err error) {
+			if ref == "f1" {
+				f := f1.Field // copy
+				return []*field.Field{&f}, nil
+			}
+			return nil, fmt.Errorf("invalid schema reference: %s", ref)
+		}),
+		field.LoaderFunc(func(ref string) (fs []*field.Field, err error) {
+			if ref == "f2" {
+				f := f2.Field // copy
+				return []*field.Field{&f}, nil
+			}
+			return nil, fmt.Errorf("invalid schema reference: %s", ref)
+		}),
+	)
+	schema.SetDefaultLoader(loader)
+
+	err := sch.Load(nil)
+	require.NoError(t, err)
+
+	//b, _ := json.MarshalIndent(sch, "", "  ")
+	//fmt.Println(string(b))
+
+	assert.NotNil(t, sch.GetField("s1"))
+	assert.NotNil(t, sch.GetField("s2"))
+	assert.NotNil(t, sch.GetField("f"))
+	assert.NotNil(t, sch.GetField("a"))
+	assert.NotNil(t, sch.GetField("s3"))
+	assert.NotNil(t, sch.GetField("s3.f"))
+	assert.NotNil(t, sch.GetField("s3.a"))
+	{
+		f := sch.GetField("s3.a1")
+		require.NotNil(t, f)
+		assert.Equal(t, f.GetType(), &field.StringType{})
+	}
+	assert.NotNil(t, sch.GetField("s4.a"))
+
+}
+
+func TestSchema_WithIncludesCircle(t *testing.T) {
+	f1 := schema.New("f2", field.Object().WithIncludes("f2"))
+	f2 := schema.New("f3", field.Object().WithIncludes("f3"))
+	f3 := schema.New("f1", field.Object().WithIncludes("f1"))
+
+	loader := field.MultiLoader(
+		field.LoaderFunc(func(ref string) (fs []*field.Field, err error) {
+			if ref == "f1" {
+				f := f1.Field // copy
+				return []*field.Field{&f}, nil
+			}
+			return nil, fmt.Errorf("invalid schema reference: %s", ref)
+		}),
+		field.LoaderFunc(func(ref string) (fs []*field.Field, err error) {
+			if ref == "f2" {
+				f := f2.Field // copy
+				return []*field.Field{&f}, nil
+			}
+			return nil, fmt.Errorf("invalid schema reference: %s", ref)
+		}),
+		field.LoaderFunc(func(ref string) (fs []*field.Field, err error) {
+			if ref == "f3" {
+				f := f3.Field // copy
+				return []*field.Field{&f}, nil
+			}
+			return nil, fmt.Errorf("invalid schema reference: %s", ref)
+		}),
+	)
+	schema.SetDefaultLoader(loader)
+	sch := schema.NewFromField(field.Object().WithIncludes("f1"))
+
+	err := sch.Load(nil)
+	require.Error(t, err)
+	assert.EqualError(t, err, "limit for included fields exceeded")
+}
+
+func TestSchema_EnumUIOptions(t *testing.T) {
+	schm := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"enum": {
+				"options": {
+					"enum": [{
+							"name": "1",
+							"value": "one",
+							"ui" : {
+							  "color": "color",
+							  "icon": "icon",
+							  "spin": true,
+							  "blink": false		
+							}
+							},
+							{
+								"name": "2",
+								"value": "two"
+							}
+					]
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+	s := schema.New()
+	err := json.Unmarshal([]byte(schm), s)
+	require.NoError(t, err)
+}
diff --git a/pkg/schema/validate/array.go b/pkg/schema/validate/array.go
new file mode 100644
index 0000000000000000000000000000000000000000..09e17e289cdfeca4cbe743271b77300a4cb7a0d8
--- /dev/null
+++ b/pkg/schema/validate/array.go
@@ -0,0 +1,51 @@
+package validate
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type maxItems int
+
+func MaxItems(max int) Validator {
+	v := maxItems(max)
+	return &v
+}
+
+func (t maxItems) Validate(_ context.Context, _ *field.Field, value interface{}) error {
+	if value == nil {
+		return nil
+	}
+	v := reflect.ValueOf(value)
+	if !v.IsValid() || v.Kind() != reflect.Array && v.Kind() != reflect.Slice {
+		return fmt.Errorf("incorrect type: \"%s\", expected \"array\"", v.Kind())
+	}
+	if t > 0 && v.Len() > int(t) {
+		return fmt.Errorf("maximum elements number is %d", t)
+	}
+	return nil
+}
+
+type minItems int
+
+func MinItems(min int) Validator {
+	v := minItems(min)
+	return &v
+}
+
+func (t minItems) Validate(_ context.Context, _ *field.Field, value interface{}) error {
+	if value == nil {
+		return nil
+	}
+	v := reflect.ValueOf(value)
+	if !v.IsValid() || v.Kind() != reflect.Array && v.Kind() != reflect.Slice {
+		return fmt.Errorf("incorrect type: \"%s\", expected \"array\"", v.Kind())
+	}
+	if t > 0 && v.Len() < int(t) {
+		return fmt.Errorf("minimum elements number is %d", t)
+	}
+	return nil
+}
diff --git a/pkg/schema/validate/array_test.go b/pkg/schema/validate/array_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..57b99445d4681b3994cb9cc5e3c35822a38a4900
--- /dev/null
+++ b/pkg/schema/validate/array_test.go
@@ -0,0 +1,80 @@
+package validate
+
+import (
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestArray(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"Max Array", field.Array(field.String()).AddOptions(MaxItems(2)), []interface{}{"a", "b"}, false},
+		{"Max Array (invalid)", field.Array(field.String()).AddOptions(MaxItems(2)), []interface{}{"a", "b", "c"}, true},
+		{"Max Array (negative max number)", field.Array(field.String()).AddOptions(MaxItems(-2)), []interface{}{"a", "b", "c"}, false},
+		{"Max Array (zero max number)", field.Array(field.String()).AddOptions(MaxItems(0)), []interface{}{"a", "b", "c"}, false},
+
+		{"Min Array", field.Array(field.String()).AddOptions(MinItems(2)), []interface{}{"a", "b"}, false},
+		{"Min Array (invalid)", field.Array(field.String()).AddOptions(MinItems(2)), []interface{}{"a"}, true},
+		{"Min Array (negative max number)", field.Array(field.String()).AddOptions(MinItems(-2)), []interface{}{"a", "b", "c"}, false},
+		{"Min Array (zero max number)", field.Array(field.String()).AddOptions(MinItems(0)), []interface{}{"a", "b", "c"}, false},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+		})
+	}
+}
+
+func TestArrayValidate(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+		error   string
+	}{
+		{"Nil Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), nil, false, ""},
+		{"Nil Min Items", field.Array(field.String()).AddOptions(MinItems(1)), nil, false, ""},
+		{"Array Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), [1]interface{}{1}, true, "validation error: incorrect type: \"array\", expected \"[]interface{}\""},
+		{"Array Min Items", field.Array(field.String()).AddOptions(MinItems(1)), [1]interface{}{1}, true, "validation error: incorrect type: \"array\", expected \"[]interface{}\""},
+		{"Slice Max Items", field.Array(field.String()).AddOptions(MaxItems(0)), []interface{}{}, false, ""},
+		{"Slice Min Items", field.Array(field.String()).AddOptions(MinItems(0)), []interface{}{}, false, ""},
+		{"Bool Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), true, true, "validation error: incorrect type: \"bool\", expected \"array\""},
+		{"Bool Min Items", field.Array(field.String()).AddOptions(MinItems(1)), true, true, "validation error: incorrect type: \"bool\", expected \"array\""},
+		{"Int Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), 1, true, "validation error: incorrect type: \"int\", expected \"array\""},
+		{"Int Min Items", field.Array(field.String()).AddOptions(MinItems(1)), 1, true, "validation error: incorrect type: \"int\", expected \"array\""},
+		{"Float Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), 1.0, true, "validation error: incorrect type: \"float64\", expected \"array\""},
+		{"Float Min Items", field.Array(field.String()).AddOptions(MinItems(1)), 1.0, true, "validation error: incorrect type: \"float64\", expected \"array\""},
+		{"String Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), "1", true, "validation error: incorrect type: \"string\", expected \"array\""},
+		{"String Min Items", field.Array(field.String()).AddOptions(MinItems(1)), "1", true, "validation error: incorrect type: \"string\", expected \"array\""},
+		{"Map Max Items", field.Array(field.String()).AddOptions(MaxItems(1)), map[int]string{1: "1"}, true, "validation error: incorrect type: \"map\", expected \"array\""},
+		{"Map Min Items", field.Array(field.String()).AddOptions(MinItems(1)), map[int]string{1: "1"}, true, "validation error: incorrect type: \"map\", expected \"array\""},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			err := Validate(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.error)
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/enum.go b/pkg/schema/validate/enum.go
new file mode 100644
index 0000000000000000000000000000000000000000..76d2ecc698792346e1f9dd60940e47d050593b96
--- /dev/null
+++ b/pkg/schema/validate/enum.go
@@ -0,0 +1,68 @@
+package validate
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/pkg/errors"
+)
+
+type EnumUI struct {
+	Color string `json:"color,omitempty"`
+	Icon  string `json:"icon,omitempty"`
+	Spin  bool   `json:"spin,omitempty"`
+	Blink bool   `json:"blink,omitempty"`
+}
+
+type EnumOpt struct {
+	Name  string      `json:"name"`
+	Value interface{} `json:"value"`
+	UI    *EnumUI     `json:"ui,omitempty"`
+}
+
+func (o EnumOpt) String() string {
+	return fmt.Sprintf("%s", o.Value)
+}
+
+type enum []EnumOpt
+
+func Enum(opts ...EnumOpt) Validator {
+	var e enum
+	for _, i := range opts {
+		e = append(e, i)
+	}
+	return &e
+}
+
+func (t enum) Validate(ctx context.Context, f *field.Field, value interface{}) error {
+	// Значение может отсутствовать, что не является ошибкой
+	if value == nil {
+		return nil
+	}
+
+	if f.AdditionalValues {
+		return nil
+	}
+
+	for _, i := range t {
+		enumValue, err := field.Decode(ctx, f, i.Value)
+		if err != nil {
+			return fmt.Errorf("error decode option value: %w", err)
+		}
+		if value == enumValue {
+			return nil
+		}
+	}
+
+	return errors.Errorf("value required to be one of %s", t)
+}
+
+func (t enum) ValidateOption() error {
+	for _, i := range t {
+		if i.Name == "" {
+			return errors.Errorf("enum name is required")
+		}
+	}
+	return nil
+}
diff --git a/pkg/schema/validate/enum_test.go b/pkg/schema/validate/enum_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..a473dc015f80e92a2b0845228b02023f3f92b63a
--- /dev/null
+++ b/pkg/schema/validate/enum_test.go
@@ -0,0 +1,69 @@
+package validate
+
+import (
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/require"
+)
+
+func TestEnum(t *testing.T) {
+	stringEnum := Enum(
+		EnumOpt{
+			Name:  "N 1",
+			Value: "n1",
+		},
+		EnumOpt{
+			Name:  "N 2",
+			Value: "n2",
+		},
+	)
+	intEnum := Enum(
+		EnumOpt{
+			Name:  "N 1",
+			Value: 1,
+		},
+		EnumOpt{
+			Name:  "N 2",
+			Value: 2,
+		},
+	)
+	floatEnum := Enum(
+		EnumOpt{
+			Name:  "N 1",
+			Value: 1.1,
+		},
+		EnumOpt{
+			Name:  "N 2",
+			Value: int(2),
+		},
+	)
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"Correct string", field.String(stringEnum), "n1", false},
+		{"Correct int", field.Number(field.NumberFormatInt, intEnum), 1, false},
+		{"Correct float", field.Number(field.NumberFormatFloat, floatEnum), 1.1, false},
+		{"Correct float", field.Number(field.NumberFormatFloat, floatEnum), int(2), false},
+		{"Incorrect string", field.String(stringEnum), "n3", true},
+		{"Incorrect int", field.Number(field.NumberFormatInt, intEnum), 3, true},
+		{"Incorrect float", field.Number(field.NumberFormatFloat, floatEnum), 1.0, true},
+		{"Incorrect enum value", field.String(intEnum), "n1", true},
+		{"Incorrect enum value", field.String(stringEnum).SetAdditionalValues(), "non enum value", false},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/number.go b/pkg/schema/validate/number.go
new file mode 100644
index 0000000000000000000000000000000000000000..81ab150b21d0af3a5e6cc91dac664c3a479a9f11
--- /dev/null
+++ b/pkg/schema/validate/number.go
@@ -0,0 +1,136 @@
+package validate
+
+import (
+	"context"
+	"fmt"
+	"math"
+	"reflect"
+	"strconv"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type max float64
+
+func Max(m float64) Validator {
+	v := max(m)
+	return &v
+}
+
+func (t max) Validate(_ context.Context, field *field.Field, value interface{}) error {
+	var num float64
+	switch v := value.(type) {
+	case int:
+		num = float64(v)
+	case int8:
+		num = float64(v)
+	case int32:
+		num = float64(v)
+	case int64:
+		num = float64(v)
+	case uint:
+		num = float64(v)
+	case uint8:
+		num = float64(v)
+	case uint32:
+		num = float64(v)
+	case uint64:
+		num = float64(v)
+	case float32:
+		num = float64(v)
+	case float64:
+		num = v
+	case nil:
+		return nil
+	default:
+		return fmt.Errorf("incorrect type: \"%s\", expected \"number\"", reflect.ValueOf(v).Kind())
+	}
+	if num > float64(t) {
+		return fmt.Errorf("maximum number is %s, got %s", strconv.FormatFloat(float64(t), 'f', -1, 64), strconv.FormatFloat(float64(num), 'f', -1, 64))
+	}
+	return nil
+
+}
+
+type min float64
+
+func Min(m float64) Validator {
+	v := min(m)
+	return &v
+}
+
+func (t min) Validate(_ context.Context, fld *field.Field, value interface{}) error {
+	var num float64
+	switch v := value.(type) {
+	case int:
+		num = float64(v)
+	case int8:
+		num = float64(v)
+	case int32:
+		num = float64(v)
+	case int64:
+		num = float64(v)
+	case uint:
+		num = float64(v)
+	case uint8:
+		num = float64(v)
+	case uint32:
+		num = float64(v)
+	case uint64:
+		num = float64(v)
+	case float32:
+		num = float64(v)
+	case float64:
+		num = v
+	case nil:
+		return nil
+	default:
+		return fmt.Errorf("incorrect type: \"%s\", expected \"number\"", reflect.ValueOf(v).Kind())
+	}
+	if num < float64(t) {
+		return fmt.Errorf("minimum number is %s, got %s", strconv.FormatFloat(float64(t), 'f', -1, 64), strconv.FormatFloat(float64(num), 'f', -1, 64))
+	}
+	return nil
+}
+
+type multipleOf float64
+
+func MultipleOf(d float64) Validator {
+	v := multipleOf(d)
+	return &v
+}
+
+func (t multipleOf) Validate(_ context.Context, field *field.Field, value interface{}) error {
+	var num float64
+	switch v := value.(type) {
+	case int:
+		num = float64(v)
+	case int8:
+		num = float64(v)
+	case int32:
+		num = float64(v)
+	case int64:
+		num = float64(v)
+	case uint:
+		num = float64(v)
+	case uint8:
+		num = float64(v)
+	case uint32:
+		num = float64(v)
+	case uint64:
+		num = float64(v)
+	case float32:
+		num = float64(v)
+	case float64:
+		num = v
+	case nil:
+		return nil
+	default:
+		return fmt.Errorf("incorrect type: \"%s\", expected \"number\"", reflect.ValueOf(v).Kind())
+	}
+	if math.Mod(num, float64(t)) != 0 {
+		return fmt.Errorf("number must be a multiple of %f", t)
+
+	}
+	return nil
+}
diff --git a/pkg/schema/validate/number_test.go b/pkg/schema/validate/number_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..edbd218cf3b19ecd5b412e5a7e74015a8dfaecb5
--- /dev/null
+++ b/pkg/schema/validate/number_test.go
@@ -0,0 +1,99 @@
+package validate
+
+import (
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestNumber(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"Max Int", field.Number("int").AddOptions(Max(5.0)), 6, true},
+		{"Min Int", field.Number("float").AddOptions(Min(10)), 9.0, true},
+
+		{"Max Float", field.Number("float").AddOptions(Max(5.5)), 5.6, true},
+		{"Min Float", field.Number("float").AddOptions(Min(10.1)), 9.9, true},
+
+		{"MultipleOf Int", field.Number("float").AddOptions(MultipleOf(5)), 1.0, true},
+		{"MultipleOf Int", field.Number("float").AddOptions(MultipleOf(5)), 10.0, false},
+
+		{"MultipleOf Float", field.Number("float").AddOptions(MultipleOf(0.01)), 0.2, true},
+		{"MultipleOf Float", field.Number("float").AddOptions(MultipleOf(0.05)), 0.1, false},
+
+		{"MinMaxMultipleOf Int", field.Number("int").AddOptions(Max(5), Min(2), MultipleOf(5)), 6, true},
+		{"MinMaxMultipleOf Int", field.Number("int").AddOptions(Max(5), Min(2), MultipleOf(2)), 4, false},
+		{"MinMaxMultipleOf Int", field.Number("int").AddOptions(Max(5), Min(1), MultipleOf(2)), -2, true},
+		{"MinMaxMultipleOf Int", field.Number("int").AddOptions(Max(5), Min(-1), MultipleOf(2)), 0, false},
+		{"MinMaxMultipleOf Int", field.Number("int").AddOptions(Max(10), Min(7), MultipleOf(5)), 6, true},
+		{"MinMaxMultipleOf Float", field.Number("int").AddOptions(Max(5.5), Min(2.3), MultipleOf(5)), 6, true},
+		{"MinMaxMultipleOf Float", field.Number("float").AddOptions(Max(10.1), Min(7.7), MultipleOf(0.1)), 6.1, true},
+
+		{"Enum miss", field.Number("int").AddOptions(Enum(EnumOpt{Name: "N 1", Value: 1}, EnumOpt{Name: "N 2", Value: 2})), 3, true},
+		{"Enum match", field.Number("int").AddOptions(Enum(EnumOpt{Name: "N 1", Value: 1}, EnumOpt{Name: "N 2", Value: 2})), 2, false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+		})
+	}
+}
+
+func TestNumberValidate(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+		error   string
+	}{
+		{"Nil Max", field.Number("int").AddOptions(Max(1.0)), nil, false, ""},
+		{"Nil Min", field.Number("int").AddOptions(Min(1.0)), nil, false, ""},
+		{"Nil MultipleOf", field.Number("int").AddOptions(MultipleOf(1)), nil, false, ""},
+		{"Int Max", field.Number("int").AddOptions(Max(1.0)), 1, false, ""},
+		{"Int Min", field.Number("int").AddOptions(Min(1.0)), 1, false, ""},
+		{"Int MultipleOf", field.Number("int").AddOptions(Min(1.0)), 1, false, ""},
+		{"FlРѕat Max", field.Number("float").AddOptions(Max(1.0)), 1.0, false, ""},
+		{"FlРѕat Min", field.Number("float").AddOptions(Min(1.0)), 1.0, false, ""},
+		{"FlРѕat MultipleOf", field.Number("float").AddOptions(Min(1.0)), 1.0, false, ""},
+		{"Bool Max Num", field.Number("int").AddOptions(Max(1.0)), true, true, "validation error: incorrect type: \"bool\", expected \"number\""},
+		{"Bool Min Num", field.Number("int").AddOptions(Min(1.0)), true, true, "validation error: incorrect type: \"bool\", expected \"number\""},
+		{"Bool MultipleOf Num", field.Number("int").AddOptions(MultipleOf(1)), true, true, "validation error: incorrect type: \"bool\", expected \"number\""},
+		{"String Max Num", field.Number("int").AddOptions(Max(1.0)), "1", true, "validation error: incorrect type: \"string\", expected \"number\""},
+		{"String Min Num", field.Number("int").AddOptions(Min(1.0)), "1", true, "validation error: incorrect type: \"string\", expected \"number\""},
+		{"String MultipleOf Num", field.Number("int").AddOptions(MultipleOf(1)), "1", true, "validation error: incorrect type: \"string\", expected \"number\""},
+		{"Array Max Num", field.Number("int").AddOptions(Max(1.0)), [1]int{1}, true, "validation error: incorrect type: \"array\", expected \"number\""},
+		{"Array Min Num", field.Number("int").AddOptions(Min(1.0)), [1]int{1}, true, "validation error: incorrect type: \"array\", expected \"number\""},
+		{"Slice Max Num", field.Number("int").AddOptions(Max(1.0)), []int{1}, true, "validation error: incorrect type: \"slice\", expected \"number\""},
+		{"Slice Min Num", field.Number("int").AddOptions(Min(1.0)), []int{1}, true, "validation error: incorrect type: \"slice\", expected \"number\""},
+		{"Slice MultipleOf Num", field.Number("int").AddOptions(MultipleOf(1)), []int{}, true, "validation error: incorrect type: \"slice\", expected \"number\""},
+		{"Map Max Num", field.Number("int").AddOptions(Max(1.0)), map[int]int{}, true, "validation error: incorrect type: \"map\", expected \"number\""},
+		{"Map Min Num", field.Number("int").AddOptions(Min(1.0)), map[int]int{}, true, "validation error: incorrect type: \"map\", expected \"number\""},
+		{"Map MultipleOf Num", field.Number("int").AddOptions(MultipleOf(1)), map[int]int{}, true, "validation error: incorrect type: \"map\", expected \"number\""},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			err := Validate(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.error)
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/readonly.go b/pkg/schema/validate/readonly.go
new file mode 100644
index 0000000000000000000000000000000000000000..8738d747dd1d86fad7203f20b9df83cb5d2334a4
--- /dev/null
+++ b/pkg/schema/validate/readonly.go
@@ -0,0 +1,30 @@
+package validate
+
+import (
+	"context"
+	"errors"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type readonly bool
+
+func ReadOnly() Validator {
+	r := readonly(true)
+	return &r
+}
+
+func (r readonly) Validate(_ context.Context, f *field.Field, value interface{}) error {
+	if value != nil {
+		return errors.New("value is readonly")
+	}
+	return nil
+
+}
+
+func (r readonly) ValidateOption() error {
+	if !r {
+		return field.ErrSkipOption
+	}
+	return nil
+}
diff --git a/pkg/schema/validate/readonly_test.go b/pkg/schema/validate/readonly_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..ee8ca9ba89a16a02375556d365884bb35d378f83
--- /dev/null
+++ b/pkg/schema/validate/readonly_test.go
@@ -0,0 +1,32 @@
+package validate
+
+import (
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/require"
+)
+
+func TestReadonly(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"ReadOnly", field.String(ReadOnly()), nil, false},
+		{"ReadOnly error", field.String(ReadOnly()), "1234567", true},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/required.go b/pkg/schema/validate/required.go
new file mode 100644
index 0000000000000000000000000000000000000000..96591e6eaf4134f07523ab87f29e932b8fcee60e
--- /dev/null
+++ b/pkg/schema/validate/required.go
@@ -0,0 +1,48 @@
+package validate
+
+import (
+	"context"
+	"errors"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+type required bool
+
+func Required() Validator {
+	r := required(true)
+	return &r
+}
+
+func (r required) Validate(_ context.Context, f *field.Field, value interface{}) error {
+
+	if value != nil {
+		t, ok := f.GetType().(interface{ IsEmpty(interface{}) bool })
+		if ok && !t.IsEmpty(value) {
+			return nil
+		}
+
+		/*
+			tt, ok := f.GetType().(interface {
+					IsEmpty(interface{}) (bool, error)
+				})
+				empty, err := tt.IsEmpty(value)
+				if err != nil {
+					return err
+				}
+				if ok && !empty {
+					return nil
+				}
+		*/
+	}
+
+	return errors.New("value is required")
+
+}
+
+func (r required) ValidateOption() error {
+	if !r {
+		return field.ErrSkipOption
+	}
+	return nil
+}
diff --git a/pkg/schema/validate/required_test.go b/pkg/schema/validate/required_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..41a03ab50d6fab906e7f78229b560e4298475dde
--- /dev/null
+++ b/pkg/schema/validate/required_test.go
@@ -0,0 +1,53 @@
+package validate
+
+import (
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/require"
+)
+
+func TestRequired(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"Array no value", field.Array(field.String(), Required()), nil, true},
+		{"Array empty", field.Array(field.String(), Required()), []interface{}{}, true},
+		{"Array not empty", field.Array(field.String(), Required()), []interface{}{"a", "b"}, false},
+		{"Boolean no value", field.Bool(Required()), nil, true},
+		{"Boolean", field.Bool(Required()), false, false},
+		{"Location: no value", field.Location(Required()), nil, true},
+		//{"Location: empty", field.Location(Required()), map[string]interface{}{}, true}, // не имеет смысла, при Decode вернется ошибка если объект пустой
+		{"Location: not empty", field.Location(Required()), &field.GeoObject{Address: "addr"}, false},
+		{"Number (int) no value", field.Number(field.NumberFormatInt, Required()), nil, true},
+		{"Number (int) empty", field.Number(field.NumberFormatInt, Required()), 0, false},
+		{"Number (int) not empty", field.Number(field.NumberFormatInt, Required()), 42, false},
+		{"Number (float) no value", field.Number(field.NumberFormatFloat, Required()), nil, true},
+		{"Number (float) empty", field.Number(field.NumberFormatFloat, Required()), 0.0, false},
+		{"Number (float) not empty", field.Number(field.NumberFormatFloat, Required()), 42.2, false},
+		{"Object no value", field.Object("a", field.String(), "b", field.Bool(), Required()), nil, true},
+		{"Object no value", field.Object("a", field.String(), "b", field.Bool(), Required()), map[string]interface{}{}, true},
+		{"Object not empty", field.Object("a", field.String(), "b", field.Bool(), Required()), map[string]interface{}{"b": true}, false},
+		{"String no value", field.String(Required()), nil, true},
+		{"String empty", field.String(Required()), "", true},
+		{"String not empty", field.String(Required()), "1234567", false},
+		{"Time no value", field.Time(Required()), nil, true},
+		{"Time not empty", field.Time(Required()), time.Now().Format(time.RFC3339), false},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
+				return
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/string.go b/pkg/schema/validate/string.go
new file mode 100644
index 0000000000000000000000000000000000000000..f05ca540034e3cfab7e57c1aa324bcb343b97431
--- /dev/null
+++ b/pkg/schema/validate/string.go
@@ -0,0 +1,71 @@
+package validate
+
+import (
+	"context"
+	"fmt"
+	"reflect"
+	"unicode/utf8"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/pkg/errors"
+)
+
+type maxLength int
+
+func MaxLength(max int) Validator {
+	v := maxLength(max)
+	return &v
+}
+
+func (t maxLength) Validate(_ context.Context, field *field.Field, value interface{}) error {
+	if s, ok := value.(string); ok {
+		n := utf8.RuneCountInString(s)
+		if t > 0 && n > int(t) {
+			return fmt.Errorf("maximum string length is %d, got \"%s\" (length=%d)", t, s, len(s))
+		}
+		return nil
+	}
+	return fmt.Errorf("incorrect type: \"%s\", expected \"string\"", reflect.ValueOf(value).Kind())
+}
+
+type minLength int
+
+func MinLength(max int) Validator {
+	v := minLength(max)
+	return &v
+}
+
+func (t minLength) Validate(_ context.Context, field *field.Field, value interface{}) error {
+	if s, ok := value.(string); ok {
+		n := utf8.RuneCountInString(s)
+		if n < int(t) {
+			return fmt.Errorf("minimum string length is %d, got \"%s\" (length=%d)", t, s, len(s))
+		}
+		return nil
+	}
+	return fmt.Errorf("incorrect type: \"%s\", expected \"string\"", reflect.ValueOf(value).Kind().String())
+}
+
+type schema bool
+
+func Schema() Validator {
+	v := schema(true)
+	return &v
+}
+
+func (t schema) Validate(_ context.Context, _ *field.Field, value interface{}) error {
+	if value == nil {
+		return nil
+	}
+	if s, ok := value.(string); ok {
+		if s == "" {
+			return nil
+		}
+		sch := field.Object()
+		if err := sch.UnmarshalJSON([]byte(s)); err != nil {
+			return errors.New("value is not valid schema")
+		}
+		return nil
+	}
+	return errors.Errorf("incorrect type: \"%s\", expected string", reflect.ValueOf(value).Kind())
+}
diff --git a/pkg/schema/validate/string_test.go b/pkg/schema/validate/string_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..b67e8c8d4f5056c61d5483a536701777d716bead
--- /dev/null
+++ b/pkg/schema/validate/string_test.go
@@ -0,0 +1,160 @@
+package validate
+
+import (
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestString(t *testing.T) {
+	invalidOptionsSchema := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"required": {
+				"options": {
+					"err": true
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+	requiredOptionsSchema := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"enum": {
+				"options": {
+					"enum": [{
+							"value": "one"
+						},
+						{
+							"value": "two"
+						}
+					]
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+
+	validSchema := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"required": {
+				"options": {
+					"required": true
+				},
+				"type": "string"
+			},
+			"readonly": {
+				"options": {
+					"readonly": true
+				},
+				"type": "string"
+			},
+			"enum": {
+				"options": {
+					"enum": [{
+							"name": "One",
+							"value": "one"
+						},
+						{
+							"name": "Two",
+							"value": "two"
+						}
+					]
+				},
+				"type": "string"
+			}
+		}
+	}
+}`
+
+	unknownFieldSchema := `{
+	"type": "object",
+	"params": {
+		"fields": {
+			"string": {}
+		}
+	}
+}`
+
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+	}{
+		{"Length Max", field.String().AddOptions(MaxLength(5)), "1234567", true},
+		{"Length Min", field.String().AddOptions(MinLength(10)), "1234", true},
+		{"Length MinMax", field.String().AddOptions(MaxLength(6), MinLength(2)), "1234567", true},
+		{"Length MinMax", field.String().AddOptions(MaxLength(10), MinLength(7)), "123456", true},
+		{"Enum miss", field.String().AddOptions(Enum(EnumOpt{Name: "N 1", Value: "n1"}, EnumOpt{Name: "N 2", Value: "n2"})), "n3", true},
+		{"Enum match", field.String().AddOptions(Enum(EnumOpt{Name: "N 1", Value: "n1"}, EnumOpt{Name: "N 2", Value: "n2"})), "n2", false},
+		{"Invalid Schema Options", field.String().AddOptions(Schema()), invalidOptionsSchema, true},
+		{"Required Schema Options", field.String().AddOptions(Schema()), requiredOptionsSchema, true},
+		{"Valid Schema", field.String().AddOptions(Schema()), validSchema, false},
+		{"Invalid Schema#1", field.String().AddOptions(Schema()), "test", true},
+		{"Unknown Field", field.String().AddOptions(Schema()), unknownFieldSchema, false},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			got, err := field.Decode(nil, tt.field, tt.data)
+			require.NoError(t, err)
+			err = Validate(nil, tt.field, got)
+			if tt.wantErr {
+				require.Error(t, err)
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
+
+func TestStringValidate(t *testing.T) {
+	tests := []struct {
+		name    string
+		field   *field.Field
+		data    interface{}
+		wantErr bool
+		error   string
+	}{
+		{"String Length Max", field.String().AddOptions(MaxLength(1)), "1", false, ""},
+		{"String Length Min", field.String().AddOptions(MinLength(1)), "1", false, ""},
+		{"Nil Length Max", field.String().AddOptions(MaxLength(1)), nil, true, "validation error: incorrect type: \"invalid\", expected \"string\""},
+		{"Nil Length Min", field.String().AddOptions(MinLength(1)), nil, true, "validation error: incorrect type: \"invalid\", expected \"string\""},
+		{"Int Length Max", field.String().AddOptions(MaxLength(1)), 1, true, "validation error: incorrect type: \"int\", expected \"string\""},
+		{"Int Length Min", field.String().AddOptions(MinLength(1)), 1, true, "validation error: incorrect type: \"int\", expected \"string\""},
+		{"Float Length Max", field.String().AddOptions(MaxLength(1)), 1.0, true, "validation error: incorrect type: \"float64\", expected \"string\""},
+		{"Float Length Min", field.String().AddOptions(MinLength(1)), 1.0, true, "validation error: incorrect type: \"float64\", expected \"string\""},
+		{"Bool Length Max", field.String().AddOptions(MaxLength(1)), true, true, "validation error: incorrect type: \"bool\", expected \"string\""},
+		{"Bool Length Min", field.String().AddOptions(MinLength(1)), true, true, "validation error: incorrect type: \"bool\", expected \"string\""},
+		{"Array Length Max", field.String().AddOptions(MaxLength(1)), [1]string{""}, true, "validation error: incorrect type: \"array\", expected \"string\""},
+		{"Array Length Min", field.String().AddOptions(MinLength(1)), [1]string{""}, true, "validation error: incorrect type: \"array\", expected \"string\""},
+		{"Slice Length Max", field.String().AddOptions(MaxLength(1)), []string{""}, true, "validation error: incorrect type: \"slice\", expected \"string\""},
+		{"Slice Length Min", field.String().AddOptions(MinLength(1)), []string{""}, true, "validation error: incorrect type: \"slice\", expected \"string\""},
+		{"Map Length Max", field.String().AddOptions(MaxLength(1)), map[string]string{"": ""}, true, "validation error: incorrect type: \"map\", expected \"string\""},
+		{"Map Length Min", field.String().AddOptions(MinLength(1)), map[string]string{"": ""}, true, "validation error: incorrect type: \"map\", expected \"string\""},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			err := Validate(nil, tt.field, tt.data)
+			if tt.wantErr {
+				require.Error(t, err)
+				assert.EqualError(t, err, tt.error)
+			}
+			if !tt.wantErr {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schema/validate/validate.go b/pkg/schema/validate/validate.go
new file mode 100644
index 0000000000000000000000000000000000000000..65ac4c400d19055deae67ddfcc5678775d0e3376
--- /dev/null
+++ b/pkg/schema/validate/validate.go
@@ -0,0 +1,99 @@
+package validate
+
+import (
+	"context"
+	"sort"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/expr"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+const ValidatorPriority = 2000
+
+type Validator interface {
+	Validate(ctx context.Context, f *field.Field, v interface{}) error
+}
+
+type Validators []Validator
+
+func (l Validators) Len() int { return len(l) }
+func (l Validators) Less(i, j int) bool {
+	pi, pj := ValidatorPriority, ValidatorPriority
+	if o, ok := l[i].(field.PriorityOption); ok {
+		pi = o.GetPriority()
+	}
+	if o, ok := l[j].(field.PriorityOption); ok {
+		pj = o.GetPriority()
+	}
+	if pi == pj {
+		return field.GetOptionName(l[i]) < field.GetOptionName(l[j])
+	}
+	return pi < pj
+}
+func (l Validators) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+
+func getValidators(f *field.Field) Validators {
+	var vs Validators
+	for _, o := range f.Options {
+		if v, ok := o.(Validator); ok {
+			vs = append(vs, v)
+		}
+	}
+	sort.Sort(vs)
+	return vs
+}
+
+func validateOptions(ctx context.Context, f *field.Field, v interface{}) error {
+	var err error
+	validators := getValidators(f)
+	for _, i := range validators {
+		err = i.Validate(ctx, f, v)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func Validate(ctx context.Context, w field.Walker, v interface{}) error {
+	if m, ok := v.(map[string]interface{}); ok {
+		ctx = expr.WithEnv(ctx, m)
+	}
+
+	_, _, err := w.Walk(ctx, v, func(ctx context.Context, fld *field.Field, v interface{}) (res field.WalkFuncResult, err error) {
+		enabled, _ := fld.IsEnabled(ctx)
+
+		res.Value = v // Значение не меняется
+
+		if !enabled {
+			res.Stop = true
+			return
+		}
+
+		if err = validateOptions(ctx, fld, v); err != nil {
+			return
+		}
+
+		if validator, ok := fld.GetType().(Validator); ok {
+			err = validator.Validate(ctx, fld, v)
+		}
+
+		return
+	})
+
+	return errors.Wrap(err, "validation error")
+}
+
+func init() {
+	field.RegisterOption(minLength(0))
+	field.RegisterOption(maxLength(0))
+	field.RegisterOption(min(0))
+	field.RegisterOption(max(0))
+	field.RegisterOption(multipleOf(1))
+	field.RegisterOption(enum{})
+	field.RegisterOption(readonly(true))
+	field.RegisterOption(required(true))
+	field.RegisterOption(maxItems(0))
+	field.RegisterOption(schema(true))
+}
diff --git a/pkg/schema/walk/fn.go b/pkg/schema/walk/fn.go
new file mode 100644
index 0000000000000000000000000000000000000000..c3eaf35c569c18ba8ead7c7ff48c08d03f2a9d90
--- /dev/null
+++ b/pkg/schema/walk/fn.go
@@ -0,0 +1,11 @@
+package walk
+
+func GenericMerge(c *WalkContext) (err error) {
+	return
+}
+
+func KeepSrc(c *WalkContext) (err error) {
+	c.Dst = c.Src
+	c.Changed = true
+	return
+}
diff --git a/pkg/schema/walk/walk.go b/pkg/schema/walk/walk.go
new file mode 100644
index 0000000000000000000000000000000000000000..4757742f82efcaffd5faba47eb076ed727783c75
--- /dev/null
+++ b/pkg/schema/walk/walk.go
@@ -0,0 +1,182 @@
+package walk
+
+import (
+	"context"
+	"fmt"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+// DataFunc тип для функции обработки данных
+type DataFunc func(c *WalkContext) error
+
+// FieldConfig описывает какие действия с полем нужно предпринять при обходе данных
+type FieldConfig struct {
+	Fn DataFunc // Пользовательская функция обработки данных поля
+}
+
+// WalkConfig настройки обхода данных
+type WalkConfig struct {
+	// Настройки для полей, в качестве ключа указывается абсолютный путь поля
+	// Например: "a.b.c.1" (числа для slice)
+	Fields map[string]FieldConfig
+}
+
+// Walker позволяет выполнять обход данных для соответствующей схемы
+type Walker struct {
+	schema    *schema.Schema
+	config    *WalkConfig
+	DefaultFn DataFunc // Функция обработки данных применяемая по умолчанию
+}
+
+// NewWalker создает экземпляр
+func NewWalker(schema *schema.Schema, config *WalkConfig) *Walker {
+	return &Walker{
+		schema:    schema,
+		config:    config,
+		DefaultFn: GenericMerge,
+	}
+}
+
+// WalkContext контекст обхода данных
+type WalkContext struct {
+	Ctx     context.Context
+	Path    string       // Путь к родительским данным
+	Key     interface{}  // Ключ или индекс текущих данных
+	Field   *field.Field // Поля схемы соответсвующее текущим данным
+	Dst     interface{}  // Данные приемника
+	Src     interface{}  // Данные источника
+	Changed bool         // Флаг показывающий, что данные приемника изменились
+}
+
+// GetPath возвращает путь соответсвующий текущему контексту
+func (w WalkContext) GetPath(keys ...interface{}) string {
+	p := make([]string, 0, 10)
+
+	if w.Path != "" {
+		p = append(p, w.Path)
+	}
+
+	if w.Key != nil {
+		p = append(p, fmt.Sprintf("%v", w.Key))
+	}
+
+	for _, k := range keys {
+		p = append(p, fmt.Sprintf("%v", k))
+	}
+
+	return strings.Join(p, ".")
+}
+
+// Clone создает копию контекста
+func (w WalkContext) Clone() *WalkContext {
+	return &w
+}
+
+// DataWalk выполняет обход данных и возвращает измененные данные
+func (m *Walker) DataWalk(ctx context.Context, dst, src interface{}) (res interface{}, changed bool, err error) {
+	wc := &WalkContext{
+		Ctx:   ctx,
+		Field: &m.schema.Field,
+		Dst:   dst,
+		Src:   src,
+	}
+
+	err = m.datawalk(wc)
+
+	return wc.Dst, wc.Changed, err
+}
+
+func (m *Walker) datawalk(w *WalkContext) (err error) {
+	path := w.GetPath()
+	fn := m.DefaultFn
+
+	fieldCfg, _ := m.config.Fields[path]
+
+	if fieldCfg.Fn != nil {
+		fn = fieldCfg.Fn
+	}
+
+	if err = fn(w); err != nil {
+		return
+	}
+
+	switch p := w.Field.Params.(type) {
+
+	case *field.ObjectParameters:
+		d, _ := w.Dst.(map[string]interface{})
+		s, _ := w.Src.(map[string]interface{})
+		res := make(map[string]interface{})
+
+		keys := make(map[string]struct{})
+		for k := range d {
+			keys[k] = struct{}{}
+		}
+		for k := range s {
+			keys[k] = struct{}{}
+		}
+
+		fields := p.GetFields(true)
+
+		for k := range keys {
+			f, ok := fields[k]
+			if !ok {
+				continue
+			}
+
+			wc := WalkContext{
+				Ctx:   w.Ctx,
+				Path:  w.GetPath(),
+				Key:   k,
+				Field: f,
+				Dst:   d[k],
+				Src:   s[k],
+			}
+
+			if err = m.datawalk(&wc); err != nil {
+				return
+			}
+
+			if wc.Dst != nil {
+				res[k] = wc.Dst
+			}
+
+			if wc.Changed {
+				w.Changed = true
+			}
+		}
+		if len(res) > 0 {
+			w.Dst = res
+		}
+
+	case *field.ArrayParameters:
+		d, _ := w.Dst.([]interface{})
+		s, _ := w.Src.([]interface{})
+		for i, v := range d {
+			var src_v interface{}
+			if i < len(s) {
+				src_v = s[i]
+			}
+			wc := WalkContext{
+				Ctx:   w.Ctx,
+				Path:  w.GetPath(),
+				Key:   i,
+				Field: p.Item,
+				Dst:   v,
+				Src:   src_v,
+			}
+			if err = m.datawalk(&wc); err != nil {
+				return
+			}
+			if wc.Changed {
+				d[i] = wc.Dst
+				w.Changed = true
+			}
+		}
+
+	}
+
+	return
+}
diff --git a/pkg/schema/walk/walk_test.go b/pkg/schema/walk/walk_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..e20adb462735e83c92c8b04f5c159e336e49f2b0
--- /dev/null
+++ b/pkg/schema/walk/walk_test.go
@@ -0,0 +1,115 @@
+package walk
+
+import (
+	"context"
+	"testing"
+
+	"git.perx.ru/perxis/perxis-go/pkg/schema"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestWalker_DataWalk(t *testing.T) {
+	s := schema.New(
+		"a", field.String(),
+		"b", field.String(),
+		"obj1", field.Object(
+			"a", field.String(),
+			"b", field.String(),
+			"obj2", field.Object(
+				"a", field.String(),
+				"b", field.String(),
+			),
+			"obj3", field.Object(
+				"e", field.String(),
+			),
+		),
+		"slice", field.Array(field.String()),
+		"inline_outer_str", field.Object(
+			true,
+			"inline_str_1", field.String(),
+			"inline_obj", field.Object(
+				true,
+				"inline_str_2", field.String(),
+			),
+		),
+	)
+
+	tests := []struct {
+		name        string
+		config      *WalkConfig
+		src         map[string]interface{}
+		dst         map[string]interface{}
+		res         map[string]interface{}
+		wantChanged bool
+		wantErr     bool
+	}{
+		{"generic",
+			&WalkConfig{
+				Fields: map[string]FieldConfig{
+					"obj1.a":       {Fn: KeepSrc},
+					"slice.1":      {Fn: KeepSrc},
+					"inline_str_1": {Fn: KeepSrc},
+					"inline_str_2": {Fn: KeepSrc},
+				},
+			},
+			map[string]interface{}{
+				"a": "src_a",
+				"b": "src_b",
+				"obj1": map[string]interface{}{
+					"a": "src_obj1_a",
+					"b": "src_obj1_b",
+					"obj2": map[string]interface{}{
+						"a": "dst_obj1_obj2_a",
+					},
+					"obj3": map[string]interface{}{
+						"e": "dst_obj1_obj3_e",
+					},
+				},
+				"inline_str_1": "src_inline_1",
+				"inline_str_2": "src_inline_2",
+				"slice":        []interface{}{"src_s1", "src_s2"},
+			},
+			map[string]interface{}{
+				"a":                 "dst_a",
+				"field_not_extists": "remove",
+				"obj1": map[string]interface{}{
+					"a": "dst_obj1_a",
+					"obj2": map[string]interface{}{
+						"a": "dst_obj1_obj2_a",
+					},
+				},
+				"inline_str_1": "dst_inline_1",
+				"inline_str_2": "dst_inline_2",
+				"slice":        []interface{}{"dst_s1", "dst_s2", "dst_s3"},
+			},
+			map[string]interface{}{
+				"a": "dst_a",
+				"obj1": map[string]interface{}{
+					"a": "src_obj1_a",
+					"obj2": map[string]interface{}{
+						"a": "dst_obj1_obj2_a",
+					},
+				},
+				"inline_str_1": "src_inline_1",
+				"inline_str_2": "src_inline_2",
+				"slice":        []interface{}{"dst_s1", "src_s2", "dst_s3"},
+			},
+			false, false,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			m := NewWalker(s, tt.config)
+			dst := tt.dst
+			res, _, err := m.DataWalk(context.Background(), dst, tt.src)
+			assert.Equal(t, tt.res, res)
+			if tt.wantErr {
+				require.Error(t, err)
+			} else {
+				require.NoError(t, err)
+			}
+		})
+	}
+}
diff --git a/pkg/schemaloader/context.go b/pkg/schemaloader/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..7407b6b30939981f917c50d734ac770ec69b9e4e
--- /dev/null
+++ b/pkg/schemaloader/context.go
@@ -0,0 +1,30 @@
+package schemaloader
+
+import "context"
+
+type LoaderContext struct {
+	SpaceID string
+	EnvID   string
+}
+
+type loaderCtxKey struct{}
+
+func WithContext(ctx context.Context, loaderContext *LoaderContext) context.Context {
+	if ctx == nil {
+		ctx = context.Background()
+	}
+	p, _ := ctx.Value(loaderCtxKey{}).(*LoaderContext)
+	if p != nil {
+		*p = *loaderContext
+		return ctx
+	}
+	return context.WithValue(ctx, loaderCtxKey{}, loaderContext)
+}
+
+func GetContext(ctx context.Context) *LoaderContext {
+	p, _ := ctx.Value(loaderCtxKey{}).(*LoaderContext)
+	if p == nil {
+		return new(LoaderContext)
+	}
+	return p
+}
diff --git a/pkg/schemaloader/loader.go b/pkg/schemaloader/loader.go
new file mode 100644
index 0000000000000000000000000000000000000000..e27baf4ea8b509adff88f7b9c5e99a48f15960b7
--- /dev/null
+++ b/pkg/schemaloader/loader.go
@@ -0,0 +1,92 @@
+package schemaloader
+
+import (
+	"context"
+	"net/url"
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/collections"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/schema/field"
+)
+
+// NewLoader возвращает новый загрузчик схем из коллекций
+// используется только на сервере
+// на клиенте нужно использовать методы получения полностью загруженных схем, для которых разрешение происходит на сервере
+func NewLoader(svc collections.Collections) field.Loader {
+	return &loader{svc: svc}
+}
+
+type loader struct {
+	svc collections.Collections
+}
+
+// Load - возвращает поля по референсу из коллекций (не загруженные)
+func (l *loader) Load(ctx context.Context, ref string) ([]*field.Field, error) {
+	spaceID, envID, colID, err := parseRef(ctx, ref)
+	if err != nil {
+		return nil, err
+	}
+
+	filter := &collections.Filter{ID: []string{colID}}
+
+	collections, err := l.svc.List(ctx, spaceID, envID, filter)
+	if err != nil {
+		return nil, errors.Wrapf(err, "schemaloader: failed to get collections for \"%s\"", ref)
+	}
+
+	var schemas []*field.Field
+	for _, s := range collections {
+		if s.Schema != nil {
+			schemas = append(schemas, &s.Schema.Field)
+		}
+	}
+
+	if len(schemas) == 0 {
+		return nil, errors.Errorf("schema not found \"%s\"", ref)
+	}
+
+	return schemas, nil
+}
+
+func parseRef(ctx context.Context, ref string) (spaceID, envID, colID string, err error) {
+	var u *url.URL
+	if u, err = url.Parse(ref); err != nil {
+		return
+	}
+
+	parts := strings.SplitN(u.Path, "/", 3)
+
+	switch len(parts) {
+	case 1:
+		colID = parts[0]
+	case 2:
+		spaceID = parts[0]
+		envID = "master"
+		colID = parts[1]
+	case 3:
+		spaceID = parts[0]
+		envID = parts[1]
+		colID = parts[2]
+	}
+
+	if colID == "" {
+		err = errors.Errorf("invalid schema reference \"%s\"", ref)
+	}
+
+	if loaderCtx := GetContext(ctx); loaderCtx != nil {
+		if spaceID == "" {
+			spaceID = loaderCtx.SpaceID
+		}
+
+		if envID == "" {
+			envID = loaderCtx.EnvID
+		}
+	}
+
+	if spaceID == "" {
+		err = errors.Errorf("can't identify space for reference \"%s\"", ref)
+	}
+
+	return
+}
diff --git a/pkg/schemaloader/loader_test.go b/pkg/schemaloader/loader_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f796fd8f669e31140e4171955c4d7db8723cec26
--- /dev/null
+++ b/pkg/schemaloader/loader_test.go
@@ -0,0 +1,141 @@
+package schemaloader
+
+import (
+	"context"
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+//func Test_Load(t *testing.T) {
+//
+//	const (
+//		spaceID = "SpaceID"
+//		envID   = "envID"
+//		colID   = "colID"
+//		uri     = "/colID#fieldID"
+//	)
+//
+//	t.Run("Load schema (success)", func(t *testing.T) {
+//		collSvs := &mocks.Collections{}
+//
+//		sch := schema.New(
+//			"first_name", field.String(),
+//			"last_name", field.String(),
+//		)
+//
+//		cl := &collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "Collection", Schema: sch}
+//		collSvs.On("List", mock.Anything, spaceID, envID, mock.AnythingOfType("*collections.Filter")).Run(func(args mock.Arguments) {
+//			filter := args[3].(*collections.Filter)
+//
+//			assert.Equal(t, &collections.Filter{ID: []string{"colID"}}, filter, "Фильтр должен содержать идентификатор коллекции")
+//		}).Return([]*collections.Collection{cl}, nil).Once()
+//
+//		loader := NewLoader(collSvs, spaceID, envID)
+//		schemas, err := loader.Load(nil, uri)
+//
+//		require.NoError(t, err, "Ожидается успешное завершение")
+//		require.Equal(t, []*field.Field{&sch.Field}, schemas, "Метод должен возвращать срез схем")
+//		collSvs.AssertExpectations(t)
+//	})
+//
+//	t.Run("Collection doesn't have schema", func(t *testing.T) {
+//		collSvs := &mocks.Collections{}
+//
+//		cl := &collections.Collection{ID: colID, SpaceID: spaceID, EnvID: envID, Name: "Collection"}
+//		collSvs.On("List", mock.Anything, spaceID, envID, mock.AnythingOfType("*collections.Filter")).Run(func(args mock.Arguments) {
+//			filter := args[3].(*collections.Filter)
+//
+//			assert.Equal(t, &collections.Filter{ID: []string{"colID"}}, filter, "Фильтр должен содержать идентификатор коллекции")
+//		}).Return([]*collections.Collection{cl}, nil).Once()
+//
+//		loader := NewLoader(collSvs, spaceID, envID)
+//		schemas, err := loader.Load(nil, uri)
+//
+//		require.Error(t, err, "Ожидается ошибка")
+//		require.Contains(t, err.Error(), "schema not found")
+//		require.Nil(t, schemas, "Метод должен вернуть nil")
+//		//assert.Nil(t, schemas, "Метод должен вернуть nil")
+//		collSvs.AssertExpectations(t)
+//	})
+//
+//	t.Run("Loader not found collection", func(t *testing.T) {
+//		collSvs := &mocks.Collections{}
+//
+//		collSvs.On("List", mock.Anything, spaceID, envID, mock.AnythingOfType("*collections.Filter")).Run(func(args mock.Arguments) {
+//			filter := args[3].(*collections.Filter)
+//
+//			assert.Equal(t, &collections.Filter{ID: []string{"colID"}}, filter, "Фильтр должен содержать идентификатор коллекции")
+//		}).Return([]*collections.Collection{}, nil).Once()
+//
+//		loader := NewLoader(collSvs, spaceID, envID)
+//		schemas, err := loader.Load(nil, uri)
+//
+//		require.Error(t, err, "Ожидается ошибка")
+//		require.Contains(t, err.Error(), "schema not found")
+//		require.Nil(t, schemas, "Метод должен вернуть nil")
+//		collSvs.AssertExpectations(t)
+//	})
+//
+//	t.Run("Collection service return error", func(t *testing.T) {
+//		collSvs := &mocks.Collections{}
+//
+//		collSvs.On("List", mock.Anything, spaceID, envID, mock.AnythingOfType("*collections.Filter")).Run(func(args mock.Arguments) {
+//			filter := args[3].(*collections.Filter)
+//
+//			assert.Equal(t, &collections.Filter{ID: []string{"colID"}}, filter, "Фильтр должен содержать идентификатор коллекции")
+//		}).Return(nil, errors.New("storage error")).Once()
+//
+//		loader := NewLoader(collSvs, spaceID, envID)
+//		schemas, err := loader.Load(nil, uri)
+//
+//		require.Error(t, err, "Ожидается ошибка")
+//		require.Contains(t, err.Error(), "failed to get schema")
+//		require.Nil(t, schemas, "Метод должен вернуть nil")
+//		collSvs.AssertExpectations(t)
+//	})
+//
+//	t.Run("ParseMask return error", func(t *testing.T) {
+//		collSvs := &mocks.Collections{}
+//
+//		loader := NewLoader(collSvs, spaceID, envID)
+//		schemas, err := loader.Load(nil, "")
+//
+//		require.Error(t, err, "Ожидается ошибка")
+//		require.Contains(t, err.Error(), "invalid schema reference")
+//		require.Nil(t, schemas, "Метод должен вернуть nil")
+//		collSvs.AssertExpectations(t)
+//	})
+//}
+
+func Test_parseRef(t *testing.T) {
+	ctx := WithContext(nil, &LoaderContext{SpaceID: "spc", EnvID: "env"})
+	tests := []struct {
+		ref            string
+		ctx            context.Context
+		wantSpaceID    string
+		wantEnvId      string
+		wantCollection string
+		wantErr        assert.ErrorAssertionFunc
+	}{
+		{"col", ctx, "spc", "env", "col", assert.NoError},
+		{"/col", ctx, "spc", "master", "col", assert.NoError},
+		{"spc1/env1/col", ctx, "spc1", "env1", "col", assert.NoError},
+		{"spc1/env1/col#fld", ctx, "spc1", "env1", "col", assert.NoError},
+		{"col%3f*", ctx, "spc", "env", "col?*", assert.NoError},
+		{"#fld", ctx, "spc", "env", "", assert.Error},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.ref, func(t *testing.T) {
+			gotSpaceID, gotEnvId, gotCollection, err := parseRef(tt.ctx, tt.ref)
+			if !tt.wantErr(t, err, fmt.Sprintf("parseRef(%v)", tt.ref)) {
+				return
+			}
+			assert.Equalf(t, tt.wantSpaceID, gotSpaceID, "parseRef(%v)", tt.ref)
+			assert.Equalf(t, tt.wantEnvId, gotEnvId, "parseRef(%v)", tt.ref)
+			assert.Equalf(t, tt.wantCollection, gotCollection, "parseRef(%v)", tt.ref)
+		})
+	}
+}
diff --git a/pkg/service/errors.go b/pkg/service/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..055b97921ff78450bb229a14efeca265903e42c4
--- /dev/null
+++ b/pkg/service/errors.go
@@ -0,0 +1,9 @@
+package service
+
+import "git.perx.ru/perxis/perxis-go/pkg/errors"
+
+var (
+	ErrAccessDenied  = errors.PermissionDenied(errors.New("access denied"))
+	ErrNotFound      = errors.NotFound(errors.New("not found"))
+	ErrAlreadyExists = errors.AlreadyExists(errors.New("already exists"))
+)
diff --git a/pkg/spaces/middleware/caching_middleware.go b/pkg/spaces/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..62396fc8f1e9101f92885330a50797e26dbdebe2
--- /dev/null
+++ b/pkg/spaces/middleware/caching_middleware.go
@@ -0,0 +1,106 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	service "git.perx.ru/perxis/perxis-go/pkg/spaces"
+)
+
+func orgKey(orgID string) string { return "org-" + orgID }
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Spaces) service.Spaces {
+		m := &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+
+		return m
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Spaces
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, space *service.Space) (sp *service.Space, err error) {
+
+	sp, err = m.next.Create(ctx, space)
+	if err == nil {
+		m.cache.Remove(orgKey(sp.OrgID))
+	}
+	return sp, err
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, spaceId string) (sp *service.Space, err error) {
+
+	value, e := m.cache.Get(spaceId)
+	if e == nil {
+		return value.(*service.Space), err
+	}
+	sp, err = m.next.Get(ctx, spaceId)
+	if err == nil {
+		m.cache.Set(spaceId, sp)
+	}
+	return sp, err
+}
+
+func (m cachingMiddleware) List(ctx context.Context, orgId string) (spaces []*service.Space, err error) {
+
+	value, e := m.cache.Get(orgKey(orgId))
+	if e == nil {
+		return value.([]*service.Space), err
+	}
+	spaces, err = m.next.List(ctx, orgId)
+	if err == nil {
+		m.cache.Set(orgKey(orgId), spaces)
+		for _, s := range spaces {
+			m.cache.Set(s.ID, s)
+		}
+	}
+	return spaces, err
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, space *service.Space) (err error) {
+
+	err = m.next.Update(ctx, space)
+	if err == nil {
+		value, e := m.cache.Get(space.ID)
+		if e == nil {
+			space := value.(*service.Space)
+			m.cache.Remove(orgKey(space.OrgID))
+		}
+		m.cache.Remove(space.ID)
+	}
+	return err
+}
+
+func (m cachingMiddleware) UpdateConfig(ctx context.Context, spaceId string, config *service.Config) (err error) {
+
+	err = m.next.UpdateConfig(ctx, spaceId, config)
+	if err == nil {
+		value, e := m.cache.Get(spaceId)
+		if e == nil {
+			space := value.(*service.Space)
+			m.cache.Remove(orgKey(space.OrgID))
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, spaceId string) (err error) {
+
+	err = m.next.Delete(ctx, spaceId)
+	if err == nil {
+		value, e := m.cache.Get(spaceId)
+		if e == nil {
+			space := value.(*service.Space)
+			m.cache.Remove(orgKey(space.OrgID))
+		}
+		m.cache.Remove(spaceId)
+	}
+	return err
+}
diff --git a/pkg/spaces/middleware/caching_middleware_test.go b/pkg/spaces/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2cfaf98db8713081225b3f01f39782f487292161
--- /dev/null
+++ b/pkg/spaces/middleware/caching_middleware_test.go
@@ -0,0 +1,241 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	spmocks "git.perx.ru/perxis/perxis-go/pkg/spaces/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestRolesCache(t *testing.T) {
+
+	const (
+		spaceID = "spaceID"
+		orgID   = "orgID"
+		size    = 5
+		ttl     = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	ctx := context.Background()
+
+	t.Run("Get from cache", func(t *testing.T) {
+		sp := &spmocks.Spaces{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+		sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+
+		v1, err := svc.Get(ctx, spaceID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, spaceID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+		sp.AssertExpectations(t)
+	})
+
+	t.Run("List from cache", func(t *testing.T) {
+		sp := &spmocks.Spaces{}
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+		sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}}, nil).Once()
+
+		vl1, err := svc.List(ctx, orgID)
+		require.NoError(t, err)
+
+		vl2, err := svc.List(ctx, orgID)
+		require.NoError(t, err)
+		assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+		sp.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate cache", func(t *testing.T) {
+		t.Run("After Update", func(t *testing.T) {
+			sp := &spmocks.Spaces{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+			vl1, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			sp.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			err = svc.Update(ctx, &spaces.Space{ID: spaceID, OrgID: orgID, Name: "SpaceUPD"})
+			require.NoError(t, err)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "SpaceUPD"}, nil).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "SpaceUPD"}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что кеш объекта был удален после обновления объекта.")
+
+			vl3, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что кеш объектов был удален после обновления объекта.")
+
+			sp.AssertExpectations(t)
+		})
+
+		t.Run("After UpdateConfig", func(t *testing.T) {
+			sp := &spmocks.Spaces{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+			vl1, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			sp.On("UpdateConfig", mock.Anything, spaceID, mock.Anything).Return(nil).Once()
+
+			err = svc.UpdateConfig(ctx, spaceID, &spaces.Config{Features: []string{"feature"}})
+			require.NoError(t, err)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "SpaceUPD", Config: &spaces.Config{Features: []string{"feature"}}}, nil).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "SpaceUPD", Config: &spaces.Config{Features: []string{"feature"}}}}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается что кеш объекта был удален после обновления объекта.")
+
+			vl3, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что кеш объектов был удален после обновления объекта.")
+
+			sp.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			sp := &spmocks.Spaces{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+			vl1, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			sp.On("Delete", mock.Anything, spaceID).Return(nil).Once()
+
+			err = svc.Delete(ctx, spaceID)
+			require.NoError(t, err)
+
+			sp.On("Get", mock.Anything, spaceID).Return(nil, errNotFound).Once()
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{}, nil).Once()
+
+			_, err = svc.Get(ctx, spaceID)
+			require.Error(t, err)
+			assert.EqualError(t, err, "not found", "Ожидается что после удаления объекта кеш  был удален и получена ошибка от сервиса.")
+
+			vl3, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.Len(t, vl3, 0, "Ожидается что после удаления кеш объектов был удален.")
+
+			sp.AssertExpectations(t)
+		})
+
+		t.Run("After Create", func(t *testing.T) {
+			sp := &spmocks.Spaces{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}}, nil).Once()
+
+			vl1, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+
+			vl2, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.Same(t, vl1[0], vl2[0], "Ожидается при повторном запросе получение объектов из кэша.")
+
+			sp.On("Create", mock.Anything, mock.Anything).Return(&spaces.Space{ID: "spaceID2", OrgID: orgID, Name: "Space2"}, nil).Once()
+
+			_, err = svc.Create(ctx, &spaces.Space{ID: "spaceID2", OrgID: orgID, Name: "Space2"})
+			require.NoError(t, err)
+
+			sp.On("List", mock.Anything, orgID).Return([]*spaces.Space{{ID: spaceID, OrgID: orgID, Name: "Space"}, {ID: "spaceID2", OrgID: orgID, Name: "Space2"}}, nil).Once()
+
+			vl3, err := svc.List(ctx, orgID)
+			require.NoError(t, err)
+			assert.NotSame(t, vl2[0], vl3[0], "Ожидается что кеш объектов был удален после создания нового объекта.")
+
+			sp.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			sp := &spmocks.Spaces{}
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(sp)
+
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+
+			v1, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается при повторном запросе получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+			sp.On("Get", mock.Anything, spaceID).Return(&spaces.Space{ID: spaceID, OrgID: orgID, Name: "Space"}, nil).Once()
+
+			v3, err := svc.Get(ctx, spaceID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается удаление объекта из кэша по истечению ttl.")
+
+			sp.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/spaces/middleware/error_logging_middleware.go b/pkg/spaces/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..85b629ba32abe38e5030077e8fe81b04206650b0
--- /dev/null
+++ b/pkg/spaces/middleware/error_logging_middleware.go
@@ -0,0 +1,90 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/spaces -i Spaces -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements spaces.Spaces that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   spaces.Spaces
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the spaces.Spaces with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next spaces.Spaces) spaces.Spaces {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, space *spaces.Space) (created *spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, space)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, spaceId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, spaceId string) (space *spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, spaceId)
+}
+
+func (m *errorLoggingMiddleware) List(ctx context.Context, orgId string) (spaces []*spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.List(ctx, orgId)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, space *spaces.Space) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, space)
+}
+
+func (m *errorLoggingMiddleware) UpdateConfig(ctx context.Context, spaceId string, config *spaces.Config) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.UpdateConfig(ctx, spaceId, config)
+}
diff --git a/pkg/spaces/middleware/logging_middleware.go b/pkg/spaces/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1471ea19535139210e79968e5a4522001a0b2ae3
--- /dev/null
+++ b/pkg/spaces/middleware/logging_middleware.go
@@ -0,0 +1,248 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/spaces -i Spaces -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements spaces.Spaces that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   spaces.Spaces
+}
+
+// LoggingMiddleware instruments an implementation of the spaces.Spaces with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next spaces.Spaces) spaces.Spaces {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, space *spaces.Space) (created *spaces.Space, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"space": space} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	created, err = m.next.Create(ctx, space)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"created": created,
+		"err":     err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return created, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, spaceId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, spaceId string) (space *spaces.Space, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	space, err = m.next.Get(ctx, spaceId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"space": space,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return space, err
+}
+
+func (m *loggingMiddleware) List(ctx context.Context, orgId string) (spaces []*spaces.Space, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"orgId": orgId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Request", fields...)
+
+	spaces, err = m.next.List(ctx, orgId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"spaces": spaces,
+		"err":    err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("List.Response", fields...)
+
+	return spaces, err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, space *spaces.Space) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":   ctx,
+		"space": space} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, space)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) UpdateConfig(ctx context.Context, spaceId string, config *spaces.Config) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"spaceId": spaceId,
+		"config":  config} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("UpdateConfig.Request", fields...)
+
+	err = m.next.UpdateConfig(ctx, spaceId, config)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("UpdateConfig.Response", fields...)
+
+	return err
+}
diff --git a/pkg/spaces/middleware/middleware.go b/pkg/spaces/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..9d9d2243026f6c1152f7625e4c15bd26b5b7b5fc
--- /dev/null
+++ b/pkg/spaces/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/spaces -i Spaces -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"go.uber.org/zap"
+)
+
+type Middleware func(spaces.Spaces) spaces.Spaces
+
+func WithLog(s spaces.Spaces, logger *zap.Logger, log_access bool) spaces.Spaces {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Spaces")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/spaces/middleware/recovering_middleware.go b/pkg/spaces/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..5ca795bf5c188603794603ec11fc0c8a79479524
--- /dev/null
+++ b/pkg/spaces/middleware/recovering_middleware.go
@@ -0,0 +1,103 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/spaces -i Spaces -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/spaces"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements spaces.Spaces that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   spaces.Spaces
+}
+
+// RecoveringMiddleware instruments an implementation of the spaces.Spaces with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next spaces.Spaces) spaces.Spaces {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, space *spaces.Space) (created *spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, space)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, spaceId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, spaceId string) (space *spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, spaceId)
+}
+
+func (m *recoveringMiddleware) List(ctx context.Context, orgId string) (spaces []*spaces.Space, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.List(ctx, orgId)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, space *spaces.Space) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, space)
+}
+
+func (m *recoveringMiddleware) UpdateConfig(ctx context.Context, spaceId string, config *spaces.Config) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.UpdateConfig(ctx, spaceId, config)
+}
diff --git a/pkg/template/builder.go b/pkg/template/builder.go
new file mode 100644
index 0000000000000000000000000000000000000000..9a4ddcdb73e6b5c585d77088630096351d19b2bd
--- /dev/null
+++ b/pkg/template/builder.go
@@ -0,0 +1,171 @@
+package template
+
+import (
+	"bytes"
+	"context"
+	"text/template"
+
+	"git.perx.ru/perxis/perxis-go/pkg/content"
+)
+
+type Builder struct {
+	ctx     context.Context
+	cnt     *content.Content
+	SpaceID string
+	EnvID   string
+	funcs   template.FuncMap
+	data    map[string]interface{}
+}
+
+func NewBuilder(cnt *content.Content, space, env string) *Builder {
+	return &Builder{
+		ctx:     context.Background(),
+		cnt:     cnt,
+		SpaceID: space,
+		EnvID:   env,
+		funcs:   make(template.FuncMap),
+	}
+}
+
+func (b *Builder) getFuncs() template.FuncMap {
+	return template.FuncMap{
+		"lookup": getLookup(b),
+		"system": getSystem(b),
+	}
+}
+
+func (b *Builder) WithData(data map[string]interface{}) *Builder {
+	bld := *b
+	bld.data = data
+	return &bld
+}
+
+func (b *Builder) WithKV(kv ...any) *Builder {
+	bld := *b
+	if bld.data == nil {
+		bld.data = make(map[string]interface{}, 10)
+	}
+	for i := 0; i < len(kv)-1; i += 2 {
+		k, _ := kv[i].(string)
+		v := kv[i+1]
+		if k != "" && v != nil {
+			bld.data[k] = v
+		}
+	}
+	return &bld
+}
+
+func (b *Builder) GetData() map[string]interface{} {
+	return b.data
+}
+
+func (b *Builder) WithSpace(space, env string) *Builder {
+	bld := *b
+	bld.SpaceID = space
+	bld.EnvID = env
+	return &bld
+}
+
+func (b *Builder) WithContext(ctx context.Context) *Builder {
+	bld := *b
+	bld.ctx = ctx
+	return &bld
+}
+
+func (b *Builder) Context() context.Context {
+	return b.ctx
+}
+
+func (b *Builder) Template() *template.Template {
+	return template.New("main").Funcs(b.getFuncs())
+}
+
+func (b *Builder) Execute(str string, data ...any) (string, error) {
+	t := b.Template()
+	buf := new(bytes.Buffer)
+	t, err := t.Parse(str)
+	if err != nil {
+		return "", err
+	}
+	if err = t.Execute(buf, b.getData(data...)); err != nil {
+		return "", err
+	}
+	return buf.String(), nil
+}
+
+func (b *Builder) ExecuteList(str []string, data ...any) ([]string, error) {
+	t := b.Template()
+	result := make([]string, len(str))
+	buffer := new(bytes.Buffer)
+	for i, tmpl := range str {
+		if tmpl == "" {
+			continue
+		}
+		t, err := t.Parse(tmpl)
+		if err != nil {
+			return []string{}, err
+		}
+		if err = t.Execute(buffer, b.getData(data...)); err != nil {
+			return []string{}, err
+		}
+		result[i] = buffer.String()
+		buffer.Reset()
+	}
+	return result, nil
+}
+
+func (b *Builder) ExecuteMap(str map[string]interface{}, data ...any) (map[string]interface{}, error) {
+	result := make(map[string]interface{}, len(str))
+	for k, v := range str {
+		switch t := v.(type) {
+		case string:
+			value, err := b.Execute(t, data...)
+			if err != nil {
+				return nil, err
+			}
+			v = value
+		case []string:
+			values, err := b.ExecuteList(append([]string{k}, t...), data...)
+			if err != nil {
+				return nil, err
+			}
+			k = values[0]
+			vv := make([]interface{}, 0, len(t))
+			for _, val := range values[1:] {
+				vv = append(vv, val)
+			}
+			v = vv
+		}
+
+		result[k] = v
+	}
+	return result, nil
+}
+
+func (b *Builder) getData(data ...any) any {
+	if len(data) == 0 {
+		return b.data
+	}
+
+	var res map[string]interface{}
+	for _, v := range data {
+		if m, ok := v.(map[string]interface{}); ok && b.data != nil {
+			res = mergeMaps(b.data, m)
+		}
+	}
+	if res != nil {
+		return res
+	}
+
+	return data[0]
+}
+
+func mergeMaps(in ...map[string]interface{}) map[string]interface{} {
+	out := make(map[string]interface{})
+	for _, i := range in {
+		for k, v := range i {
+			out[k] = v
+		}
+	}
+	return out
+}
diff --git a/pkg/template/builder_test.go b/pkg/template/builder_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f8e2b34440dd73259a6ccd3ce9202a556caa5298
--- /dev/null
+++ b/pkg/template/builder_test.go
@@ -0,0 +1,272 @@
+package template
+
+import (
+	"context"
+	"errors"
+	"testing"
+	"text/template"
+
+	"git.perx.ru/perxis/perxis-go/pkg/content"
+	"git.perx.ru/perxis/perxis-go/pkg/items"
+	mocksitems "git.perx.ru/perxis/perxis-go/pkg/items/mocks"
+	"github.com/stretchr/testify/assert"
+)
+
+func TestBuilder_Execute(t *testing.T) {
+	tests := []struct {
+		name    string
+		ctx     context.Context
+		cnt     *content.Content
+		SpaceID string
+		EnvID   string
+		funcs   template.FuncMap
+		str     string
+		data    any
+		want    any
+		wantErr bool
+
+		itemsCall func(itemsSvc *mocksitems.Items)
+	}{
+		{name: "error", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ .a }}", data: "world", want: "", wantErr: true},
+		{name: "empty", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "", data: "", want: "", wantErr: false},
+		{name: "#1", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ . }}", data: "world", want: "hello world", wantErr: false},
+		{name: "#2", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "{{ . }}", data: "world", want: "world", wantErr: false},
+		{name: "#3 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "", data: "world", want: "", wantErr: false},
+		{name: "#4 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello", data: "world", want: "hello", wantErr: false},
+		{name: "lookup", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello, {{ lookup \"secrets.dev.key\" }}", data: "", want: "hello, Luk", wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "Luk",
+				},
+			}, nil).Once()
+		}},
+		{name: "lookup with slice", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "numbers {{ lookup \"secrets.dev.slice\" }}", data: "", want: "numbers [1 2 3]", wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":    "dev",
+					"slice": []int{1, 2, 3},
+				},
+			}, nil).Once()
+		}},
+		{name: "lookup with empty Data", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "numbers {{ lookup \"secrets.dev.slice\" }}", data: "", want: "numbers <no value>", wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data:         map[string]interface{}{},
+			}, nil).Once()
+		}},
+		{name: "lookup with incorrect field", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ lookup \"secrets.dev.incorrect\" }}", data: "", want: "hello <no value>", wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "1234",
+				},
+			}, nil).Once()
+		}},
+		{name: "lookup not found", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ lookup \"secrets.prod.pass\" }}", data: "", want: "", wantErr: true, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "prod").Return(nil, errors.New("not found")).Once()
+		}},
+		{name: "lookup without itemID", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ lookup \"secrets.pass\" }}", data: "", want: "", wantErr: true},
+		{name: "system ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: "hello {{ system.SpaceID }}", data: "", want: "hello space", wantErr: false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			itemsSvc := &mocksitems.Items{}
+			if tt.itemsCall != nil {
+				tt.itemsCall(itemsSvc)
+			}
+			tt.cnt = &content.Content{
+				Items: itemsSvc,
+			}
+			b := &Builder{
+				ctx:     tt.ctx,
+				cnt:     tt.cnt,
+				SpaceID: tt.SpaceID,
+				EnvID:   tt.EnvID,
+				funcs:   tt.funcs,
+			}
+
+			got, err := b.Execute(tt.str, tt.data)
+			if tt.wantErr == true {
+				assert.Error(t, err)
+			} else {
+				assert.NoError(t, err)
+			}
+			assert.Equal(t, tt.want, got)
+			if tt.itemsCall != nil {
+				itemsSvc.AssertExpectations(t)
+			}
+		})
+	}
+}
+
+func TestBuilder_ExecuteList(t *testing.T) {
+	tests := []struct {
+		name    string
+		ctx     context.Context
+		cnt     *content.Content
+		SpaceID string
+		EnvID   string
+		funcs   template.FuncMap
+		str     []string
+		data    any
+		want    []string
+		wantErr bool
+
+		itemsCall func(itemsSvc *mocksitems.Items)
+	}{
+		{name: "error", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello { . }}", "go {{ . }"}, data: "world", want: []string{}, wantErr: true},
+		{name: "empty", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{""}, data: "world", want: []string{""}, wantErr: false},
+		{name: "#1", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello {{ . }}", "go {{ . }}"}, data: "world", want: []string{"hello world", "go world"}, wantErr: false},
+		{name: "#2", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"{{ . }}"}, data: "world", want: []string{"world"}, wantErr: false},
+		{name: "#3 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{""}, data: "world", want: []string{""}, wantErr: false},
+		{name: "#4 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello"}, data: "world", want: []string{"hello"}, wantErr: false},
+		{name: "lookup", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello {{ lookup \"secrets.dev.key\" }}"}, data: "", want: []string{"hello 1234"}, wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "1234",
+				},
+			}, nil).Once()
+		}},
+		{name: "lookup with incorrect field", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello {{ lookup \"secrets.dev.incorrect\" }}"}, data: "", want: []string{"hello <no value>"}, wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "1234",
+				},
+			}, nil).Once()
+		}},
+		{name: "system ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: []string{"hello {{ system.SpaceID }}"}, data: "", want: []string{"hello space"}, wantErr: false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			itemsSvc := &mocksitems.Items{}
+			if tt.itemsCall != nil {
+				tt.itemsCall(itemsSvc)
+			}
+			tt.cnt = &content.Content{
+				Items: itemsSvc,
+			}
+			b := &Builder{
+				ctx:     tt.ctx,
+				cnt:     tt.cnt,
+				SpaceID: tt.SpaceID,
+				EnvID:   tt.EnvID,
+				funcs:   tt.funcs,
+			}
+
+			got, err := b.ExecuteList(tt.str, tt.data)
+			if tt.wantErr == true {
+				assert.Error(t, err)
+			} else {
+				assert.NoError(t, err)
+			}
+			assert.Equal(t, tt.want, got)
+			if tt.itemsCall != nil {
+				itemsSvc.AssertExpectations(t)
+			}
+		})
+	}
+}
+
+func TestBuilder_ExecuteMap(t *testing.T) {
+	tests := []struct {
+		name    string
+		ctx     context.Context
+		cnt     *content.Content
+		SpaceID string
+		EnvID   string
+		funcs   template.FuncMap
+		str     map[string]interface{}
+		data    any
+		want    map[string]interface{}
+		wantErr bool
+
+		itemsCall func(itemsSvc *mocksitems.Items)
+	}{
+		{name: "error", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ . }"}, data: "world", want: nil, wantErr: true},
+		{name: "empty", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{}, data: "", want: map[string]interface{}{}, wantErr: false},
+		{name: "#1", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ . }}"}, data: "world", want: map[string]interface{}{"hello": "world"}, wantErr: false},
+		{name: "#2", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ . }}", "go": "{{ . }}"}, data: "world", want: map[string]interface{}{"hello": "world", "go": "world"}, wantErr: false},
+		{name: "#3 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{}, data: "world", want: map[string]interface{}{}, wantErr: false},
+		{name: "#4 ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"a": "b"}, data: "world", want: map[string]interface{}{"a": "b"}, wantErr: false},
+		{name: "lookup ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ lookup \"secrets.dev.key\" }}"}, data: "", want: map[string]interface{}{"hello": "1234"}, wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "1234",
+				},
+			}, nil).Once()
+		}},
+		{name: "lookup with incorrect field", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ lookup \"secrets.dev.incorrect\" }}"}, data: "", want: map[string]interface{}{"hello": "<no value>"}, wantErr: false, itemsCall: func(itemsSvc *mocksitems.Items) {
+			itemsSvc.On("Get", context.Background(), "space", "env", "secrets", "dev").Return(&items.Item{
+				ID:           "dev",
+				SpaceID:      "space",
+				EnvID:        "env",
+				CollectionID: "secrets",
+				Data: map[string]interface{}{
+					"id":  "dev",
+					"key": "1234",
+				},
+			}, nil).Once()
+		}},
+		{name: "system ", ctx: context.Background(), cnt: &content.Content{}, SpaceID: "space", EnvID: "env", funcs: template.FuncMap{}, str: map[string]interface{}{"hello": "{{ system.SpaceID }}"}, data: "", want: map[string]interface{}{"hello": "space"}, wantErr: false},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			itemsSvc := &mocksitems.Items{}
+			if tt.itemsCall != nil {
+				tt.itemsCall(itemsSvc)
+			}
+			tt.cnt = &content.Content{
+				Items: itemsSvc,
+			}
+			b := &Builder{
+				ctx:     tt.ctx,
+				cnt:     tt.cnt,
+				SpaceID: tt.SpaceID,
+				EnvID:   tt.EnvID,
+				funcs:   tt.funcs,
+			}
+
+			got, err := b.ExecuteMap(tt.str, tt.data)
+			if tt.wantErr == true {
+				assert.Error(t, err)
+			} else {
+				assert.NoError(t, err)
+			}
+			assert.Equal(t, tt.want, got)
+			if tt.itemsCall != nil {
+				itemsSvc.AssertExpectations(t)
+			}
+		})
+	}
+}
diff --git a/pkg/template/funcs.go b/pkg/template/funcs.go
new file mode 100644
index 0000000000000000000000000000000000000000..0c320ad139e964f002b691ce097b24e70e6cfaf3
--- /dev/null
+++ b/pkg/template/funcs.go
@@ -0,0 +1,43 @@
+package template
+
+import (
+	"strings"
+
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+)
+
+// getLookup возвращает функцию для шаблонизатора для получения значений из записи коллекции
+// name указывается в виде "<collection id>.<item id>.<field>"
+// Использование в шаблонах:  {{ lookup "secrets.key.value" }}
+func getLookup(b *Builder) any {
+	return func(name string) (any, error) {
+		parsedName := strings.Split(name, ".")
+		if len(parsedName) < 3 {
+			return "", errors.Errorf("incorrect parameter \"%s\"", name)
+		}
+
+		collectionID := parsedName[0]
+		itemID := parsedName[1]
+		field := parsedName[2]
+		item, err := b.cnt.Items.Get(b.Context(), b.SpaceID, b.EnvID, collectionID, itemID)
+		if err != nil {
+			return "", errors.Wrapf(err, "failed to get \"%s\"")
+		}
+
+		if len(item.Data) > 0 {
+			if v, ok := item.Data[field]; ok {
+				return v, nil
+			}
+		}
+
+		return nil, nil
+	}
+}
+
+// getSys возвращает функцию получения System
+// Использование в шаблонах: {{ system.SpaceID }}
+func getSystem(b *Builder) any {
+	return func() *System {
+		return &System{builder: b}
+	}
+}
diff --git a/pkg/template/system.go b/pkg/template/system.go
new file mode 100644
index 0000000000000000000000000000000000000000..8f8548eb11444e72ad4f003b9225f4d4a38e4e2a
--- /dev/null
+++ b/pkg/template/system.go
@@ -0,0 +1,13 @@
+package template
+
+type System struct {
+	builder *Builder
+}
+
+func (s *System) SpaceID() string {
+	return s.builder.SpaceID
+}
+
+func (s *System) EnvID() string {
+	return s.builder.EnvID
+}
diff --git a/pkg/users/middleware/caching_middleware.go b/pkg/users/middleware/caching_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..6dc04868c75edcd77ba9bb2b3adda3f1ff3022f0
--- /dev/null
+++ b/pkg/users/middleware/caching_middleware.go
@@ -0,0 +1,91 @@
+package service
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	services "git.perx.ru/perxis/perxis-go/pkg/options"
+	service "git.perx.ru/perxis/perxis-go/pkg/users"
+)
+
+func CachingMiddleware(cache *cache.Cache) Middleware {
+	return func(next service.Users) service.Users {
+		return &cachingMiddleware{
+			cache: cache,
+			next:  next,
+		}
+	}
+}
+
+type cachingMiddleware struct {
+	cache *cache.Cache
+	next  service.Users
+}
+
+func (m cachingMiddleware) Create(ctx context.Context, create *service.User) (user *service.User, err error) {
+	return m.next.Create(ctx, create)
+}
+
+func (m cachingMiddleware) Get(ctx context.Context, userId string) (user *service.User, err error) {
+
+	value, e := m.cache.Get(userId)
+	if e == nil {
+		return value.(*service.User), err
+	}
+	user, err = m.next.Get(ctx, userId)
+	if err == nil {
+		m.cache.Set(user.ID, user)
+		for _, i := range user.Identities {
+			m.cache.Set(i, user)
+		}
+	}
+	return user, err
+}
+
+func (m cachingMiddleware) Find(ctx context.Context, filter *service.Filter, options *services.FindOptions) (users []*service.User, total int, err error) {
+	return m.next.Find(ctx, filter, options)
+}
+
+func (m cachingMiddleware) Update(ctx context.Context, update *service.User) (err error) {
+
+	err = m.next.Update(ctx, update)
+	value, e := m.cache.Get(update.ID)
+	if err == nil && e == nil {
+		usr := value.(*service.User)
+		m.cache.Remove(usr.ID)
+		for _, i := range usr.Identities {
+			m.cache.Remove(i)
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) Delete(ctx context.Context, userId string) (err error) {
+
+	err = m.next.Delete(ctx, userId)
+	value, e := m.cache.Get(userId)
+	if err == nil && e == nil {
+		usr := value.(*service.User)
+		m.cache.Remove(usr.ID)
+		for _, i := range usr.Identities {
+			m.cache.Remove(i)
+		}
+	}
+	return err
+}
+
+func (m cachingMiddleware) GetByIdentity(ctx context.Context, identity string) (user *service.User, err error) {
+
+	value, e := m.cache.Get(identity)
+	if e == nil {
+		return value.(*service.User), err
+	}
+	user, err = m.next.GetByIdentity(ctx, identity)
+	if err == nil {
+		m.cache.Set(user.ID, user)
+		for _, i := range user.Identities {
+			m.cache.Set(i, user)
+		}
+	}
+	return user, err
+}
diff --git a/pkg/users/middleware/caching_middleware_test.go b/pkg/users/middleware/caching_middleware_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..7ad09b52fdc003eac069005a8cdb956dd58949a7
--- /dev/null
+++ b/pkg/users/middleware/caching_middleware_test.go
@@ -0,0 +1,165 @@
+package service
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/cache"
+	"git.perx.ru/perxis/perxis-go/pkg/errors"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"git.perx.ru/perxis/perxis-go/pkg/users/mocks"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"github.com/stretchr/testify/require"
+)
+
+func TestUsersCache(t *testing.T) {
+
+	const (
+		userID   = "user_id"
+		identity = "user identity"
+		size     = 5
+		ttl      = 20 * time.Millisecond
+	)
+
+	errNotFound := errors.NotFound(errors.New("not found"))
+
+	t.Run("Get from cache", func(t *testing.T) {
+		usrs := &mocks.Users{}
+		ctx := context.Background()
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(usrs)
+
+		usrs.On("Get", mock.Anything, userID).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+
+		v1, err := svc.Get(ctx, userID)
+		require.NoError(t, err)
+
+		v2, err := svc.Get(ctx, userID)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+		v3, err := svc.GetByIdentity(ctx, identity)
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кэша при запросе по Identity.")
+
+		usrs.AssertExpectations(t)
+	})
+
+	t.Run("GetByIdentity from cache", func(t *testing.T) {
+		usrs := &mocks.Users{}
+		ctx := context.Background()
+
+		svc := CachingMiddleware(cache.NewCache(size, ttl))(usrs)
+
+		usrs.On("GetByIdentity", mock.Anything, identity).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+
+		v1, err := svc.GetByIdentity(ctx, identity)
+		require.NoError(t, err)
+
+		v2, err := svc.GetByIdentity(ctx, identity)
+		require.NoError(t, err)
+		assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+		v3, err := svc.Get(ctx, userID)
+		require.NoError(t, err)
+		assert.Same(t, v2, v3, "Ожидается получение объекта из кэша при запросе по userID.")
+
+		usrs.AssertExpectations(t)
+	})
+
+	t.Run("Invalidate Cache", func(t *testing.T) {
+		t.Run("After Update", func(t *testing.T) {
+			usrs := &mocks.Users{}
+			ctx := context.Background()
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(usrs)
+
+			usrs.On("Get", mock.Anything, userID).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+			usrs.On("Update", mock.Anything, mock.Anything).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+
+			v2, err := svc.GetByIdentity(ctx, identity)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			err = svc.Update(ctx, &users.User{ID: userID, Name: "New User", Identities: []string{identity}})
+			require.NoError(t, err)
+
+			usrs.On("GetByIdentity", mock.Anything, identity).Return(&users.User{ID: userID, Name: "New User", Identities: []string{identity}}, nil).Once()
+
+			v3, err := svc.GetByIdentity(ctx, identity)
+			require.NoError(t, err)
+			assert.NotSame(t, v3, v2, "Ожидается удаление объекта из кеша после обновления и получение его заново из сервиса.")
+
+			v4, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+			assert.NotSame(t, v4, v2)
+			assert.Same(t, v4, v3, "Ожидается получение нового обьекта из кеша.")
+
+			usrs.AssertExpectations(t)
+		})
+
+		t.Run("After Delete", func(t *testing.T) {
+			usrs := &mocks.Users{}
+			ctx := context.Background()
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(usrs)
+
+			usrs.On("Get", mock.Anything, userID).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+			usrs.On("Delete", mock.Anything, mock.Anything).Return(nil).Once()
+
+			v1, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+
+			v2, err := svc.GetByIdentity(ctx, identity)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			err = svc.Delete(ctx, userID)
+			require.NoError(t, err)
+
+			usrs.On("GetByIdentity", mock.Anything, identity).Return(nil, errNotFound).Once()
+			usrs.On("Get", mock.Anything, userID).Return(nil, errNotFound).Once()
+
+			_, err = svc.GetByIdentity(ctx, identity)
+			require.Error(t, err)
+			assert.EqualErrorf(t, err, "not found", "Ожидается удаление объекта из кеша после удаления из хранилища и получение ошибки от сервиса.")
+
+			_, err = svc.Get(ctx, userID)
+			require.Error(t, err)
+			assert.EqualErrorf(t, err, "not found", "Ожидается удаление объекта из кеша после удаления из хранилища и получение ошибки от сервиса.")
+
+			usrs.AssertExpectations(t)
+		})
+
+		t.Run("After TTL expired", func(t *testing.T) {
+			usrs := &mocks.Users{}
+			ctx := context.Background()
+
+			svc := CachingMiddleware(cache.NewCache(size, ttl))(usrs)
+
+			usrs.On("Get", mock.Anything, userID).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+
+			v1, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+
+			v2, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+			assert.Same(t, v1, v2, "Ожидается получение объекта из кэша.")
+
+			time.Sleep(2 * ttl)
+
+			usrs.On("Get", mock.Anything, userID).Return(&users.User{ID: userID, Name: "User", Identities: []string{identity}}, nil).Once()
+
+			v3, err := svc.Get(ctx, userID)
+			require.NoError(t, err)
+			assert.NotSame(t, v2, v3, "Ожидается получение объекта из кэша при запросе по Identity.")
+
+			usrs.AssertExpectations(t)
+		})
+	})
+}
diff --git a/pkg/users/middleware/error_logging_middleware.go b/pkg/users/middleware/error_logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..a9084fa7a05608e45f7c5436934b5738c685a0e9
--- /dev/null
+++ b/pkg/users/middleware/error_logging_middleware.go
@@ -0,0 +1,91 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/error_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/users -i Users -t ../../../assets/templates/middleware/error_log -o error_logging_middleware.go -l ""
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"go.uber.org/zap"
+)
+
+// errorLoggingMiddleware implements users.Users that is instrumented with logging
+type errorLoggingMiddleware struct {
+	logger *zap.Logger
+	next   users.Users
+}
+
+// ErrorLoggingMiddleware instruments an implementation of the users.Users with simple logging
+func ErrorLoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next users.Users) users.Users {
+		return &errorLoggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *errorLoggingMiddleware) Create(ctx context.Context, create *users.User) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Create(ctx, create)
+}
+
+func (m *errorLoggingMiddleware) Delete(ctx context.Context, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Delete(ctx, userId)
+}
+
+func (m *errorLoggingMiddleware) Find(ctx context.Context, filter *users.Filter, options *options.FindOptions) (users []*users.User, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Find(ctx, filter, options)
+}
+
+func (m *errorLoggingMiddleware) Get(ctx context.Context, userId string) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Get(ctx, userId)
+}
+
+func (m *errorLoggingMiddleware) GetByIdentity(ctx context.Context, identity string) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.GetByIdentity(ctx, identity)
+}
+
+func (m *errorLoggingMiddleware) Update(ctx context.Context, update *users.User) (err error) {
+	logger := m.logger
+	defer func() {
+		if err != nil {
+			logger.Warn("response error", zap.Error(err))
+		}
+	}()
+	return m.next.Update(ctx, update)
+}
diff --git a/pkg/users/middleware/logging_middleware.go b/pkg/users/middleware/logging_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..1fcae0626b75992ae563ff87adfd4e33edd49af6
--- /dev/null
+++ b/pkg/users/middleware/logging_middleware.go
@@ -0,0 +1,251 @@
+package service
+
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/access_log
+// gowrap: http://github.com/hexdigest/gowrap
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/users -i Users -t ../../../assets/templates/middleware/access_log -o logging_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+	"time"
+
+	"git.perx.ru/perxis/perxis-go/pkg/auth"
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"go.uber.org/zap"
+	"go.uber.org/zap/zapcore"
+)
+
+// loggingMiddleware implements users.Users that is instrumented with logging
+type loggingMiddleware struct {
+	logger *zap.Logger
+	next   users.Users
+}
+
+// LoggingMiddleware instruments an implementation of the users.Users with simple logging
+func LoggingMiddleware(logger *zap.Logger) Middleware {
+	return func(next users.Users) users.Users {
+		return &loggingMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *loggingMiddleware) Create(ctx context.Context, create *users.User) (user *users.User, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"create": create} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Request", fields...)
+
+	user, err = m.next.Create(ctx, create)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"user": user,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Create.Response", fields...)
+
+	return user, err
+}
+
+func (m *loggingMiddleware) Delete(ctx context.Context, userId string) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"userId": userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Request", fields...)
+
+	err = m.next.Delete(ctx, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Delete.Response", fields...)
+
+	return err
+}
+
+func (m *loggingMiddleware) Find(ctx context.Context, filter *users.Filter, options *options.FindOptions) (users []*users.User, total int, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":     ctx,
+		"filter":  filter,
+		"options": options} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Request", fields...)
+
+	users, total, err = m.next.Find(ctx, filter, options)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"users": users,
+		"total": total,
+		"err":   err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Find.Response", fields...)
+
+	return users, total, err
+}
+
+func (m *loggingMiddleware) Get(ctx context.Context, userId string) (user *users.User, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"userId": userId} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Request", fields...)
+
+	user, err = m.next.Get(ctx, userId)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"user": user,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Get.Response", fields...)
+
+	return user, err
+}
+
+func (m *loggingMiddleware) GetByIdentity(ctx context.Context, identity string) (user *users.User, err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":      ctx,
+		"identity": identity} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetByIdentity.Request", fields...)
+
+	user, err = m.next.GetByIdentity(ctx, identity)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"user": user,
+		"err":  err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("GetByIdentity.Response", fields...)
+
+	return user, err
+}
+
+func (m *loggingMiddleware) Update(ctx context.Context, update *users.User) (err error) {
+	begin := time.Now()
+	var fields []zapcore.Field
+	for k, v := range map[string]interface{}{
+		"ctx":    ctx,
+		"update": update} {
+		if k == "ctx" {
+			fields = append(fields, zap.String("principal", fmt.Sprint(auth.GetPrincipal(ctx))))
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Request", fields...)
+
+	err = m.next.Update(ctx, update)
+
+	fields = []zapcore.Field{
+		zap.Duration("time", time.Since(begin)),
+		zap.Error(err),
+	}
+
+	for k, v := range map[string]interface{}{
+		"err": err} {
+		if k == "err" {
+			continue
+		}
+		fields = append(fields, zap.Reflect(k, v))
+	}
+
+	m.logger.Debug("Update.Response", fields...)
+
+	return err
+}
diff --git a/pkg/users/middleware/middleware.go b/pkg/users/middleware/middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..2888f263ca2083bdea91e3c3e62cde40f85e974f
--- /dev/null
+++ b/pkg/users/middleware/middleware.go
@@ -0,0 +1,28 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/middleware
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/users -i Users -t ../../../assets/templates/middleware/middleware -o middleware.go -l ""
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"go.uber.org/zap"
+)
+
+type Middleware func(users.Users) users.Users
+
+func WithLog(s users.Users, logger *zap.Logger, log_access bool) users.Users {
+	if logger == nil {
+		logger = zap.NewNop()
+	}
+
+	logger = logger.Named("Users")
+	s = ErrorLoggingMiddleware(logger)(s)
+	if log_access {
+		s = LoggingMiddleware(logger)(s)
+	}
+	s = RecoveringMiddleware(logger)(s)
+	return s
+}
diff --git a/pkg/users/middleware/recovering_middleware.go b/pkg/users/middleware/recovering_middleware.go
new file mode 100644
index 0000000000000000000000000000000000000000..57c401b12c0b56e9f40109570c6efe59bac8d902
--- /dev/null
+++ b/pkg/users/middleware/recovering_middleware.go
@@ -0,0 +1,104 @@
+// Code generated by gowrap. DO NOT EDIT.
+// template: ../../../assets/templates/middleware/recovery
+// gowrap: http://github.com/hexdigest/gowrap
+
+package service
+
+//go:generate gowrap gen -p git.perx.ru/perxis/perxis-go/pkg/users -i Users -t ../../../assets/templates/middleware/recovery -o recovering_middleware.go -l ""
+
+import (
+	"context"
+	"fmt"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+	"git.perx.ru/perxis/perxis-go/pkg/users"
+	"go.uber.org/zap"
+)
+
+// recoveringMiddleware implements users.Users that is instrumented with logging
+type recoveringMiddleware struct {
+	logger *zap.Logger
+	next   users.Users
+}
+
+// RecoveringMiddleware instruments an implementation of the users.Users with simple logging
+func RecoveringMiddleware(logger *zap.Logger) Middleware {
+	return func(next users.Users) users.Users {
+		return &recoveringMiddleware{
+			next:   next,
+			logger: logger,
+		}
+	}
+}
+
+func (m *recoveringMiddleware) Create(ctx context.Context, create *users.User) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Create(ctx, create)
+}
+
+func (m *recoveringMiddleware) Delete(ctx context.Context, userId string) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Delete(ctx, userId)
+}
+
+func (m *recoveringMiddleware) Find(ctx context.Context, filter *users.Filter, options *options.FindOptions) (users []*users.User, total int, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Find(ctx, filter, options)
+}
+
+func (m *recoveringMiddleware) Get(ctx context.Context, userId string) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Get(ctx, userId)
+}
+
+func (m *recoveringMiddleware) GetByIdentity(ctx context.Context, identity string) (user *users.User, err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.GetByIdentity(ctx, identity)
+}
+
+func (m *recoveringMiddleware) Update(ctx context.Context, update *users.User) (err error) {
+	logger := m.logger
+	defer func() {
+		if r := recover(); r != nil {
+			logger.Error("panic", zap.Error(fmt.Errorf("%v", r)))
+			err = fmt.Errorf("%v", r)
+		}
+	}()
+
+	return m.next.Update(ctx, update)
+}
diff --git a/pkg/users/mocks/Users.go b/pkg/users/mocks/Users.go
new file mode 100644
index 0000000000000000000000000000000000000000..6e54f18c4dfc78d8473e5499262eb7d65c783e4d
--- /dev/null
+++ b/pkg/users/mocks/Users.go
@@ -0,0 +1,143 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	users "git.perx.ru/perxis/perxis-go/pkg/users"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Users is an autogenerated mock type for the Users type
+type Users struct {
+	mock.Mock
+}
+
+// Create provides a mock function with given fields: ctx, create
+func (_m *Users) Create(ctx context.Context, create *users.User) (*users.User, error) {
+	ret := _m.Called(ctx, create)
+
+	var r0 *users.User
+	if rf, ok := ret.Get(0).(func(context.Context, *users.User) *users.User); ok {
+		r0 = rf(ctx, create)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*users.User)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, *users.User) error); ok {
+		r1 = rf(ctx, create)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Delete provides a mock function with given fields: ctx, userId
+func (_m *Users) Delete(ctx context.Context, userId string) error {
+	ret := _m.Called(ctx, userId)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, string) error); ok {
+		r0 = rf(ctx, userId)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// Find provides a mock function with given fields: ctx, filter, options
+func (_m *Users) Find(ctx context.Context, filter *users.Filter, opts *options.FindOptions) ([]*users.User, int, error) {
+	ret := _m.Called(ctx, filter, opts)
+
+	var r0 []*users.User
+	if rf, ok := ret.Get(0).(func(context.Context, *users.Filter, *options.FindOptions) []*users.User); ok {
+		r0 = rf(ctx, filter, opts)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).([]*users.User)
+		}
+	}
+
+	var r1 int
+	if rf, ok := ret.Get(1).(func(context.Context, *users.Filter, *options.FindOptions) int); ok {
+		r1 = rf(ctx, filter, opts)
+	} else {
+		r1 = ret.Get(1).(int)
+	}
+
+	var r2 error
+	if rf, ok := ret.Get(2).(func(context.Context, *users.Filter, *options.FindOptions) error); ok {
+		r2 = rf(ctx, filter, opts)
+	} else {
+		r2 = ret.Error(2)
+	}
+
+	return r0, r1, r2
+}
+
+// Get provides a mock function with given fields: ctx, userId
+func (_m *Users) Get(ctx context.Context, userId string) (*users.User, error) {
+	ret := _m.Called(ctx, userId)
+
+	var r0 *users.User
+	if rf, ok := ret.Get(0).(func(context.Context, string) *users.User); ok {
+		r0 = rf(ctx, userId)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*users.User)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, userId)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// GetByIdentity provides a mock function with given fields: ctx, identity
+func (_m *Users) GetByIdentity(ctx context.Context, identity string) (*users.User, error) {
+	ret := _m.Called(ctx, identity)
+
+	var r0 *users.User
+	if rf, ok := ret.Get(0).(func(context.Context, string) *users.User); ok {
+		r0 = rf(ctx, identity)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*users.User)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
+		r1 = rf(ctx, identity)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// Update provides a mock function with given fields: ctx, update
+func (_m *Users) Update(ctx context.Context, update *users.User) error {
+	ret := _m.Called(ctx, update)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func(context.Context, *users.User) error); ok {
+		r0 = rf(ctx, update)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
diff --git a/pkg/users/service.go b/pkg/users/service.go
new file mode 100644
index 0000000000000000000000000000000000000000..d64a5ceceed51d3731eaa6641baf64409cc7389d
--- /dev/null
+++ b/pkg/users/service.go
@@ -0,0 +1,30 @@
+package users
+
+import (
+	"context"
+
+	"git.perx.ru/perxis/perxis-go/pkg/options"
+)
+
+// @microgen grpc
+// @protobuf git.perx.ru/perxis/perxis-go/proto/users
+// @grpc-addr account.users.Users
+type Users interface {
+	Create(ctx context.Context, create *User) (user *User, err error)
+	Get(ctx context.Context, userId string) (user *User, err error)
+	Find(ctx context.Context, filter *Filter, options *options.FindOptions) (users []*User, total int, err error)
+	Update(ctx context.Context, update *User) (err error)
+	Delete(ctx context.Context, userId string) (err error)
+	GetByIdentity(ctx context.Context, identity string) (user *User, err error)
+}
+
+type Filter struct {
+	ID            []string
+	Name          []string
+	Identities    []string
+	DisplayName   []string
+	Email         []string
+	AvatarUri     []string
+	EmailVerified *bool
+	System        *bool
+}
diff --git a/pkg/users/transport/client.microgen.go b/pkg/users/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..74ca261a3bc5ec1cf99c655c82ec5b0345489ce3
--- /dev/null
+++ b/pkg/users/transport/client.microgen.go
@@ -0,0 +1,88 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	users "git.perx.ru/perxis/perxis-go/pkg/users"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Create(arg0 context.Context, arg1 *users.User) (res0 *users.User, res1 error) {
+	request := CreateRequest{Create: arg1}
+	response, res1 := set.CreateEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*CreateResponse).User, res1
+}
+
+func (set EndpointsSet) Get(arg0 context.Context, arg1 string) (res0 *users.User, res1 error) {
+	request := GetRequest{UserId: arg1}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).User, res1
+}
+
+func (set EndpointsSet) Find(arg0 context.Context, arg1 *users.Filter, arg2 *options.FindOptions) (res0 []*users.User, res1 int, res2 error) {
+	request := FindRequest{
+		Filter:  arg1,
+		Options: arg2,
+	}
+	response, res2 := set.FindEndpoint(arg0, &request)
+	if res2 != nil {
+		if e, ok := status.FromError(res2); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res2 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*FindResponse).Users, response.(*FindResponse).Total, res2
+}
+
+func (set EndpointsSet) Update(arg0 context.Context, arg1 *users.User) (res0 error) {
+	request := UpdateRequest{Update: arg1}
+	_, res0 = set.UpdateEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) Delete(arg0 context.Context, arg1 string) (res0 error) {
+	request := DeleteRequest{UserId: arg1}
+	_, res0 = set.DeleteEndpoint(arg0, &request)
+	if res0 != nil {
+		if e, ok := status.FromError(res0); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res0 = errors.New(e.Message())
+		}
+		return
+	}
+	return res0
+}
+
+func (set EndpointsSet) GetByIdentity(arg0 context.Context, arg1 string) (res0 *users.User, res1 error) {
+	request := GetByIdentityRequest{Identity: arg1}
+	response, res1 := set.GetByIdentityEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetByIdentityResponse).User, res1
+}
diff --git a/pkg/users/transport/endpoints.microgen.go b/pkg/users/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..264025bfc25edd39b423f5ca983188bc6e3a9e60
--- /dev/null
+++ b/pkg/users/transport/endpoints.microgen.go
@@ -0,0 +1,15 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Users API and used for transport purposes.
+type EndpointsSet struct {
+	CreateEndpoint        endpoint.Endpoint
+	GetEndpoint           endpoint.Endpoint
+	FindEndpoint          endpoint.Endpoint
+	UpdateEndpoint        endpoint.Endpoint
+	DeleteEndpoint        endpoint.Endpoint
+	GetByIdentityEndpoint endpoint.Endpoint
+}
diff --git a/pkg/users/transport/exchanges.microgen.go b/pkg/users/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..f70b8cdfe93eb8b843ec66136e8fa63bccc735ee
--- /dev/null
+++ b/pkg/users/transport/exchanges.microgen.go
@@ -0,0 +1,52 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	users "git.perx.ru/perxis/perxis-go/pkg/users"
+)
+
+type (
+	CreateRequest struct {
+		Create *users.User `json:"create"`
+	}
+	CreateResponse struct {
+		User *users.User `json:"user"`
+	}
+
+	GetRequest struct {
+		UserId string `json:"user_id"`
+	}
+	GetResponse struct {
+		User *users.User `json:"user"`
+	}
+
+	FindRequest struct {
+		Filter  *users.Filter        `json:"filter"`
+		Options *options.FindOptions `json:"options"`
+	}
+	FindResponse struct {
+		Users []*users.User `json:"users"`
+		Total int           `json:"total"`
+	}
+
+	UpdateRequest struct {
+		Update *users.User `json:"update"`
+	}
+	// Formal exchange type, please do not delete.
+	UpdateResponse struct{}
+
+	DeleteRequest struct {
+		UserId string `json:"user_id"`
+	}
+	// Formal exchange type, please do not delete.
+	DeleteResponse struct{}
+
+	GetByIdentityRequest struct {
+		Identity string `json:"identity"`
+	}
+	GetByIdentityResponse struct {
+		User *users.User `json:"user"`
+	}
+)
diff --git a/pkg/users/transport/grpc/client.microgen.go b/pkg/users/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..98f4b74c1d23e667d8c1d2c7290cd136f5e20a5d
--- /dev/null
+++ b/pkg/users/transport/grpc/client.microgen.go
@@ -0,0 +1,61 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/users/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/users"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	if addr == "" {
+		addr = "account.users.Users"
+	}
+	return transport.EndpointsSet{
+		CreateEndpoint: grpckit.NewClient(
+			conn, addr, "Create",
+			_Encode_Create_Request,
+			_Decode_Create_Response,
+			pb.CreateResponse{},
+			opts...,
+		).Endpoint(),
+		DeleteEndpoint: grpckit.NewClient(
+			conn, addr, "Delete",
+			_Encode_Delete_Request,
+			_Decode_Delete_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+		FindEndpoint: grpckit.NewClient(
+			conn, addr, "Find",
+			_Encode_Find_Request,
+			_Decode_Find_Response,
+			pb.FindResponse{},
+			opts...,
+		).Endpoint(),
+		GetByIdentityEndpoint: grpckit.NewClient(
+			conn, addr, "GetByIdentity",
+			_Encode_GetByIdentity_Request,
+			_Decode_GetByIdentity_Response,
+			pb.GetByIdentityResponse{},
+			opts...,
+		).Endpoint(),
+		GetEndpoint: grpckit.NewClient(
+			conn, addr, "Get",
+			_Encode_Get_Request,
+			_Decode_Get_Response,
+			pb.GetResponse{},
+			opts...,
+		).Endpoint(),
+		UpdateEndpoint: grpckit.NewClient(
+			conn, addr, "Update",
+			_Encode_Update_Request,
+			_Decode_Update_Response,
+			empty.Empty{},
+			opts...,
+		).Endpoint(),
+	}
+}
diff --git a/pkg/users/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/users/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..1837d41081572ab74ebfd7c9594a8292b4c9de29
--- /dev/null
+++ b/pkg/users/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,265 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/users/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/users"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*transport.CreateRequest)
+	reqCreate, err := PtrUserToProto(req.Create)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateRequest{Create: reqCreate}, nil
+}
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*transport.GetRequest)
+	return &pb.GetRequest{UserId: req.UserId}, nil
+}
+
+func _Encode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*transport.FindRequest)
+	reqFilter, err := PtrFilterToProto(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := PtrServicesFindOptionsToProto(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindRequest{
+		Filter:  reqFilter,
+		Options: reqOptions,
+	}, nil
+}
+
+func _Encode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*transport.UpdateRequest)
+	reqUpdate, err := PtrUserToProto(req.Update)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.UpdateRequest{Update: reqUpdate}, nil
+}
+
+func _Encode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*transport.DeleteRequest)
+	return &pb.DeleteRequest{UserId: req.UserId}, nil
+}
+
+func _Encode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*transport.CreateResponse)
+	respUser, err := PtrUserToProto(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.CreateResponse{User: respUser}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respUser, err := PtrUserToProto(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{User: respUser}, nil
+}
+
+func _Encode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*transport.FindResponse)
+	respUsers, err := ListPtrUserToProto(resp.Users)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.FindResponse{
+		Total: int64(resp.Total),
+		Users: respUsers,
+	}, nil
+}
+
+func _Encode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Create_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil CreateRequest")
+	}
+	req := request.(*pb.CreateRequest)
+	reqCreate, err := ProtoToPtrUser(req.Create)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateRequest{Create: reqCreate}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetRequest")
+	}
+	req := request.(*pb.GetRequest)
+	return &transport.GetRequest{UserId: string(req.UserId)}, nil
+}
+
+func _Decode_Find_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil FindRequest")
+	}
+	req := request.(*pb.FindRequest)
+	reqFilter, err := ProtoToPtrFilter(req.Filter)
+	if err != nil {
+		return nil, err
+	}
+	reqOptions, err := ProtoToPtrServicesFindOptions(req.Options)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindRequest{
+		Filter:  reqFilter,
+		Options: reqOptions,
+	}, nil
+}
+
+func _Decode_Update_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil UpdateRequest")
+	}
+	req := request.(*pb.UpdateRequest)
+	reqUpdate, err := ProtoToPtrUser(req.Update)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.UpdateRequest{Update: reqUpdate}, nil
+}
+
+func _Decode_Delete_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil DeleteRequest")
+	}
+	req := request.(*pb.DeleteRequest)
+	return &transport.DeleteRequest{UserId: string(req.UserId)}, nil
+}
+
+func _Decode_Create_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil CreateResponse")
+	}
+	resp := response.(*pb.CreateResponse)
+	respUser, err := ProtoToPtrUser(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.CreateResponse{User: respUser}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respUser, err := ProtoToPtrUser(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{User: respUser}, nil
+}
+
+func _Decode_Find_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil FindResponse")
+	}
+	resp := response.(*pb.FindResponse)
+	respUsers, err := ProtoToListPtrUser(resp.Users)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.FindResponse{
+		Total: int(resp.Total),
+		Users: respUsers,
+	}, nil
+}
+
+func _Decode_Delete_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_GetByIdentity_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetByIdentityRequest")
+	}
+	req := request.(*transport.GetByIdentityRequest)
+	return &pb.GetByIdentityRequest{Identity: req.Identity}, nil
+}
+
+func _Encode_GetByIdentity_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetByIdentityResponse")
+	}
+	resp := response.(*transport.GetByIdentityResponse)
+	respUser, err := PtrUserToProto(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetByIdentityResponse{User: respUser}, nil
+}
+
+func _Decode_GetByIdentity_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	if request == nil {
+		return nil, errors.New("nil GetByIdentityRequest")
+	}
+	req := request.(*pb.GetByIdentityRequest)
+	return &transport.GetByIdentityRequest{Identity: string(req.Identity)}, nil
+}
+
+func _Decode_GetByIdentity_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetByIdentityResponse")
+	}
+	resp := response.(*pb.GetByIdentityResponse)
+	respUser, err := ProtoToPtrUser(resp.User)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetByIdentityResponse{User: respUser}, nil
+}
+
+func _Encode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Update_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
diff --git a/pkg/users/transport/grpc/protobuf_type_converters.microgen.go b/pkg/users/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..d0789e52420b5383530e10f89e06100197a32bbb
--- /dev/null
+++ b/pkg/users/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,153 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	options "git.perx.ru/perxis/perxis-go/pkg/options"
+	service "git.perx.ru/perxis/perxis-go/pkg/users"
+	common "git.perx.ru/perxis/perxis-go/proto/common"
+	pb "git.perx.ru/perxis/perxis-go/proto/users"
+	"github.com/golang/protobuf/ptypes/wrappers"
+)
+
+func PtrUserToProto(create *service.User) (*pb.User, error) {
+	if create == nil {
+		return nil, nil
+	}
+	u := &pb.User{
+		Id:          create.ID,
+		Name:        create.Name,
+		Identities:  create.Identities,
+		DisplayName: create.DisplayName,
+		Email:       create.Email,
+		AvatarUrl:   create.AvatarURL,
+	}
+	if create.EmailVerified != nil {
+		u.EmailVerified = &wrappers.BoolValue{
+			Value: *create.EmailVerified,
+		}
+	}
+	if create.System != nil {
+		u.System = &wrappers.BoolValue{
+			Value: *create.System,
+		}
+	}
+	return u, nil
+}
+
+func ProtoToPtrUser(protoCreate *pb.User) (*service.User, error) {
+	if protoCreate == nil {
+		return nil, nil
+	}
+	user := &service.User{
+		ID:          protoCreate.Id,
+		Name:        protoCreate.Name,
+		DisplayName: protoCreate.DisplayName,
+		Identities:  protoCreate.Identities,
+		Email:       protoCreate.Email,
+		AvatarURL:   protoCreate.AvatarUrl,
+	}
+	if protoCreate.EmailVerified != nil {
+		user.EmailVerified = &protoCreate.EmailVerified.Value
+	}
+	if protoCreate.System != nil {
+		user.System = &protoCreate.System.Value
+	}
+	return user, nil
+}
+
+func PtrFilterToProto(filter *service.Filter) (*pb.Filter, error) {
+	if filter == nil {
+		return nil, nil
+	}
+	f := &pb.Filter{
+		Id:          filter.ID,
+		Name:        filter.Name,
+		Identities:  filter.Identities,
+		DisplayName: filter.DisplayName,
+		Email:       filter.Email,
+	}
+	if filter.EmailVerified != nil {
+		f.EmailVerified = &wrappers.BoolValue{
+			Value: *filter.EmailVerified,
+		}
+	}
+	if filter.System != nil {
+		f.System = &wrappers.BoolValue{
+			Value: *filter.System,
+		}
+	}
+	return f, nil
+}
+
+func ProtoToPtrFilter(protoFilter *pb.Filter) (*service.Filter, error) {
+	if protoFilter == nil {
+		return nil, nil
+	}
+	f := &service.Filter{
+		ID:          protoFilter.Id,
+		Name:        protoFilter.Name,
+		Identities:  protoFilter.Identities,
+		DisplayName: protoFilter.DisplayName,
+		Email:       protoFilter.Email,
+	}
+	if protoFilter.EmailVerified != nil {
+		f.EmailVerified = &protoFilter.EmailVerified.Value
+	}
+	if protoFilter.System != nil {
+		f.System = &protoFilter.System.Value
+	}
+	return f, nil
+}
+
+func ListPtrUserToProto(users []*service.User) ([]*pb.User, error) {
+	protoUsers := make([]*pb.User, 0, len(users))
+	for _, u := range users {
+		pu, err := PtrUserToProto(u)
+		if err != nil {
+			return nil, err
+		}
+		protoUsers = append(protoUsers, pu)
+	}
+	return protoUsers, nil
+}
+
+func ProtoToListPtrUser(protoCreates []*pb.User) ([]*service.User, error) {
+	users := make([]*service.User, 0, len(protoCreates))
+	for _, pu := range protoCreates {
+		u, err := ProtoToPtrUser(pu)
+		if err != nil {
+			return nil, err
+		}
+		users = append(users, u)
+	}
+	return users, nil
+}
+
+func PtrServicesFindOptionsToProto(opts *options.FindOptions) (*common.FindOptions, error) {
+	if opts == nil {
+		return nil, nil
+	}
+	return &common.FindOptions{
+		Sort:     opts.Sort,
+		PageNum:  int32(opts.PageNum),
+		PageSize: int32(opts.PageSize),
+	}, nil
+}
+
+func ProtoToPtrServicesFindOptions(protoOpts *common.FindOptions) (*options.FindOptions, error) {
+	if protoOpts == nil {
+		return nil, nil
+	}
+	return &options.FindOptions{
+		SortOptions: options.SortOptions{
+			Sort: protoOpts.Sort,
+		},
+		PaginationOptions: options.PaginationOptions{
+			PageNum:  int(protoOpts.PageNum),
+			PageSize: int(protoOpts.PageSize),
+		},
+	}, nil
+}
diff --git a/pkg/users/transport/grpc/server.microgen.go b/pkg/users/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..2be01e97a5a98fcf5189b386fbb36d1a60f43db7
--- /dev/null
+++ b/pkg/users/transport/grpc/server.microgen.go
@@ -0,0 +1,112 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/users/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/users"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type usersServer struct {
+	create        grpc.Handler
+	get           grpc.Handler
+	find          grpc.Handler
+	update        grpc.Handler
+	delete        grpc.Handler
+	getByIdentity grpc.Handler
+
+	pb.UnimplementedUsersServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.UsersServer {
+	return &usersServer{
+		create: grpc.NewServer(
+			endpoints.CreateEndpoint,
+			_Decode_Create_Request,
+			_Encode_Create_Response,
+			opts...,
+		),
+		delete: grpc.NewServer(
+			endpoints.DeleteEndpoint,
+			_Decode_Delete_Request,
+			_Encode_Delete_Response,
+			opts...,
+		),
+		find: grpc.NewServer(
+			endpoints.FindEndpoint,
+			_Decode_Find_Request,
+			_Encode_Find_Response,
+			opts...,
+		),
+		get: grpc.NewServer(
+			endpoints.GetEndpoint,
+			_Decode_Get_Request,
+			_Encode_Get_Response,
+			opts...,
+		),
+		getByIdentity: grpc.NewServer(
+			endpoints.GetByIdentityEndpoint,
+			_Decode_GetByIdentity_Request,
+			_Encode_GetByIdentity_Response,
+			opts...,
+		),
+		update: grpc.NewServer(
+			endpoints.UpdateEndpoint,
+			_Decode_Update_Request,
+			_Encode_Update_Response,
+			opts...,
+		),
+	}
+}
+
+func (S *usersServer) Create(ctx context.Context, req *pb.CreateRequest) (*pb.CreateResponse, error) {
+	_, resp, err := S.create.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.CreateResponse), nil
+}
+
+func (S *usersServer) Get(ctx context.Context, req *pb.GetRequest) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
+
+func (S *usersServer) Find(ctx context.Context, req *pb.FindRequest) (*pb.FindResponse, error) {
+	_, resp, err := S.find.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.FindResponse), nil
+}
+
+func (S *usersServer) Update(ctx context.Context, req *pb.UpdateRequest) (*empty.Empty, error) {
+	_, resp, err := S.update.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *usersServer) Delete(ctx context.Context, req *pb.DeleteRequest) (*empty.Empty, error) {
+	_, resp, err := S.delete.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*empty.Empty), nil
+}
+
+func (S *usersServer) GetByIdentity(ctx context.Context, req *pb.GetByIdentityRequest) (*pb.GetByIdentityResponse, error) {
+	_, resp, err := S.getByIdentity.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetByIdentityResponse), nil
+}
diff --git a/pkg/users/transport/server.microgen.go b/pkg/users/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..e12645efc79921da957e359848d9dca606f6364b
--- /dev/null
+++ b/pkg/users/transport/server.microgen.go
@@ -0,0 +1,72 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	users "git.perx.ru/perxis/perxis-go/pkg/users"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc users.Users) EndpointsSet {
+	return EndpointsSet{
+		CreateEndpoint:        CreateEndpoint(svc),
+		DeleteEndpoint:        DeleteEndpoint(svc),
+		FindEndpoint:          FindEndpoint(svc),
+		GetByIdentityEndpoint: GetByIdentityEndpoint(svc),
+		GetEndpoint:           GetEndpoint(svc),
+		UpdateEndpoint:        UpdateEndpoint(svc),
+	}
+}
+
+func CreateEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*CreateRequest)
+		res0, res1 := svc.Create(arg0, req.Create)
+		return &CreateResponse{User: res0}, res1
+	}
+}
+
+func GetEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetRequest)
+		res0, res1 := svc.Get(arg0, req.UserId)
+		return &GetResponse{User: res0}, res1
+	}
+}
+
+func FindEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*FindRequest)
+		res0, res1, res2 := svc.Find(arg0, req.Filter, req.Options)
+		return &FindResponse{
+			Total: res1,
+			Users: res0,
+		}, res2
+	}
+}
+
+func UpdateEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*UpdateRequest)
+		res0 := svc.Update(arg0, req.Update)
+		return &UpdateResponse{}, res0
+	}
+}
+
+func DeleteEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*DeleteRequest)
+		res0 := svc.Delete(arg0, req.UserId)
+		return &DeleteResponse{}, res0
+	}
+}
+
+func GetByIdentityEndpoint(svc users.Users) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		req := request.(*GetByIdentityRequest)
+		res0, res1 := svc.GetByIdentity(arg0, req.Identity)
+		return &GetByIdentityResponse{User: res0}, res1
+	}
+}
diff --git a/pkg/users/user.go b/pkg/users/user.go
new file mode 100644
index 0000000000000000000000000000000000000000..9eca6efc24569c2ef951d2bcb059440d54896ecc
--- /dev/null
+++ b/pkg/users/user.go
@@ -0,0 +1,31 @@
+package users
+
+// Current - Идентификатор, который можно использовать для получения/обновления/регистрации
+// пользователя, от имени которого был сделан запрос.
+const Current = "current"
+
+type User struct {
+	ID            string   `json:"id" bson:"_id"`
+	Name          string   `json:"name" bson:"name"`
+	DisplayName   string   `json:"displayName" bson:"displayName"`
+	Identities    []string `json:"identities" bson:"identities"`
+	Email         string   `json:"email" bson:"email"`
+	EmailVerified *bool    `json:"emailVerified" bson:"emailVerified"`
+	AvatarURL     string   `json:"avatarUrl" bson:"avatarUrl,omitempty"`
+	System        *bool    `json:"system" bson:"system"`
+}
+
+func (u User) GetID() string {
+	return u.ID
+}
+
+func (u User) IsSystem() bool {
+	if u.System != nil {
+		return *u.System
+	}
+	return false
+}
+
+func (u User) Clone() *User {
+	return &u
+}
diff --git a/pkg/version/mocks/Versions.go b/pkg/version/mocks/Versions.go
new file mode 100644
index 0000000000000000000000000000000000000000..e4572bac6ac23903d7ad0bdd94e2f7044f9fa7e3
--- /dev/null
+++ b/pkg/version/mocks/Versions.go
@@ -0,0 +1,38 @@
+// Code generated by mockery v2.7.4. DO NOT EDIT.
+
+package mocks
+
+import (
+	context "context"
+
+	version "git.perx.ru/perxis/perxis-go/pkg/version"
+	mock "github.com/stretchr/testify/mock"
+)
+
+// Versions is an autogenerated mock type for the Versions type
+type Versions struct {
+	mock.Mock
+}
+
+// Get provides a mock function with given fields: ctx
+func (_m *Versions) Get(ctx context.Context) (*version.Version, error) {
+	ret := _m.Called(ctx)
+
+	var r0 *version.Version
+	if rf, ok := ret.Get(0).(func(context.Context) *version.Version); ok {
+		r0 = rf(ctx)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*version.Version)
+		}
+	}
+
+	var r1 error
+	if rf, ok := ret.Get(1).(func(context.Context) error); ok {
+		r1 = rf(ctx)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
diff --git a/pkg/version/transport/client.microgen.go b/pkg/version/transport/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..ec9a69655fab437a4146b6cc61973197f415ff2c
--- /dev/null
+++ b/pkg/version/transport/client.microgen.go
@@ -0,0 +1,24 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+	"errors"
+
+	version "git.perx.ru/perxis/perxis-go/pkg/version"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+)
+
+func (set EndpointsSet) Get(arg0 context.Context) (res0 *version.Version, res1 error) {
+	request := GetRequest{}
+	response, res1 := set.GetEndpoint(arg0, &request)
+	if res1 != nil {
+		if e, ok := status.FromError(res1); ok || e.Code() == codes.Internal || e.Code() == codes.Unknown {
+			res1 = errors.New(e.Message())
+		}
+		return
+	}
+	return response.(*GetResponse).Version, res1
+}
diff --git a/pkg/version/transport/endpoints.microgen.go b/pkg/version/transport/endpoints.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..b670fc2bb09ce1fe180f427ea600f703ebda02d5
--- /dev/null
+++ b/pkg/version/transport/endpoints.microgen.go
@@ -0,0 +1,10 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import endpoint "github.com/go-kit/kit/endpoint"
+
+// EndpointsSet implements Versions API and used for transport purposes.
+type EndpointsSet struct {
+	GetEndpoint endpoint.Endpoint
+}
diff --git a/pkg/version/transport/exchanges.microgen.go b/pkg/version/transport/exchanges.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..11c9bfbda2c9d21ecd76cc67961c59f8439936c1
--- /dev/null
+++ b/pkg/version/transport/exchanges.microgen.go
@@ -0,0 +1,13 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import version "git.perx.ru/perxis/perxis-go/pkg/version"
+
+type (
+	// Formal exchange type, please do not delete.
+	GetRequest  struct{}
+	GetResponse struct {
+		Version *version.Version `json:"version"`
+	}
+)
diff --git a/pkg/version/transport/grpc/client.microgen.go b/pkg/version/transport/grpc/client.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..237f175e8023bd8168b3ff43da7096efe58020e7
--- /dev/null
+++ b/pkg/version/transport/grpc/client.microgen.go
@@ -0,0 +1,20 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/version/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/versions/content"
+	grpckit "github.com/go-kit/kit/transport/grpc"
+	grpc "google.golang.org/grpc"
+)
+
+func NewGRPCClient(conn *grpc.ClientConn, addr string, opts ...grpckit.ClientOption) transport.EndpointsSet {
+	return transport.EndpointsSet{GetEndpoint: grpckit.NewClient(
+		conn, addr, "Get",
+		_Encode_Get_Request,
+		_Decode_Get_Response,
+		pb.GetResponse{},
+		opts...,
+	).Endpoint()}
+}
diff --git a/pkg/version/transport/grpc/protobuf_endpoint_converters.microgen.go b/pkg/version/transport/grpc/protobuf_endpoint_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..26396d633853d9eac9ef0a1d64590b265d16119f
--- /dev/null
+++ b/pkg/version/transport/grpc/protobuf_endpoint_converters.microgen.go
@@ -0,0 +1,45 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// Please, do not change functions names!
+package transportgrpc
+
+import (
+	"context"
+	"errors"
+
+	transport "git.perx.ru/perxis/perxis-go/pkg/version/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/versions/content"
+	empty "github.com/golang/protobuf/ptypes/empty"
+)
+
+func _Encode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Encode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*transport.GetResponse)
+	respVersion, err := PtrVersionsVersionToProto(resp.Version)
+	if err != nil {
+		return nil, err
+	}
+	return &pb.GetResponse{Version: respVersion}, nil
+}
+
+func _Decode_Get_Request(ctx context.Context, request interface{}) (interface{}, error) {
+	return &empty.Empty{}, nil
+}
+
+func _Decode_Get_Response(ctx context.Context, response interface{}) (interface{}, error) {
+	if response == nil {
+		return nil, errors.New("nil GetResponse")
+	}
+	resp := response.(*pb.GetResponse)
+	respVersion, err := ProtoToPtrVersionsVersion(resp.Version)
+	if err != nil {
+		return nil, err
+	}
+	return &transport.GetResponse{Version: respVersion}, nil
+}
diff --git a/pkg/version/transport/grpc/protobuf_type_converters.microgen.go b/pkg/version/transport/grpc/protobuf_type_converters.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..4868510051fc294697066f312467bbe244a7d62c
--- /dev/null
+++ b/pkg/version/transport/grpc/protobuf_type_converters.microgen.go
@@ -0,0 +1,38 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// It is better for you if you do not change functions names!
+// This file will never be overwritten.
+package transportgrpc
+
+import (
+	"git.perx.ru/perxis/perxis-go/pkg/version"
+	"git.perx.ru/perxis/perxis-go/proto/common"
+)
+
+func PtrVersionsVersionToProto(version *version.Version) (*common.Version, error) {
+	pVersion := &common.Version{
+		ApiVersion:    version.APIVersion,
+		ServerVersion: version.ServerVersion,
+		Commit:        version.Commit,
+		BuildTime:     version.BuildTime,
+	}
+	return pVersion, nil
+}
+
+func ProtoToPtrVersionsVersion(protoVersion *common.Version) (*version.Version, error) {
+	version := &version.Version{
+		APIVersion:    protoVersion.ApiVersion,
+		ServerVersion: protoVersion.ServerVersion,
+		Commit:        protoVersion.Commit,
+		BuildTime:     protoVersion.BuildTime,
+	}
+	return version, nil
+}
+
+func PtrVersionVersionToProto(version *version.Version) (*common.Version, error) {
+	panic("function not provided") // TODO: provide converter
+}
+
+func ProtoToPtrVersionVersion(protoVersion *common.Version) (*version.Version, error) {
+	panic("function not provided") // TODO: provide converter
+}
diff --git a/pkg/version/transport/grpc/server.microgen.go b/pkg/version/transport/grpc/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..63586ae3f3a1386413e3ae703591d0ee410e3e35
--- /dev/null
+++ b/pkg/version/transport/grpc/server.microgen.go
@@ -0,0 +1,35 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+// DO NOT EDIT.
+package transportgrpc
+
+import (
+	transport "git.perx.ru/perxis/perxis-go/pkg/version/transport"
+	pb "git.perx.ru/perxis/perxis-go/proto/versions/content"
+	grpc "github.com/go-kit/kit/transport/grpc"
+	empty "github.com/golang/protobuf/ptypes/empty"
+	context "golang.org/x/net/context"
+)
+
+type versionsServer struct {
+	get grpc.Handler
+
+	pb.UnimplementedVersionsServer
+}
+
+func NewGRPCServer(endpoints *transport.EndpointsSet, opts ...grpc.ServerOption) pb.VersionsServer {
+	return &versionsServer{get: grpc.NewServer(
+		endpoints.GetEndpoint,
+		_Decode_Get_Request,
+		_Encode_Get_Response,
+		opts...,
+	)}
+}
+
+func (S *versionsServer) Get(ctx context.Context, req *empty.Empty) (*pb.GetResponse, error) {
+	_, resp, err := S.get.ServeGRPC(ctx, req)
+	if err != nil {
+		return nil, err
+	}
+	return resp.(*pb.GetResponse), nil
+}
diff --git a/pkg/version/transport/server.microgen.go b/pkg/version/transport/server.microgen.go
new file mode 100644
index 0000000000000000000000000000000000000000..61f36f62a7d425e6fa2ce75a0e50cebd527853a0
--- /dev/null
+++ b/pkg/version/transport/server.microgen.go
@@ -0,0 +1,21 @@
+// Code generated by microgen 0.9.1. DO NOT EDIT.
+
+package transport
+
+import (
+	"context"
+
+	versions "git.perx.ru/perxis/perxis-go/pkg/version"
+	endpoint "github.com/go-kit/kit/endpoint"
+)
+
+func Endpoints(svc versions.Versions) EndpointsSet {
+	return EndpointsSet{GetEndpoint: GetEndpoint(svc)}
+}
+
+func GetEndpoint(svc versions.Versions) endpoint.Endpoint {
+	return func(arg0 context.Context, request interface{}) (interface{}, error) {
+		res0, res1 := svc.Get(arg0)
+		return &GetResponse{Version: res0}, res1
+	}
+}
diff --git a/pkg/version/version.go b/pkg/version/version.go
new file mode 100644
index 0000000000000000000000000000000000000000..3ab2eb5d82a8ca1fe258c003e54f6d6324e82a5f
--- /dev/null
+++ b/pkg/version/version.go
@@ -0,0 +1,22 @@
+package version
+
+import (
+	"context"
+	"fmt"
+)
+
+type Versions interface {
+	Get(ctx context.Context) (version *Version, err error)
+}
+
+type Version struct {
+	ServerVersion string
+	APIVersion    string
+	Commit        string
+	BuildTime     string
+	BuildNumber   int
+}
+
+func (v *Version) String() string {
+	return fmt.Sprintf("%s-api%s-%s build: %d (%s)", v.ServerVersion, v.APIVersion, v.Commit, v.BuildNumber, v.BuildTime)
+}