diff --git a/perxis/auth.py b/perxis/auth.py
index 398e9f7f602309a9a86301c6d47db68c627bfdeb..d2989bab852ce2b4c84d9a062e80c031223f66e2 100644
--- a/perxis/auth.py
+++ b/perxis/auth.py
@@ -9,9 +9,15 @@ from requests_oauthlib import OAuth2Session
 class OAuth2Plugin(grpc.AuthMetadataPlugin):
     _token = None
 
-    def __init__(self, client: Client, client_secret: str, token_url: str, audience: str,
-                 signature_header_key: str = 'authorization', token_type: str = 'Bearer') -> None:
-
+    def __init__(
+        self,
+        client: Client,
+        client_secret: str,
+        token_url: str,
+        audience: str,
+        signature_header_key: str = "authorization",
+        token_type: str = "Bearer",
+    ) -> None:
         self._client_secret = client_secret
         self._token_url = token_url
         self._audience = audience
@@ -20,9 +26,20 @@ class OAuth2Plugin(grpc.AuthMetadataPlugin):
 
         self.oauth2 = OAuth2Session(client=client)
 
-    def __call__(self, context: grpc.AuthMetadataContext,
-                 callback: grpc.AuthMetadataPluginCallback) -> None:
-        callback(((self._signature_header_key, f'{self._token_type} {self.token["access_token"]}'),), None)
+    def __call__(
+        self,
+        context: grpc.AuthMetadataContext,
+        callback: grpc.AuthMetadataPluginCallback,
+    ) -> None:
+        callback(
+            metadata=(
+                (
+                    self._signature_header_key,
+                    f'{self._token_type} {self.token["access_token"]}',
+                ),
+            ),
+            error=None,
+        )
 
     @property
     def token(self) -> OAuth2Token:
@@ -30,21 +47,21 @@ class OAuth2Plugin(grpc.AuthMetadataPlugin):
             return self.oauth2.fetch_token(
                 token_url=self._token_url,
                 client_secret=self._client_secret,
-                audience=self._audience
+                audience=self._audience,
             )
 
         def refresh_token() -> OAuth2Token:
             return self.oauth2.refresh_token(
                 token_url=self._token_url,
                 client_secret=self._client_secret,
-                audience=self._audience
+                audience=self._audience,
             )
 
         if self._token is None:
             self._token = fetch_token()
 
-        if self._token['expires_at'] and self._token['expires_at'] < time.time():
-            if 'refresh_token' in self._token:
+        if self._token["expires_at"] and self._token["expires_at"] < time.time():
+            if "refresh_token" in self._token:
                 self._token = refresh_token()
             else:
                 self._token = fetch_token()
@@ -55,11 +72,24 @@ class OAuth2Plugin(grpc.AuthMetadataPlugin):
 class APIKeyPlugin(grpc.AuthMetadataPlugin):
     _token = None
 
-    def __init__(self, token: str, signature_header_key: str = 'authorization', token_type: str = 'API-Key') -> None:
+    def __init__(
+        self,
+        token: str,
+        signature_header_key: str = "authorization",
+        token_type: str = "API-Key",
+    ) -> None:
         self._token = token
         self._signature_header_key = signature_header_key
         self._token_type = token_type
 
-    def __call__(self, context: grpc.AuthMetadataContext,
-                 callback: grpc.AuthMetadataPluginCallback) -> None:
-        callback(((self._signature_header_key, f'{self._token_type} {self._token}'),), None)
+    def __call__(
+        self,
+        context: grpc.AuthMetadataContext,
+        callback: grpc.AuthMetadataPluginCallback,
+    ) -> None:
+        callback(
+            metadata=(
+                (self._signature_header_key, f"{self._token_type} {self._token}"),
+            ),
+            error=None,
+        )
diff --git a/perxis/channel.py b/perxis/channel.py
index 39b9660daf4c901787170415382621d1f8f1dc43..329b78a7da0a5511f43da682a5372f7a38d654a7 100644
--- a/perxis/channel.py
+++ b/perxis/channel.py
@@ -1,16 +1,13 @@
-from typing import Optional
-
 import grpc
 
 from perxis.auth import APIKeyPlugin
-from perxis.models import PerxisEnviron
 
 
 class GrpcChannel:
     call_credentials: grpc.CallCredentials
     channel_credentials: grpc.ChannelCredentials
     composite_credentials: grpc.ChannelCredentials
-    channel: Optional[grpc.Channel] = None
+    channel: grpc.Channel | None = None
 
     def __init__(
         self,
@@ -38,20 +35,23 @@ class GrpcChannel:
     def connect(self, project_name: str, project_version: str) -> None:
         if not self.channel:
             self.channel = grpc.aio.secure_channel(
-                self.target, self.composite_credentials,
+                self.target,
+                self.composite_credentials,
                 options=(
                     ("grpc.primary_user_agent", f"{project_name}/{project_version}"),
-                )
+                ),
             )
 
     async def close(self) -> None:
         await self.channel.close()
 
 
-def get_grpc_channel(perxis_environ: PerxisEnviron, project_name: str, project_version: str) -> GrpcChannel:
+def get_grpc_channel(
+    target: str, api_key: str, project_name: str, project_version: str
+) -> GrpcChannel:
     return GrpcChannel(
-        target=perxis_environ.target,
-        metadata_plugin=APIKeyPlugin(token=perxis_environ.api_key),
+        target=target,
+        metadata_plugin=APIKeyPlugin(token=api_key),
         project_name=project_name,
-        project_version=project_version
+        project_version=project_version,
     )
diff --git a/perxis/interceptors.py b/perxis/interceptors.py
index 87eb273818fccc901092ac4b3cfafb65a27f650b..cca3b8e817bb49781dcb9b76a1f1fcea5fe258e2 100644
--- a/perxis/interceptors.py
+++ b/perxis/interceptors.py
@@ -2,38 +2,44 @@ import grpc
 import collections
 
 
-class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor,
-                                grpc.UnaryStreamClientInterceptor,
-                                grpc.StreamUnaryClientInterceptor,
-                                grpc.StreamStreamClientInterceptor):
-
+class _GenericClientInterceptor(
+    grpc.UnaryUnaryClientInterceptor,
+    grpc.UnaryStreamClientInterceptor,
+    grpc.StreamUnaryClientInterceptor,
+    grpc.StreamStreamClientInterceptor,
+):
     def __init__(self, interceptor_function):
         self._fn = interceptor_function
 
     def intercept_unary_unary(self, continuation, client_call_details, request):
         new_details, new_request_iterator, postprocess = self._fn(
-            client_call_details, iter((request,)), False, False)
+            client_call_details, iter((request,)), False, False
+        )
         response = continuation(new_details, next(new_request_iterator))
         return postprocess(response) if postprocess else response
 
-    def intercept_unary_stream(self, continuation, client_call_details,
-                               request):
+    def intercept_unary_stream(self, continuation, client_call_details, request):
         new_details, new_request_iterator, postprocess = self._fn(
-            client_call_details, iter((request,)), False, True)
+            client_call_details, iter((request,)), False, True
+        )
         response_it = continuation(new_details, next(new_request_iterator))
         return postprocess(response_it) if postprocess else response_it
 
-    def intercept_stream_unary(self, continuation, client_call_details,
-                               request_iterator):
+    def intercept_stream_unary(
+        self, continuation, client_call_details, request_iterator
+    ):
         new_details, new_request_iterator, postprocess = self._fn(
-            client_call_details, request_iterator, True, False)
+            client_call_details, request_iterator, True, False
+        )
         response = continuation(new_details, new_request_iterator)
         return postprocess(response) if postprocess else response
 
-    def intercept_stream_stream(self, continuation, client_call_details,
-                                request_iterator):
+    def intercept_stream_stream(
+        self, continuation, client_call_details, request_iterator
+    ):
         new_details, new_request_iterator, postprocess = self._fn(
-            client_call_details, request_iterator, True, True)
+            client_call_details, request_iterator, True, True
+        )
         response_it = continuation(new_details, new_request_iterator)
         return postprocess(response_it) if postprocess else response_it
 
@@ -43,27 +49,35 @@ def create(intercept_call):
 
 
 class _ClientCallDetails(
-        collections.namedtuple(
-            '_ClientCallDetails',
-            ('method', 'timeout', 'metadata', 'credentials')),
-        grpc.ClientCallDetails):
+    collections.namedtuple(
+        typename="_ClientCallDetails",
+        field_names=("method", "timeout", "metadata", "credentials"),
+    ),
+    grpc.ClientCallDetails,
+):
     pass
 
 
 def header_adder_interceptor(header, value):
-    def intercept_call(client_call_details, request_iterator, request_streaming,
-                       response_streaming):
+    def intercept_call(
+        client_call_details, request_iterator, request_streaming, response_streaming
+    ):
         metadata = []
         if client_call_details.metadata is not None:
             metadata = list(client_call_details.metadata)
-        metadata.append((
-            header,
-            value,
-        ))
+        metadata.append(
+            (
+                header,
+                value,
+            )
+        )
 
         client_call_details = _ClientCallDetails(
-            client_call_details.method, client_call_details.timeout, metadata,
-            client_call_details.credentials)
+            client_call_details.method,
+            client_call_details.timeout,
+            metadata,
+            client_call_details.credentials,
+        )
         return client_call_details, request_iterator, None
 
     return create(intercept_call)
diff --git a/perxis/models.py b/perxis/models.py
index 340c735c3a0d50c6afb6ff3404e126bdae70d8a5..e3a1d57f173b09930900cc86b94aea14f28581a7 100644
--- a/perxis/models.py
+++ b/perxis/models.py
@@ -1,15 +1,6 @@
 from dataclasses import dataclass
 
 
-@dataclass
-class PerxisEnviron:
-    target: str
-    space_id: str
-    env_id: str
-    api_key: str
-    collection_id: str
-
-
 @dataclass
 class Reference:
     id: str
diff --git a/perxis/provider.py b/perxis/provider.py
index bd3bf25c2e8de14c378167dd03142a2fc491dfa1..eeadadfacfbef14df56a7f0a645426e190093ff0 100644
--- a/perxis/provider.py
+++ b/perxis/provider.py
@@ -1,5 +1,5 @@
-from typing import Optional
 from google.protobuf.struct_pb2 import Struct
+from google.protobuf.empty_pb2 import Empty
 from perxis.common import common_pb2
 from perxis.items import items_pb2, items_pb2_grpc
 from perxis.references import references_pb2
@@ -8,25 +8,24 @@ from perxis.channel import GrpcChannel
 from perxis.models import Reference
 
 
+DEFAULT_PAGE_SIZE: int = 100
+
+
 class PerxisDataProvider:
     def __init__(
         self,
         channel: GrpcChannel,
         space_id: str,
         env_id: str,
-        collection_id: Optional[str] = None,
-        default_page_size: Optional[int] = 100,
     ) -> None:
         self.channel = channel
         self.space_id = space_id
         self.env_id = env_id
-        self.collection_id = collection_id
         self.items_stub = items_pb2_grpc.ItemsStub(self.channel.channel)
         self.references_stub = references_pb2_grpc.ReferencesStub(self.channel.channel)
-        self.default_page_size = default_page_size
 
     async def create(
-        self, data: Struct, collection_id: Optional[str] = None
+        self, data: Struct, collection_id: str
     ) -> items_pb2.CreateResponse:
         """Сохранение данных формы в системе Perxis."""
         result = await self.items_stub.Create(
@@ -34,16 +33,14 @@ class PerxisDataProvider:
                 item=items_pb2.Item(
                     space_id=self.space_id,
                     env_id=self.env_id,
-                    collection_id=collection_id or self.collection_id,
+                    collection_id=collection_id,
                     data=data,
                 )
             )
         )
         return result
 
-    async def update(
-        self, item_id: str, data: Struct, collection_id: Optional[str] = None
-    ):
+    async def update(self, item_id: str, data: Struct, collection_id: str) -> Empty:
         """Метод обновления записи в коллекции."""
         result = await self.items_stub.Update(
             items_pb2.UpdateRequest(
@@ -51,7 +48,7 @@ class PerxisDataProvider:
                     id=item_id,
                     space_id=self.space_id,
                     env_id=self.env_id,
-                    collection_id=collection_id or self.collection_id,
+                    collection_id=collection_id,
                     data=data,
                 )
             )
@@ -60,18 +57,18 @@ class PerxisDataProvider:
 
     async def find(
         self,
-        collection_id: Optional[str] = None,
-        filters: Optional[list[str]] = None,
-        sort_by: Optional[list[str]] = None,
-        page_num: Optional[int] = None,
-        page_size: Optional[int] = None,
+        collection_id: str,
+        filters: list[str] | None = None,
+        sort_by: list[str] | None = None,
+        page_num: int | None = None,
+        page_size: int | None = DEFAULT_PAGE_SIZE,
     ) -> items_pb2.FindResponse:
         """Метод поиска записей в коллекции."""
         result = await self.items_stub.Find(
             items_pb2.FindRequest(
                 space_id=self.space_id,
                 env_id=self.env_id,
-                collection_id=collection_id or self.collection_id,
+                collection_id=collection_id,
                 filter=items_pb2.Filter(q=filters or []),
                 options=items_pb2.FindOptions(
                     options=common_pb2.FindOptions(
@@ -84,18 +81,19 @@ class PerxisDataProvider:
 
     async def find_published(
         self,
-        collection_id: Optional[str] = None,
-        filters: Optional[list[str]] = None,
-        fields: Optional[list[str]] = None,
-        sort_by: Optional[list[str]] = None,
-        page_num: Optional[int] = None,
-        page_size: Optional[int] = None,
+        collection_id: str,
+        filters: list[str] | None = None,
+        fields: list[str] | None = None,
+        sort_by: list[str] | None = None,
+        page_num: int | None = None,
+        page_size: int | None = DEFAULT_PAGE_SIZE,
     ) -> items_pb2.FindResponse:
+        """Метод поиска опубликованных записей в коллекции."""
         result = await self.items_stub.FindPublished(
             items_pb2.FindPublishedRequest(
                 space_id=self.space_id,
                 env_id=self.env_id,
-                collection_id=collection_id or self.collection_id,
+                collection_id=collection_id,
                 filter=items_pb2.Filter(q=filters or []),
                 options=items_pb2.FindPublishedOptions(
                     options=common_pb2.FindOptions(
@@ -111,10 +109,11 @@ class PerxisDataProvider:
 
     async def fetch_all_published(
         self,
-        collection_id: Optional[str] = None,
-        filters: Optional[list[str]] = None,
-        fields: Optional[list[str]] = None,
-        sort_by: Optional[list[str]] = None,
+        collection_id: str,
+        filters: list[str] | None = None,
+        fields: list[str] | None = None,
+        sort_by: list[str] | None = None,
+        page_size: int | None = DEFAULT_PAGE_SIZE,
     ) -> items_pb2.FindResponse:
         """Метод поиска всех опубликованных записей в коллекции."""
         kwargs = {
@@ -122,13 +121,13 @@ class PerxisDataProvider:
             "filters": filters,
             "sort_by": sort_by,
             "fields": fields,
-            "page_size": self.default_page_size,
+            "page_size": page_size,
         }
         message = await self.find_published(**kwargs)
         yield message
 
-        if pages := message.total // self.default_page_size:
-            if message.total % self.default_page_size:
+        if pages := message.total // page_size:
+            if message.total % page_size:
                 pages += 1
         else:
             pages = 1
@@ -136,7 +135,7 @@ class PerxisDataProvider:
         for page_num in range(1, pages + 1):
             yield await self.find_published(page_num=page_num, **kwargs)
 
-    async def unpublish(self, item_id: str, collection_id: Optional[str] = None):
+    async def unpublish(self, item_id: str, collection_id: str) -> Empty:
         """Метод снятия с публикации записи в коллекции."""
         result = await self.items_stub.Unpublish(
             items_pb2.UnpublishRequest(
@@ -144,13 +143,13 @@ class PerxisDataProvider:
                     id=item_id,
                     space_id=self.space_id,
                     env_id=self.env_id,
-                    collection_id=collection_id or self.collection_id,
+                    collection_id=collection_id,
                 )
             )
         )
         return result
 
-    async def publish(self, item_id: str, collection_id: Optional[str] = None):
+    async def publish(self, item_id: str, collection_id: str) -> Empty:
         """Метод публикации записи в коллекции."""
         result = await self.items_stub.Publish(
             items_pb2.PublishRequest(
@@ -158,7 +157,7 @@ class PerxisDataProvider:
                     id=item_id,
                     space_id=self.space_id,
                     env_id=self.env_id,
-                    collection_id=collection_id or self.collection_id,
+                    collection_id=collection_id,
                 )
             )
         )
@@ -166,37 +165,40 @@ class PerxisDataProvider:
 
     async def fetch_all(
         self,
-        collection_id: Optional[str] = None,
-        filters: Optional[list[str]] = None,
-        sort_by: Optional[list[str]] = None,
+        collection_id: str,
+        filters: list[str] | str = None,
+        sort_by: list[str] | str = None,
+        page_size: int | None = DEFAULT_PAGE_SIZE,
     ) -> items_pb2.FindResponse:
         """Метод получения всех записей коллекции."""
         items = []
         storage_data = await self.find(
-            collection_id=collection_id or self.collection_id,
-            page_size=self.default_page_size,
+            collection_id=collection_id,
+            page_size=page_size,
             filters=filters,
         )
         items.extend(storage_data.items)
 
-        if pages := storage_data.total // self.default_page_size:
-            if storage_data.total % self.default_page_size:
+        if pages := storage_data.total // page_size:
+            if storage_data.total % page_size:
                 pages += 1
         else:
             pages = 1
 
         for page_num in range(1, pages + 1):
             storage_data = await self.find(
-                collection_id=collection_id or self.collection_id,
+                collection_id=collection_id,
                 filters=filters,
                 sort_by=sort_by,
                 page_num=page_num,
-                page_size=self.default_page_size,
+                page_size=page_size,
             )
             items.extend(storage_data.items)
-        return items_pb2.FindResponse(items=items)
+        return items_pb2.FindResponse(items=items, total=len(items))
 
-    async def get_references(self, references: list[Reference]):
+    async def get_references(
+        self, references: list[Reference]
+    ) -> items_pb2.GetResponse:
         """Метод получения связей."""
         result = await self.references_stub.Get(
             references_pb2.GetRequest(
diff --git a/setup.py b/setup.py
index edc0cd988b1d8f305f334b5aa1bd8a8e5b983f47..7a6513a3cedf3f481d8db2ad8ce3ad7a66bad044 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ def load_requirements():
 
 setup(
     name='perxis',
-    version='0.0.23',
+    version='1.0.0',
     description='Perxis python client',
     long_description=long_description,
     long_description_content_type='text/markdown',