Skip to content
Snippets Groups Projects

Из PerxisDataProvider вынесена логика работы с item и referrence

Merged Podosochnyy Maxim requested to merge feature/AUTO-2275 into master
3 files
+ 250
68
Compare changes
  • Side-by-side
  • Inline
Files
3
+ 241
67
@@ -13,28 +13,94 @@ DEFAULT_PAGE_SIZE: int = 100
DEFAULT_PART_SIZE: int = 1024 * 5
class PerxisDataProvider:
def __init__(
class PerxisFilesWrapper:
__stub: files_pb2_grpc.FilesStub
def __init__(self, stub: files_pb2_grpc.FilesStub) -> None:
self.__stub = stub
async def start_upload(
self, file_name: str, file_size: int
) -> files_pb2.StartUploadResponse:
message = await self.__stub.StartUpload(
files_pb2.StartUploadRequest(
upload=files_pb2.MultipartUpload(
file=files_pb2.File(
name=file_name, size=file_size
)
)
)
)
return message
async def complete_upload(
self,
channel: GrpcChannel,
space_id: str,
env_id: str,
) -> None:
self.channel = channel
self.space_id = space_id
self.env_id = env_id
self.items_stub = items_pb2_grpc.ItemsStub(self.channel.channel)
self.references_stub = references_pb2_grpc.ReferencesStub(self.channel.channel)
file_id: str,
upload_id: str,
parts: list[str],
part_size: int = DEFAULT_PART_SIZE
) -> files_pb2.CompleteUploadResponse:
message = await self.__stub.CompleteUpload(
files_pb2.CompleteUploadRequest(
upload=files_pb2.MultipartUpload(
file=files_pb2.File(id=file_id),
upload_id=upload_id,
part_size=part_size,
parts=[
files_pb2.CompletedPart(id=value, number=index)
for index, value in enumerate(parts, 1)
],
)
)
)
return message
class PerxisReferencesWrapper:
__references: references_pb2_grpc.ReferencesStub
def __init__(
self,
references: references_pb2_grpc.ReferencesStub,
):
self.__references = references
async def get_references(
self, references: list[Reference], space_id: str, env_id: str
) -> items_pb2.GetResponse:
"""Метод получения связей."""
result = await self.__references.Get(
references_pb2.GetRequest(
space_id=space_id,
env_id=env_id,
references=[
references_pb2.Reference(id=ref.id, collection_id=ref.collection_id)
for ref in references
],
)
)
return result
class PerxisItemsWrapper:
__items: items_pb2_grpc.ItemsStub
def __init__(
self,
items: items_pb2_grpc.ItemsStub,
):
self.__items = items
async def create(
self, data: Struct, collection_id: str
self, data: Struct, collection_id: str, space_id: str, env_id: str
) -> items_pb2.CreateResponse:
"""Сохранение данных формы в системе Perxis."""
result = await self.items_stub.Create(
result = await self.__items.Create(
items_pb2.CreateRequest(
item=items_pb2.Item(
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
data=data,
)
@@ -42,14 +108,14 @@ class PerxisDataProvider:
)
return result
async def update(self, item_id: str, data: Struct, collection_id: str) -> Empty:
async def update(self, item_id: str, data: Struct, collection_id: str, space_id: str, env_id: str) -> Empty:
"""Метод обновления записи в коллекции."""
result = await self.items_stub.Update(
result = await self.__items.Update(
items_pb2.UpdateRequest(
item=items_pb2.Item(
id=item_id,
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
data=data,
)
@@ -60,21 +126,24 @@ class PerxisDataProvider:
async def find(
self,
collection_id: str,
space_id: str,
env_id: str,
filters: list[str] | None = None,
sort_by: list[str] | None = None,
fields: list[str] | None = None,
page_num: int | None = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
"""Метод поиска записей в коллекции."""
result = await self.items_stub.Find(
result = await self.__items.Find(
items_pb2.FindRequest(
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
filter=items_pb2.Filter(q=filters or []),
options=items_pb2.FindOptions(
options=common_pb2.FindOptions(
sort=sort_by, page_num=page_num, page_size=page_size
sort=sort_by, page_num=page_num, page_size=page_size, fields=fields or []
)
),
)
@@ -84,6 +153,8 @@ class PerxisDataProvider:
async def find_published(
self,
collection_id: str,
space_id: str,
env_id: str,
filters: list[str] | None = None,
fields: list[str] | None = None,
sort_by: list[str] | None = None,
@@ -91,10 +162,10 @@ class PerxisDataProvider:
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
"""Метод поиска опубликованных записей в коллекции."""
result = await self.items_stub.FindPublished(
result = await self.__items.FindPublished(
items_pb2.FindPublishedRequest(
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
filter=items_pb2.Filter(q=filters or []),
options=items_pb2.FindPublishedOptions(
@@ -112,6 +183,8 @@ class PerxisDataProvider:
async def fetch_all_published(
self,
collection_id: str,
space_id: str,
env_id: str,
filters: list[str] | None = None,
fields: list[str] | None = None,
sort_by: list[str] | None = None,
@@ -124,6 +197,8 @@ class PerxisDataProvider:
"sort_by": sort_by,
"fields": fields,
"page_size": page_size,
"space_id": space_id,
"env_id": env_id
}
message = await self.find_published(**kwargs)
yield message
@@ -137,28 +212,28 @@ class PerxisDataProvider:
for page_num in range(1, pages + 1):
yield await self.find_published(page_num=page_num, **kwargs)
async def unpublish(self, item_id: str, collection_id: str) -> Empty:
async def unpublish(self, item_id: str, collection_id: str, space_id: str, env_id: str) -> Empty:
"""Метод снятия с публикации записи в коллекции."""
result = await self.items_stub.Unpublish(
result = await self.__items.Unpublish(
items_pb2.UnpublishRequest(
item=items_pb2.Item(
id=item_id,
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
)
)
)
return result
async def publish(self, item_id: str, collection_id: str) -> Empty:
async def publish(self, item_id: str, collection_id: str, space_id: str, env_id: str) -> Empty:
"""Метод публикации записи в коллекции."""
result = await self.items_stub.Publish(
result = await self.__items.Publish(
items_pb2.PublishRequest(
item=items_pb2.Item(
id=item_id,
space_id=self.space_id,
env_id=self.env_id,
space_id=space_id,
env_id=env_id,
collection_id=collection_id,
)
)
@@ -168,7 +243,10 @@ class PerxisDataProvider:
async def fetch_all(
self,
collection_id: str,
space_id: str,
env_id: str,
filters: list[str] | str = None,
fields: list[str] | None = None,
sort_by: list[str] | str = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
@@ -176,8 +254,11 @@ class PerxisDataProvider:
items = []
storage_data = await self.find(
collection_id=collection_id,
space_id=space_id,
env_id=env_id,
page_size=page_size,
filters=filters,
fields=fields,
)
items.extend(storage_data.items)
@@ -190,47 +271,150 @@ class PerxisDataProvider:
for page_num in range(1, pages + 1):
storage_data = await self.find(
collection_id=collection_id,
space_id=space_id,
env_id=env_id,
filters=filters,
sort_by=sort_by,
page_num=page_num,
page_size=page_size,
fields=fields,
)
items.extend(storage_data.items)
return items_pb2.FindResponse(items=items, total=len(items))
class PerxisDataProvider:
def __init__(
self,
channel: GrpcChannel,
space_id: str,
env_id: str,
) -> None:
self.channel = channel
self.space_id = space_id
self.env_id = env_id
self.references_wrapper = PerxisReferencesWrapper(
references=references_pb2_grpc.ReferencesStub(self.channel.channel)
)
self.items_wrapper = PerxisItemsWrapper(
items=items_pb2_grpc.ItemsStub(self.channel.channel),
)
async def create(
self, data: Struct, collection_id: str
) -> items_pb2.CreateResponse:
result = await self.items_wrapper.create(data, collection_id, self.space_id, self.env_id)
return result
async def update(self, item_id: str, data: Struct, collection_id: str) -> Empty:
result = await self.items_wrapper.update(item_id, data, collection_id, self.space_id, self.env_id)
return result
async def find(
self,
collection_id: str,
filters: list[str] | None = None,
sort_by: list[str] | None = None,
page_num: int | None = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
result = await self.items_wrapper.find(
collection_id, self.space_id, self.env_id, filters, sort_by, page_num, page_size
)
return result
async def find_published(
self,
collection_id: str,
filters: list[str] | None = None,
fields: list[str] | None = None,
sort_by: list[str] | None = None,
page_num: int | None = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
result = await self.items_wrapper.find_published(
collection_id, self.space_id, self.env_id, filters, fields, sort_by, page_num, page_size
)
return result
async def fetch_all_published(
self,
collection_id: str,
filters: list[str] | None = None,
fields: list[str] | None = None,
sort_by: list[str] | None = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
"""Метод поиска всех опубликованных записей в коллекции."""
kwargs = {
"collection_id": collection_id,
"filters": filters,
"sort_by": sort_by,
"fields": fields,
"page_size": page_size,
}
message = await self.find_published(**kwargs)
yield message
if pages := message.total // page_size:
if message.total % page_size:
pages += 1
else:
pages = 1
for page_num in range(1, pages + 1):
yield await self.find_published(page_num=page_num, **kwargs)
async def unpublish(self, item_id: str, collection_id: str) -> Empty:
result = await self.items_wrapper.unpublish(item_id, collection_id, self.space_id, self.env_id)
return result
async def publish(self, item_id: str, collection_id: str) -> Empty:
result = await self.items_wrapper.publish(item_id, collection_id, self.space_id, self.env_id)
return result
async def fetch_all(
self,
collection_id: str,
filters: list[str] | str = None,
sort_by: list[str] | str = None,
page_size: int | None = DEFAULT_PAGE_SIZE,
) -> items_pb2.FindResponse:
result = await self.items_wrapper.fetch_all(
collection_id, self.space_id, self.env_id, filters, sort_by, page_size
)
return result
async def get_references(
self, references: list[Reference]
) -> items_pb2.GetResponse:
"""Метод получения связей."""
result = await self.references_stub.Get(
references_pb2.GetRequest(
space_id=self.space_id,
env_id=self.env_id,
references=[
references_pb2.Reference(id=ref.id, collection_id=ref.collection_id)
for ref in references
],
)
result = await self.references_wrapper.get_references(
references, self.space_id, self.env_id
)
return result
class PerxisFileProvider:
__files_wrapper: PerxisFilesWrapper
def __init__(self, channel: GrpcChannel) -> None:
self._stub = files_pb2_grpc.FilesStub(channel.channel)
self.__files_wrapper = PerxisFilesWrapper(
stub=files_pb2_grpc.FilesStub(channel.channel)
)
async def start_upload(
self, file_name: str, file_size: int
) -> files_pb2.StartUploadResponse:
message = await self._stub.StartUpload(
files_pb2.StartUploadRequest(
upload=files_pb2.MultipartUpload(
file=files_pb2.File(
name=file_name, size=file_size
)
)
)
)
message = await self.__files_wrapper.start_upload(file_name, file_size)
return message
async def complete_upload(
@@ -240,18 +424,8 @@ class PerxisFileProvider:
parts: list[str],
part_size: int = DEFAULT_PART_SIZE
) -> files_pb2.CompleteUploadResponse:
message = await self._stub.CompleteUpload(
files_pb2.CompleteUploadRequest(
upload=files_pb2.MultipartUpload(
file=files_pb2.File(id=file_id),
upload_id=upload_id,
part_size=part_size,
parts=[
files_pb2.CompletedPart(id=value, number=index)
for index, value in enumerate(parts, 1)
],
)
)
message = await self.__files_wrapper.complete_upload(
file_id, upload_id, parts, part_size
)
return message
Loading