diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index 42c8eba..fbb71ee 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -10,6 +10,7 @@ jobs: runs-on: ubuntu-latest permissions: contents: read + id-token: write steps: - uses: actions/checkout@v2 @@ -32,9 +33,6 @@ jobs: run: | poetry build - - name: Publish python package + - name: Publish package distributions to PyPI if: github.event_name == 'release' - run: | - poetry publish - env: - POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/cef2210 diff --git a/cirro_api_client/v1/api/notebooks/get_notebook_instance_status.py b/cirro_api_client/v1/api/projects/get_quotas.py similarity index 67% rename from cirro_api_client/v1/api/notebooks/get_notebook_instance_status.py rename to cirro_api_client/v1/api/projects/get_quotas.py index a1300b7..3700a97 100644 --- a/cirro_api_client/v1/api/notebooks/get_notebook_instance_status.py +++ b/cirro_api_client/v1/api/projects/get_quotas.py @@ -6,35 +6,38 @@ from ... import errors from ...client import Client -from ...models.notebook_instance_status_response import NotebookInstanceStatusResponse +from ...models.cloud_quota import CloudQuota from ...types import Response def _get_kwargs( project_id: str, - notebook_instance_id: str, ) -> dict[str, Any]: _kwargs: dict[str, Any] = { "method": "get", - "url": "/projects/{project_id}/notebook-instances/{notebook_instance_id}:status".format( + "url": "/projects/{project_id}/cloud-quotas".format( project_id=quote(str(project_id), safe=""), - notebook_instance_id=quote(str(notebook_instance_id), safe=""), ), } return _kwargs -def _parse_response(*, client: Client, response: httpx.Response) -> NotebookInstanceStatusResponse | None: +def _parse_response(*, client: Client, response: httpx.Response) -> list[CloudQuota] | None: if response.status_code == 200: - response_200 = NotebookInstanceStatusResponse.from_dict(response.json()) + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = CloudQuota.from_dict(response_200_item_data) + + response_200.append(response_200_item) return response_200 errors.handle_error_response(response, client.raise_on_unexpected_status) -def _build_response(*, client: Client, response: httpx.Response) -> Response[NotebookInstanceStatusResponse]: +def _build_response(*, client: Client, response: httpx.Response) -> Response[list[CloudQuota]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -45,17 +48,15 @@ def _build_response(*, client: Client, response: httpx.Response) -> Response[Not def sync_detailed( project_id: str, - notebook_instance_id: str, *, client: Client, -) -> Response[NotebookInstanceStatusResponse]: - """Get notebook instance status +) -> Response[list[CloudQuota]]: + """Get cloud quotas - Retrieves the status of the instance + Retrieves a list of relevant cloud service quotas for project Args: project_id (str): - notebook_instance_id (str): client (Client): instance of the API client Raises: @@ -63,12 +64,11 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[NotebookInstanceStatusResponse] + Response[list[CloudQuota]] """ kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, ) response = client.get_httpx_client().request( @@ -81,17 +81,15 @@ def sync_detailed( def sync( project_id: str, - notebook_instance_id: str, *, client: Client, -) -> NotebookInstanceStatusResponse | None: - """Get notebook instance status +) -> list[CloudQuota] | None: + """Get cloud quotas - Retrieves the status of the instance + Retrieves a list of relevant cloud service quotas for project Args: project_id (str): - notebook_instance_id (str): client (Client): instance of the API client Raises: @@ -99,13 +97,12 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - NotebookInstanceStatusResponse + list[CloudQuota] """ try: return sync_detailed( project_id=project_id, - notebook_instance_id=notebook_instance_id, client=client, ).parsed except errors.NotFoundException: @@ -114,17 +111,15 @@ def sync( async def asyncio_detailed( project_id: str, - notebook_instance_id: str, *, client: Client, -) -> Response[NotebookInstanceStatusResponse]: - """Get notebook instance status +) -> Response[list[CloudQuota]]: + """Get cloud quotas - Retrieves the status of the instance + Retrieves a list of relevant cloud service quotas for project Args: project_id (str): - notebook_instance_id (str): client (Client): instance of the API client Raises: @@ -132,12 +127,11 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[NotebookInstanceStatusResponse] + Response[list[CloudQuota]] """ kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, ) response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) @@ -147,17 +141,15 @@ async def asyncio_detailed( async def asyncio( project_id: str, - notebook_instance_id: str, *, client: Client, -) -> NotebookInstanceStatusResponse | None: - """Get notebook instance status +) -> list[CloudQuota] | None: + """Get cloud quotas - Retrieves the status of the instance + Retrieves a list of relevant cloud service quotas for project Args: project_id (str): - notebook_instance_id (str): client (Client): instance of the API client Raises: @@ -165,14 +157,13 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - NotebookInstanceStatusResponse + list[CloudQuota] """ try: return ( await asyncio_detailed( project_id=project_id, - notebook_instance_id=notebook_instance_id, client=client, ) ).parsed diff --git a/cirro_api_client/v1/api/projects/request_quota_increase.py b/cirro_api_client/v1/api/projects/request_quota_increase.py new file mode 100644 index 0000000..540c5ae --- /dev/null +++ b/cirro_api_client/v1/api/projects/request_quota_increase.py @@ -0,0 +1,188 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.request_quota_increase_command import RequestQuotaIncreaseCommand +from ...models.request_quota_increase_response import RequestQuotaIncreaseResponse +from ...types import Response + + +def _get_kwargs( + project_id: str, + *, + body: RequestQuotaIncreaseCommand, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "put", + "url": "/projects/{project_id}/cloud-quotas".format( + project_id=quote(str(project_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> RequestQuotaIncreaseResponse | None: + if response.status_code == 200: + response_200 = RequestQuotaIncreaseResponse.from_dict(response.json()) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[RequestQuotaIncreaseResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + *, + client: Client, + body: RequestQuotaIncreaseCommand, +) -> Response[RequestQuotaIncreaseResponse]: + """Request quota increase + + Request a service quota increase for a project's cloud account + + Args: + project_id (str): + body (RequestQuotaIncreaseCommand): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[RequestQuotaIncreaseResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + *, + client: Client, + body: RequestQuotaIncreaseCommand, +) -> RequestQuotaIncreaseResponse | None: + """Request quota increase + + Request a service quota increase for a project's cloud account + + Args: + project_id (str): + body (RequestQuotaIncreaseCommand): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + RequestQuotaIncreaseResponse + """ + + try: + return sync_detailed( + project_id=project_id, + client=client, + body=body, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + *, + client: Client, + body: RequestQuotaIncreaseCommand, +) -> Response[RequestQuotaIncreaseResponse]: + """Request quota increase + + Request a service quota increase for a project's cloud account + + Args: + project_id (str): + body (RequestQuotaIncreaseCommand): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[RequestQuotaIncreaseResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + *, + client: Client, + body: RequestQuotaIncreaseCommand, +) -> RequestQuotaIncreaseResponse | None: + """Request quota increase + + Request a service quota increase for a project's cloud account + + Args: + project_id (str): + body (RequestQuotaIncreaseCommand): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + RequestQuotaIncreaseResponse + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + client=client, + body=body, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/notebooks/__init__.py b/cirro_api_client/v1/api/shared_filesystems/__init__.py similarity index 100% rename from cirro_api_client/v1/api/notebooks/__init__.py rename to cirro_api_client/v1/api/shared_filesystems/__init__.py diff --git a/cirro_api_client/v1/api/shared_filesystems/create_shared_filesystem.py b/cirro_api_client/v1/api/shared_filesystems/create_shared_filesystem.py new file mode 100644 index 0000000..ee83b76 --- /dev/null +++ b/cirro_api_client/v1/api/shared_filesystems/create_shared_filesystem.py @@ -0,0 +1,180 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.create_response import CreateResponse +from ...models.shared_filesystem_input import SharedFilesystemInput +from ...types import Response + + +def _get_kwargs( + project_id: str, + *, + body: SharedFilesystemInput, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/projects/{project_id}/shared-filesystems".format( + project_id=quote(str(project_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> CreateResponse | None: + if response.status_code == 201: + response_201 = CreateResponse.from_dict(response.json()) + + return response_201 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[CreateResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> Response[CreateResponse]: + """Create a shared filesystem + + Args: + project_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CreateResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> CreateResponse | None: + """Create a shared filesystem + + Args: + project_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CreateResponse + """ + + try: + return sync_detailed( + project_id=project_id, + client=client, + body=body, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> Response[CreateResponse]: + """Create a shared filesystem + + Args: + project_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CreateResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> CreateResponse | None: + """Create a shared filesystem + + Args: + project_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CreateResponse + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + client=client, + body=body, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/notebooks/delete_notebook_instance.py b/cirro_api_client/v1/api/shared_filesystems/delete_shared_filesystem.py similarity index 79% rename from cirro_api_client/v1/api/notebooks/delete_notebook_instance.py rename to cirro_api_client/v1/api/shared_filesystems/delete_shared_filesystem.py index 3744161..0863b98 100644 --- a/cirro_api_client/v1/api/notebooks/delete_notebook_instance.py +++ b/cirro_api_client/v1/api/shared_filesystems/delete_shared_filesystem.py @@ -11,13 +11,13 @@ def _get_kwargs( project_id: str, - notebook_instance_id: str, + filesystem_id: str, ) -> dict[str, Any]: _kwargs: dict[str, Any] = { "method": "delete", - "url": "/projects/{project_id}/notebook-instances/{notebook_instance_id}".format( + "url": "/projects/{project_id}/shared-filesystems/{filesystem_id}".format( project_id=quote(str(project_id), safe=""), - notebook_instance_id=quote(str(notebook_instance_id), safe=""), + filesystem_id=quote(str(filesystem_id), safe=""), ), } @@ -42,17 +42,15 @@ def _build_response(*, client: Client, response: httpx.Response) -> Response[Any def sync_detailed( project_id: str, - notebook_instance_id: str, + filesystem_id: str, *, client: Client, ) -> Response[Any]: - """Delete notebook instance - - Triggers a deletion of the notebook instance + """Delete a shared filesystem Args: project_id (str): - notebook_instance_id (str): + filesystem_id (str): client (Client): instance of the API client Raises: @@ -65,7 +63,7 @@ def sync_detailed( kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + filesystem_id=filesystem_id, ) response = client.get_httpx_client().request( @@ -78,17 +76,15 @@ def sync_detailed( async def asyncio_detailed( project_id: str, - notebook_instance_id: str, + filesystem_id: str, *, client: Client, ) -> Response[Any]: - """Delete notebook instance - - Triggers a deletion of the notebook instance + """Delete a shared filesystem Args: project_id (str): - notebook_instance_id (str): + filesystem_id (str): client (Client): instance of the API client Raises: @@ -101,7 +97,7 @@ async def asyncio_detailed( kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + filesystem_id=filesystem_id, ) response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) diff --git a/cirro_api_client/v1/api/shared_filesystems/get_shared_filesystem.py b/cirro_api_client/v1/api/shared_filesystems/get_shared_filesystem.py new file mode 100644 index 0000000..be1c5b0 --- /dev/null +++ b/cirro_api_client/v1/api/shared_filesystems/get_shared_filesystem.py @@ -0,0 +1,172 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.shared_filesystem import SharedFilesystem +from ...types import Response + + +def _get_kwargs( + project_id: str, + filesystem_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/projects/{project_id}/shared-filesystems/{filesystem_id}".format( + project_id=quote(str(project_id), safe=""), + filesystem_id=quote(str(filesystem_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> SharedFilesystem | None: + if response.status_code == 200: + response_200 = SharedFilesystem.from_dict(response.json()) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[SharedFilesystem]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + filesystem_id: str, + *, + client: Client, +) -> Response[SharedFilesystem]: + """Get shared filesystem details + + Args: + project_id (str): + filesystem_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SharedFilesystem] + """ + + kwargs = _get_kwargs( + project_id=project_id, + filesystem_id=filesystem_id, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + filesystem_id: str, + *, + client: Client, +) -> SharedFilesystem | None: + """Get shared filesystem details + + Args: + project_id (str): + filesystem_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SharedFilesystem + """ + + try: + return sync_detailed( + project_id=project_id, + filesystem_id=filesystem_id, + client=client, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + filesystem_id: str, + *, + client: Client, +) -> Response[SharedFilesystem]: + """Get shared filesystem details + + Args: + project_id (str): + filesystem_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SharedFilesystem] + """ + + kwargs = _get_kwargs( + project_id=project_id, + filesystem_id=filesystem_id, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + filesystem_id: str, + *, + client: Client, +) -> SharedFilesystem | None: + """Get shared filesystem details + + Args: + project_id (str): + filesystem_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SharedFilesystem + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + filesystem_id=filesystem_id, + client=client, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/notebooks/get_notebook_instances.py b/cirro_api_client/v1/api/shared_filesystems/list_shared_filesystems.py similarity index 78% rename from cirro_api_client/v1/api/notebooks/get_notebook_instances.py rename to cirro_api_client/v1/api/shared_filesystems/list_shared_filesystems.py index 39b7e9d..0fa90a2 100644 --- a/cirro_api_client/v1/api/notebooks/get_notebook_instances.py +++ b/cirro_api_client/v1/api/shared_filesystems/list_shared_filesystems.py @@ -6,7 +6,7 @@ from ... import errors from ...client import Client -from ...models.notebook_instance import NotebookInstance +from ...models.shared_filesystem import SharedFilesystem from ...types import Response @@ -15,7 +15,7 @@ def _get_kwargs( ) -> dict[str, Any]: _kwargs: dict[str, Any] = { "method": "get", - "url": "/projects/{project_id}/notebook-instances".format( + "url": "/projects/{project_id}/shared-filesystems".format( project_id=quote(str(project_id), safe=""), ), } @@ -23,12 +23,12 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Client, response: httpx.Response) -> list[NotebookInstance] | None: +def _parse_response(*, client: Client, response: httpx.Response) -> list[SharedFilesystem] | None: if response.status_code == 200: response_200 = [] _response_200 = response.json() for response_200_item_data in _response_200: - response_200_item = NotebookInstance.from_dict(response_200_item_data) + response_200_item = SharedFilesystem.from_dict(response_200_item_data) response_200.append(response_200_item) @@ -37,7 +37,7 @@ def _parse_response(*, client: Client, response: httpx.Response) -> list[Noteboo errors.handle_error_response(response, client.raise_on_unexpected_status) -def _build_response(*, client: Client, response: httpx.Response) -> Response[list[NotebookInstance]]: +def _build_response(*, client: Client, response: httpx.Response) -> Response[list[SharedFilesystem]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -50,10 +50,8 @@ def sync_detailed( project_id: str, *, client: Client, -) -> Response[list[NotebookInstance]]: - """Get notebook instances - - Retrieves a list of notebook instances that the user has access to +) -> Response[list[SharedFilesystem]]: + """List shared filesystems in a project Args: project_id (str): @@ -64,7 +62,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[list[NotebookInstance]] + Response[list[SharedFilesystem]] """ kwargs = _get_kwargs( @@ -83,10 +81,8 @@ def sync( project_id: str, *, client: Client, -) -> list[NotebookInstance] | None: - """Get notebook instances - - Retrieves a list of notebook instances that the user has access to +) -> list[SharedFilesystem] | None: + """List shared filesystems in a project Args: project_id (str): @@ -97,7 +93,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - list[NotebookInstance] + list[SharedFilesystem] """ try: @@ -113,10 +109,8 @@ async def asyncio_detailed( project_id: str, *, client: Client, -) -> Response[list[NotebookInstance]]: - """Get notebook instances - - Retrieves a list of notebook instances that the user has access to +) -> Response[list[SharedFilesystem]]: + """List shared filesystems in a project Args: project_id (str): @@ -127,7 +121,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[list[NotebookInstance]] + Response[list[SharedFilesystem]] """ kwargs = _get_kwargs( @@ -143,10 +137,8 @@ async def asyncio( project_id: str, *, client: Client, -) -> list[NotebookInstance] | None: - """Get notebook instances - - Retrieves a list of notebook instances that the user has access to +) -> list[SharedFilesystem] | None: + """List shared filesystems in a project Args: project_id (str): @@ -157,7 +149,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - list[NotebookInstance] + list[SharedFilesystem] """ try: diff --git a/cirro_api_client/v1/api/shared_filesystems/update_shared_filesystem.py b/cirro_api_client/v1/api/shared_filesystems/update_shared_filesystem.py new file mode 100644 index 0000000..7defd82 --- /dev/null +++ b/cirro_api_client/v1/api/shared_filesystems/update_shared_filesystem.py @@ -0,0 +1,194 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.shared_filesystem import SharedFilesystem +from ...models.shared_filesystem_input import SharedFilesystemInput +from ...types import Response + + +def _get_kwargs( + project_id: str, + filesystem_id: str, + *, + body: SharedFilesystemInput, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "put", + "url": "/projects/{project_id}/shared-filesystems/{filesystem_id}".format( + project_id=quote(str(project_id), safe=""), + filesystem_id=quote(str(filesystem_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> SharedFilesystem | None: + if response.status_code == 200: + response_200 = SharedFilesystem.from_dict(response.json()) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[SharedFilesystem]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + filesystem_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> Response[SharedFilesystem]: + """Update a shared filesystem + + Args: + project_id (str): + filesystem_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SharedFilesystem] + """ + + kwargs = _get_kwargs( + project_id=project_id, + filesystem_id=filesystem_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + filesystem_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> SharedFilesystem | None: + """Update a shared filesystem + + Args: + project_id (str): + filesystem_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SharedFilesystem + """ + + try: + return sync_detailed( + project_id=project_id, + filesystem_id=filesystem_id, + client=client, + body=body, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + filesystem_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> Response[SharedFilesystem]: + """Update a shared filesystem + + Args: + project_id (str): + filesystem_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SharedFilesystem] + """ + + kwargs = _get_kwargs( + project_id=project_id, + filesystem_id=filesystem_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + filesystem_id: str, + *, + client: Client, + body: SharedFilesystemInput, +) -> SharedFilesystem | None: + """Update a shared filesystem + + Args: + project_id (str): + filesystem_id (str): + body (SharedFilesystemInput): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SharedFilesystem + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + filesystem_id=filesystem_id, + client=client, + body=body, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/sheets/__init__.py b/cirro_api_client/v1/api/sheets/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/cirro_api_client/v1/api/notebooks/create_notebook_instance.py b/cirro_api_client/v1/api/sheets/create_sheet.py similarity index 82% rename from cirro_api_client/v1/api/notebooks/create_notebook_instance.py rename to cirro_api_client/v1/api/sheets/create_sheet.py index f34d3a0..527dcd3 100644 --- a/cirro_api_client/v1/api/notebooks/create_notebook_instance.py +++ b/cirro_api_client/v1/api/sheets/create_sheet.py @@ -6,21 +6,21 @@ from ... import errors from ...client import Client -from ...models.create_notebook_instance_request import CreateNotebookInstanceRequest from ...models.create_response import CreateResponse +from ...models.create_sheet_request import CreateSheetRequest from ...types import Response def _get_kwargs( project_id: str, *, - body: CreateNotebookInstanceRequest, + body: CreateSheetRequest, ) -> dict[str, Any]: headers: dict[str, Any] = {} _kwargs: dict[str, Any] = { "method": "post", - "url": "/projects/{project_id}/notebook-instances".format( + "url": "/projects/{project_id}/sheets".format( project_id=quote(str(project_id), safe=""), ), } @@ -55,15 +55,15 @@ def sync_detailed( project_id: str, *, client: Client, - body: CreateNotebookInstanceRequest, + body: CreateSheetRequest, ) -> Response[CreateResponse]: - """Create notebook instance + """Create sheet - Creates a notebook instance within the project + Creates a sheet (table or view) Args: project_id (str): - body (CreateNotebookInstanceRequest): + body (CreateSheetRequest): client (Client): instance of the API client Raises: @@ -91,15 +91,15 @@ def sync( project_id: str, *, client: Client, - body: CreateNotebookInstanceRequest, + body: CreateSheetRequest, ) -> CreateResponse | None: - """Create notebook instance + """Create sheet - Creates a notebook instance within the project + Creates a sheet (table or view) Args: project_id (str): - body (CreateNotebookInstanceRequest): + body (CreateSheetRequest): client (Client): instance of the API client Raises: @@ -124,15 +124,15 @@ async def asyncio_detailed( project_id: str, *, client: Client, - body: CreateNotebookInstanceRequest, + body: CreateSheetRequest, ) -> Response[CreateResponse]: - """Create notebook instance + """Create sheet - Creates a notebook instance within the project + Creates a sheet (table or view) Args: project_id (str): - body (CreateNotebookInstanceRequest): + body (CreateSheetRequest): client (Client): instance of the API client Raises: @@ -157,15 +157,15 @@ async def asyncio( project_id: str, *, client: Client, - body: CreateNotebookInstanceRequest, + body: CreateSheetRequest, ) -> CreateResponse | None: - """Create notebook instance + """Create sheet - Creates a notebook instance within the project + Creates a sheet (table or view) Args: project_id (str): - body (CreateNotebookInstanceRequest): + body (CreateSheetRequest): client (Client): instance of the API client Raises: diff --git a/cirro_api_client/v1/api/sheets/delete_sheet.py b/cirro_api_client/v1/api/sheets/delete_sheet.py new file mode 100644 index 0000000..33a7b9e --- /dev/null +++ b/cirro_api_client/v1/api/sheets/delete_sheet.py @@ -0,0 +1,109 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...types import Response + + +def _get_kwargs( + project_id: str, + sheet_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "delete", + "url": "/projects/{project_id}/sheets/{sheet_id}".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> Any | None: + if response.status_code == 204: + return None + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> Response[Any]: + """Delete sheet + + Deletes a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> Response[Any]: + """Delete sheet + + Deletes a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) diff --git a/cirro_api_client/v1/api/notebooks/generate_notebook_instance_url.py b/cirro_api_client/v1/api/sheets/get_jobs.py similarity index 65% rename from cirro_api_client/v1/api/notebooks/generate_notebook_instance_url.py rename to cirro_api_client/v1/api/sheets/get_jobs.py index 8d7633e..10d47c7 100644 --- a/cirro_api_client/v1/api/notebooks/generate_notebook_instance_url.py +++ b/cirro_api_client/v1/api/sheets/get_jobs.py @@ -6,35 +6,40 @@ from ... import errors from ...client import Client -from ...models.open_notebook_instance_response import OpenNotebookInstanceResponse +from ...models.sheet_job import SheetJob from ...types import Response def _get_kwargs( project_id: str, - notebook_instance_id: str, + sheet_id: str, ) -> dict[str, Any]: _kwargs: dict[str, Any] = { "method": "get", - "url": "/projects/{project_id}/notebook-instances/{notebook_instance_id}:generate-url".format( + "url": "/projects/{project_id}/sheets/{sheet_id}/jobs".format( project_id=quote(str(project_id), safe=""), - notebook_instance_id=quote(str(notebook_instance_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), ), } return _kwargs -def _parse_response(*, client: Client, response: httpx.Response) -> OpenNotebookInstanceResponse | None: +def _parse_response(*, client: Client, response: httpx.Response) -> list[SheetJob] | None: if response.status_code == 200: - response_200 = OpenNotebookInstanceResponse.from_dict(response.json()) + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = SheetJob.from_dict(response_200_item_data) + + response_200.append(response_200_item) return response_200 errors.handle_error_response(response, client.raise_on_unexpected_status) -def _build_response(*, client: Client, response: httpx.Response) -> Response[OpenNotebookInstanceResponse]: +def _build_response(*, client: Client, response: httpx.Response) -> Response[list[SheetJob]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -45,17 +50,17 @@ def _build_response(*, client: Client, response: httpx.Response) -> Response[Ope def sync_detailed( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, -) -> Response[OpenNotebookInstanceResponse]: - """Generate notebook instance URL +) -> Response[list[SheetJob]]: + """List jobs - Creates an authenticated URL to open up the notebook instance in your browser + Retrieves ingest jobs for a sheet Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -63,12 +68,12 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[OpenNotebookInstanceResponse] + Response[list[SheetJob]] """ kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, ) response = client.get_httpx_client().request( @@ -81,17 +86,17 @@ def sync_detailed( def sync( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, -) -> OpenNotebookInstanceResponse | None: - """Generate notebook instance URL +) -> list[SheetJob] | None: + """List jobs - Creates an authenticated URL to open up the notebook instance in your browser + Retrieves ingest jobs for a sheet Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -99,13 +104,13 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - OpenNotebookInstanceResponse + list[SheetJob] """ try: return sync_detailed( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, client=client, ).parsed except errors.NotFoundException: @@ -114,17 +119,17 @@ def sync( async def asyncio_detailed( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, -) -> Response[OpenNotebookInstanceResponse]: - """Generate notebook instance URL +) -> Response[list[SheetJob]]: + """List jobs - Creates an authenticated URL to open up the notebook instance in your browser + Retrieves ingest jobs for a sheet Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -132,12 +137,12 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[OpenNotebookInstanceResponse] + Response[list[SheetJob]] """ kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, ) response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) @@ -147,17 +152,17 @@ async def asyncio_detailed( async def asyncio( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, -) -> OpenNotebookInstanceResponse | None: - """Generate notebook instance URL +) -> list[SheetJob] | None: + """List jobs - Creates an authenticated URL to open up the notebook instance in your browser + Retrieves ingest jobs for a sheet Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -165,14 +170,14 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - OpenNotebookInstanceResponse + list[SheetJob] """ try: return ( await asyncio_detailed( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, client=client, ) ).parsed diff --git a/cirro_api_client/v1/api/sheets/get_sheet.py b/cirro_api_client/v1/api/sheets/get_sheet.py new file mode 100644 index 0000000..a1c31d8 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/get_sheet.py @@ -0,0 +1,180 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.sheet_detail import SheetDetail +from ...types import Response + + +def _get_kwargs( + project_id: str, + sheet_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/projects/{project_id}/sheets/{sheet_id}".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> SheetDetail | None: + if response.status_code == 200: + response_200 = SheetDetail.from_dict(response.json()) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[SheetDetail]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> Response[SheetDetail]: + """Get sheet + + Retrieves a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SheetDetail] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> SheetDetail | None: + """Get sheet + + Retrieves a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SheetDetail + """ + + try: + return sync_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> Response[SheetDetail]: + """Get sheet + + Retrieves a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SheetDetail] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + sheet_id: str, + *, + client: Client, +) -> SheetDetail | None: + """Get sheet + + Retrieves a sheet + + Args: + project_id (str): + sheet_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SheetDetail + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/sheets/get_sheet_data.py b/cirro_api_client/v1/api/sheets/get_sheet_data.py new file mode 100644 index 0000000..281e9e2 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/get_sheet_data.py @@ -0,0 +1,260 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.sheet_query_response import SheetQueryResponse +from ...models.sql_sort_order import SqlSortOrder +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + project_id: str, + sheet_id: str, + *, + limit: int | Unset = 1000, + page: int | Unset = 1, + order_by: None | str | Unset = UNSET, + order: SqlSortOrder | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["limit"] = limit + + params["page"] = page + + json_order_by: None | str | Unset + if isinstance(order_by, Unset): + json_order_by = UNSET + else: + json_order_by = order_by + params["orderBy"] = json_order_by + + json_order: str | Unset = UNSET + if not isinstance(order, Unset): + json_order = order.value + + params["order"] = json_order + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/projects/{project_id}/sheets/{sheet_id}/data".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + "params": params, + } + + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> SheetQueryResponse | None: + if response.status_code == 200: + response_200 = SheetQueryResponse.from_dict(response.json()) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[SheetQueryResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + limit: int | Unset = 1000, + page: int | Unset = 1, + order_by: None | str | Unset = UNSET, + order: SqlSortOrder | Unset = UNSET, +) -> Response[SheetQueryResponse]: + """Get sheet data + + Returns paginated rows from a sheet. The first column is always _row_id, which uniquely identifies + each row and is required for row updates via PUT. Defaults page=1, limit=1000, order=ASCENDING. + + Args: + project_id (str): + sheet_id (str): + limit (int | Unset): Default: 1000. + page (int | Unset): Default: 1. + order_by (None | str | Unset): + order (SqlSortOrder | Unset): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SheetQueryResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + limit=limit, + page=page, + order_by=order_by, + order=order, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + sheet_id: str, + *, + client: Client, + limit: int | Unset = 1000, + page: int | Unset = 1, + order_by: None | str | Unset = UNSET, + order: SqlSortOrder | Unset = UNSET, +) -> SheetQueryResponse | None: + """Get sheet data + + Returns paginated rows from a sheet. The first column is always _row_id, which uniquely identifies + each row and is required for row updates via PUT. Defaults page=1, limit=1000, order=ASCENDING. + + Args: + project_id (str): + sheet_id (str): + limit (int | Unset): Default: 1000. + page (int | Unset): Default: 1. + order_by (None | str | Unset): + order (SqlSortOrder | Unset): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SheetQueryResponse + """ + + try: + return sync_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + limit=limit, + page=page, + order_by=order_by, + order=order, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + limit: int | Unset = 1000, + page: int | Unset = 1, + order_by: None | str | Unset = UNSET, + order: SqlSortOrder | Unset = UNSET, +) -> Response[SheetQueryResponse]: + """Get sheet data + + Returns paginated rows from a sheet. The first column is always _row_id, which uniquely identifies + each row and is required for row updates via PUT. Defaults page=1, limit=1000, order=ASCENDING. + + Args: + project_id (str): + sheet_id (str): + limit (int | Unset): Default: 1000. + page (int | Unset): Default: 1. + order_by (None | str | Unset): + order (SqlSortOrder | Unset): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[SheetQueryResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + limit=limit, + page=page, + order_by=order_by, + order=order, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + sheet_id: str, + *, + client: Client, + limit: int | Unset = 1000, + page: int | Unset = 1, + order_by: None | str | Unset = UNSET, + order: SqlSortOrder | Unset = UNSET, +) -> SheetQueryResponse | None: + """Get sheet data + + Returns paginated rows from a sheet. The first column is always _row_id, which uniquely identifies + each row and is required for row updates via PUT. Defaults page=1, limit=1000, order=ASCENDING. + + Args: + project_id (str): + sheet_id (str): + limit (int | Unset): Default: 1000. + page (int | Unset): Default: 1. + order_by (None | str | Unset): + order (SqlSortOrder | Unset): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + SheetQueryResponse + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + limit=limit, + page=page, + order_by=order_by, + order=order, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/sheets/get_sheets.py b/cirro_api_client/v1/api/sheets/get_sheets.py new file mode 100644 index 0000000..bcbf89c --- /dev/null +++ b/cirro_api_client/v1/api/sheets/get_sheets.py @@ -0,0 +1,171 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.sheet import Sheet +from ...types import Response + + +def _get_kwargs( + project_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/projects/{project_id}/sheets".format( + project_id=quote(str(project_id), safe=""), + ), + } + + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> list[Sheet] | None: + if response.status_code == 200: + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = Sheet.from_dict(response_200_item_data) + + response_200.append(response_200_item) + + return response_200 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[list[Sheet]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + *, + client: Client, +) -> Response[list[Sheet]]: + """List sheets + + Retrieves sheets for a project + + Args: + project_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[list[Sheet]] + """ + + kwargs = _get_kwargs( + project_id=project_id, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + *, + client: Client, +) -> list[Sheet] | None: + """List sheets + + Retrieves sheets for a project + + Args: + project_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + list[Sheet] + """ + + try: + return sync_detailed( + project_id=project_id, + client=client, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + *, + client: Client, +) -> Response[list[Sheet]]: + """List sheets + + Retrieves sheets for a project + + Args: + project_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[list[Sheet]] + """ + + kwargs = _get_kwargs( + project_id=project_id, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + *, + client: Client, +) -> list[Sheet] | None: + """List sheets + + Retrieves sheets for a project + + Args: + project_id (str): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + list[Sheet] + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + client=client, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/notebooks/stop_notebook_instance.py b/cirro_api_client/v1/api/sheets/refresh_view.py similarity index 79% rename from cirro_api_client/v1/api/notebooks/stop_notebook_instance.py rename to cirro_api_client/v1/api/sheets/refresh_view.py index 04412fb..6bd4934 100644 --- a/cirro_api_client/v1/api/notebooks/stop_notebook_instance.py +++ b/cirro_api_client/v1/api/sheets/refresh_view.py @@ -11,13 +11,13 @@ def _get_kwargs( project_id: str, - notebook_instance_id: str, + sheet_id: str, ) -> dict[str, Any]: _kwargs: dict[str, Any] = { "method": "post", - "url": "/projects/{project_id}/notebook-instances/{notebook_instance_id}:stop".format( + "url": "/projects/{project_id}/sheets/{sheet_id}:refresh".format( project_id=quote(str(project_id), safe=""), - notebook_instance_id=quote(str(notebook_instance_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), ), } @@ -42,17 +42,17 @@ def _build_response(*, client: Client, response: httpx.Response) -> Response[Any def sync_detailed( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, ) -> Response[Any]: - """Stop notebook instance + """Refresh view - Shuts down a running notebook instance + Re-materializes a view from its source tables Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -65,7 +65,7 @@ def sync_detailed( kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, ) response = client.get_httpx_client().request( @@ -78,17 +78,17 @@ def sync_detailed( async def asyncio_detailed( project_id: str, - notebook_instance_id: str, + sheet_id: str, *, client: Client, ) -> Response[Any]: - """Stop notebook instance + """Refresh view - Shuts down a running notebook instance + Re-materializes a view from its source tables Args: project_id (str): - notebook_instance_id (str): + sheet_id (str): client (Client): instance of the API client Raises: @@ -101,7 +101,7 @@ async def asyncio_detailed( kwargs = _get_kwargs( project_id=project_id, - notebook_instance_id=notebook_instance_id, + sheet_id=sheet_id, ) response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) diff --git a/cirro_api_client/v1/api/sheets/trigger_ingest.py b/cirro_api_client/v1/api/sheets/trigger_ingest.py new file mode 100644 index 0000000..2490b66 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/trigger_ingest.py @@ -0,0 +1,202 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.create_response import CreateResponse +from ...models.trigger_ingest_request import TriggerIngestRequest +from ...types import Response + + +def _get_kwargs( + project_id: str, + sheet_id: str, + *, + body: TriggerIngestRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/projects/{project_id}/sheets/{sheet_id}/ingest".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> CreateResponse | None: + if response.status_code == 201: + response_201 = CreateResponse.from_dict(response.json()) + + return response_201 + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[CreateResponse]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: TriggerIngestRequest, +) -> Response[CreateResponse]: + """Trigger ingest + + Triggers an async file ingest into the sheet + + Args: + project_id (str): + sheet_id (str): + body (TriggerIngestRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CreateResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + project_id: str, + sheet_id: str, + *, + client: Client, + body: TriggerIngestRequest, +) -> CreateResponse | None: + """Trigger ingest + + Triggers an async file ingest into the sheet + + Args: + project_id (str): + sheet_id (str): + body (TriggerIngestRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CreateResponse + """ + + try: + return sync_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + body=body, + ).parsed + except errors.NotFoundException: + return None + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: TriggerIngestRequest, +) -> Response[CreateResponse]: + """Trigger ingest + + Triggers an async file ingest into the sheet + + Args: + project_id (str): + sheet_id (str): + body (TriggerIngestRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CreateResponse] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + project_id: str, + sheet_id: str, + *, + client: Client, + body: TriggerIngestRequest, +) -> CreateResponse | None: + """Trigger ingest + + Triggers an async file ingest into the sheet + + Args: + project_id (str): + sheet_id (str): + body (TriggerIngestRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CreateResponse + """ + + try: + return ( + await asyncio_detailed( + project_id=project_id, + sheet_id=sheet_id, + client=client, + body=body, + ) + ).parsed + except errors.NotFoundException: + return None diff --git a/cirro_api_client/v1/api/sheets/update_sheet.py b/cirro_api_client/v1/api/sheets/update_sheet.py new file mode 100644 index 0000000..7652461 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/update_sheet.py @@ -0,0 +1,125 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.update_sheet_request import UpdateSheetRequest +from ...types import Response + + +def _get_kwargs( + project_id: str, + sheet_id: str, + *, + body: UpdateSheetRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "put", + "url": "/projects/{project_id}/sheets/{sheet_id}".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: UpdateSheetRequest, +) -> Response[Any]: + """Update sheet + + Updates a sheet (table or view) + + Args: + project_id (str): + sheet_id (str): + body (UpdateSheetRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: UpdateSheetRequest, +) -> Response[Any]: + """Update sheet + + Updates a sheet (table or view) + + Args: + project_id (str): + sheet_id (str): + body (UpdateSheetRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) diff --git a/cirro_api_client/v1/api/sheets/update_sheet_data.py b/cirro_api_client/v1/api/sheets/update_sheet_data.py new file mode 100644 index 0000000..1b400b4 --- /dev/null +++ b/cirro_api_client/v1/api/sheets/update_sheet_data.py @@ -0,0 +1,127 @@ +from http import HTTPStatus +from typing import Any +from urllib.parse import quote + +import httpx + +from ... import errors +from ...client import Client +from ...models.update_rows_request import UpdateRowsRequest +from ...types import Response + + +def _get_kwargs( + project_id: str, + sheet_id: str, + *, + body: UpdateRowsRequest, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "put", + "url": "/projects/{project_id}/sheets/{sheet_id}/data".format( + project_id=quote(str(project_id), safe=""), + sheet_id=quote(str(sheet_id), safe=""), + ), + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + errors.handle_error_response(response, client.raise_on_unexpected_status) + + +def _build_response(*, client: Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: UpdateRowsRequest, +) -> Response[Any]: + """Update sheet rows + + Updates specific rows in a TABLE sheet by _row_id. This is a partial update: only the columns + included in each entry are modified, all other columns are left unchanged. + + Args: + project_id (str): + sheet_id (str): + body (UpdateRowsRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = client.get_httpx_client().request( + auth=client.get_auth(), + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + project_id: str, + sheet_id: str, + *, + client: Client, + body: UpdateRowsRequest, +) -> Response[Any]: + """Update sheet rows + + Updates specific rows in a TABLE sheet by _row_id. This is a partial update: only the columns + included in each entry are modified, all other columns are left unchanged. + + Args: + project_id (str): + sheet_id (str): + body (UpdateRowsRequest): + client (Client): instance of the API client + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + project_id=project_id, + sheet_id=sheet_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(auth=client.get_auth(), **kwargs) + + return _build_response(client=client, response=response) diff --git a/cirro_api_client/v1/models/__init__.py b/cirro_api_client/v1/models/__init__.py index 9ea8ac9..8e010a6 100644 --- a/cirro_api_client/v1/models/__init__.py +++ b/cirro_api_client/v1/models/__init__.py @@ -34,6 +34,9 @@ from .classification_input import ClassificationInput from .cloud_account import CloudAccount from .cloud_account_type import CloudAccountType +from .cloud_quota import CloudQuota +from .column_data_type import ColumnDataType +from .column_def import ColumnDef from .column_definition import ColumnDefinition from .compute_environment_configuration import ComputeEnvironmentConfiguration from .compute_environment_configuration_input import ComputeEnvironmentConfigurationInput @@ -42,10 +45,10 @@ from .contact import Contact from .contact_input import ContactInput from .cost_response import CostResponse -from .create_notebook_instance_request import CreateNotebookInstanceRequest from .create_project_access_request import CreateProjectAccessRequest from .create_reference_request import CreateReferenceRequest from .create_response import CreateResponse +from .create_sheet_request import CreateSheetRequest from .custom_pipeline_settings import CustomPipelineSettings from .custom_process_input import CustomProcessInput from .customer_type import CustomerType @@ -66,7 +69,6 @@ from .dataset_detail_params import DatasetDetailParams from .dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap from .dataset_viz import DatasetViz -from .dataset_viz_config import DatasetVizConfig from .discussion import Discussion from .discussion_input import DiscussionInput from .discussion_type import DiscussionType @@ -76,14 +78,19 @@ from .error_message import ErrorMessage from .executor import Executor from .feature_flags import FeatureFlags +from .file_def import FileDef from .file_entry import FileEntry from .file_entry_metadata import FileEntryMetadata from .file_mapping_rule import FileMappingRule from .file_name_match import FileNameMatch from .file_name_pattern import FileNamePattern from .file_requirements import FileRequirements +from .file_type import FileType +from .filter_operator import FilterOperator +from .foreign_key_ref import ForeignKeyRef from .form_schema import FormSchema from .form_schema_form import FormSchemaForm +from .form_schema_metadata_requirements import FormSchemaMetadataRequirements from .form_schema_ui import FormSchemaUi from .fulfillment_response import FulfillmentResponse from .generate_sftp_credentials_request import GenerateSftpCredentialsRequest @@ -108,8 +115,11 @@ from .import_data_request_download_method import ImportDataRequestDownloadMethod from .invite_user_request import InviteUserRequest from .invite_user_response import InviteUserResponse +from .join_condition import JoinCondition +from .join_type import JoinType from .list_events_entity_type import ListEventsEntityType from .log_entry import LogEntry +from .logical_operator import LogicalOperator from .login_provider import LoginProvider from .message import Message from .message_input import MessageInput @@ -120,9 +130,6 @@ from .move_dataset_input import MoveDatasetInput from .move_dataset_response import MoveDatasetResponse from .named_item import NamedItem -from .notebook_instance import NotebookInstance -from .notebook_instance_status_response import NotebookInstanceStatusResponse -from .open_notebook_instance_response import OpenNotebookInstanceResponse from .paginated_response_app_registration_dto import PaginatedResponseAppRegistrationDto from .paginated_response_dataset_list_dto import PaginatedResponseDatasetListDto from .paginated_response_discussion import PaginatedResponseDiscussion @@ -152,14 +159,20 @@ from .project_role import ProjectRole from .project_settings import ProjectSettings from .project_user import ProjectUser +from .query_column import QueryColumn from .reference import Reference from .reference_type import ReferenceType from .reference_type_validation_item import ReferenceTypeValidationItem from .repository_type import RepositoryType +from .request_quota_increase_command import RequestQuotaIncreaseCommand +from .request_quota_increase_response import RequestQuotaIncreaseResponse from .request_status import RequestStatus from .requirement_fulfillment_input import RequirementFulfillmentInput from .requirement_input import RequirementInput from .resources_info import ResourcesInfo +from .row_update import RowUpdate +from .row_update_values import RowUpdateValues +from .row_update_values_additional_property import RowUpdateValuesAdditionalProperty from .run_analysis_request import RunAnalysisRequest from .run_analysis_request_params import RunAnalysisRequestParams from .run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap @@ -175,8 +188,19 @@ from .share_detail import ShareDetail from .share_input import ShareInput from .share_type import ShareType +from .shared_filesystem import SharedFilesystem +from .shared_filesystem_input import SharedFilesystemInput from .sharing_type import SharingType +from .sheet import Sheet +from .sheet_creation_mode import SheetCreationMode +from .sheet_detail import SheetDetail +from .sheet_job import SheetJob +from .sheet_job_type import SheetJobType +from .sheet_query_response import SheetQueryResponse +from .sheet_query_response_rows_item import SheetQueryResponseRowsItem +from .sheet_type import SheetType from .sort_order import SortOrder +from .sql_sort_order import SqlSortOrder from .status import Status from .stop_execution_response import StopExecutionResponse from .sync_status import SyncStatus @@ -186,7 +210,10 @@ from .task import Task from .task_cost import TaskCost from .tenant_info import TenantInfo +from .trigger_ingest_request import TriggerIngestRequest from .update_dataset_request import UpdateDatasetRequest +from .update_rows_request import UpdateRowsRequest +from .update_sheet_request import UpdateSheetRequest from .update_user_request import UpdateUserRequest from .upload_dataset_create_response import UploadDatasetCreateResponse from .upload_dataset_request import UploadDatasetRequest @@ -197,6 +224,11 @@ from .validate_file_name_patterns_request import ValidateFileNamePatternsRequest from .validate_file_requirements_request import ValidateFileRequirementsRequest from .version_specification import VersionSpecification +from .view_filter import ViewFilter +from .view_filter_values import ViewFilterValues +from .view_join import ViewJoin +from .view_query_request import ViewQueryRequest +from .view_sheet_ref import ViewSheetRef from .workspace import Workspace from .workspace_compute_config import WorkspaceComputeConfig from .workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables @@ -240,6 +272,9 @@ "ClassificationInput", "CloudAccount", "CloudAccountType", + "CloudQuota", + "ColumnDataType", + "ColumnDef", "ColumnDefinition", "ComputeEnvironmentConfiguration", "ComputeEnvironmentConfigurationInput", @@ -248,10 +283,10 @@ "Contact", "ContactInput", "CostResponse", - "CreateNotebookInstanceRequest", "CreateProjectAccessRequest", "CreateReferenceRequest", "CreateResponse", + "CreateSheetRequest", "CustomerType", "CustomPipelineSettings", "CustomProcessInput", @@ -272,7 +307,6 @@ "DatasetDetailParams", "DatasetDetailSourceSampleFilesMap", "DatasetViz", - "DatasetVizConfig", "Discussion", "DiscussionInput", "DiscussionType", @@ -282,14 +316,19 @@ "ErrorMessage", "Executor", "FeatureFlags", + "FileDef", "FileEntry", "FileEntryMetadata", "FileMappingRule", "FileNameMatch", "FileNamePattern", "FileRequirements", + "FileType", + "FilterOperator", + "ForeignKeyRef", "FormSchema", "FormSchemaForm", + "FormSchemaMetadataRequirements", "FormSchemaUi", "FulfillmentResponse", "GenerateSftpCredentialsRequest", @@ -314,8 +353,11 @@ "ImportDataRequestDownloadMethod", "InviteUserRequest", "InviteUserResponse", + "JoinCondition", + "JoinType", "ListEventsEntityType", "LogEntry", + "LogicalOperator", "LoginProvider", "Message", "MessageInput", @@ -326,9 +368,6 @@ "MoveDatasetInput", "MoveDatasetResponse", "NamedItem", - "NotebookInstance", - "NotebookInstanceStatusResponse", - "OpenNotebookInstanceResponse", "PaginatedResponseAppRegistrationDto", "PaginatedResponseDatasetListDto", "PaginatedResponseDiscussion", @@ -358,14 +397,20 @@ "ProjectRole", "ProjectSettings", "ProjectUser", + "QueryColumn", "Reference", "ReferenceType", "ReferenceTypeValidationItem", "RepositoryType", + "RequestQuotaIncreaseCommand", + "RequestQuotaIncreaseResponse", "RequestStatus", "RequirementFulfillmentInput", "RequirementInput", "ResourcesInfo", + "RowUpdate", + "RowUpdateValues", + "RowUpdateValuesAdditionalProperty", "RunAnalysisRequest", "RunAnalysisRequestParams", "RunAnalysisRequestSourceSampleFilesMap", @@ -379,10 +424,21 @@ "SftpCredentials", "Share", "ShareDetail", + "SharedFilesystem", + "SharedFilesystemInput", "ShareInput", "ShareType", "SharingType", + "Sheet", + "SheetCreationMode", + "SheetDetail", + "SheetJob", + "SheetJobType", + "SheetQueryResponse", + "SheetQueryResponseRowsItem", + "SheetType", "SortOrder", + "SqlSortOrder", "Status", "StopExecutionResponse", "SyncStatus", @@ -392,7 +448,10 @@ "Task", "TaskCost", "TenantInfo", + "TriggerIngestRequest", "UpdateDatasetRequest", + "UpdateRowsRequest", + "UpdateSheetRequest", "UpdateUserRequest", "UploadDatasetCreateResponse", "UploadDatasetRequest", @@ -403,6 +462,11 @@ "ValidateFileNamePatternsRequest", "ValidateFileRequirementsRequest", "VersionSpecification", + "ViewFilter", + "ViewFilterValues", + "ViewJoin", + "ViewQueryRequest", + "ViewSheetRef", "Workspace", "WorkspaceComputeConfig", "WorkspaceComputeConfigEnvironmentVariables", diff --git a/cirro_api_client/v1/models/artifact.py b/cirro_api_client/v1/models/artifact.py index e3123b7..dd43acf 100644 --- a/cirro_api_client/v1/models/artifact.py +++ b/cirro_api_client/v1/models/artifact.py @@ -17,7 +17,7 @@ class Artifact: Attributes: type_ (ArtifactType): - path (str): + path (str): A secondary file or resource associated with a dataset """ type_: ArtifactType diff --git a/cirro_api_client/v1/models/cloud_quota.py b/cirro_api_client/v1/models/cloud_quota.py new file mode 100644 index 0000000..852de13 --- /dev/null +++ b/cirro_api_client/v1/models/cloud_quota.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="CloudQuota") + + +@_attrs_define +class CloudQuota: + """ + Attributes: + name (str): + description (str): + service (str): + code (str): + applied_quota (float): + has_open_request (bool): + requested_quota (float | None | Unset): + """ + + name: str + description: str + service: str + code: str + applied_quota: float + has_open_request: bool + requested_quota: float | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + name = self.name + + description = self.description + + service = self.service + + code = self.code + + applied_quota = self.applied_quota + + has_open_request = self.has_open_request + + requested_quota: float | None | Unset + if isinstance(self.requested_quota, Unset): + requested_quota = UNSET + else: + requested_quota = self.requested_quota + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + "description": description, + "service": service, + "code": code, + "appliedQuota": applied_quota, + "hasOpenRequest": has_open_request, + } + ) + if requested_quota is not UNSET: + field_dict["requestedQuota"] = requested_quota + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + name = d.pop("name") + + description = d.pop("description") + + service = d.pop("service") + + code = d.pop("code") + + applied_quota = d.pop("appliedQuota") + + has_open_request = d.pop("hasOpenRequest") + + def _parse_requested_quota(data: object) -> float | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(float | None | Unset, data) + + requested_quota = _parse_requested_quota(d.pop("requestedQuota", UNSET)) + + cloud_quota = cls( + name=name, + description=description, + service=service, + code=code, + applied_quota=applied_quota, + has_open_request=has_open_request, + requested_quota=requested_quota, + ) + + cloud_quota.additional_properties = d + return cloud_quota + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/column_data_type.py b/cirro_api_client/v1/models/column_data_type.py new file mode 100644 index 0000000..e57531d --- /dev/null +++ b/cirro_api_client/v1/models/column_data_type.py @@ -0,0 +1,21 @@ +from enum import Enum + + +class ColumnDataType(str, Enum): + BIGINT = "BIGINT" + BOOLEAN = "BOOLEAN" + DATE = "DATE" + DOUBLE = "DOUBLE" + FLOAT = "FLOAT" + INTEGER = "INTEGER" + STRING = "STRING" + TIMESTAMP = "TIMESTAMP" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/column_def.py b/cirro_api_client/v1/models/column_def.py new file mode 100644 index 0000000..3fa698b --- /dev/null +++ b/cirro_api_client/v1/models/column_def.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.column_data_type import ColumnDataType +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.foreign_key_ref import ForeignKeyRef + + +T = TypeVar("T", bound="ColumnDef") + + +@_attrs_define +class ColumnDef: + """ + Attributes: + name (str): + data_type (ColumnDataType): + description (str): + foreign_key (ForeignKeyRef | None | Unset): + """ + + name: str + data_type: ColumnDataType + description: str + foreign_key: ForeignKeyRef | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.foreign_key_ref import ForeignKeyRef + + name = self.name + + data_type = self.data_type.value + + description = self.description + + foreign_key: dict[str, Any] | None | Unset + if isinstance(self.foreign_key, Unset): + foreign_key = UNSET + elif isinstance(self.foreign_key, ForeignKeyRef): + foreign_key = self.foreign_key.to_dict() + else: + foreign_key = self.foreign_key + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + "dataType": data_type, + "description": description, + } + ) + if foreign_key is not UNSET: + field_dict["foreignKey"] = foreign_key + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.foreign_key_ref import ForeignKeyRef + + d = dict(src_dict) + name = d.pop("name") + + data_type = ColumnDataType(d.pop("dataType")) + + description = d.pop("description") + + def _parse_foreign_key(data: object) -> ForeignKeyRef | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + foreign_key_type_1 = ForeignKeyRef.from_dict(data) + + return foreign_key_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(ForeignKeyRef | None | Unset, data) + + foreign_key = _parse_foreign_key(d.pop("foreignKey", UNSET)) + + column_def = cls( + name=name, + data_type=data_type, + description=description, + foreign_key=foreign_key, + ) + + column_def.additional_properties = d + return column_def + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/create_notebook_instance_request.py b/cirro_api_client/v1/models/create_notebook_instance_request.py deleted file mode 100644 index 6c130f4..0000000 --- a/cirro_api_client/v1/models/create_notebook_instance_request.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from typing import Any, TypeVar, cast - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..types import UNSET, Unset - -T = TypeVar("T", bound="CreateNotebookInstanceRequest") - - -@_attrs_define -class CreateNotebookInstanceRequest: - """ - Attributes: - name (str): - instance_type (str): AWS EC2 Instance Type (see list of available options) Example: ml.t3.medium. - accelerator_types (list[str]): - volume_size_gb (int): - git_repositories (list[str] | None | Unset): List of public git repositories to clone into the notebook - instance. - is_shared_with_project (bool | Unset): Whether the notebook is shared with the project Default: False. - """ - - name: str - instance_type: str - accelerator_types: list[str] - volume_size_gb: int - git_repositories: list[str] | None | Unset = UNSET - is_shared_with_project: bool | Unset = False - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - name = self.name - - instance_type = self.instance_type - - accelerator_types = self.accelerator_types - - volume_size_gb = self.volume_size_gb - - git_repositories: list[str] | None | Unset - if isinstance(self.git_repositories, Unset): - git_repositories = UNSET - elif isinstance(self.git_repositories, list): - git_repositories = self.git_repositories - - else: - git_repositories = self.git_repositories - - is_shared_with_project = self.is_shared_with_project - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "name": name, - "instanceType": instance_type, - "acceleratorTypes": accelerator_types, - "volumeSizeGB": volume_size_gb, - } - ) - if git_repositories is not UNSET: - field_dict["gitRepositories"] = git_repositories - if is_shared_with_project is not UNSET: - field_dict["isSharedWithProject"] = is_shared_with_project - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - name = d.pop("name") - - instance_type = d.pop("instanceType") - - accelerator_types = cast(list[str], d.pop("acceleratorTypes")) - - volume_size_gb = d.pop("volumeSizeGB") - - def _parse_git_repositories(data: object) -> list[str] | None | Unset: - if data is None: - return data - if isinstance(data, Unset): - return data - try: - if not isinstance(data, list): - raise TypeError() - git_repositories_type_0 = cast(list[str], data) - - return git_repositories_type_0 - except (TypeError, ValueError, AttributeError, KeyError): - pass - return cast(list[str] | None | Unset, data) - - git_repositories = _parse_git_repositories(d.pop("gitRepositories", UNSET)) - - is_shared_with_project = d.pop("isSharedWithProject", UNSET) - - create_notebook_instance_request = cls( - name=name, - instance_type=instance_type, - accelerator_types=accelerator_types, - volume_size_gb=volume_size_gb, - git_repositories=git_repositories, - is_shared_with_project=is_shared_with_project, - ) - - create_notebook_instance_request.additional_properties = d - return create_notebook_instance_request - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/cirro_api_client/v1/models/create_sheet_request.py b/cirro_api_client/v1/models/create_sheet_request.py new file mode 100644 index 0000000..4c2cc6e --- /dev/null +++ b/cirro_api_client/v1/models/create_sheet_request.py @@ -0,0 +1,252 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.sheet_creation_mode import SheetCreationMode +from ..models.sheet_type import SheetType +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.column_def import ColumnDef + from ..models.file_def import FileDef + from ..models.view_query_request import ViewQueryRequest + + +T = TypeVar("T", bound="CreateSheetRequest") + + +@_attrs_define +class CreateSheetRequest: + """ + Attributes: + name (str): Display name for the sheet + namespace_name (str): Namespace for the sheet. This serves as a container to group and manage sheets. + Permissions can be broadly managed at this level too. Example: alz_cohort. + table_name (str): Name of the sheet's underlying table Example: my_table. + sheet_type (SheetType): + description (str | Unset): Optional description of the sheet's purpose or contents + audit_read_access (bool | Unset): Enable audit logging for read access to this sheet Default: False. + sheet_creation_mode (None | SheetCreationMode | Unset): How the table should be initialized (required for TABLE) + columns (list[ColumnDef] | None | Unset): Column definitions for the table schema (required for TABLE) + file_def (FileDef | None | Unset): If provided, an ingest job is triggered immediately after table creation + (TABLE only) + view_definition (None | Unset | ViewQueryRequest): View definition specifying sheets, joins, columns, and + filters (required for VIEW) + """ + + name: str + namespace_name: str + table_name: str + sheet_type: SheetType + description: str | Unset = UNSET + audit_read_access: bool | Unset = False + sheet_creation_mode: None | SheetCreationMode | Unset = UNSET + columns: list[ColumnDef] | None | Unset = UNSET + file_def: FileDef | None | Unset = UNSET + view_definition: None | Unset | ViewQueryRequest = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.file_def import FileDef + from ..models.view_query_request import ViewQueryRequest + + name = self.name + + namespace_name = self.namespace_name + + table_name = self.table_name + + sheet_type = self.sheet_type.value + + description = self.description + + audit_read_access = self.audit_read_access + + sheet_creation_mode: None | str | Unset + if isinstance(self.sheet_creation_mode, Unset): + sheet_creation_mode = UNSET + elif isinstance(self.sheet_creation_mode, SheetCreationMode): + sheet_creation_mode = self.sheet_creation_mode.value + else: + sheet_creation_mode = self.sheet_creation_mode + + columns: list[dict[str, Any]] | None | Unset + if isinstance(self.columns, Unset): + columns = UNSET + elif isinstance(self.columns, list): + columns = [] + for columns_type_0_item_data in self.columns: + columns_type_0_item = columns_type_0_item_data.to_dict() + columns.append(columns_type_0_item) + + else: + columns = self.columns + + file_def: dict[str, Any] | None | Unset + if isinstance(self.file_def, Unset): + file_def = UNSET + elif isinstance(self.file_def, FileDef): + file_def = self.file_def.to_dict() + else: + file_def = self.file_def + + view_definition: dict[str, Any] | None | Unset + if isinstance(self.view_definition, Unset): + view_definition = UNSET + elif isinstance(self.view_definition, ViewQueryRequest): + view_definition = self.view_definition.to_dict() + else: + view_definition = self.view_definition + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + "namespaceName": namespace_name, + "tableName": table_name, + "sheetType": sheet_type, + } + ) + if description is not UNSET: + field_dict["description"] = description + if audit_read_access is not UNSET: + field_dict["auditReadAccess"] = audit_read_access + if sheet_creation_mode is not UNSET: + field_dict["sheetCreationMode"] = sheet_creation_mode + if columns is not UNSET: + field_dict["columns"] = columns + if file_def is not UNSET: + field_dict["fileDef"] = file_def + if view_definition is not UNSET: + field_dict["viewDefinition"] = view_definition + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.column_def import ColumnDef + from ..models.file_def import FileDef + from ..models.view_query_request import ViewQueryRequest + + d = dict(src_dict) + name = d.pop("name") + + namespace_name = d.pop("namespaceName") + + table_name = d.pop("tableName") + + sheet_type = SheetType(d.pop("sheetType")) + + description = d.pop("description", UNSET) + + audit_read_access = d.pop("auditReadAccess", UNSET) + + def _parse_sheet_creation_mode(data: object) -> None | SheetCreationMode | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + sheet_creation_mode_type_1 = SheetCreationMode(data) + + return sheet_creation_mode_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | SheetCreationMode | Unset, data) + + sheet_creation_mode = _parse_sheet_creation_mode(d.pop("sheetCreationMode", UNSET)) + + def _parse_columns(data: object) -> list[ColumnDef] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + columns_type_0 = [] + _columns_type_0 = data + for columns_type_0_item_data in _columns_type_0: + columns_type_0_item = ColumnDef.from_dict(columns_type_0_item_data) + + columns_type_0.append(columns_type_0_item) + + return columns_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[ColumnDef] | None | Unset, data) + + columns = _parse_columns(d.pop("columns", UNSET)) + + def _parse_file_def(data: object) -> FileDef | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + file_def_type_1 = FileDef.from_dict(data) + + return file_def_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(FileDef | None | Unset, data) + + file_def = _parse_file_def(d.pop("fileDef", UNSET)) + + def _parse_view_definition(data: object) -> None | Unset | ViewQueryRequest: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + view_definition_type_1 = ViewQueryRequest.from_dict(data) + + return view_definition_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewQueryRequest, data) + + view_definition = _parse_view_definition(d.pop("viewDefinition", UNSET)) + + create_sheet_request = cls( + name=name, + namespace_name=namespace_name, + table_name=table_name, + sheet_type=sheet_type, + description=description, + audit_read_access=audit_read_access, + sheet_creation_mode=sheet_creation_mode, + columns=columns, + file_def=file_def, + view_definition=view_definition, + ) + + create_sheet_request.additional_properties = d + return create_sheet_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/custom_process_input.py b/cirro_api_client/v1/models/custom_process_input.py index c385ccb..099ba62 100644 --- a/cirro_api_client/v1/models/custom_process_input.py +++ b/cirro_api_client/v1/models/custom_process_input.py @@ -13,6 +13,7 @@ from ..models.custom_pipeline_settings import CustomPipelineSettings from ..models.file_mapping_rule import FileMappingRule from ..models.pipeline_code import PipelineCode + from ..models.tag import Tag T = TypeVar("T", bound="CustomProcessInput") @@ -42,6 +43,7 @@ class CustomProcessInput: uses_sample_sheet (bool | Unset): Whether the pipeline uses the Cirro-provided sample sheet custom_settings (CustomPipelineSettings | None | Unset): file_mapping_rules (list[FileMappingRule] | None | Unset): + tags (list[Tag] | None | Unset): """ id: str @@ -61,6 +63,7 @@ class CustomProcessInput: uses_sample_sheet: bool | Unset = UNSET custom_settings: CustomPipelineSettings | None | Unset = UNSET file_mapping_rules: list[FileMappingRule] | None | Unset = UNSET + tags: list[Tag] | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -135,6 +138,18 @@ def to_dict(self) -> dict[str, Any]: else: file_mapping_rules = self.file_mapping_rules + tags: list[dict[str, Any]] | None | Unset + if isinstance(self.tags, Unset): + tags = UNSET + elif isinstance(self.tags, list): + tags = [] + for tags_type_0_item_data in self.tags: + tags_type_0_item = tags_type_0_item_data.to_dict() + tags.append(tags_type_0_item) + + else: + tags = self.tags + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -168,6 +183,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["customSettings"] = custom_settings if file_mapping_rules is not UNSET: field_dict["fileMappingRules"] = file_mapping_rules + if tags is not UNSET: + field_dict["tags"] = tags return field_dict @@ -176,6 +193,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.custom_pipeline_settings import CustomPipelineSettings from ..models.file_mapping_rule import FileMappingRule from ..models.pipeline_code import PipelineCode + from ..models.tag import Tag d = dict(src_dict) id = d.pop("id") @@ -283,6 +301,28 @@ def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Un file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) + def _parse_tags(data: object) -> list[Tag] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + tags_type_0 = [] + _tags_type_0 = data + for tags_type_0_item_data in _tags_type_0: + tags_type_0_item = Tag.from_dict(tags_type_0_item_data) + + tags_type_0.append(tags_type_0_item) + + return tags_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[Tag] | None | Unset, data) + + tags = _parse_tags(d.pop("tags", UNSET)) + custom_process_input = cls( id=id, name=name, @@ -301,6 +341,7 @@ def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Un uses_sample_sheet=uses_sample_sheet, custom_settings=custom_settings, file_mapping_rules=file_mapping_rules, + tags=tags, ) custom_process_input.additional_properties = d diff --git a/cirro_api_client/v1/models/dataset_detail.py b/cirro_api_client/v1/models/dataset_detail.py index 5e482de..55d3c4f 100644 --- a/cirro_api_client/v1/models/dataset_detail.py +++ b/cirro_api_client/v1/models/dataset_detail.py @@ -50,6 +50,7 @@ class DatasetDetail: from another project. share (NamedItem | None | Unset): total_size_bytes (int | None | Unset): Total size of dataset files (in bytes) + file_count (int | None | Unset): Total number of dataset files """ id: str @@ -74,6 +75,7 @@ class DatasetDetail: originating_project_id: str | Unset = UNSET share: NamedItem | None | Unset = UNSET total_size_bytes: int | None | Unset = UNSET + file_count: int | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -139,6 +141,12 @@ def to_dict(self) -> dict[str, Any]: else: total_size_bytes = self.total_size_bytes + file_count: int | None | Unset + if isinstance(self.file_count, Unset): + file_count = UNSET + else: + file_count = self.file_count + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -170,6 +178,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["share"] = share if total_size_bytes is not UNSET: field_dict["totalSizeBytes"] = total_size_bytes + if file_count is not UNSET: + field_dict["fileCount"] = file_count return field_dict @@ -258,6 +268,15 @@ def _parse_total_size_bytes(data: object) -> int | None | Unset: total_size_bytes = _parse_total_size_bytes(d.pop("totalSizeBytes", UNSET)) + def _parse_file_count(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + file_count = _parse_file_count(d.pop("fileCount", UNSET)) + dataset_detail = cls( id=id, name=name, @@ -281,6 +300,7 @@ def _parse_total_size_bytes(data: object) -> int | None | Unset: originating_project_id=originating_project_id, share=share, total_size_bytes=total_size_bytes, + file_count=file_count, ) dataset_detail.additional_properties = d diff --git a/cirro_api_client/v1/models/dataset_viz.py b/cirro_api_client/v1/models/dataset_viz.py index 84b3e52..6f8f30c 100644 --- a/cirro_api_client/v1/models/dataset_viz.py +++ b/cirro_api_client/v1/models/dataset_viz.py @@ -1,17 +1,13 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field from ..types import UNSET, Unset -if TYPE_CHECKING: - from ..models.dataset_viz_config import DatasetVizConfig - - T = TypeVar("T", bound="DatasetViz") @@ -23,14 +19,12 @@ class DatasetViz: name (str | Unset): Name of viz desc (str | Unset): Description of viz type_ (str | Unset): Type of viz Example: vitescce. - config (DatasetVizConfig | Unset): Config or path to config used to render viz """ path: str | Unset = UNSET name: str | Unset = UNSET desc: str | Unset = UNSET type_: str | Unset = UNSET - config: DatasetVizConfig | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -42,10 +36,6 @@ def to_dict(self) -> dict[str, Any]: type_ = self.type_ - config: dict[str, Any] | Unset = UNSET - if not isinstance(self.config, Unset): - config = self.config.to_dict() - field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) @@ -57,15 +47,11 @@ def to_dict(self) -> dict[str, Any]: field_dict["desc"] = desc if type_ is not UNSET: field_dict["type"] = type_ - if config is not UNSET: - field_dict["config"] = config return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.dataset_viz_config import DatasetVizConfig - d = dict(src_dict) path = d.pop("path", UNSET) @@ -75,19 +61,11 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: type_ = d.pop("type", UNSET) - _config = d.pop("config", UNSET) - config: DatasetVizConfig | Unset - if isinstance(_config, Unset): - config = UNSET - else: - config = DatasetVizConfig.from_dict(_config) - dataset_viz = cls( path=path, name=name, desc=desc, type_=type_, - config=config, ) dataset_viz.additional_properties = d diff --git a/cirro_api_client/v1/models/entity_type.py b/cirro_api_client/v1/models/entity_type.py index 3542bec..7b81d3a 100644 --- a/cirro_api_client/v1/models/entity_type.py +++ b/cirro_api_client/v1/models/entity_type.py @@ -4,7 +4,6 @@ class EntityType(str, Enum): DATASET = "DATASET" DISCUSSION = "DISCUSSION" - NOTEBOOK = "NOTEBOOK" PROCESS = "PROCESS" PROJECT = "PROJECT" REFERENCE = "REFERENCE" diff --git a/cirro_api_client/v1/models/feature_flags.py b/cirro_api_client/v1/models/feature_flags.py index 2f0bf3a..736a486 100644 --- a/cirro_api_client/v1/models/feature_flags.py +++ b/cirro_api_client/v1/models/feature_flags.py @@ -19,6 +19,10 @@ class FeatureFlags: workspaces_enabled (bool): drive_enabled (bool): app_registrations_enabled (bool): + sheets_enabled (bool): + ai_enabled (bool): + shared_filesystems_enabled (bool): + custom_workspace_roles_enabled (bool): """ sftp_enabled: bool @@ -27,6 +31,10 @@ class FeatureFlags: workspaces_enabled: bool drive_enabled: bool app_registrations_enabled: bool + sheets_enabled: bool + ai_enabled: bool + shared_filesystems_enabled: bool + custom_workspace_roles_enabled: bool additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -42,6 +50,14 @@ def to_dict(self) -> dict[str, Any]: app_registrations_enabled = self.app_registrations_enabled + sheets_enabled = self.sheets_enabled + + ai_enabled = self.ai_enabled + + shared_filesystems_enabled = self.shared_filesystems_enabled + + custom_workspace_roles_enabled = self.custom_workspace_roles_enabled + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -52,6 +68,10 @@ def to_dict(self) -> dict[str, Any]: "workspacesEnabled": workspaces_enabled, "driveEnabled": drive_enabled, "appRegistrationsEnabled": app_registrations_enabled, + "sheetsEnabled": sheets_enabled, + "aiEnabled": ai_enabled, + "sharedFilesystemsEnabled": shared_filesystems_enabled, + "customWorkspaceRolesEnabled": custom_workspace_roles_enabled, } ) @@ -72,6 +92,14 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: app_registrations_enabled = d.pop("appRegistrationsEnabled") + sheets_enabled = d.pop("sheetsEnabled") + + ai_enabled = d.pop("aiEnabled") + + shared_filesystems_enabled = d.pop("sharedFilesystemsEnabled") + + custom_workspace_roles_enabled = d.pop("customWorkspaceRolesEnabled") + feature_flags = cls( sftp_enabled=sftp_enabled, governance_enabled=governance_enabled, @@ -79,6 +107,10 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: workspaces_enabled=workspaces_enabled, drive_enabled=drive_enabled, app_registrations_enabled=app_registrations_enabled, + sheets_enabled=sheets_enabled, + ai_enabled=ai_enabled, + shared_filesystems_enabled=shared_filesystems_enabled, + custom_workspace_roles_enabled=custom_workspace_roles_enabled, ) feature_flags.additional_properties = d diff --git a/cirro_api_client/v1/models/file_def.py b/cirro_api_client/v1/models/file_def.py new file mode 100644 index 0000000..1dc3c8e --- /dev/null +++ b/cirro_api_client/v1/models/file_def.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.file_type import FileType +from ..types import UNSET, Unset + +T = TypeVar("T", bound="FileDef") + + +@_attrs_define +class FileDef: + """If provided, an ingest job is triggered immediately after table creation (TABLE only) + + Attributes: + file_type (FileType): + storage_uri (str | Unset): Full S3 URI to the source file. + """ + + file_type: FileType + storage_uri: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + file_type = self.file_type.value + + storage_uri = self.storage_uri + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "fileType": file_type, + } + ) + if storage_uri is not UNSET: + field_dict["storageUri"] = storage_uri + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + file_type = FileType(d.pop("fileType")) + + storage_uri = d.pop("storageUri", UNSET) + + file_def = cls( + file_type=file_type, + storage_uri=storage_uri, + ) + + file_def.additional_properties = d + return file_def + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/file_requirements.py b/cirro_api_client/v1/models/file_requirements.py index 5758da2..b56658e 100644 --- a/cirro_api_client/v1/models/file_requirements.py +++ b/cirro_api_client/v1/models/file_requirements.py @@ -6,6 +6,8 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + if TYPE_CHECKING: from ..models.allowed_data_type import AllowedDataType @@ -20,11 +22,13 @@ class FileRequirements: files (list[str]): error_msg (str): allowed_data_types (list[AllowedDataType]): + has_error (bool | Unset): """ files: list[str] error_msg: str allowed_data_types: list[AllowedDataType] + has_error: bool | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,6 +41,8 @@ def to_dict(self) -> dict[str, Any]: allowed_data_types_item = allowed_data_types_item_data.to_dict() allowed_data_types.append(allowed_data_types_item) + has_error = self.has_error + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -46,6 +52,8 @@ def to_dict(self) -> dict[str, Any]: "allowedDataTypes": allowed_data_types, } ) + if has_error is not UNSET: + field_dict["hasError"] = has_error return field_dict @@ -65,10 +73,13 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: allowed_data_types.append(allowed_data_types_item) + has_error = d.pop("hasError", UNSET) + file_requirements = cls( files=files, error_msg=error_msg, allowed_data_types=allowed_data_types, + has_error=has_error, ) file_requirements.additional_properties = d diff --git a/cirro_api_client/v1/models/file_type.py b/cirro_api_client/v1/models/file_type.py new file mode 100644 index 0000000..36a292c --- /dev/null +++ b/cirro_api_client/v1/models/file_type.py @@ -0,0 +1,16 @@ +from enum import Enum + + +class FileType(str, Enum): + CSV = "CSV" + JSON = "JSON" + PARQUET = "PARQUET" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/filter_operator.py b/cirro_api_client/v1/models/filter_operator.py new file mode 100644 index 0000000..a2e3675 --- /dev/null +++ b/cirro_api_client/v1/models/filter_operator.py @@ -0,0 +1,24 @@ +from enum import Enum + + +class FilterOperator(str, Enum): + EQUALS = "EQUALS" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUALS = "GREATER_THAN_OR_EQUALS" + IN = "IN" + IS_NOT_NULL = "IS_NOT_NULL" + IS_NULL = "IS_NULL" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUALS = "LESS_THAN_OR_EQUALS" + LIKE = "LIKE" + NOT_EQUALS = "NOT_EQUALS" + NOT_IN = "NOT_IN" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/notebook_instance_status_response.py b/cirro_api_client/v1/models/foreign_key_ref.py similarity index 63% rename from cirro_api_client/v1/models/notebook_instance_status_response.py rename to cirro_api_client/v1/models/foreign_key_ref.py index df0ef2d..a006f66 100644 --- a/cirro_api_client/v1/models/notebook_instance_status_response.py +++ b/cirro_api_client/v1/models/foreign_key_ref.py @@ -6,32 +6,32 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="NotebookInstanceStatusResponse") +T = TypeVar("T", bound="ForeignKeyRef") @_attrs_define -class NotebookInstanceStatusResponse: +class ForeignKeyRef: """ Attributes: - status (str): - status_message (str): + sheet_id (str): + column_name (str): """ - status: str - status_message: str + sheet_id: str + column_name: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - status = self.status + sheet_id = self.sheet_id - status_message = self.status_message + column_name = self.column_name field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { - "status": status, - "statusMessage": status_message, + "sheetId": sheet_id, + "columnName": column_name, } ) @@ -40,17 +40,17 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - status = d.pop("status") + sheet_id = d.pop("sheetId") - status_message = d.pop("statusMessage") + column_name = d.pop("columnName") - notebook_instance_status_response = cls( - status=status, - status_message=status_message, + foreign_key_ref = cls( + sheet_id=sheet_id, + column_name=column_name, ) - notebook_instance_status_response.additional_properties = d - return notebook_instance_status_response + foreign_key_ref.additional_properties = d + return foreign_key_ref @property def additional_keys(self) -> list[str]: diff --git a/cirro_api_client/v1/models/form_schema.py b/cirro_api_client/v1/models/form_schema.py index b05d9ad..caeaf63 100644 --- a/cirro_api_client/v1/models/form_schema.py +++ b/cirro_api_client/v1/models/form_schema.py @@ -10,6 +10,7 @@ if TYPE_CHECKING: from ..models.form_schema_form import FormSchemaForm + from ..models.form_schema_metadata_requirements import FormSchemaMetadataRequirements from ..models.form_schema_ui import FormSchemaUi @@ -22,10 +23,12 @@ class FormSchema: Attributes: form (FormSchemaForm | Unset): JSONSchema representation of the parameters ui (FormSchemaUi | Unset): Describes how the form should be rendered, see rjsf + metadata_requirements (FormSchemaMetadataRequirements | Unset): JSONSchema for validating sample metadata """ form: FormSchemaForm | Unset = UNSET ui: FormSchemaUi | Unset = UNSET + metadata_requirements: FormSchemaMetadataRequirements | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,6 +40,10 @@ def to_dict(self) -> dict[str, Any]: if not isinstance(self.ui, Unset): ui = self.ui.to_dict() + metadata_requirements: dict[str, Any] | Unset = UNSET + if not isinstance(self.metadata_requirements, Unset): + metadata_requirements = self.metadata_requirements.to_dict() + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) @@ -44,12 +51,15 @@ def to_dict(self) -> dict[str, Any]: field_dict["form"] = form if ui is not UNSET: field_dict["ui"] = ui + if metadata_requirements is not UNSET: + field_dict["metadataRequirements"] = metadata_requirements return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.form_schema_form import FormSchemaForm + from ..models.form_schema_metadata_requirements import FormSchemaMetadataRequirements from ..models.form_schema_ui import FormSchemaUi d = dict(src_dict) @@ -67,9 +77,17 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: else: ui = FormSchemaUi.from_dict(_ui) + _metadata_requirements = d.pop("metadataRequirements", UNSET) + metadata_requirements: FormSchemaMetadataRequirements | Unset + if isinstance(_metadata_requirements, Unset): + metadata_requirements = UNSET + else: + metadata_requirements = FormSchemaMetadataRequirements.from_dict(_metadata_requirements) + form_schema = cls( form=form, ui=ui, + metadata_requirements=metadata_requirements, ) form_schema.additional_properties = d diff --git a/cirro_api_client/v1/models/form_schema_metadata_requirements.py b/cirro_api_client/v1/models/form_schema_metadata_requirements.py new file mode 100644 index 0000000..322bc83 --- /dev/null +++ b/cirro_api_client/v1/models/form_schema_metadata_requirements.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="FormSchemaMetadataRequirements") + + +@_attrs_define +class FormSchemaMetadataRequirements: + """JSONSchema for validating sample metadata""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + form_schema_metadata_requirements = cls() + + form_schema_metadata_requirements.additional_properties = d + return form_schema_metadata_requirements + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/join_condition.py b/cirro_api_client/v1/models/join_condition.py new file mode 100644 index 0000000..158efe4 --- /dev/null +++ b/cirro_api_client/v1/models/join_condition.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="JoinCondition") + + +@_attrs_define +class JoinCondition: + """ + Attributes: + left_column (str): Qualified column reference in alias.column format Example: s1.patient_id. + right_column (str): Qualified column reference in alias.column format Example: s2.patient_id. + """ + + left_column: str + right_column: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + left_column = self.left_column + + right_column = self.right_column + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "leftColumn": left_column, + "rightColumn": right_column, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + left_column = d.pop("leftColumn") + + right_column = d.pop("rightColumn") + + join_condition = cls( + left_column=left_column, + right_column=right_column, + ) + + join_condition.additional_properties = d + return join_condition + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/join_type.py b/cirro_api_client/v1/models/join_type.py new file mode 100644 index 0000000..8da026e --- /dev/null +++ b/cirro_api_client/v1/models/join_type.py @@ -0,0 +1,17 @@ +from enum import Enum + + +class JoinType(str, Enum): + FULL = "FULL" + INNER = "INNER" + LEFT = "LEFT" + RIGHT = "RIGHT" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/list_events_entity_type.py b/cirro_api_client/v1/models/list_events_entity_type.py index 91e5d4a..d2b59af 100644 --- a/cirro_api_client/v1/models/list_events_entity_type.py +++ b/cirro_api_client/v1/models/list_events_entity_type.py @@ -4,12 +4,12 @@ class ListEventsEntityType(str, Enum): BILLINGACCOUNT = "BillingAccount" DATASET = "Dataset" - NOTEBOOKINSTANCE = "NotebookInstance" PROCESS = "Process" PROJECT = "Project" SAMPLE = "Sample" USER = "User" USERPROJECTASSIGNMENT = "UserProjectAssignment" + WORKSPACE = "Workspace" UNKNOWN = "UNKNOWN" """ This is a fallback value for when the value is not known, do not use this value when making requests """ diff --git a/cirro_api_client/v1/models/logical_operator.py b/cirro_api_client/v1/models/logical_operator.py new file mode 100644 index 0000000..dd01a96 --- /dev/null +++ b/cirro_api_client/v1/models/logical_operator.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class LogicalOperator(str, Enum): + AND = "AND" + OR = "OR" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/mounted_dataset.py b/cirro_api_client/v1/models/mounted_dataset.py index f7e88bf..1557210 100644 --- a/cirro_api_client/v1/models/mounted_dataset.py +++ b/cirro_api_client/v1/models/mounted_dataset.py @@ -19,11 +19,13 @@ class MountedDataset: name (str): Folder name that appears in the workspace dataset_id (None | str | Unset): ID of the dataset to mount custom_uri (None | str | Unset): Full S3 URI to mounted data (if mounting custom path) + shared_filesystem_id (None | str | Unset): ID of a shared filesystem to mount (read-write) """ name: str dataset_id: None | str | Unset = UNSET custom_uri: None | str | Unset = UNSET + shared_filesystem_id: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -41,6 +43,12 @@ def to_dict(self) -> dict[str, Any]: else: custom_uri = self.custom_uri + shared_filesystem_id: None | str | Unset + if isinstance(self.shared_filesystem_id, Unset): + shared_filesystem_id = UNSET + else: + shared_filesystem_id = self.shared_filesystem_id + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -52,6 +60,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["datasetId"] = dataset_id if custom_uri is not UNSET: field_dict["customUri"] = custom_uri + if shared_filesystem_id is not UNSET: + field_dict["sharedFilesystemId"] = shared_filesystem_id return field_dict @@ -78,10 +88,20 @@ def _parse_custom_uri(data: object) -> None | str | Unset: custom_uri = _parse_custom_uri(d.pop("customUri", UNSET)) + def _parse_shared_filesystem_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + shared_filesystem_id = _parse_shared_filesystem_id(d.pop("sharedFilesystemId", UNSET)) + mounted_dataset = cls( name=name, dataset_id=dataset_id, custom_uri=custom_uri, + shared_filesystem_id=shared_filesystem_id, ) mounted_dataset.additional_properties = d diff --git a/cirro_api_client/v1/models/permission.py b/cirro_api_client/v1/models/permission.py index c825505..bc2b9e4 100644 --- a/cirro_api_client/v1/models/permission.py +++ b/cirro_api_client/v1/models/permission.py @@ -2,7 +2,6 @@ class Permission(str, Enum): - CONTROL_NOTEBOOK_INSTANCE = "CONTROL_NOTEBOOK_INSTANCE" CONTROL_WORKSPACE = "CONTROL_WORKSPACE" CREATE_BILLING_ACCOUNT = "CREATE_BILLING_ACCOUNT" CREATE_BYOA_PROJECT = "CREATE_BYOA_PROJECT" @@ -13,11 +12,12 @@ class Permission(str, Enum): CREATE_DATASET = "CREATE_DATASET" CREATE_DISCUSSION = "CREATE_DISCUSSION" CREATE_HOSTED_PROJECT = "CREATE_HOSTED_PROJECT" - CREATE_NOTEBOOK_INSTANCE = "CREATE_NOTEBOOK_INSTANCE" + CREATE_SHARED_FILESYSTEM = "CREATE_SHARED_FILESYSTEM" + CREATE_SHEET = "CREATE_SHEET" CREATE_WORKSPACE = "CREATE_WORKSPACE" DELETE_DATASET = "DELETE_DATASET" - DELETE_NOTEBOOK_INSTANCE = "DELETE_NOTEBOOK_INSTANCE" DELETE_PROJECT = "DELETE_PROJECT" + DELETE_SHEET = "DELETE_SHEET" DELETE_WORKSPACE = "DELETE_WORKSPACE" EDIT_DASHBOARD = "EDIT_DASHBOARD" EDIT_DATASET = "EDIT_DATASET" @@ -25,6 +25,7 @@ class Permission(str, Enum): EDIT_PROJECT_MEMBERS = "EDIT_PROJECT_MEMBERS" EDIT_PROJECT_METADATA = "EDIT_PROJECT_METADATA" EDIT_PROJECT_REFERENCES = "EDIT_PROJECT_REFERENCES" + EDIT_SHEET = "EDIT_SHEET" GENERATE_DOWNLOAD_TOKEN = "GENERATE_DOWNLOAD_TOKEN" INVITE_MEMBER = "INVITE_MEMBER" MANAGE_AGENTS = "MANAGE_AGENTS" @@ -35,8 +36,9 @@ class Permission(str, Enum): MANAGE_MEMBERS = "MANAGE_MEMBERS" MANAGE_OWN_APP_REGISTRATIONS = "MANAGE_OWN_APP_REGISTRATIONS" MANAGE_PROJECT_SHARES = "MANAGE_PROJECT_SHARES" - OPEN_NOTEBOOK_INSTANCE = "OPEN_NOTEBOOK_INSTANCE" + MANAGE_SHARED_FILESYSTEMS = "MANAGE_SHARED_FILESYSTEMS" OPEN_WORKSPACE = "OPEN_WORKSPACE" + READ_SCRATCH_BUCKET = "READ_SCRATCH_BUCKET" REQUEST_PROJECT = "REQUEST_PROJECT" RUN_ANALYSIS = "RUN_ANALYSIS" SEARCH_MEMBERS = "SEARCH_MEMBERS" @@ -59,6 +61,7 @@ class Permission(str, Enum): VIEW_PROJECT_REFERENCES = "VIEW_PROJECT_REFERENCES" VIEW_PROJECT_SHARES = "VIEW_PROJECT_SHARES" VIEW_SERVICE_CONNECTIONS = "VIEW_SERVICE_CONNECTIONS" + VIEW_SHEET = "VIEW_SHEET" VIEW_WORKSPACE_ENVIRONMENTS = "VIEW_WORKSPACE_ENVIRONMENTS" UNKNOWN = "UNKNOWN" """ This is a fallback value for when the value is not known, do not use this value when making requests """ diff --git a/cirro_api_client/v1/models/pipeline_code.py b/cirro_api_client/v1/models/pipeline_code.py index 2b49a0b..7400ab8 100644 --- a/cirro_api_client/v1/models/pipeline_code.py +++ b/cirro_api_client/v1/models/pipeline_code.py @@ -1,12 +1,13 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, TypeVar +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field from ..models.repository_type import RepositoryType +from ..types import UNSET, Unset T = TypeVar("T", bound="PipelineCode") @@ -20,12 +21,14 @@ class PipelineCode: version (str): Branch, tag, or commit hash of the pipeline code Example: main. repository_type (RepositoryType): Type of repository entry_point (str): Main script for running the pipeline Example: main.nf. + executor_version (None | str | Unset): Version of the executor Example: 24.10.5. """ repository_path: str version: str repository_type: RepositoryType entry_point: str + executor_version: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -37,6 +40,12 @@ def to_dict(self) -> dict[str, Any]: entry_point = self.entry_point + executor_version: None | str | Unset + if isinstance(self.executor_version, Unset): + executor_version = UNSET + else: + executor_version = self.executor_version + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -47,6 +56,8 @@ def to_dict(self) -> dict[str, Any]: "entryPoint": entry_point, } ) + if executor_version is not UNSET: + field_dict["executorVersion"] = executor_version return field_dict @@ -61,11 +72,21 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: entry_point = d.pop("entryPoint") + def _parse_executor_version(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + executor_version = _parse_executor_version(d.pop("executorVersion", UNSET)) + pipeline_code = cls( repository_path=repository_path, version=version, repository_type=repository_type, entry_point=entry_point, + executor_version=executor_version, ) pipeline_code.additional_properties = d diff --git a/cirro_api_client/v1/models/process.py b/cirro_api_client/v1/models/process.py index e75cc0a..f4cd5ea 100644 --- a/cirro_api_client/v1/models/process.py +++ b/cirro_api_client/v1/models/process.py @@ -2,7 +2,7 @@ import datetime from collections.abc import Mapping -from typing import Any, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -11,6 +11,10 @@ from ..models.executor import Executor from ..types import UNSET, Unset +if TYPE_CHECKING: + from ..models.tag import Tag + + T = TypeVar("T", bound="Process") @@ -32,6 +36,7 @@ class Process: allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet is_archived (bool): Whether the process is marked as archived + tags (list[Tag]): category (str | Unset): Category of the process Example: Microbial Analysis. pipeline_type (str | Unset): Type of pipeline Example: nf-core. documentation_url (str | Unset): Link to process documentation Example: @@ -54,6 +59,7 @@ class Process: allow_multiple_sources: bool uses_sample_sheet: bool is_archived: bool + tags: list[Tag] category: str | Unset = UNSET pipeline_type: str | Unset = UNSET documentation_url: str | Unset = UNSET @@ -88,6 +94,11 @@ def to_dict(self) -> dict[str, Any]: is_archived = self.is_archived + tags = [] + for tags_item_data in self.tags: + tags_item = tags_item_data.to_dict() + tags.append(tags_item) + category = self.category pipeline_type = self.pipeline_type @@ -126,6 +137,7 @@ def to_dict(self) -> dict[str, Any]: "allowMultipleSources": allow_multiple_sources, "usesSampleSheet": uses_sample_sheet, "isArchived": is_archived, + "tags": tags, } ) if category is not UNSET: @@ -147,6 +159,8 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.tag import Tag + d = dict(src_dict) id = d.pop("id") @@ -172,6 +186,13 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: is_archived = d.pop("isArchived") + tags = [] + _tags = d.pop("tags") + for tags_item_data in _tags: + tags_item = Tag.from_dict(tags_item_data) + + tags.append(tags_item) + category = d.pop("category", UNSET) pipeline_type = d.pop("pipelineType", UNSET) @@ -216,6 +237,7 @@ def _parse_owner(data: object) -> None | str | Unset: allow_multiple_sources=allow_multiple_sources, uses_sample_sheet=uses_sample_sheet, is_archived=is_archived, + tags=tags, category=category, pipeline_type=pipeline_type, documentation_url=documentation_url, diff --git a/cirro_api_client/v1/models/process_detail.py b/cirro_api_client/v1/models/process_detail.py index 5349b37..0f75eee 100644 --- a/cirro_api_client/v1/models/process_detail.py +++ b/cirro_api_client/v1/models/process_detail.py @@ -15,6 +15,7 @@ from ..models.custom_pipeline_settings import CustomPipelineSettings from ..models.file_mapping_rule import FileMappingRule from ..models.pipeline_code import PipelineCode + from ..models.tag import Tag T = TypeVar("T", bound="ProcessDetail") @@ -38,6 +39,7 @@ class ProcessDetail: allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet is_archived (bool): Whether the process is marked as archived + tags (list[Tag]): category (str | Unset): Category of the process Example: Microbial Analysis. pipeline_type (str | Unset): Type of pipeline Example: nf-core. documentation_url (str | Unset): Link to process documentation Example: @@ -63,6 +65,7 @@ class ProcessDetail: allow_multiple_sources: bool uses_sample_sheet: bool is_archived: bool + tags: list[Tag] category: str | Unset = UNSET pipeline_type: str | Unset = UNSET documentation_url: str | Unset = UNSET @@ -103,6 +106,11 @@ def to_dict(self) -> dict[str, Any]: is_archived = self.is_archived + tags = [] + for tags_item_data in self.tags: + tags_item = tags_item_data.to_dict() + tags.append(tags_item) + category = self.category pipeline_type = self.pipeline_type @@ -169,6 +177,7 @@ def to_dict(self) -> dict[str, Any]: "allowMultipleSources": allow_multiple_sources, "usesSampleSheet": uses_sample_sheet, "isArchived": is_archived, + "tags": tags, } ) if category is not UNSET: @@ -199,6 +208,7 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.custom_pipeline_settings import CustomPipelineSettings from ..models.file_mapping_rule import FileMappingRule from ..models.pipeline_code import PipelineCode + from ..models.tag import Tag d = dict(src_dict) id = d.pop("id") @@ -225,6 +235,13 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: is_archived = d.pop("isArchived") + tags = [] + _tags = d.pop("tags") + for tags_item_data in _tags: + tags_item = Tag.from_dict(tags_item_data) + + tags.append(tags_item) + category = d.pop("category", UNSET) pipeline_type = d.pop("pipelineType", UNSET) @@ -325,6 +342,7 @@ def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Un allow_multiple_sources=allow_multiple_sources, uses_sample_sheet=uses_sample_sheet, is_archived=is_archived, + tags=tags, category=category, pipeline_type=pipeline_type, documentation_url=documentation_url, diff --git a/cirro_api_client/v1/models/project_access_type.py b/cirro_api_client/v1/models/project_access_type.py index 1af9048..eee5ae3 100644 --- a/cirro_api_client/v1/models/project_access_type.py +++ b/cirro_api_client/v1/models/project_access_type.py @@ -4,9 +4,11 @@ class ProjectAccessType(str, Enum): DATASET_UPLOAD = "DATASET_UPLOAD" PROJECT_DOWNLOAD = "PROJECT_DOWNLOAD" + READ_SCRATCH = "READ_SCRATCH" REFERENCE_UPLOAD = "REFERENCE_UPLOAD" SAMPLESHEET_UPLOAD = "SAMPLESHEET_UPLOAD" SHARED_DATASET_DOWNLOAD = "SHARED_DATASET_DOWNLOAD" + SHEET_UPLOAD = "SHEET_UPLOAD" UNKNOWN = "UNKNOWN" """ This is a fallback value for when the value is not known, do not use this value when making requests """ diff --git a/cirro_api_client/v1/models/project_file_access_request.py b/cirro_api_client/v1/models/project_file_access_request.py index 4cd7108..58a40cb 100644 --- a/cirro_api_client/v1/models/project_file_access_request.py +++ b/cirro_api_client/v1/models/project_file_access_request.py @@ -18,11 +18,13 @@ class ProjectFileAccessRequest: Attributes: access_type (ProjectAccessType): dataset_id (None | str | Unset): + sheet_id (None | str | Unset): token_lifetime_hours (int | None | Unset): """ access_type: ProjectAccessType dataset_id: None | str | Unset = UNSET + sheet_id: None | str | Unset = UNSET token_lifetime_hours: int | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -35,6 +37,12 @@ def to_dict(self) -> dict[str, Any]: else: dataset_id = self.dataset_id + sheet_id: None | str | Unset + if isinstance(self.sheet_id, Unset): + sheet_id = UNSET + else: + sheet_id = self.sheet_id + token_lifetime_hours: int | None | Unset if isinstance(self.token_lifetime_hours, Unset): token_lifetime_hours = UNSET @@ -50,6 +58,8 @@ def to_dict(self) -> dict[str, Any]: ) if dataset_id is not UNSET: field_dict["datasetId"] = dataset_id + if sheet_id is not UNSET: + field_dict["sheetId"] = sheet_id if token_lifetime_hours is not UNSET: field_dict["tokenLifetimeHours"] = token_lifetime_hours @@ -69,6 +79,15 @@ def _parse_dataset_id(data: object) -> None | str | Unset: dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) + def _parse_sheet_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + sheet_id = _parse_sheet_id(d.pop("sheetId", UNSET)) + def _parse_token_lifetime_hours(data: object) -> int | None | Unset: if data is None: return data @@ -81,6 +100,7 @@ def _parse_token_lifetime_hours(data: object) -> int | None | Unset: project_file_access_request = cls( access_type=access_type, dataset_id=dataset_id, + sheet_id=sheet_id, token_lifetime_hours=token_lifetime_hours, ) diff --git a/cirro_api_client/v1/models/project_settings.py b/cirro_api_client/v1/models/project_settings.py index 463da85..6147a14 100644 --- a/cirro_api_client/v1/models/project_settings.py +++ b/cirro_api_client/v1/models/project_settings.py @@ -29,8 +29,6 @@ class ProjectSettings: vpc_id (None | str | Unset): VPC that the compute environment will use Example: vpc-00000000000000000. batch_subnets (list[str] | None | Unset): List of subnets that the pipeline compute environment will use Example: ['subnet-00000000000000000']. - sagemaker_subnets (list[str] | None | Unset): List of subnets that the sagemaker instances will use Example: - ['subnet-00000000000000000']. workspace_subnets (list[str] | None | Unset): List of subnets that workspace instances will use Example: ['subnet-00000000000000000']. max_spot_vcpu (int | Unset): vCPU service quota limit for standard spot instances (pipelines) Default: 0. @@ -42,12 +40,20 @@ class ProjectSettings: max_workspaces_gpuvcpu (int | Unset): vCPU service quota limit for GPU-enabled instances (workspaces) Default: 0. max_workspaces_per_user (int | Unset): Maximum number of workspaces per user (workspaces) Default: 0. + enable_advanced_gpu_config (bool | None | Unset): Enables advanced GPU configuration (multi-GPU and GPU model + selection) for workspaces Default: False. + enable_custom_workspace_roles (bool | None | Unset): Enables custom IAM task roles for workspaces (BYOA projects + only) Default: False. + max_shared_filesystems (int | Unset): Maximum number of shared filesystems for this project Default: 0. is_discoverable (bool | None | Unset): Enables the project to be discoverable by other users Default: False. is_shareable (bool | None | Unset): Enables the project to be shared with other projects Default: False. + is_ai_enabled (bool | None | Unset): Allows users of this project to interact with AI services Default: False. has_pipelines_enabled (bool | None | Unset): (Read-only) Whether this project has pipelines enabled Default: False. has_workspaces_enabled (bool | None | Unset): (Read-only) Whether this project has workspaces enabled Default: False. + has_shared_filesystems_enabled (bool | None | Unset): (Read-only) Whether this project has shared filesystems + enabled Default: False. """ budget_amount: int @@ -60,7 +66,6 @@ class ProjectSettings: temporary_storage_lifetime_days: int | Unset = 14 vpc_id: None | str | Unset = UNSET batch_subnets: list[str] | None | Unset = UNSET - sagemaker_subnets: list[str] | None | Unset = UNSET workspace_subnets: list[str] | None | Unset = UNSET max_spot_vcpu: int | Unset = 0 max_fpgavcpu: int | Unset = 0 @@ -70,10 +75,15 @@ class ProjectSettings: max_workspaces_vcpu: int | Unset = 0 max_workspaces_gpuvcpu: int | Unset = 0 max_workspaces_per_user: int | Unset = 0 + enable_advanced_gpu_config: bool | None | Unset = False + enable_custom_workspace_roles: bool | None | Unset = False + max_shared_filesystems: int | Unset = 0 is_discoverable: bool | None | Unset = False is_shareable: bool | None | Unset = False + is_ai_enabled: bool | None | Unset = False has_pipelines_enabled: bool | None | Unset = False has_workspaces_enabled: bool | None | Unset = False + has_shared_filesystems_enabled: bool | None | Unset = False additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -114,15 +124,6 @@ def to_dict(self) -> dict[str, Any]: else: batch_subnets = self.batch_subnets - sagemaker_subnets: list[str] | None | Unset - if isinstance(self.sagemaker_subnets, Unset): - sagemaker_subnets = UNSET - elif isinstance(self.sagemaker_subnets, list): - sagemaker_subnets = self.sagemaker_subnets - - else: - sagemaker_subnets = self.sagemaker_subnets - workspace_subnets: list[str] | None | Unset if isinstance(self.workspace_subnets, Unset): workspace_subnets = UNSET @@ -152,6 +153,20 @@ def to_dict(self) -> dict[str, Any]: max_workspaces_per_user = self.max_workspaces_per_user + enable_advanced_gpu_config: bool | None | Unset + if isinstance(self.enable_advanced_gpu_config, Unset): + enable_advanced_gpu_config = UNSET + else: + enable_advanced_gpu_config = self.enable_advanced_gpu_config + + enable_custom_workspace_roles: bool | None | Unset + if isinstance(self.enable_custom_workspace_roles, Unset): + enable_custom_workspace_roles = UNSET + else: + enable_custom_workspace_roles = self.enable_custom_workspace_roles + + max_shared_filesystems = self.max_shared_filesystems + is_discoverable: bool | None | Unset if isinstance(self.is_discoverable, Unset): is_discoverable = UNSET @@ -164,6 +179,12 @@ def to_dict(self) -> dict[str, Any]: else: is_shareable = self.is_shareable + is_ai_enabled: bool | None | Unset + if isinstance(self.is_ai_enabled, Unset): + is_ai_enabled = UNSET + else: + is_ai_enabled = self.is_ai_enabled + has_pipelines_enabled: bool | None | Unset if isinstance(self.has_pipelines_enabled, Unset): has_pipelines_enabled = UNSET @@ -176,6 +197,12 @@ def to_dict(self) -> dict[str, Any]: else: has_workspaces_enabled = self.has_workspaces_enabled + has_shared_filesystems_enabled: bool | None | Unset + if isinstance(self.has_shared_filesystems_enabled, Unset): + has_shared_filesystems_enabled = UNSET + else: + has_shared_filesystems_enabled = self.has_shared_filesystems_enabled + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -200,8 +227,6 @@ def to_dict(self) -> dict[str, Any]: field_dict["vpcId"] = vpc_id if batch_subnets is not UNSET: field_dict["batchSubnets"] = batch_subnets - if sagemaker_subnets is not UNSET: - field_dict["sagemakerSubnets"] = sagemaker_subnets if workspace_subnets is not UNSET: field_dict["workspaceSubnets"] = workspace_subnets if max_spot_vcpu is not UNSET: @@ -220,14 +245,24 @@ def to_dict(self) -> dict[str, Any]: field_dict["maxWorkspacesGPUVCPU"] = max_workspaces_gpuvcpu if max_workspaces_per_user is not UNSET: field_dict["maxWorkspacesPerUser"] = max_workspaces_per_user + if enable_advanced_gpu_config is not UNSET: + field_dict["enableAdvancedGpuConfig"] = enable_advanced_gpu_config + if enable_custom_workspace_roles is not UNSET: + field_dict["enableCustomWorkspaceRoles"] = enable_custom_workspace_roles + if max_shared_filesystems is not UNSET: + field_dict["maxSharedFilesystems"] = max_shared_filesystems if is_discoverable is not UNSET: field_dict["isDiscoverable"] = is_discoverable if is_shareable is not UNSET: field_dict["isShareable"] = is_shareable + if is_ai_enabled is not UNSET: + field_dict["isAiEnabled"] = is_ai_enabled if has_pipelines_enabled is not UNSET: field_dict["hasPipelinesEnabled"] = has_pipelines_enabled if has_workspaces_enabled is not UNSET: field_dict["hasWorkspacesEnabled"] = has_workspaces_enabled + if has_shared_filesystems_enabled is not UNSET: + field_dict["hasSharedFilesystemsEnabled"] = has_shared_filesystems_enabled return field_dict @@ -283,23 +318,6 @@ def _parse_batch_subnets(data: object) -> list[str] | None | Unset: batch_subnets = _parse_batch_subnets(d.pop("batchSubnets", UNSET)) - def _parse_sagemaker_subnets(data: object) -> list[str] | None | Unset: - if data is None: - return data - if isinstance(data, Unset): - return data - try: - if not isinstance(data, list): - raise TypeError() - sagemaker_subnets_type_0 = cast(list[str], data) - - return sagemaker_subnets_type_0 - except (TypeError, ValueError, AttributeError, KeyError): - pass - return cast(list[str] | None | Unset, data) - - sagemaker_subnets = _parse_sagemaker_subnets(d.pop("sagemakerSubnets", UNSET)) - def _parse_workspace_subnets(data: object) -> list[str] | None | Unset: if data is None: return data @@ -340,6 +358,26 @@ def _parse_dragen_ami(data: object) -> None | str | Unset: max_workspaces_per_user = d.pop("maxWorkspacesPerUser", UNSET) + def _parse_enable_advanced_gpu_config(data: object) -> bool | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(bool | None | Unset, data) + + enable_advanced_gpu_config = _parse_enable_advanced_gpu_config(d.pop("enableAdvancedGpuConfig", UNSET)) + + def _parse_enable_custom_workspace_roles(data: object) -> bool | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(bool | None | Unset, data) + + enable_custom_workspace_roles = _parse_enable_custom_workspace_roles(d.pop("enableCustomWorkspaceRoles", UNSET)) + + max_shared_filesystems = d.pop("maxSharedFilesystems", UNSET) + def _parse_is_discoverable(data: object) -> bool | None | Unset: if data is None: return data @@ -358,6 +396,15 @@ def _parse_is_shareable(data: object) -> bool | None | Unset: is_shareable = _parse_is_shareable(d.pop("isShareable", UNSET)) + def _parse_is_ai_enabled(data: object) -> bool | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(bool | None | Unset, data) + + is_ai_enabled = _parse_is_ai_enabled(d.pop("isAiEnabled", UNSET)) + def _parse_has_pipelines_enabled(data: object) -> bool | None | Unset: if data is None: return data @@ -376,6 +423,17 @@ def _parse_has_workspaces_enabled(data: object) -> bool | None | Unset: has_workspaces_enabled = _parse_has_workspaces_enabled(d.pop("hasWorkspacesEnabled", UNSET)) + def _parse_has_shared_filesystems_enabled(data: object) -> bool | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(bool | None | Unset, data) + + has_shared_filesystems_enabled = _parse_has_shared_filesystems_enabled( + d.pop("hasSharedFilesystemsEnabled", UNSET) + ) + project_settings = cls( budget_amount=budget_amount, budget_period=budget_period, @@ -387,7 +445,6 @@ def _parse_has_workspaces_enabled(data: object) -> bool | None | Unset: temporary_storage_lifetime_days=temporary_storage_lifetime_days, vpc_id=vpc_id, batch_subnets=batch_subnets, - sagemaker_subnets=sagemaker_subnets, workspace_subnets=workspace_subnets, max_spot_vcpu=max_spot_vcpu, max_fpgavcpu=max_fpgavcpu, @@ -397,10 +454,15 @@ def _parse_has_workspaces_enabled(data: object) -> bool | None | Unset: max_workspaces_vcpu=max_workspaces_vcpu, max_workspaces_gpuvcpu=max_workspaces_gpuvcpu, max_workspaces_per_user=max_workspaces_per_user, + enable_advanced_gpu_config=enable_advanced_gpu_config, + enable_custom_workspace_roles=enable_custom_workspace_roles, + max_shared_filesystems=max_shared_filesystems, is_discoverable=is_discoverable, is_shareable=is_shareable, + is_ai_enabled=is_ai_enabled, has_pipelines_enabled=has_pipelines_enabled, has_workspaces_enabled=has_workspaces_enabled, + has_shared_filesystems_enabled=has_shared_filesystems_enabled, ) project_settings.additional_properties = d diff --git a/cirro_api_client/v1/models/query_column.py b/cirro_api_client/v1/models/query_column.py new file mode 100644 index 0000000..ebf79db --- /dev/null +++ b/cirro_api_client/v1/models/query_column.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.column_data_type import ColumnDataType + +T = TypeVar("T", bound="QueryColumn") + + +@_attrs_define +class QueryColumn: + """A column in a sheet query result. + + Attributes: + name (str): Column name. + data_type (ColumnDataType): + """ + + name: str + data_type: ColumnDataType + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + name = self.name + + data_type = self.data_type.value + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + "dataType": data_type, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + name = d.pop("name") + + data_type = ColumnDataType(d.pop("dataType")) + + query_column = cls( + name=name, + data_type=data_type, + ) + + query_column.additional_properties = d + return query_column + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/request_quota_increase_command.py b/cirro_api_client/v1/models/request_quota_increase_command.py new file mode 100644 index 0000000..4cc4ba2 --- /dev/null +++ b/cirro_api_client/v1/models/request_quota_increase_command.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="RequestQuotaIncreaseCommand") + + +@_attrs_define +class RequestQuotaIncreaseCommand: + """ + Attributes: + service_code (str): + quota_code (str): + value (float): + """ + + service_code: str + quota_code: str + value: float + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + service_code = self.service_code + + quota_code = self.quota_code + + value = self.value + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "serviceCode": service_code, + "quotaCode": quota_code, + "value": value, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + service_code = d.pop("serviceCode") + + quota_code = d.pop("quotaCode") + + value = d.pop("value") + + request_quota_increase_command = cls( + service_code=service_code, + quota_code=quota_code, + value=value, + ) + + request_quota_increase_command.additional_properties = d + return request_quota_increase_command + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/request_quota_increase_response.py b/cirro_api_client/v1/models/request_quota_increase_response.py new file mode 100644 index 0000000..f3ffbfd --- /dev/null +++ b/cirro_api_client/v1/models/request_quota_increase_response.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.cloud_quota import CloudQuota + + +T = TypeVar("T", bound="RequestQuotaIncreaseResponse") + + +@_attrs_define +class RequestQuotaIncreaseResponse: + """ + Attributes: + quota (CloudQuota): + """ + + quota: CloudQuota + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + quota = self.quota.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "quota": quota, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.cloud_quota import CloudQuota + + d = dict(src_dict) + quota = CloudQuota.from_dict(d.pop("quota")) + + request_quota_increase_response = cls( + quota=quota, + ) + + request_quota_increase_response.additional_properties = d + return request_quota_increase_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/row_update.py b/cirro_api_client/v1/models/row_update.py new file mode 100644 index 0000000..a0024d1 --- /dev/null +++ b/cirro_api_client/v1/models/row_update.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.row_update_values import RowUpdateValues + + +T = TypeVar("T", bound="RowUpdate") + + +@_attrs_define +class RowUpdate: + """ + Attributes: + row_id (int): _row_id, which serves as the primary key to identify the row. Example: 42. + values (RowUpdateValues): Column name and new value. Only the columns included here are updated; all other + columns on the row are left unchanged. At least one entry is required. Example: {'icd_code': 'G65'}. + """ + + row_id: int + values: RowUpdateValues + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + row_id = self.row_id + + values = self.values.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "rowId": row_id, + "values": values, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.row_update_values import RowUpdateValues + + d = dict(src_dict) + row_id = d.pop("rowId") + + values = RowUpdateValues.from_dict(d.pop("values")) + + row_update = cls( + row_id=row_id, + values=values, + ) + + row_update.additional_properties = d + return row_update + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/row_update_values.py b/cirro_api_client/v1/models/row_update_values.py new file mode 100644 index 0000000..27faff2 --- /dev/null +++ b/cirro_api_client/v1/models/row_update_values.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.row_update_values_additional_property import RowUpdateValuesAdditionalProperty + + +T = TypeVar("T", bound="RowUpdateValues") + + +@_attrs_define +class RowUpdateValues: + """Column name and new value. Only the columns included here are updated; all other columns on the row are left + unchanged. At least one entry is required. + + Example: + {'icd_code': 'G65'} + + """ + + additional_properties: dict[str, RowUpdateValuesAdditionalProperty] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = prop.to_dict() + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.row_update_values_additional_property import RowUpdateValuesAdditionalProperty + + d = dict(src_dict) + row_update_values = cls() + + additional_properties = {} + for prop_name, prop_dict in d.items(): + additional_property = RowUpdateValuesAdditionalProperty.from_dict(prop_dict) + + additional_properties[prop_name] = additional_property + + row_update_values.additional_properties = additional_properties + return row_update_values + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> RowUpdateValuesAdditionalProperty: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: RowUpdateValuesAdditionalProperty) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/row_update_values_additional_property.py b/cirro_api_client/v1/models/row_update_values_additional_property.py new file mode 100644 index 0000000..4490a59 --- /dev/null +++ b/cirro_api_client/v1/models/row_update_values_additional_property.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="RowUpdateValuesAdditionalProperty") + + +@_attrs_define +class RowUpdateValuesAdditionalProperty: + """ """ + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + row_update_values_additional_property = cls() + + row_update_values_additional_property.additional_properties = d + return row_update_values_additional_property + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/shared_filesystem.py b/cirro_api_client/v1/models/shared_filesystem.py new file mode 100644 index 0000000..d2c3acc --- /dev/null +++ b/cirro_api_client/v1/models/shared_filesystem.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.status import Status +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.named_item import NamedItem + + +T = TypeVar("T", bound="SharedFilesystem") + + +@_attrs_define +class SharedFilesystem: + """ + Attributes: + id (str): + name (str): + description (str): + project_id (str): + status (Status): + created_by (str): + created_at (datetime.datetime): + updated_at (datetime.datetime): + status_message (None | str | Unset): + size_in_bytes (int | None | Unset): Size of file system (refreshed daily) + warning_threshold_bytes (int | None | Unset): + used_by_workspaces (list[NamedItem] | Unset): Workspaces currently referencing this filesystem + used_by_count (int | Unset): Number of workspaces currently referencing this filesystem + """ + + id: str + name: str + description: str + project_id: str + status: Status + created_by: str + created_at: datetime.datetime + updated_at: datetime.datetime + status_message: None | str | Unset = UNSET + size_in_bytes: int | None | Unset = UNSET + warning_threshold_bytes: int | None | Unset = UNSET + used_by_workspaces: list[NamedItem] | Unset = UNSET + used_by_count: int | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + name = self.name + + description = self.description + + project_id = self.project_id + + status = self.status.value + + created_by = self.created_by + + created_at = self.created_at.isoformat() + + updated_at = self.updated_at.isoformat() + + status_message: None | str | Unset + if isinstance(self.status_message, Unset): + status_message = UNSET + else: + status_message = self.status_message + + size_in_bytes: int | None | Unset + if isinstance(self.size_in_bytes, Unset): + size_in_bytes = UNSET + else: + size_in_bytes = self.size_in_bytes + + warning_threshold_bytes: int | None | Unset + if isinstance(self.warning_threshold_bytes, Unset): + warning_threshold_bytes = UNSET + else: + warning_threshold_bytes = self.warning_threshold_bytes + + used_by_workspaces: list[dict[str, Any]] | Unset = UNSET + if not isinstance(self.used_by_workspaces, Unset): + used_by_workspaces = [] + for used_by_workspaces_item_data in self.used_by_workspaces: + used_by_workspaces_item = used_by_workspaces_item_data.to_dict() + used_by_workspaces.append(used_by_workspaces_item) + + used_by_count = self.used_by_count + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "name": name, + "description": description, + "projectId": project_id, + "status": status, + "createdBy": created_by, + "createdAt": created_at, + "updatedAt": updated_at, + } + ) + if status_message is not UNSET: + field_dict["statusMessage"] = status_message + if size_in_bytes is not UNSET: + field_dict["sizeInBytes"] = size_in_bytes + if warning_threshold_bytes is not UNSET: + field_dict["warningThresholdBytes"] = warning_threshold_bytes + if used_by_workspaces is not UNSET: + field_dict["usedByWorkspaces"] = used_by_workspaces + if used_by_count is not UNSET: + field_dict["usedByCount"] = used_by_count + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.named_item import NamedItem + + d = dict(src_dict) + id = d.pop("id") + + name = d.pop("name") + + description = d.pop("description") + + project_id = d.pop("projectId") + + status = Status(d.pop("status")) + + created_by = d.pop("createdBy") + + created_at = isoparse(d.pop("createdAt")) + + updated_at = isoparse(d.pop("updatedAt")) + + def _parse_status_message(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + status_message = _parse_status_message(d.pop("statusMessage", UNSET)) + + def _parse_size_in_bytes(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + size_in_bytes = _parse_size_in_bytes(d.pop("sizeInBytes", UNSET)) + + def _parse_warning_threshold_bytes(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + warning_threshold_bytes = _parse_warning_threshold_bytes(d.pop("warningThresholdBytes", UNSET)) + + _used_by_workspaces = d.pop("usedByWorkspaces", UNSET) + used_by_workspaces: list[NamedItem] | Unset = UNSET + if _used_by_workspaces is not UNSET: + used_by_workspaces = [] + for used_by_workspaces_item_data in _used_by_workspaces: + used_by_workspaces_item = NamedItem.from_dict(used_by_workspaces_item_data) + + used_by_workspaces.append(used_by_workspaces_item) + + used_by_count = d.pop("usedByCount", UNSET) + + shared_filesystem = cls( + id=id, + name=name, + description=description, + project_id=project_id, + status=status, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + status_message=status_message, + size_in_bytes=size_in_bytes, + warning_threshold_bytes=warning_threshold_bytes, + used_by_workspaces=used_by_workspaces, + used_by_count=used_by_count, + ) + + shared_filesystem.additional_properties = d + return shared_filesystem + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/shared_filesystem_input.py b/cirro_api_client/v1/models/shared_filesystem_input.py new file mode 100644 index 0000000..db630a2 --- /dev/null +++ b/cirro_api_client/v1/models/shared_filesystem_input.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="SharedFilesystemInput") + + +@_attrs_define +class SharedFilesystemInput: + """ + Attributes: + name (str): + description (None | str | Unset): + warning_threshold_bytes (int | None | Unset): Size in bytes at which to send a warning notification + """ + + name: str + description: None | str | Unset = UNSET + warning_threshold_bytes: int | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + name = self.name + + description: None | str | Unset + if isinstance(self.description, Unset): + description = UNSET + else: + description = self.description + + warning_threshold_bytes: int | None | Unset + if isinstance(self.warning_threshold_bytes, Unset): + warning_threshold_bytes = UNSET + else: + warning_threshold_bytes = self.warning_threshold_bytes + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + } + ) + if description is not UNSET: + field_dict["description"] = description + if warning_threshold_bytes is not UNSET: + field_dict["warningThresholdBytes"] = warning_threshold_bytes + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + name = d.pop("name") + + def _parse_description(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + description = _parse_description(d.pop("description", UNSET)) + + def _parse_warning_threshold_bytes(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + warning_threshold_bytes = _parse_warning_threshold_bytes(d.pop("warningThresholdBytes", UNSET)) + + shared_filesystem_input = cls( + name=name, + description=description, + warning_threshold_bytes=warning_threshold_bytes, + ) + + shared_filesystem_input.additional_properties = d + return shared_filesystem_input + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/notebook_instance.py b/cirro_api_client/v1/models/sheet.py similarity index 56% rename from cirro_api_client/v1/models/notebook_instance.py rename to cirro_api_client/v1/models/sheet.py index f2aed23..55352ea 100644 --- a/cirro_api_client/v1/models/notebook_instance.py +++ b/cirro_api_client/v1/models/sheet.py @@ -2,47 +2,47 @@ import datetime from collections.abc import Mapping -from typing import Any, TypeVar, cast +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field from dateutil.parser import isoparse +from ..models.sheet_creation_mode import SheetCreationMode +from ..models.sheet_type import SheetType from ..models.status import Status -T = TypeVar("T", bound="NotebookInstance") +T = TypeVar("T", bound="Sheet") @_attrs_define -class NotebookInstance: +class Sheet: """ Attributes: id (str): name (str): + description (str): + project_id (str): + sheet_type (SheetType): + sheet_creation_mode (SheetCreationMode): status (Status): - status_message (str): - instance_type (str): - accelerator_types (list[str]): - git_repositories (list[str]): - volume_size_gb (int): - is_shared_with_project (bool): created_by (str): created_at (datetime.datetime): updated_at (datetime.datetime): + total_row_count (int): """ id: str name: str + description: str + project_id: str + sheet_type: SheetType + sheet_creation_mode: SheetCreationMode status: Status - status_message: str - instance_type: str - accelerator_types: list[str] - git_repositories: list[str] - volume_size_gb: int - is_shared_with_project: bool created_by: str created_at: datetime.datetime updated_at: datetime.datetime + total_row_count: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -50,19 +50,15 @@ def to_dict(self) -> dict[str, Any]: name = self.name - status = self.status.value - - status_message = self.status_message - - instance_type = self.instance_type + description = self.description - accelerator_types = self.accelerator_types + project_id = self.project_id - git_repositories = self.git_repositories + sheet_type = self.sheet_type.value - volume_size_gb = self.volume_size_gb + sheet_creation_mode = self.sheet_creation_mode.value - is_shared_with_project = self.is_shared_with_project + status = self.status.value created_by = self.created_by @@ -70,22 +66,23 @@ def to_dict(self) -> dict[str, Any]: updated_at = self.updated_at.isoformat() + total_row_count = self.total_row_count + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { "id": id, "name": name, + "description": description, + "projectId": project_id, + "sheetType": sheet_type, + "sheetCreationMode": sheet_creation_mode, "status": status, - "statusMessage": status_message, - "instanceType": instance_type, - "acceleratorTypes": accelerator_types, - "gitRepositories": git_repositories, - "volumeSizeGB": volume_size_gb, - "isSharedWithProject": is_shared_with_project, "createdBy": created_by, "createdAt": created_at, "updatedAt": updated_at, + "totalRowCount": total_row_count, } ) @@ -98,19 +95,15 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: name = d.pop("name") - status = Status(d.pop("status")) - - status_message = d.pop("statusMessage") - - instance_type = d.pop("instanceType") + description = d.pop("description") - accelerator_types = cast(list[str], d.pop("acceleratorTypes")) + project_id = d.pop("projectId") - git_repositories = cast(list[str], d.pop("gitRepositories")) + sheet_type = SheetType(d.pop("sheetType")) - volume_size_gb = d.pop("volumeSizeGB") + sheet_creation_mode = SheetCreationMode(d.pop("sheetCreationMode")) - is_shared_with_project = d.pop("isSharedWithProject") + status = Status(d.pop("status")) created_by = d.pop("createdBy") @@ -118,23 +111,24 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: updated_at = isoparse(d.pop("updatedAt")) - notebook_instance = cls( + total_row_count = d.pop("totalRowCount") + + sheet = cls( id=id, name=name, + description=description, + project_id=project_id, + sheet_type=sheet_type, + sheet_creation_mode=sheet_creation_mode, status=status, - status_message=status_message, - instance_type=instance_type, - accelerator_types=accelerator_types, - git_repositories=git_repositories, - volume_size_gb=volume_size_gb, - is_shared_with_project=is_shared_with_project, created_by=created_by, created_at=created_at, updated_at=updated_at, + total_row_count=total_row_count, ) - notebook_instance.additional_properties = d - return notebook_instance + sheet.additional_properties = d + return sheet @property def additional_keys(self) -> list[str]: diff --git a/cirro_api_client/v1/models/sheet_creation_mode.py b/cirro_api_client/v1/models/sheet_creation_mode.py new file mode 100644 index 0000000..65a3694 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_creation_mode.py @@ -0,0 +1,17 @@ +from enum import Enum + + +class SheetCreationMode(str, Enum): + FILE = "FILE" + SCRATCH = "SCRATCH" + STANDARD = "STANDARD" + TEMPLATE = "TEMPLATE" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/sheet_detail.py b/cirro_api_client/v1/models/sheet_detail.py new file mode 100644 index 0000000..10b031f --- /dev/null +++ b/cirro_api_client/v1/models/sheet_detail.py @@ -0,0 +1,304 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.sheet_creation_mode import SheetCreationMode +from ..models.sheet_type import SheetType +from ..models.status import Status +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.column_def import ColumnDef + from ..models.view_query_request import ViewQueryRequest + + +T = TypeVar("T", bound="SheetDetail") + + +@_attrs_define +class SheetDetail: + """ + Attributes: + id (str): + name (str): + description (str): + project_id (str): + namespace_name (str): + table_name (str): + sheet_type (SheetType): + status (Status): + audit_read_access (bool): + created_by (str): + created_at (datetime.datetime): + updated_at (datetime.datetime): + total_row_count (int): + sheet_creation_mode (None | SheetCreationMode | Unset): How the table was initialized. Null for VIEW sheets. + columns (list[ColumnDef] | None | Unset): Column definitions for the table schema. Null for VIEW sheets. + view_definition (None | Unset | ViewQueryRequest): View definition for VIEW sheets. Null for TABLE sheets. + last_refreshed_at (datetime.datetime | None | Unset): When the view was last materialized. Null for TABLE + sheets. + """ + + id: str + name: str + description: str + project_id: str + namespace_name: str + table_name: str + sheet_type: SheetType + status: Status + audit_read_access: bool + created_by: str + created_at: datetime.datetime + updated_at: datetime.datetime + total_row_count: int + sheet_creation_mode: None | SheetCreationMode | Unset = UNSET + columns: list[ColumnDef] | None | Unset = UNSET + view_definition: None | Unset | ViewQueryRequest = UNSET + last_refreshed_at: datetime.datetime | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.view_query_request import ViewQueryRequest + + id = self.id + + name = self.name + + description = self.description + + project_id = self.project_id + + namespace_name = self.namespace_name + + table_name = self.table_name + + sheet_type = self.sheet_type.value + + status = self.status.value + + audit_read_access = self.audit_read_access + + created_by = self.created_by + + created_at = self.created_at.isoformat() + + updated_at = self.updated_at.isoformat() + + total_row_count = self.total_row_count + + sheet_creation_mode: None | str | Unset + if isinstance(self.sheet_creation_mode, Unset): + sheet_creation_mode = UNSET + elif isinstance(self.sheet_creation_mode, SheetCreationMode): + sheet_creation_mode = self.sheet_creation_mode.value + else: + sheet_creation_mode = self.sheet_creation_mode + + columns: list[dict[str, Any]] | None | Unset + if isinstance(self.columns, Unset): + columns = UNSET + elif isinstance(self.columns, list): + columns = [] + for columns_type_0_item_data in self.columns: + columns_type_0_item = columns_type_0_item_data.to_dict() + columns.append(columns_type_0_item) + + else: + columns = self.columns + + view_definition: dict[str, Any] | None | Unset + if isinstance(self.view_definition, Unset): + view_definition = UNSET + elif isinstance(self.view_definition, ViewQueryRequest): + view_definition = self.view_definition.to_dict() + else: + view_definition = self.view_definition + + last_refreshed_at: None | str | Unset + if isinstance(self.last_refreshed_at, Unset): + last_refreshed_at = UNSET + elif isinstance(self.last_refreshed_at, datetime.datetime): + last_refreshed_at = self.last_refreshed_at.isoformat() + else: + last_refreshed_at = self.last_refreshed_at + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "name": name, + "description": description, + "projectId": project_id, + "namespaceName": namespace_name, + "tableName": table_name, + "sheetType": sheet_type, + "status": status, + "auditReadAccess": audit_read_access, + "createdBy": created_by, + "createdAt": created_at, + "updatedAt": updated_at, + "totalRowCount": total_row_count, + } + ) + if sheet_creation_mode is not UNSET: + field_dict["sheetCreationMode"] = sheet_creation_mode + if columns is not UNSET: + field_dict["columns"] = columns + if view_definition is not UNSET: + field_dict["viewDefinition"] = view_definition + if last_refreshed_at is not UNSET: + field_dict["lastRefreshedAt"] = last_refreshed_at + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.column_def import ColumnDef + from ..models.view_query_request import ViewQueryRequest + + d = dict(src_dict) + id = d.pop("id") + + name = d.pop("name") + + description = d.pop("description") + + project_id = d.pop("projectId") + + namespace_name = d.pop("namespaceName") + + table_name = d.pop("tableName") + + sheet_type = SheetType(d.pop("sheetType")) + + status = Status(d.pop("status")) + + audit_read_access = d.pop("auditReadAccess") + + created_by = d.pop("createdBy") + + created_at = isoparse(d.pop("createdAt")) + + updated_at = isoparse(d.pop("updatedAt")) + + total_row_count = d.pop("totalRowCount") + + def _parse_sheet_creation_mode(data: object) -> None | SheetCreationMode | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + sheet_creation_mode_type_1 = SheetCreationMode(data) + + return sheet_creation_mode_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | SheetCreationMode | Unset, data) + + sheet_creation_mode = _parse_sheet_creation_mode(d.pop("sheetCreationMode", UNSET)) + + def _parse_columns(data: object) -> list[ColumnDef] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + columns_type_0 = [] + _columns_type_0 = data + for columns_type_0_item_data in _columns_type_0: + columns_type_0_item = ColumnDef.from_dict(columns_type_0_item_data) + + columns_type_0.append(columns_type_0_item) + + return columns_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[ColumnDef] | None | Unset, data) + + columns = _parse_columns(d.pop("columns", UNSET)) + + def _parse_view_definition(data: object) -> None | Unset | ViewQueryRequest: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + view_definition_type_1 = ViewQueryRequest.from_dict(data) + + return view_definition_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewQueryRequest, data) + + view_definition = _parse_view_definition(d.pop("viewDefinition", UNSET)) + + def _parse_last_refreshed_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + last_refreshed_at_type_0 = isoparse(data) + + return last_refreshed_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + last_refreshed_at = _parse_last_refreshed_at(d.pop("lastRefreshedAt", UNSET)) + + sheet_detail = cls( + id=id, + name=name, + description=description, + project_id=project_id, + namespace_name=namespace_name, + table_name=table_name, + sheet_type=sheet_type, + status=status, + audit_read_access=audit_read_access, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + total_row_count=total_row_count, + sheet_creation_mode=sheet_creation_mode, + columns=columns, + view_definition=view_definition, + last_refreshed_at=last_refreshed_at, + ) + + sheet_detail.additional_properties = d + return sheet_detail + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/sheet_job.py b/cirro_api_client/v1/models/sheet_job.py new file mode 100644 index 0000000..e4f5097 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_job.py @@ -0,0 +1,227 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.sheet_job_type import SheetJobType +from ..models.status import Status +from ..types import UNSET, Unset + +T = TypeVar("T", bound="SheetJob") + + +@_attrs_define +class SheetJob: + """ + Attributes: + id (str): + sheet_id (str): + job_type (SheetJobType): + status (Status): + created_at (datetime.datetime): + updated_at (datetime.datetime): + started_at (datetime.datetime | None | Unset): + completed_at (datetime.datetime | None | Unset): + failed_at_step (None | str | Unset): + error_message (None | str | Unset): + snapshot_id (None | str | Unset): + """ + + id: str + sheet_id: str + job_type: SheetJobType + status: Status + created_at: datetime.datetime + updated_at: datetime.datetime + started_at: datetime.datetime | None | Unset = UNSET + completed_at: datetime.datetime | None | Unset = UNSET + failed_at_step: None | str | Unset = UNSET + error_message: None | str | Unset = UNSET + snapshot_id: None | str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + id = self.id + + sheet_id = self.sheet_id + + job_type = self.job_type.value + + status = self.status.value + + created_at = self.created_at.isoformat() + + updated_at = self.updated_at.isoformat() + + started_at: None | str | Unset + if isinstance(self.started_at, Unset): + started_at = UNSET + elif isinstance(self.started_at, datetime.datetime): + started_at = self.started_at.isoformat() + else: + started_at = self.started_at + + completed_at: None | str | Unset + if isinstance(self.completed_at, Unset): + completed_at = UNSET + elif isinstance(self.completed_at, datetime.datetime): + completed_at = self.completed_at.isoformat() + else: + completed_at = self.completed_at + + failed_at_step: None | str | Unset + if isinstance(self.failed_at_step, Unset): + failed_at_step = UNSET + else: + failed_at_step = self.failed_at_step + + error_message: None | str | Unset + if isinstance(self.error_message, Unset): + error_message = UNSET + else: + error_message = self.error_message + + snapshot_id: None | str | Unset + if isinstance(self.snapshot_id, Unset): + snapshot_id = UNSET + else: + snapshot_id = self.snapshot_id + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "id": id, + "sheetId": sheet_id, + "jobType": job_type, + "status": status, + "createdAt": created_at, + "updatedAt": updated_at, + } + ) + if started_at is not UNSET: + field_dict["startedAt"] = started_at + if completed_at is not UNSET: + field_dict["completedAt"] = completed_at + if failed_at_step is not UNSET: + field_dict["failedAtStep"] = failed_at_step + if error_message is not UNSET: + field_dict["errorMessage"] = error_message + if snapshot_id is not UNSET: + field_dict["snapshotId"] = snapshot_id + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + id = d.pop("id") + + sheet_id = d.pop("sheetId") + + job_type = SheetJobType(d.pop("jobType")) + + status = Status(d.pop("status")) + + created_at = isoparse(d.pop("createdAt")) + + updated_at = isoparse(d.pop("updatedAt")) + + def _parse_started_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + started_at_type_0 = isoparse(data) + + return started_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + started_at = _parse_started_at(d.pop("startedAt", UNSET)) + + def _parse_completed_at(data: object) -> datetime.datetime | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + completed_at_type_0 = isoparse(data) + + return completed_at_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.datetime | None | Unset, data) + + completed_at = _parse_completed_at(d.pop("completedAt", UNSET)) + + def _parse_failed_at_step(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + failed_at_step = _parse_failed_at_step(d.pop("failedAtStep", UNSET)) + + def _parse_error_message(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + error_message = _parse_error_message(d.pop("errorMessage", UNSET)) + + def _parse_snapshot_id(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + snapshot_id = _parse_snapshot_id(d.pop("snapshotId", UNSET)) + + sheet_job = cls( + id=id, + sheet_id=sheet_id, + job_type=job_type, + status=status, + created_at=created_at, + updated_at=updated_at, + started_at=started_at, + completed_at=completed_at, + failed_at_step=failed_at_step, + error_message=error_message, + snapshot_id=snapshot_id, + ) + + sheet_job.additional_properties = d + return sheet_job + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/sheet_job_type.py b/cirro_api_client/v1/models/sheet_job_type.py new file mode 100644 index 0000000..2cb4a14 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_job_type.py @@ -0,0 +1,17 @@ +from enum import Enum + + +class SheetJobType(str, Enum): + CREATE_TABLE = "CREATE_TABLE" + DROP_TABLE = "DROP_TABLE" + INGEST = "INGEST" + MATERIALIZE_VIEW = "MATERIALIZE_VIEW" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/sheet_query_response.py b/cirro_api_client/v1/models/sheet_query_response.py new file mode 100644 index 0000000..c874f65 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_query_response.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.query_column import QueryColumn + from ..models.sheet_query_response_rows_item import SheetQueryResponseRowsItem + + +T = TypeVar("T", bound="SheetQueryResponse") + + +@_attrs_define +class SheetQueryResponse: + """ + Attributes: + columns (list[QueryColumn]): + rows (list[list[SheetQueryResponseRowsItem]]): + total_row_count (int): + """ + + columns: list[QueryColumn] + rows: list[list[SheetQueryResponseRowsItem]] + total_row_count: int + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + columns = [] + for columns_item_data in self.columns: + columns_item = columns_item_data.to_dict() + columns.append(columns_item) + + rows = [] + for rows_item_data in self.rows: + rows_item = [] + for rows_item_item_data in rows_item_data: + rows_item_item = rows_item_item_data.to_dict() + rows_item.append(rows_item_item) + + rows.append(rows_item) + + total_row_count = self.total_row_count + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "columns": columns, + "rows": rows, + "totalRowCount": total_row_count, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.query_column import QueryColumn + from ..models.sheet_query_response_rows_item import SheetQueryResponseRowsItem + + d = dict(src_dict) + columns = [] + _columns = d.pop("columns") + for columns_item_data in _columns: + columns_item = QueryColumn.from_dict(columns_item_data) + + columns.append(columns_item) + + rows = [] + _rows = d.pop("rows") + for rows_item_data in _rows: + rows_item = [] + _rows_item = rows_item_data + for rows_item_item_data in _rows_item: + rows_item_item = SheetQueryResponseRowsItem.from_dict(rows_item_item_data) + + rows_item.append(rows_item_item) + + rows.append(rows_item) + + total_row_count = d.pop("totalRowCount") + + sheet_query_response = cls( + columns=columns, + rows=rows, + total_row_count=total_row_count, + ) + + sheet_query_response.additional_properties = d + return sheet_query_response + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/sheet_query_response_rows_item.py b/cirro_api_client/v1/models/sheet_query_response_rows_item.py new file mode 100644 index 0000000..01d8615 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_query_response_rows_item.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="SheetQueryResponseRowsItem") + + +@_attrs_define +class SheetQueryResponseRowsItem: + """ """ + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + sheet_query_response_rows_item = cls() + + sheet_query_response_rows_item.additional_properties = d + return sheet_query_response_rows_item + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/sheet_type.py b/cirro_api_client/v1/models/sheet_type.py new file mode 100644 index 0000000..8da42a6 --- /dev/null +++ b/cirro_api_client/v1/models/sheet_type.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class SheetType(str, Enum): + TABLE = "TABLE" + VIEW = "VIEW" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/sql_sort_order.py b/cirro_api_client/v1/models/sql_sort_order.py new file mode 100644 index 0000000..e01ea75 --- /dev/null +++ b/cirro_api_client/v1/models/sql_sort_order.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class SqlSortOrder(str, Enum): + ASC = "ASC" + DESC = "DESC" + UNKNOWN = "UNKNOWN" + """ This is a fallback value for when the value is not known, do not use this value when making requests """ + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def _missing_(cls, number): + return cls(cls.UNKNOWN) diff --git a/cirro_api_client/v1/models/trigger_ingest_request.py b/cirro_api_client/v1/models/trigger_ingest_request.py new file mode 100644 index 0000000..0e9fac2 --- /dev/null +++ b/cirro_api_client/v1/models/trigger_ingest_request.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.file_def import FileDef + + +T = TypeVar("T", bound="TriggerIngestRequest") + + +@_attrs_define +class TriggerIngestRequest: + """ + Attributes: + file_def (FileDef): If provided, an ingest job is triggered immediately after table creation (TABLE only) + """ + + file_def: FileDef + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + file_def = self.file_def.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "fileDef": file_def, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.file_def import FileDef + + d = dict(src_dict) + file_def = FileDef.from_dict(d.pop("fileDef")) + + trigger_ingest_request = cls( + file_def=file_def, + ) + + trigger_ingest_request.additional_properties = d + return trigger_ingest_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/update_rows_request.py b/cirro_api_client/v1/models/update_rows_request.py new file mode 100644 index 0000000..25870bd --- /dev/null +++ b/cirro_api_client/v1/models/update_rows_request.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.row_update import RowUpdate + + +T = TypeVar("T", bound="UpdateRowsRequest") + + +@_attrs_define +class UpdateRowsRequest: + """ + Attributes: + updates (list[RowUpdate]): List of rows to update. + """ + + updates: list[RowUpdate] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + updates = [] + for updates_item_data in self.updates: + updates_item = updates_item_data.to_dict() + updates.append(updates_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "updates": updates, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.row_update import RowUpdate + + d = dict(src_dict) + updates = [] + _updates = d.pop("updates") + for updates_item_data in _updates: + updates_item = RowUpdate.from_dict(updates_item_data) + + updates.append(updates_item) + + update_rows_request = cls( + updates=updates, + ) + + update_rows_request.additional_properties = d + return update_rows_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/update_sheet_request.py b/cirro_api_client/v1/models/update_sheet_request.py new file mode 100644 index 0000000..ed10921 --- /dev/null +++ b/cirro_api_client/v1/models/update_sheet_request.py @@ -0,0 +1,109 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_query_request import ViewQueryRequest + + +T = TypeVar("T", bound="UpdateSheetRequest") + + +@_attrs_define +class UpdateSheetRequest: + """ + Attributes: + name (str): Display name + description (str): + view_definition (None | Unset | ViewQueryRequest): Updated view definition (VIEW sheets only) + """ + + name: str + description: str + view_definition: None | Unset | ViewQueryRequest = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.view_query_request import ViewQueryRequest + + name = self.name + + description = self.description + + view_definition: dict[str, Any] | None | Unset + if isinstance(self.view_definition, Unset): + view_definition = UNSET + elif isinstance(self.view_definition, ViewQueryRequest): + view_definition = self.view_definition.to_dict() + else: + view_definition = self.view_definition + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "name": name, + "description": description, + } + ) + if view_definition is not UNSET: + field_dict["viewDefinition"] = view_definition + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_query_request import ViewQueryRequest + + d = dict(src_dict) + name = d.pop("name") + + description = d.pop("description") + + def _parse_view_definition(data: object) -> None | Unset | ViewQueryRequest: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + view_definition_type_1 = ViewQueryRequest.from_dict(data) + + return view_definition_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewQueryRequest, data) + + view_definition = _parse_view_definition(d.pop("viewDefinition", UNSET)) + + update_sheet_request = cls( + name=name, + description=description, + view_definition=view_definition, + ) + + update_sheet_request.additional_properties = d + return update_sheet_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/view_filter.py b/cirro_api_client/v1/models/view_filter.py new file mode 100644 index 0000000..6712084 --- /dev/null +++ b/cirro_api_client/v1/models/view_filter.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.filter_operator import FilterOperator +from ..models.logical_operator import LogicalOperator +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_filter_values import ViewFilterValues + + +T = TypeVar("T", bound="ViewFilter") + + +@_attrs_define +class ViewFilter: + """A filter node: either a group (with logicalOperator and conditions) or a leaf condition (with column, operator, and + values) + + Attributes: + logical_operator (LogicalOperator | None | Unset): Set for group nodes to combine child conditions + conditions (list[ViewFilter] | None | Unset): Child filter nodes (for group nodes) + column (None | str | Unset): Qualified column reference in alias.column format (for leaf nodes) Example: s1.age. + operator (FilterOperator | None | Unset): Comparison operator (for leaf nodes) + values (list[ViewFilterValues] | None | Unset): Values for the filter. Single-element list for comparison + operators (EQUALS, GREATER_THAN, etc.), multi-element for IN/NOT_IN. Null or empty for IS_NULL/IS_NOT_NULL. + """ + + logical_operator: LogicalOperator | None | Unset = UNSET + conditions: list[ViewFilter] | None | Unset = UNSET + column: None | str | Unset = UNSET + operator: FilterOperator | None | Unset = UNSET + values: list[ViewFilterValues] | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + logical_operator: None | str | Unset + if isinstance(self.logical_operator, Unset): + logical_operator = UNSET + elif isinstance(self.logical_operator, LogicalOperator): + logical_operator = self.logical_operator.value + else: + logical_operator = self.logical_operator + + conditions: list[dict[str, Any]] | None | Unset + if isinstance(self.conditions, Unset): + conditions = UNSET + elif isinstance(self.conditions, list): + conditions = [] + for conditions_type_0_item_data in self.conditions: + conditions_type_0_item = conditions_type_0_item_data.to_dict() + conditions.append(conditions_type_0_item) + + else: + conditions = self.conditions + + column: None | str | Unset + if isinstance(self.column, Unset): + column = UNSET + else: + column = self.column + + operator: None | str | Unset + if isinstance(self.operator, Unset): + operator = UNSET + elif isinstance(self.operator, FilterOperator): + operator = self.operator.value + else: + operator = self.operator + + values: list[dict[str, Any]] | None | Unset + if isinstance(self.values, Unset): + values = UNSET + elif isinstance(self.values, list): + values = [] + for values_type_0_item_data in self.values: + values_type_0_item = values_type_0_item_data.to_dict() + values.append(values_type_0_item) + + else: + values = self.values + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if logical_operator is not UNSET: + field_dict["logicalOperator"] = logical_operator + if conditions is not UNSET: + field_dict["conditions"] = conditions + if column is not UNSET: + field_dict["column"] = column + if operator is not UNSET: + field_dict["operator"] = operator + if values is not UNSET: + field_dict["values"] = values + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_filter_values import ViewFilterValues + + d = dict(src_dict) + + def _parse_logical_operator(data: object) -> LogicalOperator | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + logical_operator_type_1 = LogicalOperator(data) + + return logical_operator_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(LogicalOperator | None | Unset, data) + + logical_operator = _parse_logical_operator(d.pop("logicalOperator", UNSET)) + + def _parse_conditions(data: object) -> list[ViewFilter] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + conditions_type_0 = [] + _conditions_type_0 = data + for conditions_type_0_item_data in _conditions_type_0: + conditions_type_0_item = ViewFilter.from_dict(conditions_type_0_item_data) + + conditions_type_0.append(conditions_type_0_item) + + return conditions_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[ViewFilter] | None | Unset, data) + + conditions = _parse_conditions(d.pop("conditions", UNSET)) + + def _parse_column(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + column = _parse_column(d.pop("column", UNSET)) + + def _parse_operator(data: object) -> FilterOperator | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + operator_type_1 = FilterOperator(data) + + return operator_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(FilterOperator | None | Unset, data) + + operator = _parse_operator(d.pop("operator", UNSET)) + + def _parse_values(data: object) -> list[ViewFilterValues] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + values_type_0 = [] + _values_type_0 = data + for values_type_0_item_data in _values_type_0: + values_type_0_item = ViewFilterValues.from_dict(values_type_0_item_data) + + values_type_0.append(values_type_0_item) + + return values_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[ViewFilterValues] | None | Unset, data) + + values = _parse_values(d.pop("values", UNSET)) + + view_filter = cls( + logical_operator=logical_operator, + conditions=conditions, + column=column, + operator=operator, + values=values, + ) + + view_filter.additional_properties = d + return view_filter + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/dataset_viz_config.py b/cirro_api_client/v1/models/view_filter_values.py similarity index 81% rename from cirro_api_client/v1/models/dataset_viz_config.py rename to cirro_api_client/v1/models/view_filter_values.py index 12ebb38..bc0c65e 100644 --- a/cirro_api_client/v1/models/dataset_viz_config.py +++ b/cirro_api_client/v1/models/view_filter_values.py @@ -6,12 +6,12 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="DatasetVizConfig") +T = TypeVar("T", bound="ViewFilterValues") @_attrs_define -class DatasetVizConfig: - """Config or path to config used to render viz""" +class ViewFilterValues: + """ """ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) @@ -24,10 +24,10 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - dataset_viz_config = cls() + view_filter_values = cls() - dataset_viz_config.additional_properties = d - return dataset_viz_config + view_filter_values.additional_properties = d + return view_filter_values @property def additional_keys(self) -> list[str]: diff --git a/cirro_api_client/v1/models/view_join.py b/cirro_api_client/v1/models/view_join.py new file mode 100644 index 0000000..4d9f441 --- /dev/null +++ b/cirro_api_client/v1/models/view_join.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.join_type import JoinType + +if TYPE_CHECKING: + from ..models.join_condition import JoinCondition + + +T = TypeVar("T", bound="ViewJoin") + + +@_attrs_define +class ViewJoin: + """ + Attributes: + sheet_alias (str): Alias of the sheet to join + join_type (JoinType): + conditions (list[JoinCondition]): + """ + + sheet_alias: str + join_type: JoinType + conditions: list[JoinCondition] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + sheet_alias = self.sheet_alias + + join_type = self.join_type.value + + conditions = [] + for conditions_item_data in self.conditions: + conditions_item = conditions_item_data.to_dict() + conditions.append(conditions_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "sheetAlias": sheet_alias, + "joinType": join_type, + "conditions": conditions, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.join_condition import JoinCondition + + d = dict(src_dict) + sheet_alias = d.pop("sheetAlias") + + join_type = JoinType(d.pop("joinType")) + + conditions = [] + _conditions = d.pop("conditions") + for conditions_item_data in _conditions: + conditions_item = JoinCondition.from_dict(conditions_item_data) + + conditions.append(conditions_item) + + view_join = cls( + sheet_alias=sheet_alias, + join_type=join_type, + conditions=conditions, + ) + + view_join.additional_properties = d + return view_join + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/view_query_request.py b/cirro_api_client/v1/models/view_query_request.py new file mode 100644 index 0000000..52114e4 --- /dev/null +++ b/cirro_api_client/v1/models/view_query_request.py @@ -0,0 +1,184 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.view_filter import ViewFilter + from ..models.view_join import ViewJoin + from ..models.view_sheet_ref import ViewSheetRef + + +T = TypeVar("T", bound="ViewQueryRequest") + + +@_attrs_define +class ViewQueryRequest: + """Request for a view joining one or more sheets with optional column selection and filtering + + Attributes: + sheets (list[ViewSheetRef]): Sheets to include in the view + joins (list[ViewJoin] | None | Unset): Join definitions between sheets + columns (list[str] | None | Unset): Columns to select in alias.column format. If null, selects all columns. + filter_ (None | Unset | ViewFilter): Filter conditions to apply + """ + + sheets: list[ViewSheetRef] + joins: list[ViewJoin] | None | Unset = UNSET + columns: list[str] | None | Unset = UNSET + filter_: None | Unset | ViewFilter = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.view_filter import ViewFilter + + sheets = [] + for sheets_item_data in self.sheets: + sheets_item = sheets_item_data.to_dict() + sheets.append(sheets_item) + + joins: list[dict[str, Any]] | None | Unset + if isinstance(self.joins, Unset): + joins = UNSET + elif isinstance(self.joins, list): + joins = [] + for joins_type_0_item_data in self.joins: + joins_type_0_item = joins_type_0_item_data.to_dict() + joins.append(joins_type_0_item) + + else: + joins = self.joins + + columns: list[str] | None | Unset + if isinstance(self.columns, Unset): + columns = UNSET + elif isinstance(self.columns, list): + columns = self.columns + + else: + columns = self.columns + + filter_: dict[str, Any] | None | Unset + if isinstance(self.filter_, Unset): + filter_ = UNSET + elif isinstance(self.filter_, ViewFilter): + filter_ = self.filter_.to_dict() + else: + filter_ = self.filter_ + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "sheets": sheets, + } + ) + if joins is not UNSET: + field_dict["joins"] = joins + if columns is not UNSET: + field_dict["columns"] = columns + if filter_ is not UNSET: + field_dict["filter"] = filter_ + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.view_filter import ViewFilter + from ..models.view_join import ViewJoin + from ..models.view_sheet_ref import ViewSheetRef + + d = dict(src_dict) + sheets = [] + _sheets = d.pop("sheets") + for sheets_item_data in _sheets: + sheets_item = ViewSheetRef.from_dict(sheets_item_data) + + sheets.append(sheets_item) + + def _parse_joins(data: object) -> list[ViewJoin] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + joins_type_0 = [] + _joins_type_0 = data + for joins_type_0_item_data in _joins_type_0: + joins_type_0_item = ViewJoin.from_dict(joins_type_0_item_data) + + joins_type_0.append(joins_type_0_item) + + return joins_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[ViewJoin] | None | Unset, data) + + joins = _parse_joins(d.pop("joins", UNSET)) + + def _parse_columns(data: object) -> list[str] | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, list): + raise TypeError() + columns_type_0 = cast(list[str], data) + + return columns_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(list[str] | None | Unset, data) + + columns = _parse_columns(d.pop("columns", UNSET)) + + def _parse_filter_(data: object) -> None | Unset | ViewFilter: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + filter_type_1 = ViewFilter.from_dict(data) + + return filter_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | Unset | ViewFilter, data) + + filter_ = _parse_filter_(d.pop("filter", UNSET)) + + view_query_request = cls( + sheets=sheets, + joins=joins, + columns=columns, + filter_=filter_, + ) + + view_query_request.additional_properties = d + return view_query_request + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/cirro_api_client/v1/models/open_notebook_instance_response.py b/cirro_api_client/v1/models/view_sheet_ref.py similarity index 67% rename from cirro_api_client/v1/models/open_notebook_instance_response.py rename to cirro_api_client/v1/models/view_sheet_ref.py index 3f7e01b..25b35b7 100644 --- a/cirro_api_client/v1/models/open_notebook_instance_response.py +++ b/cirro_api_client/v1/models/view_sheet_ref.py @@ -6,32 +6,32 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="OpenNotebookInstanceResponse") +T = TypeVar("T", bound="ViewSheetRef") @_attrs_define -class OpenNotebookInstanceResponse: +class ViewSheetRef: """ Attributes: - url (str): - message (str): + sheet_id (str): + alias (str): """ - url: str - message: str + sheet_id: str + alias: str additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - url = self.url + sheet_id = self.sheet_id - message = self.message + alias = self.alias field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { - "url": url, - "message": message, + "sheetId": sheet_id, + "alias": alias, } ) @@ -40,17 +40,17 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - url = d.pop("url") + sheet_id = d.pop("sheetId") - message = d.pop("message") + alias = d.pop("alias") - open_notebook_instance_response = cls( - url=url, - message=message, + view_sheet_ref = cls( + sheet_id=sheet_id, + alias=alias, ) - open_notebook_instance_response.additional_properties = d - return open_notebook_instance_response + view_sheet_ref.additional_properties = d + return view_sheet_ref @property def additional_keys(self) -> list[str]: diff --git a/cirro_api_client/v1/models/workspace_compute_config.py b/cirro_api_client/v1/models/workspace_compute_config.py index 18340a1..92369a3 100644 --- a/cirro_api_client/v1/models/workspace_compute_config.py +++ b/cirro_api_client/v1/models/workspace_compute_config.py @@ -31,6 +31,10 @@ class WorkspaceComputeConfig: injected into the container at runtime. Keys must be non-blank. Example: {'ENV_MODE': 'production', 'LOG_LEVEL': 'debug'}. local_port (int | Unset): User-facing web server port (http). Example: 8080. + custom_task_role_arn (None | str | Unset): Custom IAM task role for the workspace ECS task. Provide either a + role name (e.g., 'Cirro-CustomWorkspaceTaskRole-{projectShortCode}-{name}') or a full ARN (e.g., + 'arn:aws:iam::{accountId}:role/Cirro-CustomWorkspaceTaskRole-{projectShortCode}-{name}'). Must belong to the + project's AWS account. """ container_image_uri: str @@ -41,6 +45,7 @@ class WorkspaceComputeConfig: gpu_model: None | str | Unset = UNSET environment_variables: None | Unset | WorkspaceComputeConfigEnvironmentVariables = UNSET local_port: int | Unset = UNSET + custom_task_role_arn: None | str | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -72,6 +77,12 @@ def to_dict(self) -> dict[str, Any]: local_port = self.local_port + custom_task_role_arn: None | str | Unset + if isinstance(self.custom_task_role_arn, Unset): + custom_task_role_arn = UNSET + else: + custom_task_role_arn = self.custom_task_role_arn + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -93,6 +104,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["environmentVariables"] = environment_variables if local_port is not UNSET: field_dict["localPort"] = local_port + if custom_task_role_arn is not UNSET: + field_dict["customTaskRoleArn"] = custom_task_role_arn return field_dict @@ -139,6 +152,15 @@ def _parse_environment_variables(data: object) -> None | Unset | WorkspaceComput local_port = d.pop("localPort", UNSET) + def _parse_custom_task_role_arn(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + custom_task_role_arn = _parse_custom_task_role_arn(d.pop("customTaskRoleArn", UNSET)) + workspace_compute_config = cls( container_image_uri=container_image_uri, cpu=cpu, @@ -148,6 +170,7 @@ def _parse_environment_variables(data: object) -> None | Unset | WorkspaceComput gpu_model=gpu_model, environment_variables=environment_variables, local_port=local_port, + custom_task_role_arn=custom_task_role_arn, ) workspace_compute_config.additional_properties = d diff --git a/config.yml b/config.yml index 54fd01d..ebc3644 100644 --- a/config.yml +++ b/config.yml @@ -29,3 +29,7 @@ class_overrides: class_name: RunAnalysisRequestSourceSampleFilesMap WorkspaceComputeConfigEnvironmentVariablesType0: class_name: WorkspaceComputeConfigEnvironmentVariables + ViewFilterValuesType0Item: + class_name: ViewFilterValues + SheetQueryResponseRowsItemItem: + class_name: SheetQueryResponseRowsItem diff --git a/pyproject.toml b/pyproject.toml index bd7c683..70016d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cirro_api_client" -version = "1.3.2" +version = "1.4.0" description = "A client library for accessing Cirro" authors = ["Cirro "] license = "MIT"