baserow.client

This module handles the interaction with Baserow's REST API over HTTP.

   1"""
   2This module handles the interaction with Baserow's REST API over HTTP.
   3"""
   4
   5from __future__ import annotations
   6import asyncio
   7from datetime import datetime, timedelta
   8import enum
   9from functools import wraps
  10from io import BufferedReader
  11import json
  12from typing import Any, Generic, Optional, Type, TypeVar, Union
  13
  14import aiohttp
  15from pydantic import BaseModel, RootModel
  16
  17from baserow.error import BaserowError, JWTAuthRequiredError, PackageClientAlreadyConfiguredError, PackageClientNotConfiguredError, UnspecifiedBaserowError
  18from baserow.file import File
  19from baserow.field_config import FieldConfig, FieldConfigType
  20from baserow.filter import Filter
  21
  22
  23API_PREFIX: str = "api"
  24"""URL prefix for all API call URLs."""
  25
  26CONTENT_TYPE_JSON: dict[str, str] = {"Content-Type": "application/json"}
  27"""HTTP Header when content type is JSON."""
  28
  29
  30def _url_join(*parts: str) -> str:
  31    """Joins given strings into a URL."""
  32    rsl = [part.strip("/") for part in parts]
  33    return "/".join(rsl) + "/"
  34
  35
  36def _list_to_str(items: list[str]) -> str:
  37    return ",".join(items)
  38
  39
  40T = TypeVar("T", bound=Union[BaseModel, RootModel])
  41
  42A = TypeVar("A")
  43
  44
  45class RowResponse(BaseModel, Generic[T]):
  46    """The return object of list row API calls."""
  47    count: int
  48    next: Optional[str]
  49    previous: Optional[str]
  50    results: list[T]
  51
  52
  53class MinimalRow(BaseModel):
  54    """The minimal result items of a `RowResponse`."""
  55    id: int
  56
  57
  58class TokenResponse(BaseModel):
  59    """Result of an authentication token call."""
  60    user: Any
  61    access_token: str
  62    refresh_token: str
  63
  64
  65class TokenRefresh(BaseModel):
  66    """Response from a token refresh call."""
  67    user: Any
  68    access_token: str
  69
  70
  71class DatabaseTableResponse(BaseModel):
  72    """Describes a table within a database in Baserow."""
  73    id: int
  74    name: str
  75    order: int
  76    database_id: int
  77
  78
  79class DatabaseTablesResponse(RootModel[list[DatabaseTableResponse]]):
  80    """Contains all tables for a database in Baserow."""
  81    root: list[DatabaseTableResponse]
  82
  83
  84class FieldResponse(RootModel[list[FieldConfig]]):
  85    """
  86    The response for the list field call. Contains all fields of a table.
  87    """
  88    root: list[FieldConfig]
  89
  90
  91class BatchResponse(BaseModel, Generic[A]):
  92    """
  93    Response for batch mode. The results of a batch call are encapsulated in
  94    this response.
  95    """
  96    items: list[A]
  97
  98
  99class ErrorResponse(BaseModel):
 100    """
 101    The return object from Baserow when the request was unsuccessful. Contains
 102    information about the reasons for the failure.
 103    """
 104    error: str
 105    """Short error enum."""
 106    detail: Any
 107    """Additional information on the error."""
 108
 109
 110class AuthMethod(int, enum.Enum):
 111    """
 112    Differentiates between the two authentication methods for the client.
 113    Internal use only. For more information on the two different authentication
 114    methods, refer to the documentation for the `Client` class.
 115    """
 116
 117    DATABASE_TOKEN = 0
 118    """Authentication with the database token."""
 119    JWT = 1
 120    """Authentication with user credentials."""
 121
 122
 123def jwt_only(func):
 124    """
 125    Decorator for operations that can only be executed with a JWT token
 126    (authenticated via login credentials). If a database token is used,
 127    `TokenAuthNotAllowedError` is thrown.
 128    """
 129
 130    @wraps(func)
 131    def wrapper(self, *args, **kwargs):
 132        if self._auth_method is not AuthMethod.JWT:
 133            raise JWTAuthRequiredError(func.__name__)
 134        return func(self, *args, **kwargs)
 135    return wrapper
 136
 137
 138class Client:
 139    """
 140    This class manages interaction with the Baserow server via HTTP using REST
 141    API calls. Access to the Baserow API requires authentication, and there are
 142    tow methods available: Database Tokens and JWT Tokens.
 143
 144    **Database Tokens:** These tokens are designed for delivering data to
 145    frontends and, as such, can only perform CRUD (Create, Read, Update, Delete)
 146    operations on a database. New tokens can be created in the User Settings,
 147    where their permissions can also be configured. For instance, it is possible
 148    to create a token that only allows reading. These tokens have unlimited
 149    validity.
 150
 151    ```python
 152    from baserow import Client
 153
 154    client = Client("baserow.example.com", token="<API-TOKEN>")
 155    ```
 156
 157    **JWT Tokens:** All other functionalities require a JWT token, which can be
 158    obtained by providing login credentials (email address and password) to the
 159    Baserow API. These tokens have a limited lifespan of 10 minutes and will be
 160    refreshed if needed.
 161
 162    ```python
 163    from baserow import Client
 164
 165    client = Client(
 166        "baserow.example.com",
 167        email="baserow.user@example.com",
 168        password="<PASSWORD>",
 169    )
 170    ```
 171
 172    **Singleton/Global Client.** In many applications, maintaining a consistent
 173    connection to a single Baserow instance throughout the runtime is crucial.
 174    To facilitate this, the package provides a Global Client, which acts as a
 175    singleton. This means the client needs to be configured just once using
 176    GlobalClient.configure(). After this initial setup, the Global Client can be
 177    universally accessed and used throughout the program.
 178
 179    When utilizing the ORM functionality of the table models, all methods within
 180    the `Table` models inherently use this Global Client. Please note that the
 181    Global Client can only be configured once. Attempting to call the
 182    GlobalClient.configure() method more than once will result in an exception.
 183
 184    ```python
 185    # Either configure the global client with a database token...
 186    GlobalClient.configure("baserow.example.com", token="<API-TOKEN>")
 187
 188    # ...or with the login credentials (email and password).
 189    GlobalClient.configure(
 190        "baserow.example.com",
 191        email="baserow.user@example.com",
 192        password="<PASSWORD>",
 193    )
 194    ```
 195
 196    This client can also be used directly, without utilizing the ORM
 197    functionality of the package.
 198
 199    Args:
 200        url (str): The base URL of the Baserow instance. token (str, optional):
 201        An access token (referred to as a database token
 202            in Baserow's documentation) can be generated in the user settings
 203            within Baserow.
 204        email (str, optional): Email address of a Baserow user for the JWT
 205            authentication.
 206        password (str, optional): Password of a Baserow user for the JWT
 207            authentication.
 208    """
 209
 210    def __init__(
 211        self,
 212        url: str,
 213        token: Optional[str] = None,
 214        email: Optional[str] = None,
 215        password: Optional[str] = None,
 216    ):
 217        if not token and not email and not password:
 218            raise ValueError(
 219                "you must either provide a database token or the login credentials (email, password) of a user"
 220            )
 221        if token and (email or password):
 222            raise ValueError(
 223                "passing parameters for both types of authentication (database token and login credentials) simultaneously is not allowed"
 224            )
 225        if not token and (not email or not password):
 226            missing = "email" if not email else "password"
 227            raise ValueError(
 228                f"""incomplete authentication with login credentials, {
 229                    missing} is missing"""
 230            )
 231        self._url = url
 232        self._token = token
 233        self._email = email
 234        self._password = password
 235        self._session: aiohttp.ClientSession = aiohttp.ClientSession()
 236        self._auth_method = AuthMethod.DATABASE_TOKEN if token else AuthMethod.JWT
 237        # Cache is only accessed by __header() method.
 238        self.__headers_cache: Optional[dict[str, str]] = None
 239        self.__jwt_access_token: Optional[str] = None
 240        self.__jwt_refresh_token: Optional[str] = None
 241        self.__jwt_token_age: Optional[datetime] = None
 242
 243    async def token_auth(self, email: str, password: str) -> TokenResponse:
 244        """
 245        Authenticates an existing user based on their email and their password.
 246        If successful, an access token will be returned.
 247
 248        This method is designed to function even with a partially initialized
 249        instance, as it's used for optional JWT token retrieval during class
 250        initialization.
 251
 252        Args:
 253            email (str): Email address of a Baserow user for the JWT
 254                authentication.
 255            password (str): Password of a Baserow user for the JWT
 256                authentication.
 257            url (url, optional): Optional base url.
 258            session (aiohttp.ClientSession, optional): Optional client session.
 259        """
 260        return await self._typed_request(
 261            "post",
 262            _url_join(self._url, API_PREFIX, "user/token-auth"),
 263            TokenResponse,
 264            headers=CONTENT_TYPE_JSON,
 265            json={"email": email, "password": password},
 266            use_default_headers=False,
 267        )
 268
 269    async def token_refresh(self, refresh_token: str) -> TokenRefresh:
 270        """
 271        Generate a new JWT access_token that can be used to continue operating
 272        on Baserow starting from a valid refresh token. The initial JWT access
 273        and refresh token can be generated using `Client.token_auth()`.
 274
 275        Args:
 276            refresh_token: The JWT refresh token obtained by
 277                `Client.token_auth()`.
 278        """
 279        return await self._typed_request(
 280            "post",
 281            _url_join(self._url, API_PREFIX, "user/token-refresh"),
 282            TokenRefresh,
 283            headers=CONTENT_TYPE_JSON,
 284            json={"refresh_token": refresh_token},
 285            use_default_headers=False,
 286        )
 287
 288    async def list_table_rows(
 289        self,
 290        table_id: int,
 291        user_field_names: bool,
 292        result_type: Optional[Type[T]] = None,
 293        filter: Optional[Filter] = None,
 294        order_by: Optional[list[str]] = None,
 295        page: Optional[int] = None,
 296        size: Optional[int] = None,
 297    ) -> RowResponse[T]:
 298        """
 299        Lists rows in the table with the given ID. Note that Baserow uses
 300        paging. If all rows of a table are needed, the
 301        Client.list_all_table_rows method can be used.
 302
 303        Args:
 304            table_id (int): The ID of the table to be queried.
 305            user_field_names (bool): When set to true, the returned fields will
 306                be named according to their field names. Otherwise, the unique
 307                IDs of the fields will be used.
 308            result_type (Optional[Type[T]]): Which type will appear as an item
 309                in the result list and should be serialized accordingly. If set
 310                to None, Pydantic will attempt to serialize it to the standard
 311                types.
 312            filter (Optional[list[Filter]], optional): Allows the dataset to be
 313                filtered.
 314            order_by (Optional[list[str]], optional): A list of field names/IDs
 315                by which the result should be sorted. If the field name is
 316                prepended with a +, the sorting is ascending; if with a -, it is
 317                descending.
 318            page (Optional[int], optional): The page of the paging.
 319            size (Optional[int], optional): How many records should be returned
 320                at max. Defaults to 100 and cannot exceed 200.
 321        """
 322        params: dict[str, str] = {
 323            "user_field_names": "true" if user_field_names else "false",
 324        }
 325        if filter is not None:
 326            params["filters"] = filter.model_dump_json(by_alias=True)
 327        if order_by is not None:
 328            params["order_by"] = _list_to_str(order_by)
 329        if page is not None:
 330            params["page"] = str(page)
 331        if size is not None:
 332            params["size"] = str(size)
 333        url = _url_join(
 334            self._url, API_PREFIX,
 335            "database/rows/table",
 336            str(table_id),
 337        )
 338        if result_type:
 339            model = RowResponse[result_type]
 340        else:
 341            model = RowResponse[Any]
 342        return await self._typed_request("get", url, model, params=params)
 343
 344    async def list_all_table_rows(
 345        self,
 346        table_id: int,
 347        user_field_names: bool,
 348        result_type: Optional[Type[T]] = None,
 349        filter: Optional[Filter] = None,
 350        order_by: Optional[list[str]] = None,
 351    ) -> RowResponse[T]:
 352        """
 353        Since Baserow uses paging, this method sends as many requests to Baserow
 354        as needed until all rows are received. This function should only be used
 355        when all data is truly needed. This should be a rare occurrence, as
 356        filtering can occur on Baserow's side using the filter parameter.
 357
 358        Args:
 359            table_id (int): The ID of the table to be queried. user_field_names
 360            (bool): When set to true, the returned fields will
 361                be named according to their field names. Otherwise, the unique
 362                IDs of the fields will be used.
 363            result_type (Optional[Type[T]]): Which type will appear as an item
 364                in the result list and should be serialized accordingly. If set
 365                to None, Pydantic will attempt to serialize it to the standard
 366                types.
 367            filter (Optional[list[Filter]], optional): Allows the dataset to be
 368                filtered.
 369            order_by (Optional[list[str]], optional): A list of field names/IDs
 370                by which the result should be sorted. If the field name is
 371                prepended with a +, the sorting is ascending; if with a -, it is
 372                descending.
 373        """
 374        count: int = await self.table_row_count(table_id)
 375        total_calls = (count + 200 - 1) // 200
 376
 377        requests = []
 378        for page in range(1, total_calls+1):
 379            rqs = asyncio.create_task(
 380                self.list_table_rows(
 381                    table_id,
 382                    user_field_names,
 383                    result_type=result_type,
 384                    filter=filter,
 385                    order_by=order_by,
 386                    page=page,
 387                    size=200,
 388                )
 389            )
 390            requests.append(rqs)
 391        responses = await asyncio.gather(*requests)
 392
 393        rsl: Optional[RowResponse[T]] = None
 394        for rsp in responses:
 395            if rsl is None:
 396                rsl = rsp
 397            else:
 398                rsl.results.extend(rsp.results)
 399        if rsl is None:
 400            return RowResponse(
 401                count=0,
 402                previous=None,
 403                next=None,
 404                results=[],
 405            )
 406        return rsl
 407
 408    async def table_row_count(self, table_id: int, filter: Optional[Filter] = None) -> int:
 409        """
 410        Determines how many rows or records are present in the table with the
 411        given ID. Filters can be optionally passed as parameters.
 412
 413        Args:
 414            table_id (int): The ID of the table to be queried.
 415            filter (Optional[list[Filter]], optional): Allows the dataset to be
 416                filtered. Only rows matching the filter will be counted.
 417        """
 418        rsl = await self.list_table_rows(table_id, True, filter=filter, size=1)
 419        return rsl.count
 420
 421    async def list_fields(self, table_id: int) -> FieldResponse:
 422        """
 423        Lists all fields (»columns«) of a table.
 424
 425        Args:
 426            table_id (int): The ID of the table to be queried.
 427        """
 428        return await self._typed_request(
 429            "get",
 430            _url_join(
 431                self._url,
 432                API_PREFIX,
 433                "database/fields/table/",
 434                str(table_id),
 435            ),
 436            FieldResponse,
 437        )
 438
 439    async def get_row(
 440        self,
 441        table_id: int,
 442        row_id: int,
 443        user_field_names: bool,
 444        result_type: Optional[Type[T]] = None,
 445    ) -> T:
 446        """
 447        Fetch a single row/entry from the given table by the row ID.
 448
 449        Args:
 450            table_id (int): The ID of the table to be queried.
 451            row_id (int): The ID of the row to be returned.
 452            user_field_names (bool): When set to true, the fields in the
 453                provided data parameter are named according to their field
 454                names. Otherwise, the unique IDs of the fields will be used.
 455            result_type (Optional[Type[T]]): Which type will appear as an item
 456                in the result list and should be serialized accordingly. If set
 457                to None, Pydantic will attempt to serialize it to the standard
 458                types.
 459        """
 460        if result_type:
 461            model = result_type
 462        else:
 463            model = Any
 464        return await self._typed_request(
 465            "get",
 466            _url_join(
 467                self._url,
 468                API_PREFIX,
 469                "database/rows/table",
 470                str(table_id),
 471                str(row_id),
 472            ),
 473            model,
 474            params={"user_field_names": "true" if user_field_names else "false"}
 475        )
 476
 477    async def create_row(
 478        self,
 479        table_id: int,
 480        data: Union[T, dict[str, Any]],
 481        user_field_names: bool,
 482        before: Optional[int] = None,
 483    ) -> Union[T, MinimalRow]:
 484        """
 485        Creates a new row in the table with the given ID. The data can be
 486        provided either as a dictionary or as a Pydantic model. Please note that
 487        this method does not check whether the fields provided with `data`
 488        actually exist.
 489
 490        The return value depends on the `data` parameter: If a Pydantic model is
 491        passed, the return value is an instance of this model with the values as
 492        they are in the newly created row. If any arbitrary dictionary is
 493        passed, `MinimalRow` is returned, which contains only the ID field.
 494
 495        Args:
 496            table_id (int): The ID of the table where the new row should be
 497                created.
 498            data (Union[T, dict[str, Any]]): The data of the new row.
 499            user_field_names (bool): When set to true, the fields in the
 500                provided data parameter are named according to their field
 501                names. Otherwise, the unique IDs of the fields will be used.
 502            before (Optional[int], optional):  If provided then the newly
 503                created row will be positioned before the row with the provided
 504                id. 
 505        """
 506        params: dict[str, str] = {
 507            "user_field_names": "true" if user_field_names else "false",
 508        }
 509        if before is not None:
 510            params["before"] = str(before)
 511
 512        if not isinstance(data, dict):
 513            json = data.model_dump(by_alias=True)
 514        else:
 515            json = data
 516
 517        return await self._typed_request(
 518            "post",
 519            _url_join(
 520                self._url,
 521                API_PREFIX,
 522                "database/rows/table",
 523                str(table_id),
 524            ),
 525            type(data) if not isinstance(data, dict) else MinimalRow,
 526            CONTENT_TYPE_JSON,
 527            params,
 528            json,
 529        )
 530
 531    async def create_rows(
 532        self,
 533        table_id: int,
 534        data: Union[list[T], list[dict[str, Any]]],
 535        user_field_names: bool,
 536        before: Optional[int] = None,
 537    ) -> Union[BatchResponse[T], BatchResponse[MinimalRow]]:
 538        """
 539        Creates one or multiple new row(w) in the table with the given ID using
 540        Baserow's batch functionality. The data can be provided either as a list
 541        of dictionaries or as a Pydantic models. Please note that this method
 542        does not check whether the fields provided with `data` actually exist.
 543
 544        The return value depends on the `data` parameter: If a Pydantic model is
 545        passed, the return value is an instance of this model with the values as
 546        they are in the newly created row. If any arbitrary dictionary is
 547        passed, `MinimalRow` is returned, which contains only the ID field.
 548
 549        If the given list is empty, no call is executed; instead, an empty
 550        response is returned.
 551
 552        Args:
 553            table_id (int): The ID of the table where the new row should be
 554                created.
 555            data (Union[list[T], list[dict[str, Any]]]): The data of the new
 556                row.
 557            user_field_names (bool): When set to true, the fields in the
 558                provided data parameter are named according to their field
 559                names. Otherwise, the unique IDs of the fields will be used.
 560            before (Optional[int], optional):  If provided then the newly
 561                created row will be positioned before the row with the provided
 562                id. 
 563        """
 564        if len(data) == 0:
 565            return BatchResponse(items=[])
 566        params: dict[str, str] = {
 567            "user_field_names": "true" if user_field_names else "false",
 568        }
 569        if before is not None:
 570            params["before"] = str(before)
 571        if len(data) == 0:
 572            raise ValueError("data parameter cannot be empty list")
 573        if not isinstance(data[0], dict):
 574            result_type = BatchResponse[type(data[0])]
 575        else:
 576            result_type = BatchResponse[MinimalRow]
 577        items: list[dict[str, Any]] = []
 578        for item in data:
 579            if not isinstance(item, dict):
 580                items.append(item.model_dump(by_alias=True))
 581            else:
 582                items.append(item)
 583        json = {"items": items}
 584        return await self._typed_request(
 585            "post",
 586            _url_join(
 587                self._url,
 588                API_PREFIX,
 589                "database/rows/table",
 590                str(table_id),
 591                "batch",
 592            ),
 593            result_type,
 594            CONTENT_TYPE_JSON,
 595            params,
 596            json,
 597        )
 598
 599    async def update_row(
 600        self,
 601        table_id: int,
 602        row_id: int,
 603        data: Union[T, dict[str, Any]],
 604        user_field_names: bool,
 605    ) -> Union[T, MinimalRow]:
 606        """ 
 607        Updates a row by it's ID in the table with the given ID. The data can be
 608        provided either as a dictionary or as a Pydantic model. Please note that
 609        this method does not check whether the fields provided with `data`
 610        actually exist.
 611
 612        The return value depends on the `data` parameter: If a Pydantic model is
 613        passed, the return value is an instance of this model with the values as
 614        they are in the newly created row. If any arbitrary dictionary is
 615        passed, `MinimalRow` is returned, which contains only the ID field.
 616
 617        Args:
 618            table_id (int): The ID of the table where the new row should be
 619                created.
 620            row_id (int): The ID of the row which should be updated.
 621            data (Union[T, dict[str, Any]]): The data of the new row.
 622            user_field_names (bool): When set to true, the fields in the
 623                provided data parameter are named according to their field
 624                names. Otherwise, the unique IDs of the fields will be used.
 625        """
 626        params: dict[str, str] = {
 627            "user_field_names": "true" if user_field_names else "false",
 628        }
 629
 630        if not isinstance(data, dict):
 631            json = data.model_dump(by_alias=True)
 632        else:
 633            json = data
 634
 635        return await self._typed_request(
 636            "patch",
 637            _url_join(
 638                self._url,
 639                API_PREFIX,
 640                "database/rows/table",
 641                str(table_id),
 642                str(row_id),
 643            ),
 644            type(data) if not isinstance(data, dict) else MinimalRow,
 645            CONTENT_TYPE_JSON,
 646            params,
 647            json
 648        )
 649
 650    async def upload_file(self, file: BufferedReader) -> File:
 651        """
 652        Uploads a file to Baserow by uploading the file contents directly. The
 653        file is passed as a `BufferedReader`. So, a local file can be loaded
 654        using `open("my-file.ext", "rb")` and then passed to this method.
 655
 656        After the file is uploaded, it needs to be linked to the field in the
 657        table row. For this, with the Client.update_row() method, either the
 658        complete field.File instance can be added as a list item to the File
 659        field or simply the name (field.File.name, the name is unique in any
 660        case).
 661
 662        Example usage:
 663
 664        ```python
 665        with open("my-image.png", "rb") as file:
 666            rsl = await client.upload_file(file)
 667
 668        table_id = 23
 669        row_id = 42
 670        file_field_name = "Attachments"
 671        await client.update_row(
 672            table_id,
 673            row_id,
 674            {file_field_name: [{"name": rsl.name}],
 675            True
 676        )
 677        ```
 678
 679        It's also possible to directly upload a file accessible via a public
 680        URL. For this purpose, use Client.upload_file_via_url().
 681
 682        Args:
 683            file (BufferedReader): A BufferedReader containing the file to be
 684                uploaded.
 685        """
 686        return await self._typed_request(
 687            "post",
 688            _url_join(
 689                self._url,
 690                API_PREFIX,
 691                "user-files/upload-file",
 692            ),
 693            File,
 694            data={"file": file},
 695        )
 696
 697    async def upload_file_via_url(self, url: str) -> File:
 698        """
 699        Uploads a file from a given URL to the storage of Baserow. The Baserow
 700        instance must have access to this URL.
 701
 702        After the file is uploaded, it needs to be linked to the field in the
 703        table row. For this, with the Client.update_row() method, either the
 704        complete field.File instance can be added as a list item to the File
 705        field or simply the name (field.File.name, the name is unique in any
 706        case).
 707
 708        Example usage:
 709
 710        ```python
 711        rsl = await client.upload_file_via_url("https://picsum.photos/500")
 712
 713        table_id = 23
 714        row_id = 42
 715        file_field_name = "Attachments"
 716        await client.update_row(
 717            table_id,
 718            row_id,
 719            {file_field_name: [{"name": rsl.name}],
 720            True
 721        )
 722        ```
 723
 724        It's also possible to upload a locally available file. For this purpose,
 725        use `Client.upload_file()`.
 726
 727        Args:
 728            url (str): The URL of the file.
 729        """
 730        return await self._typed_request(
 731            "post",
 732            _url_join(
 733                self._url,
 734                API_PREFIX,
 735                "user-files/upload-via-url",
 736            ),
 737            File,
 738            CONTENT_TYPE_JSON,
 739            json={"url": url}
 740        )
 741
 742    async def delete_row(
 743        self,
 744        table_id: int,
 745        row_id: Union[int, list[int]],
 746    ):
 747        """
 748        Deletes a row with the given ID in the table with the given ID. It's
 749        also possible to delete more than one row simultaneously. For this, a
 750        list of IDs can be passed using the row_id parameter.
 751
 752        Args:
 753            table_id (int): The ID of the table where the row should be deleted.
 754            row_id (Union[int, list[int]]): The ID(s) of the row(s) which should
 755                be deleted.
 756        """
 757        if isinstance(row_id, int):
 758            return await self._request(
 759                "delete",
 760                _url_join(
 761                    self._url,
 762                    API_PREFIX,
 763                    "database/rows/table",
 764                    str(table_id),
 765                    str(row_id),
 766                ),
 767                None,
 768            )
 769        return await self._request(
 770            "post",
 771            _url_join(
 772                self._url,
 773                API_PREFIX,
 774                "database/rows/table",
 775                str(table_id),
 776                "batch-delete",
 777            ),
 778            None,
 779            CONTENT_TYPE_JSON,
 780            None,
 781            {"items": row_id},
 782        )
 783
 784    @jwt_only
 785    async def list_database_tables(
 786        self,
 787        database_id: int,
 788    ) -> DatabaseTablesResponse:
 789        """
 790        Lists all the tables that are in the database related to the database
 791        given by it's ID. Please note that this method only works when access is
 792        through a JWT token, meaning login credentials are used for
 793        authentication. Additionally, the account being used must have access to
 794        the database/workspace.
 795
 796        Args:
 797            database_id (int): The ID of the database from which one wants to
 798                retrieve a listing of all tables. 
 799        """
 800        return await self._typed_request(
 801            "get",
 802            _url_join(
 803                self._url,
 804                API_PREFIX,
 805                "database/tables/database",
 806                str(database_id),
 807            ),
 808            DatabaseTablesResponse,
 809        )
 810
 811    @jwt_only
 812    async def create_database_table(
 813        self,
 814        database_id: int,
 815        name: str,
 816        client_session_id: Optional[str] = None,
 817        client_undo_redo_action_group_id: Optional[str] = None,
 818    ) -> DatabaseTableResponse:
 819        """
 820        Creates synchronously a new table with the given for the database
 821        related to the provided database_id parameter.
 822
 823        Args:
 824            database_id (int): The ID of the database in which the new table
 825                should be created.
 826            name (str): Human readable name for the new table.
 827            client_session_id (str, optional): An optional UUID that marks
 828                the action performed by this request as having occurred in a
 829                particular client session. Then using the undo/redo endpoints
 830                with the same ClientSessionId header this action can be
 831                undone/redone.
 832            client_undo_redo_action_group_id (str, optional): An optional UUID
 833                that marks the action performed by this request as having
 834                occurred in a particular action group.Then calling the undo/redo
 835                endpoint with the same ClientSessionId header, all the actions
 836                belonging to the same action group can be undone/redone together
 837                in a single API call.
 838        """
 839        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 840        if client_session_id:
 841            headers["ClientSessionId"] = client_session_id
 842        if client_undo_redo_action_group_id:
 843            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 844        return await self._typed_request(
 845            "post",
 846            _url_join(
 847                self._url,
 848                API_PREFIX,
 849                "database/tables/database",
 850                str(database_id),
 851            ),
 852            DatabaseTableResponse,
 853            headers=headers,
 854            json={"name": name},
 855        )
 856
 857    async def create_database_table_field(
 858        self,
 859        table_id: int,
 860        field: FieldConfigType,
 861        client_session_id: Optional[str] = None,
 862        client_undo_redo_action_group_id: Optional[str] = None,
 863    ) -> FieldConfig:
 864        """
 865        Adds a new field to a table specified by its ID.
 866
 867        Args:
 868            table_id (int): The ID of the table to be altered.
 869            field (FieldConfigType): The config of the field to be added.
 870            client_session_id (str, optional): An optional UUID that marks
 871                the action performed by this request as having occurred in a
 872                particular client session. Then using the undo/redo endpoints
 873                with the same ClientSessionId header this action can be
 874                undone/redone.
 875            client_undo_redo_action_group_id (str, optional): An optional UUID
 876                that marks the action performed by this request as having
 877                occurred in a particular action group.Then calling the undo/redo
 878                endpoint with the same ClientSessionId header, all the actions
 879                belonging to the same action group can be undone/redone together
 880                in a single API call.
 881        """
 882        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 883        if client_session_id:
 884            headers["ClientSessionId"] = client_session_id
 885        if client_undo_redo_action_group_id:
 886            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 887        return await self._typed_request(
 888            "post",
 889            _url_join(
 890                self._url,
 891                API_PREFIX,
 892                "database/fields/table",
 893                str(table_id),
 894            ),
 895            FieldConfig,
 896            headers=headers,
 897            json=field.model_dump(),
 898        )
 899
 900    async def update_database_table_field(
 901        self,
 902        field_id: int,
 903        field: FieldConfigType | dict[str, Any],
 904        client_session_id: Optional[str] = None,
 905        client_undo_redo_action_group_id: Optional[str] = None,
 906    ) -> FieldConfig:
 907        """
 908        Updates a table field defined by it's id
 909
 910        Args:
 911            field_id (int): The ID of the field to be updated.
 912            field (FieldConfigType | dict[str, Any]): The config of the field to
 913                be added.
 914            client_session_id (str, optional): An optional UUID that marks
 915                the action performed by this request as having occurred in a
 916                particular client session. Then using the undo/redo endpoints
 917                with the same ClientSessionId header this action can be
 918                undone/redone.
 919            client_undo_redo_action_group_id (str, optional): An optional UUID
 920                that marks the action performed by this request as having
 921                occurred in a particular action group.Then calling the undo/redo
 922                endpoint with the same ClientSessionId header, all the actions
 923                belonging to the same action group can be undone/redone together
 924                in a single API call.
 925        """
 926        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 927        if client_session_id:
 928            headers["ClientSessionId"] = client_session_id
 929        if client_undo_redo_action_group_id:
 930            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 931
 932        if not isinstance(field, dict):
 933            json = field.model_dump(by_alias=True)
 934        else:
 935            json = field
 936
 937        return await self._typed_request(
 938            "patch",
 939            _url_join(
 940                self._url,
 941                API_PREFIX,
 942                "database/fields",
 943                str(field_id),
 944            ),
 945            FieldConfig,
 946            headers=headers,
 947            json=json,
 948        )
 949
 950    async def delete_database_table_field(
 951        self,
 952        field_id: int,
 953        client_session_id: Optional[str] = None,
 954        client_undo_redo_action_group_id: Optional[str] = None,
 955    ):
 956        """
 957        Deletes a table field defined by it's id
 958
 959        Args:
 960            field_id (int): The ID of the field to be deleted.
 961            client_session_id (str, optional): An optional UUID that marks
 962                the action performed by this request as having occurred in a
 963                particular client session. Then using the undo/redo endpoints
 964                with the same ClientSessionId header this action can be
 965                undone/redone.
 966            client_undo_redo_action_group_id (str, optional): An optional UUID
 967                that marks the action performed by this request as having
 968                occurred in a particular action group.Then calling the undo/redo
 969                endpoint with the same ClientSessionId header, all the actions
 970                belonging to the same action group can be undone/redone together
 971                in a single API call.
 972        """
 973        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 974        if client_session_id:
 975            headers["ClientSessionId"] = client_session_id
 976        if client_undo_redo_action_group_id:
 977            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 978        await self._request(
 979            "DELETE",
 980            _url_join(
 981                self._url,
 982                API_PREFIX,
 983                "database/fields",
 984                str(field_id),
 985            ),
 986            None,
 987            headers=headers,
 988        )
 989
 990    async def close(self):
 991        """
 992        The connection session with the client is terminated. Subsequently, the
 993        client object cannot be used anymore. It is necessary to explicitly and
 994        manually close the session only when the client object is directly
 995        instantiated.
 996        """
 997        await self._session.close()
 998
 999    async def __get_jwt_access_token(self) -> str:
1000        if self._email is None or self._password is None:
1001            raise ValueError("email and password have to be set")
1002        if self.__jwt_access_token is None or self.__jwt_refresh_token is None or self.__jwt_token_age is None:
1003            # Need initialize token.
1004            rsp = await self.token_auth(self._email, self._password)
1005            self.__jwt_access_token = rsp.access_token
1006            self.__jwt_refresh_token = rsp.refresh_token
1007            self.__jwt_token_age = datetime.now()
1008        elif self.__jwt_token_age < datetime.now() - timedelta(minutes=10):
1009            # Token has to be refreshed.
1010            rsp = await self.token_refresh(self.__jwt_refresh_token)
1011            self.__jwt_access_token = rsp.access_token
1012            self.__jwt_token_age = datetime.now()
1013        return self.__jwt_access_token
1014
1015    async def __headers(
1016        self,
1017        parts: Optional[dict[str, str]],
1018    ) -> dict[str, str]:
1019        # if self.__headers_cache is not None:
1020        #     if parts is not None:
1021        #         rsl = self.__headers_cache.copy()
1022        #         rsl.update(parts)
1023        #         return rsl
1024        #     return self.__headers_cache
1025        rsl: dict[str, str] = {}
1026        if parts is not None:
1027            rsl = parts
1028
1029        if self._token:
1030            token = f"Token {self._token}"
1031        elif self._email and self._password:
1032            access_token = await self.__get_jwt_access_token()
1033            token = f"JWT {access_token}"
1034        else:
1035            raise RuntimeError("logic error, shouldn't be possible")
1036
1037        rsl["Authorization"] = token
1038        return rsl
1039
1040    async def _typed_request(
1041        self,
1042        method: str,
1043        url: str,
1044        result_type: Optional[Type[T]],
1045        headers: Optional[dict[str, str]] = None,
1046        params: Optional[dict[str, str]] = None,
1047        json: Optional[dict[str, Any]] = None,
1048        data: Optional[dict[str, Any]] = None,
1049        use_default_headers: bool = True,
1050    ) -> T:
1051        """
1052        Wrap the _request method for all cases where the return value of the
1053        request must not be None under any circumstances. If it is, a ValueError
1054        will be raised.
1055        """
1056        rsl = await self._request(
1057            method,
1058            url,
1059            result_type,
1060            headers,
1061            params,
1062            json,
1063            data,
1064            use_default_headers,
1065        )
1066        if not rsl:
1067            raise ValueError("request result shouldn't be None")
1068        return rsl
1069
1070    async def _request(
1071        self,
1072        method: str,
1073        url: str,
1074        result_type: Optional[Type[T]],
1075        headers: Optional[dict[str, str]] = None,
1076        params: Optional[dict[str, str]] = None,
1077        json: Optional[dict[str, Any]] = None,
1078        data: Optional[dict[str, Any]] = None,
1079        use_default_headers: bool = True,
1080    ) -> Optional[T]:
1081        """
1082        Handles the actual HTTP request.
1083
1084        Args:
1085            result_type (Type[T]): The pydantic model which should be used to
1086                serialize the response field of the response. If set to None
1087                pydantic will try to serialize the response with built-in types.
1088                Aka `pydantic.JsonValue`.
1089        """
1090        if use_default_headers:
1091            headers = await self.__headers(headers)
1092        else:
1093            headers = {}
1094        async with self._session.request(
1095            method,
1096            url,
1097            headers=headers,
1098            params=params,
1099            json=json,
1100            data=data,
1101        ) as rsp:
1102            if rsp.status == 400:
1103                err = ErrorResponse.model_validate_json(await rsp.text())
1104                raise BaserowError(rsp.status, err.error, err.detail)
1105            if rsp.status == 204:
1106                return None
1107            if rsp.status != 200:
1108                raise UnspecifiedBaserowError(rsp.status, await rsp.text())
1109            body = await rsp.text()
1110            if result_type is not None:
1111                rsl = result_type.model_validate_json(body)
1112                return rsl
1113            return None
1114
1115
1116class GlobalClient(Client):
1117    """
1118    The singleton version of the client encapsulates the client in a singleton.
1119    The parameters (URL and access tokens) can be configured independently of
1120    the actual instantiation.
1121
1122    Unless specified otherwise, this singleton is used by all table instances
1123    for accessing Baserow.
1124
1125    This is helpful in systems where the client can be configured once at
1126    program start (e.g., in the `__main__.py`) based on the settings file and
1127    then used throughout the program without specifying additional parameters.
1128    The Singleton pattern ensures that only one instance of the client is used
1129    throughout the entire program, thereby maintaining full control over the
1130    `aiohttp.ClientSession`.
1131
1132    The configuration can be done either directly in the code using
1133    `GlobalClient.configure()` or from a JSON file using
1134    `GlobalClient.from_file()`.
1135    """
1136    _instance: Optional[Client] = None
1137    _is_initialized: bool = False
1138    is_configured: bool = False
1139    __url: str = ""
1140    __token: Optional[str] = None
1141    __email: Optional[str] = None
1142    __password: Optional[str] = None
1143
1144    def __new__(cls):
1145        if not cls.is_configured:
1146            raise PackageClientNotConfiguredError
1147        if cls._instance is None:
1148            cls._instance = super().__new__(cls)
1149        return cls._instance
1150
1151    def __init__(self):
1152        if not self._is_initialized:
1153            super().__init__(
1154                self.__url,
1155                token=self.__token,
1156                email=self.__email,
1157                password=self.__password,
1158            )
1159            self._is_initialized = True
1160
1161    @classmethod
1162    def configure(
1163        cls,
1164        url: str,
1165        token: Optional[str] = None,
1166        email: Optional[str] = None,
1167        password: Optional[str] = None,
1168    ):
1169        """
1170        Set the URL and token before the first use of the client.
1171
1172        Args:
1173            token (str, optional): An access token (referred to as a database token
1174                in Baserow's documentation) can be generated in the user settings
1175                within Baserow.
1176            email (str, optional): Email address of a Baserow user for the JWT
1177                authentication.
1178            password (str, optional): Password of a Baserow user for the JWT
1179                authentication.
1180        """
1181        if cls.is_configured:
1182            raise PackageClientAlreadyConfiguredError(cls.__url, url)
1183        cls.__url = url
1184        cls.__token = token
1185        cls.__email = email
1186        cls.__password = password
1187        cls.is_configured = True
1188
1189    @classmethod
1190    def from_file(cls, path: str):
1191        """
1192        Attempts to load the client configuration from the given JSON file. As
1193        with `GlobalClient.configure()`, it is only possible to use either the
1194        token or the login credentials. Structure of the config file for Token:
1195
1196        ```json
1197        {
1198            "url": "https:/your.baserow.instance"
1199            "token": "your_token_here"
1200        }
1201        ```
1202
1203        For JWT authentication using login credentials:
1204
1205        ```json
1206        {
1207            "url": "https:/your.baserow.instance"
1208            "email": "your-login-mail@example.com",
1209            "password": "your-secret-password"
1210        }
1211        ```
1212
1213        Args:
1214            path: Path to input JSON-file.
1215        """
1216        with open(path, "r") as f:
1217            cfg = json.load(f)
1218
1219        cls.configure(
1220            cfg["url"],
1221            token=cfg["token"] if "token" in cfg else None,
1222            email=cfg["email"] if "email" in cfg else None,
1223            password=cfg["password"] if "password" in cfg else None,
1224        )
API_PREFIX: str = 'api'

URL prefix for all API call URLs.

CONTENT_TYPE_JSON: dict[str, str] = {'Content-Type': 'application/json'}

HTTP Header when content type is JSON.

class RowResponse(pydantic.main.BaseModel, typing.Generic[~T]):
46class RowResponse(BaseModel, Generic[T]):
47    """The return object of list row API calls."""
48    count: int
49    next: Optional[str]
50    previous: Optional[str]
51    results: list[T]

The return object of list row API calls.

count: int
next: Optional[str]
previous: Optional[str]
results: list[~T]
Inherited Members
pydantic.main.BaseModel
BaseModel
model_config
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
model_fields
model_computed_fields
class MinimalRow(pydantic.main.BaseModel):
54class MinimalRow(BaseModel):
55    """The minimal result items of a `RowResponse`."""
56    id: int

The minimal result items of a RowResponse.

id: int
model_config = {}
model_fields = {'id': FieldInfo(annotation=int, required=True)}
model_computed_fields = {}
Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
class TokenResponse(pydantic.main.BaseModel):
59class TokenResponse(BaseModel):
60    """Result of an authentication token call."""
61    user: Any
62    access_token: str
63    refresh_token: str

Result of an authentication token call.

user: Any
access_token: str
refresh_token: str
model_config = {}
model_fields = {'user': FieldInfo(annotation=Any, required=True), 'access_token': FieldInfo(annotation=str, required=True), 'refresh_token': FieldInfo(annotation=str, required=True)}
model_computed_fields = {}
Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
class TokenRefresh(pydantic.main.BaseModel):
66class TokenRefresh(BaseModel):
67    """Response from a token refresh call."""
68    user: Any
69    access_token: str

Response from a token refresh call.

user: Any
access_token: str
model_config = {}
model_fields = {'user': FieldInfo(annotation=Any, required=True), 'access_token': FieldInfo(annotation=str, required=True)}
model_computed_fields = {}
Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
class DatabaseTableResponse(pydantic.main.BaseModel):
72class DatabaseTableResponse(BaseModel):
73    """Describes a table within a database in Baserow."""
74    id: int
75    name: str
76    order: int
77    database_id: int

Describes a table within a database in Baserow.

id: int
name: str
order: int
database_id: int
model_config = {}
model_fields = {'id': FieldInfo(annotation=int, required=True), 'name': FieldInfo(annotation=str, required=True), 'order': FieldInfo(annotation=int, required=True), 'database_id': FieldInfo(annotation=int, required=True)}
model_computed_fields = {}
Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
class DatabaseTablesResponse(pydantic.main.BaseModel, typing.Generic[~RootModelRootType]):
80class DatabaseTablesResponse(RootModel[list[DatabaseTableResponse]]):
81    """Contains all tables for a database in Baserow."""
82    root: list[DatabaseTableResponse]

Contains all tables for a database in Baserow.

root: list[DatabaseTableResponse]
Inherited Members
pydantic.main.BaseModel
BaseModel
model_config
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
model_fields
model_computed_fields
class FieldResponse(pydantic.main.BaseModel, typing.Generic[~RootModelRootType]):
85class FieldResponse(RootModel[list[FieldConfig]]):
86    """
87    The response for the list field call. Contains all fields of a table.
88    """
89    root: list[FieldConfig]

The response for the list field call. Contains all fields of a table.

Inherited Members
pydantic.main.BaseModel
BaseModel
model_config
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
model_fields
model_computed_fields
class BatchResponse(pydantic.main.BaseModel, typing.Generic[~A]):
92class BatchResponse(BaseModel, Generic[A]):
93    """
94    Response for batch mode. The results of a batch call are encapsulated in
95    this response.
96    """
97    items: list[A]

Response for batch mode. The results of a batch call are encapsulated in this response.

items: list[~A]
Inherited Members
pydantic.main.BaseModel
BaseModel
model_config
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
model_fields
model_computed_fields
class ErrorResponse(pydantic.main.BaseModel):
100class ErrorResponse(BaseModel):
101    """
102    The return object from Baserow when the request was unsuccessful. Contains
103    information about the reasons for the failure.
104    """
105    error: str
106    """Short error enum."""
107    detail: Any
108    """Additional information on the error."""

The return object from Baserow when the request was unsuccessful. Contains information about the reasons for the failure.

error: str

Short error enum.

detail: Any

Additional information on the error.

model_config = {}
model_fields = {'error': FieldInfo(annotation=str, required=True), 'detail': FieldInfo(annotation=Any, required=True)}
model_computed_fields = {}
Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_post_init
model_rebuild
model_validate
model_validate_json
model_validate_strings
dict
json
parse_obj
parse_raw
parse_file
from_orm
construct
copy
schema
schema_json
validate
update_forward_refs
class AuthMethod(builtins.int, enum.Enum):
111class AuthMethod(int, enum.Enum):
112    """
113    Differentiates between the two authentication methods for the client.
114    Internal use only. For more information on the two different authentication
115    methods, refer to the documentation for the `Client` class.
116    """
117
118    DATABASE_TOKEN = 0
119    """Authentication with the database token."""
120    JWT = 1
121    """Authentication with user credentials."""

Differentiates between the two authentication methods for the client. Internal use only. For more information on the two different authentication methods, refer to the documentation for the Client class.

DATABASE_TOKEN = <AuthMethod.DATABASE_TOKEN: 0>

Authentication with the database token.

JWT = <AuthMethod.JWT: 1>

Authentication with user credentials.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
bit_count
to_bytes
from_bytes
as_integer_ratio
is_integer
real
imag
numerator
denominator
def jwt_only(func):
124def jwt_only(func):
125    """
126    Decorator for operations that can only be executed with a JWT token
127    (authenticated via login credentials). If a database token is used,
128    `TokenAuthNotAllowedError` is thrown.
129    """
130
131    @wraps(func)
132    def wrapper(self, *args, **kwargs):
133        if self._auth_method is not AuthMethod.JWT:
134            raise JWTAuthRequiredError(func.__name__)
135        return func(self, *args, **kwargs)
136    return wrapper

Decorator for operations that can only be executed with a JWT token (authenticated via login credentials). If a database token is used, TokenAuthNotAllowedError is thrown.

class Client:
 139class Client:
 140    """
 141    This class manages interaction with the Baserow server via HTTP using REST
 142    API calls. Access to the Baserow API requires authentication, and there are
 143    tow methods available: Database Tokens and JWT Tokens.
 144
 145    **Database Tokens:** These tokens are designed for delivering data to
 146    frontends and, as such, can only perform CRUD (Create, Read, Update, Delete)
 147    operations on a database. New tokens can be created in the User Settings,
 148    where their permissions can also be configured. For instance, it is possible
 149    to create a token that only allows reading. These tokens have unlimited
 150    validity.
 151
 152    ```python
 153    from baserow import Client
 154
 155    client = Client("baserow.example.com", token="<API-TOKEN>")
 156    ```
 157
 158    **JWT Tokens:** All other functionalities require a JWT token, which can be
 159    obtained by providing login credentials (email address and password) to the
 160    Baserow API. These tokens have a limited lifespan of 10 minutes and will be
 161    refreshed if needed.
 162
 163    ```python
 164    from baserow import Client
 165
 166    client = Client(
 167        "baserow.example.com",
 168        email="baserow.user@example.com",
 169        password="<PASSWORD>",
 170    )
 171    ```
 172
 173    **Singleton/Global Client.** In many applications, maintaining a consistent
 174    connection to a single Baserow instance throughout the runtime is crucial.
 175    To facilitate this, the package provides a Global Client, which acts as a
 176    singleton. This means the client needs to be configured just once using
 177    GlobalClient.configure(). After this initial setup, the Global Client can be
 178    universally accessed and used throughout the program.
 179
 180    When utilizing the ORM functionality of the table models, all methods within
 181    the `Table` models inherently use this Global Client. Please note that the
 182    Global Client can only be configured once. Attempting to call the
 183    GlobalClient.configure() method more than once will result in an exception.
 184
 185    ```python
 186    # Either configure the global client with a database token...
 187    GlobalClient.configure("baserow.example.com", token="<API-TOKEN>")
 188
 189    # ...or with the login credentials (email and password).
 190    GlobalClient.configure(
 191        "baserow.example.com",
 192        email="baserow.user@example.com",
 193        password="<PASSWORD>",
 194    )
 195    ```
 196
 197    This client can also be used directly, without utilizing the ORM
 198    functionality of the package.
 199
 200    Args:
 201        url (str): The base URL of the Baserow instance. token (str, optional):
 202        An access token (referred to as a database token
 203            in Baserow's documentation) can be generated in the user settings
 204            within Baserow.
 205        email (str, optional): Email address of a Baserow user for the JWT
 206            authentication.
 207        password (str, optional): Password of a Baserow user for the JWT
 208            authentication.
 209    """
 210
 211    def __init__(
 212        self,
 213        url: str,
 214        token: Optional[str] = None,
 215        email: Optional[str] = None,
 216        password: Optional[str] = None,
 217    ):
 218        if not token and not email and not password:
 219            raise ValueError(
 220                "you must either provide a database token or the login credentials (email, password) of a user"
 221            )
 222        if token and (email or password):
 223            raise ValueError(
 224                "passing parameters for both types of authentication (database token and login credentials) simultaneously is not allowed"
 225            )
 226        if not token and (not email or not password):
 227            missing = "email" if not email else "password"
 228            raise ValueError(
 229                f"""incomplete authentication with login credentials, {
 230                    missing} is missing"""
 231            )
 232        self._url = url
 233        self._token = token
 234        self._email = email
 235        self._password = password
 236        self._session: aiohttp.ClientSession = aiohttp.ClientSession()
 237        self._auth_method = AuthMethod.DATABASE_TOKEN if token else AuthMethod.JWT
 238        # Cache is only accessed by __header() method.
 239        self.__headers_cache: Optional[dict[str, str]] = None
 240        self.__jwt_access_token: Optional[str] = None
 241        self.__jwt_refresh_token: Optional[str] = None
 242        self.__jwt_token_age: Optional[datetime] = None
 243
 244    async def token_auth(self, email: str, password: str) -> TokenResponse:
 245        """
 246        Authenticates an existing user based on their email and their password.
 247        If successful, an access token will be returned.
 248
 249        This method is designed to function even with a partially initialized
 250        instance, as it's used for optional JWT token retrieval during class
 251        initialization.
 252
 253        Args:
 254            email (str): Email address of a Baserow user for the JWT
 255                authentication.
 256            password (str): Password of a Baserow user for the JWT
 257                authentication.
 258            url (url, optional): Optional base url.
 259            session (aiohttp.ClientSession, optional): Optional client session.
 260        """
 261        return await self._typed_request(
 262            "post",
 263            _url_join(self._url, API_PREFIX, "user/token-auth"),
 264            TokenResponse,
 265            headers=CONTENT_TYPE_JSON,
 266            json={"email": email, "password": password},
 267            use_default_headers=False,
 268        )
 269
 270    async def token_refresh(self, refresh_token: str) -> TokenRefresh:
 271        """
 272        Generate a new JWT access_token that can be used to continue operating
 273        on Baserow starting from a valid refresh token. The initial JWT access
 274        and refresh token can be generated using `Client.token_auth()`.
 275
 276        Args:
 277            refresh_token: The JWT refresh token obtained by
 278                `Client.token_auth()`.
 279        """
 280        return await self._typed_request(
 281            "post",
 282            _url_join(self._url, API_PREFIX, "user/token-refresh"),
 283            TokenRefresh,
 284            headers=CONTENT_TYPE_JSON,
 285            json={"refresh_token": refresh_token},
 286            use_default_headers=False,
 287        )
 288
 289    async def list_table_rows(
 290        self,
 291        table_id: int,
 292        user_field_names: bool,
 293        result_type: Optional[Type[T]] = None,
 294        filter: Optional[Filter] = None,
 295        order_by: Optional[list[str]] = None,
 296        page: Optional[int] = None,
 297        size: Optional[int] = None,
 298    ) -> RowResponse[T]:
 299        """
 300        Lists rows in the table with the given ID. Note that Baserow uses
 301        paging. If all rows of a table are needed, the
 302        Client.list_all_table_rows method can be used.
 303
 304        Args:
 305            table_id (int): The ID of the table to be queried.
 306            user_field_names (bool): When set to true, the returned fields will
 307                be named according to their field names. Otherwise, the unique
 308                IDs of the fields will be used.
 309            result_type (Optional[Type[T]]): Which type will appear as an item
 310                in the result list and should be serialized accordingly. If set
 311                to None, Pydantic will attempt to serialize it to the standard
 312                types.
 313            filter (Optional[list[Filter]], optional): Allows the dataset to be
 314                filtered.
 315            order_by (Optional[list[str]], optional): A list of field names/IDs
 316                by which the result should be sorted. If the field name is
 317                prepended with a +, the sorting is ascending; if with a -, it is
 318                descending.
 319            page (Optional[int], optional): The page of the paging.
 320            size (Optional[int], optional): How many records should be returned
 321                at max. Defaults to 100 and cannot exceed 200.
 322        """
 323        params: dict[str, str] = {
 324            "user_field_names": "true" if user_field_names else "false",
 325        }
 326        if filter is not None:
 327            params["filters"] = filter.model_dump_json(by_alias=True)
 328        if order_by is not None:
 329            params["order_by"] = _list_to_str(order_by)
 330        if page is not None:
 331            params["page"] = str(page)
 332        if size is not None:
 333            params["size"] = str(size)
 334        url = _url_join(
 335            self._url, API_PREFIX,
 336            "database/rows/table",
 337            str(table_id),
 338        )
 339        if result_type:
 340            model = RowResponse[result_type]
 341        else:
 342            model = RowResponse[Any]
 343        return await self._typed_request("get", url, model, params=params)
 344
 345    async def list_all_table_rows(
 346        self,
 347        table_id: int,
 348        user_field_names: bool,
 349        result_type: Optional[Type[T]] = None,
 350        filter: Optional[Filter] = None,
 351        order_by: Optional[list[str]] = None,
 352    ) -> RowResponse[T]:
 353        """
 354        Since Baserow uses paging, this method sends as many requests to Baserow
 355        as needed until all rows are received. This function should only be used
 356        when all data is truly needed. This should be a rare occurrence, as
 357        filtering can occur on Baserow's side using the filter parameter.
 358
 359        Args:
 360            table_id (int): The ID of the table to be queried. user_field_names
 361            (bool): When set to true, the returned fields will
 362                be named according to their field names. Otherwise, the unique
 363                IDs of the fields will be used.
 364            result_type (Optional[Type[T]]): Which type will appear as an item
 365                in the result list and should be serialized accordingly. If set
 366                to None, Pydantic will attempt to serialize it to the standard
 367                types.
 368            filter (Optional[list[Filter]], optional): Allows the dataset to be
 369                filtered.
 370            order_by (Optional[list[str]], optional): A list of field names/IDs
 371                by which the result should be sorted. If the field name is
 372                prepended with a +, the sorting is ascending; if with a -, it is
 373                descending.
 374        """
 375        count: int = await self.table_row_count(table_id)
 376        total_calls = (count + 200 - 1) // 200
 377
 378        requests = []
 379        for page in range(1, total_calls+1):
 380            rqs = asyncio.create_task(
 381                self.list_table_rows(
 382                    table_id,
 383                    user_field_names,
 384                    result_type=result_type,
 385                    filter=filter,
 386                    order_by=order_by,
 387                    page=page,
 388                    size=200,
 389                )
 390            )
 391            requests.append(rqs)
 392        responses = await asyncio.gather(*requests)
 393
 394        rsl: Optional[RowResponse[T]] = None
 395        for rsp in responses:
 396            if rsl is None:
 397                rsl = rsp
 398            else:
 399                rsl.results.extend(rsp.results)
 400        if rsl is None:
 401            return RowResponse(
 402                count=0,
 403                previous=None,
 404                next=None,
 405                results=[],
 406            )
 407        return rsl
 408
 409    async def table_row_count(self, table_id: int, filter: Optional[Filter] = None) -> int:
 410        """
 411        Determines how many rows or records are present in the table with the
 412        given ID. Filters can be optionally passed as parameters.
 413
 414        Args:
 415            table_id (int): The ID of the table to be queried.
 416            filter (Optional[list[Filter]], optional): Allows the dataset to be
 417                filtered. Only rows matching the filter will be counted.
 418        """
 419        rsl = await self.list_table_rows(table_id, True, filter=filter, size=1)
 420        return rsl.count
 421
 422    async def list_fields(self, table_id: int) -> FieldResponse:
 423        """
 424        Lists all fields (»columns«) of a table.
 425
 426        Args:
 427            table_id (int): The ID of the table to be queried.
 428        """
 429        return await self._typed_request(
 430            "get",
 431            _url_join(
 432                self._url,
 433                API_PREFIX,
 434                "database/fields/table/",
 435                str(table_id),
 436            ),
 437            FieldResponse,
 438        )
 439
 440    async def get_row(
 441        self,
 442        table_id: int,
 443        row_id: int,
 444        user_field_names: bool,
 445        result_type: Optional[Type[T]] = None,
 446    ) -> T:
 447        """
 448        Fetch a single row/entry from the given table by the row ID.
 449
 450        Args:
 451            table_id (int): The ID of the table to be queried.
 452            row_id (int): The ID of the row to be returned.
 453            user_field_names (bool): When set to true, the fields in the
 454                provided data parameter are named according to their field
 455                names. Otherwise, the unique IDs of the fields will be used.
 456            result_type (Optional[Type[T]]): Which type will appear as an item
 457                in the result list and should be serialized accordingly. If set
 458                to None, Pydantic will attempt to serialize it to the standard
 459                types.
 460        """
 461        if result_type:
 462            model = result_type
 463        else:
 464            model = Any
 465        return await self._typed_request(
 466            "get",
 467            _url_join(
 468                self._url,
 469                API_PREFIX,
 470                "database/rows/table",
 471                str(table_id),
 472                str(row_id),
 473            ),
 474            model,
 475            params={"user_field_names": "true" if user_field_names else "false"}
 476        )
 477
 478    async def create_row(
 479        self,
 480        table_id: int,
 481        data: Union[T, dict[str, Any]],
 482        user_field_names: bool,
 483        before: Optional[int] = None,
 484    ) -> Union[T, MinimalRow]:
 485        """
 486        Creates a new row in the table with the given ID. The data can be
 487        provided either as a dictionary or as a Pydantic model. Please note that
 488        this method does not check whether the fields provided with `data`
 489        actually exist.
 490
 491        The return value depends on the `data` parameter: If a Pydantic model is
 492        passed, the return value is an instance of this model with the values as
 493        they are in the newly created row. If any arbitrary dictionary is
 494        passed, `MinimalRow` is returned, which contains only the ID field.
 495
 496        Args:
 497            table_id (int): The ID of the table where the new row should be
 498                created.
 499            data (Union[T, dict[str, Any]]): The data of the new row.
 500            user_field_names (bool): When set to true, the fields in the
 501                provided data parameter are named according to their field
 502                names. Otherwise, the unique IDs of the fields will be used.
 503            before (Optional[int], optional):  If provided then the newly
 504                created row will be positioned before the row with the provided
 505                id. 
 506        """
 507        params: dict[str, str] = {
 508            "user_field_names": "true" if user_field_names else "false",
 509        }
 510        if before is not None:
 511            params["before"] = str(before)
 512
 513        if not isinstance(data, dict):
 514            json = data.model_dump(by_alias=True)
 515        else:
 516            json = data
 517
 518        return await self._typed_request(
 519            "post",
 520            _url_join(
 521                self._url,
 522                API_PREFIX,
 523                "database/rows/table",
 524                str(table_id),
 525            ),
 526            type(data) if not isinstance(data, dict) else MinimalRow,
 527            CONTENT_TYPE_JSON,
 528            params,
 529            json,
 530        )
 531
 532    async def create_rows(
 533        self,
 534        table_id: int,
 535        data: Union[list[T], list[dict[str, Any]]],
 536        user_field_names: bool,
 537        before: Optional[int] = None,
 538    ) -> Union[BatchResponse[T], BatchResponse[MinimalRow]]:
 539        """
 540        Creates one or multiple new row(w) in the table with the given ID using
 541        Baserow's batch functionality. The data can be provided either as a list
 542        of dictionaries or as a Pydantic models. Please note that this method
 543        does not check whether the fields provided with `data` actually exist.
 544
 545        The return value depends on the `data` parameter: If a Pydantic model is
 546        passed, the return value is an instance of this model with the values as
 547        they are in the newly created row. If any arbitrary dictionary is
 548        passed, `MinimalRow` is returned, which contains only the ID field.
 549
 550        If the given list is empty, no call is executed; instead, an empty
 551        response is returned.
 552
 553        Args:
 554            table_id (int): The ID of the table where the new row should be
 555                created.
 556            data (Union[list[T], list[dict[str, Any]]]): The data of the new
 557                row.
 558            user_field_names (bool): When set to true, the fields in the
 559                provided data parameter are named according to their field
 560                names. Otherwise, the unique IDs of the fields will be used.
 561            before (Optional[int], optional):  If provided then the newly
 562                created row will be positioned before the row with the provided
 563                id. 
 564        """
 565        if len(data) == 0:
 566            return BatchResponse(items=[])
 567        params: dict[str, str] = {
 568            "user_field_names": "true" if user_field_names else "false",
 569        }
 570        if before is not None:
 571            params["before"] = str(before)
 572        if len(data) == 0:
 573            raise ValueError("data parameter cannot be empty list")
 574        if not isinstance(data[0], dict):
 575            result_type = BatchResponse[type(data[0])]
 576        else:
 577            result_type = BatchResponse[MinimalRow]
 578        items: list[dict[str, Any]] = []
 579        for item in data:
 580            if not isinstance(item, dict):
 581                items.append(item.model_dump(by_alias=True))
 582            else:
 583                items.append(item)
 584        json = {"items": items}
 585        return await self._typed_request(
 586            "post",
 587            _url_join(
 588                self._url,
 589                API_PREFIX,
 590                "database/rows/table",
 591                str(table_id),
 592                "batch",
 593            ),
 594            result_type,
 595            CONTENT_TYPE_JSON,
 596            params,
 597            json,
 598        )
 599
 600    async def update_row(
 601        self,
 602        table_id: int,
 603        row_id: int,
 604        data: Union[T, dict[str, Any]],
 605        user_field_names: bool,
 606    ) -> Union[T, MinimalRow]:
 607        """ 
 608        Updates a row by it's ID in the table with the given ID. The data can be
 609        provided either as a dictionary or as a Pydantic model. Please note that
 610        this method does not check whether the fields provided with `data`
 611        actually exist.
 612
 613        The return value depends on the `data` parameter: If a Pydantic model is
 614        passed, the return value is an instance of this model with the values as
 615        they are in the newly created row. If any arbitrary dictionary is
 616        passed, `MinimalRow` is returned, which contains only the ID field.
 617
 618        Args:
 619            table_id (int): The ID of the table where the new row should be
 620                created.
 621            row_id (int): The ID of the row which should be updated.
 622            data (Union[T, dict[str, Any]]): The data of the new row.
 623            user_field_names (bool): When set to true, the fields in the
 624                provided data parameter are named according to their field
 625                names. Otherwise, the unique IDs of the fields will be used.
 626        """
 627        params: dict[str, str] = {
 628            "user_field_names": "true" if user_field_names else "false",
 629        }
 630
 631        if not isinstance(data, dict):
 632            json = data.model_dump(by_alias=True)
 633        else:
 634            json = data
 635
 636        return await self._typed_request(
 637            "patch",
 638            _url_join(
 639                self._url,
 640                API_PREFIX,
 641                "database/rows/table",
 642                str(table_id),
 643                str(row_id),
 644            ),
 645            type(data) if not isinstance(data, dict) else MinimalRow,
 646            CONTENT_TYPE_JSON,
 647            params,
 648            json
 649        )
 650
 651    async def upload_file(self, file: BufferedReader) -> File:
 652        """
 653        Uploads a file to Baserow by uploading the file contents directly. The
 654        file is passed as a `BufferedReader`. So, a local file can be loaded
 655        using `open("my-file.ext", "rb")` and then passed to this method.
 656
 657        After the file is uploaded, it needs to be linked to the field in the
 658        table row. For this, with the Client.update_row() method, either the
 659        complete field.File instance can be added as a list item to the File
 660        field or simply the name (field.File.name, the name is unique in any
 661        case).
 662
 663        Example usage:
 664
 665        ```python
 666        with open("my-image.png", "rb") as file:
 667            rsl = await client.upload_file(file)
 668
 669        table_id = 23
 670        row_id = 42
 671        file_field_name = "Attachments"
 672        await client.update_row(
 673            table_id,
 674            row_id,
 675            {file_field_name: [{"name": rsl.name}],
 676            True
 677        )
 678        ```
 679
 680        It's also possible to directly upload a file accessible via a public
 681        URL. For this purpose, use Client.upload_file_via_url().
 682
 683        Args:
 684            file (BufferedReader): A BufferedReader containing the file to be
 685                uploaded.
 686        """
 687        return await self._typed_request(
 688            "post",
 689            _url_join(
 690                self._url,
 691                API_PREFIX,
 692                "user-files/upload-file",
 693            ),
 694            File,
 695            data={"file": file},
 696        )
 697
 698    async def upload_file_via_url(self, url: str) -> File:
 699        """
 700        Uploads a file from a given URL to the storage of Baserow. The Baserow
 701        instance must have access to this URL.
 702
 703        After the file is uploaded, it needs to be linked to the field in the
 704        table row. For this, with the Client.update_row() method, either the
 705        complete field.File instance can be added as a list item to the File
 706        field or simply the name (field.File.name, the name is unique in any
 707        case).
 708
 709        Example usage:
 710
 711        ```python
 712        rsl = await client.upload_file_via_url("https://picsum.photos/500")
 713
 714        table_id = 23
 715        row_id = 42
 716        file_field_name = "Attachments"
 717        await client.update_row(
 718            table_id,
 719            row_id,
 720            {file_field_name: [{"name": rsl.name}],
 721            True
 722        )
 723        ```
 724
 725        It's also possible to upload a locally available file. For this purpose,
 726        use `Client.upload_file()`.
 727
 728        Args:
 729            url (str): The URL of the file.
 730        """
 731        return await self._typed_request(
 732            "post",
 733            _url_join(
 734                self._url,
 735                API_PREFIX,
 736                "user-files/upload-via-url",
 737            ),
 738            File,
 739            CONTENT_TYPE_JSON,
 740            json={"url": url}
 741        )
 742
 743    async def delete_row(
 744        self,
 745        table_id: int,
 746        row_id: Union[int, list[int]],
 747    ):
 748        """
 749        Deletes a row with the given ID in the table with the given ID. It's
 750        also possible to delete more than one row simultaneously. For this, a
 751        list of IDs can be passed using the row_id parameter.
 752
 753        Args:
 754            table_id (int): The ID of the table where the row should be deleted.
 755            row_id (Union[int, list[int]]): The ID(s) of the row(s) which should
 756                be deleted.
 757        """
 758        if isinstance(row_id, int):
 759            return await self._request(
 760                "delete",
 761                _url_join(
 762                    self._url,
 763                    API_PREFIX,
 764                    "database/rows/table",
 765                    str(table_id),
 766                    str(row_id),
 767                ),
 768                None,
 769            )
 770        return await self._request(
 771            "post",
 772            _url_join(
 773                self._url,
 774                API_PREFIX,
 775                "database/rows/table",
 776                str(table_id),
 777                "batch-delete",
 778            ),
 779            None,
 780            CONTENT_TYPE_JSON,
 781            None,
 782            {"items": row_id},
 783        )
 784
 785    @jwt_only
 786    async def list_database_tables(
 787        self,
 788        database_id: int,
 789    ) -> DatabaseTablesResponse:
 790        """
 791        Lists all the tables that are in the database related to the database
 792        given by it's ID. Please note that this method only works when access is
 793        through a JWT token, meaning login credentials are used for
 794        authentication. Additionally, the account being used must have access to
 795        the database/workspace.
 796
 797        Args:
 798            database_id (int): The ID of the database from which one wants to
 799                retrieve a listing of all tables. 
 800        """
 801        return await self._typed_request(
 802            "get",
 803            _url_join(
 804                self._url,
 805                API_PREFIX,
 806                "database/tables/database",
 807                str(database_id),
 808            ),
 809            DatabaseTablesResponse,
 810        )
 811
 812    @jwt_only
 813    async def create_database_table(
 814        self,
 815        database_id: int,
 816        name: str,
 817        client_session_id: Optional[str] = None,
 818        client_undo_redo_action_group_id: Optional[str] = None,
 819    ) -> DatabaseTableResponse:
 820        """
 821        Creates synchronously a new table with the given for the database
 822        related to the provided database_id parameter.
 823
 824        Args:
 825            database_id (int): The ID of the database in which the new table
 826                should be created.
 827            name (str): Human readable name for the new table.
 828            client_session_id (str, optional): An optional UUID that marks
 829                the action performed by this request as having occurred in a
 830                particular client session. Then using the undo/redo endpoints
 831                with the same ClientSessionId header this action can be
 832                undone/redone.
 833            client_undo_redo_action_group_id (str, optional): An optional UUID
 834                that marks the action performed by this request as having
 835                occurred in a particular action group.Then calling the undo/redo
 836                endpoint with the same ClientSessionId header, all the actions
 837                belonging to the same action group can be undone/redone together
 838                in a single API call.
 839        """
 840        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 841        if client_session_id:
 842            headers["ClientSessionId"] = client_session_id
 843        if client_undo_redo_action_group_id:
 844            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 845        return await self._typed_request(
 846            "post",
 847            _url_join(
 848                self._url,
 849                API_PREFIX,
 850                "database/tables/database",
 851                str(database_id),
 852            ),
 853            DatabaseTableResponse,
 854            headers=headers,
 855            json={"name": name},
 856        )
 857
 858    async def create_database_table_field(
 859        self,
 860        table_id: int,
 861        field: FieldConfigType,
 862        client_session_id: Optional[str] = None,
 863        client_undo_redo_action_group_id: Optional[str] = None,
 864    ) -> FieldConfig:
 865        """
 866        Adds a new field to a table specified by its ID.
 867
 868        Args:
 869            table_id (int): The ID of the table to be altered.
 870            field (FieldConfigType): The config of the field to be added.
 871            client_session_id (str, optional): An optional UUID that marks
 872                the action performed by this request as having occurred in a
 873                particular client session. Then using the undo/redo endpoints
 874                with the same ClientSessionId header this action can be
 875                undone/redone.
 876            client_undo_redo_action_group_id (str, optional): An optional UUID
 877                that marks the action performed by this request as having
 878                occurred in a particular action group.Then calling the undo/redo
 879                endpoint with the same ClientSessionId header, all the actions
 880                belonging to the same action group can be undone/redone together
 881                in a single API call.
 882        """
 883        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 884        if client_session_id:
 885            headers["ClientSessionId"] = client_session_id
 886        if client_undo_redo_action_group_id:
 887            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 888        return await self._typed_request(
 889            "post",
 890            _url_join(
 891                self._url,
 892                API_PREFIX,
 893                "database/fields/table",
 894                str(table_id),
 895            ),
 896            FieldConfig,
 897            headers=headers,
 898            json=field.model_dump(),
 899        )
 900
 901    async def update_database_table_field(
 902        self,
 903        field_id: int,
 904        field: FieldConfigType | dict[str, Any],
 905        client_session_id: Optional[str] = None,
 906        client_undo_redo_action_group_id: Optional[str] = None,
 907    ) -> FieldConfig:
 908        """
 909        Updates a table field defined by it's id
 910
 911        Args:
 912            field_id (int): The ID of the field to be updated.
 913            field (FieldConfigType | dict[str, Any]): The config of the field to
 914                be added.
 915            client_session_id (str, optional): An optional UUID that marks
 916                the action performed by this request as having occurred in a
 917                particular client session. Then using the undo/redo endpoints
 918                with the same ClientSessionId header this action can be
 919                undone/redone.
 920            client_undo_redo_action_group_id (str, optional): An optional UUID
 921                that marks the action performed by this request as having
 922                occurred in a particular action group.Then calling the undo/redo
 923                endpoint with the same ClientSessionId header, all the actions
 924                belonging to the same action group can be undone/redone together
 925                in a single API call.
 926        """
 927        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 928        if client_session_id:
 929            headers["ClientSessionId"] = client_session_id
 930        if client_undo_redo_action_group_id:
 931            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 932
 933        if not isinstance(field, dict):
 934            json = field.model_dump(by_alias=True)
 935        else:
 936            json = field
 937
 938        return await self._typed_request(
 939            "patch",
 940            _url_join(
 941                self._url,
 942                API_PREFIX,
 943                "database/fields",
 944                str(field_id),
 945            ),
 946            FieldConfig,
 947            headers=headers,
 948            json=json,
 949        )
 950
 951    async def delete_database_table_field(
 952        self,
 953        field_id: int,
 954        client_session_id: Optional[str] = None,
 955        client_undo_redo_action_group_id: Optional[str] = None,
 956    ):
 957        """
 958        Deletes a table field defined by it's id
 959
 960        Args:
 961            field_id (int): The ID of the field to be deleted.
 962            client_session_id (str, optional): An optional UUID that marks
 963                the action performed by this request as having occurred in a
 964                particular client session. Then using the undo/redo endpoints
 965                with the same ClientSessionId header this action can be
 966                undone/redone.
 967            client_undo_redo_action_group_id (str, optional): An optional UUID
 968                that marks the action performed by this request as having
 969                occurred in a particular action group.Then calling the undo/redo
 970                endpoint with the same ClientSessionId header, all the actions
 971                belonging to the same action group can be undone/redone together
 972                in a single API call.
 973        """
 974        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
 975        if client_session_id:
 976            headers["ClientSessionId"] = client_session_id
 977        if client_undo_redo_action_group_id:
 978            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
 979        await self._request(
 980            "DELETE",
 981            _url_join(
 982                self._url,
 983                API_PREFIX,
 984                "database/fields",
 985                str(field_id),
 986            ),
 987            None,
 988            headers=headers,
 989        )
 990
 991    async def close(self):
 992        """
 993        The connection session with the client is terminated. Subsequently, the
 994        client object cannot be used anymore. It is necessary to explicitly and
 995        manually close the session only when the client object is directly
 996        instantiated.
 997        """
 998        await self._session.close()
 999
1000    async def __get_jwt_access_token(self) -> str:
1001        if self._email is None or self._password is None:
1002            raise ValueError("email and password have to be set")
1003        if self.__jwt_access_token is None or self.__jwt_refresh_token is None or self.__jwt_token_age is None:
1004            # Need initialize token.
1005            rsp = await self.token_auth(self._email, self._password)
1006            self.__jwt_access_token = rsp.access_token
1007            self.__jwt_refresh_token = rsp.refresh_token
1008            self.__jwt_token_age = datetime.now()
1009        elif self.__jwt_token_age < datetime.now() - timedelta(minutes=10):
1010            # Token has to be refreshed.
1011            rsp = await self.token_refresh(self.__jwt_refresh_token)
1012            self.__jwt_access_token = rsp.access_token
1013            self.__jwt_token_age = datetime.now()
1014        return self.__jwt_access_token
1015
1016    async def __headers(
1017        self,
1018        parts: Optional[dict[str, str]],
1019    ) -> dict[str, str]:
1020        # if self.__headers_cache is not None:
1021        #     if parts is not None:
1022        #         rsl = self.__headers_cache.copy()
1023        #         rsl.update(parts)
1024        #         return rsl
1025        #     return self.__headers_cache
1026        rsl: dict[str, str] = {}
1027        if parts is not None:
1028            rsl = parts
1029
1030        if self._token:
1031            token = f"Token {self._token}"
1032        elif self._email and self._password:
1033            access_token = await self.__get_jwt_access_token()
1034            token = f"JWT {access_token}"
1035        else:
1036            raise RuntimeError("logic error, shouldn't be possible")
1037
1038        rsl["Authorization"] = token
1039        return rsl
1040
1041    async def _typed_request(
1042        self,
1043        method: str,
1044        url: str,
1045        result_type: Optional[Type[T]],
1046        headers: Optional[dict[str, str]] = None,
1047        params: Optional[dict[str, str]] = None,
1048        json: Optional[dict[str, Any]] = None,
1049        data: Optional[dict[str, Any]] = None,
1050        use_default_headers: bool = True,
1051    ) -> T:
1052        """
1053        Wrap the _request method for all cases where the return value of the
1054        request must not be None under any circumstances. If it is, a ValueError
1055        will be raised.
1056        """
1057        rsl = await self._request(
1058            method,
1059            url,
1060            result_type,
1061            headers,
1062            params,
1063            json,
1064            data,
1065            use_default_headers,
1066        )
1067        if not rsl:
1068            raise ValueError("request result shouldn't be None")
1069        return rsl
1070
1071    async def _request(
1072        self,
1073        method: str,
1074        url: str,
1075        result_type: Optional[Type[T]],
1076        headers: Optional[dict[str, str]] = None,
1077        params: Optional[dict[str, str]] = None,
1078        json: Optional[dict[str, Any]] = None,
1079        data: Optional[dict[str, Any]] = None,
1080        use_default_headers: bool = True,
1081    ) -> Optional[T]:
1082        """
1083        Handles the actual HTTP request.
1084
1085        Args:
1086            result_type (Type[T]): The pydantic model which should be used to
1087                serialize the response field of the response. If set to None
1088                pydantic will try to serialize the response with built-in types.
1089                Aka `pydantic.JsonValue`.
1090        """
1091        if use_default_headers:
1092            headers = await self.__headers(headers)
1093        else:
1094            headers = {}
1095        async with self._session.request(
1096            method,
1097            url,
1098            headers=headers,
1099            params=params,
1100            json=json,
1101            data=data,
1102        ) as rsp:
1103            if rsp.status == 400:
1104                err = ErrorResponse.model_validate_json(await rsp.text())
1105                raise BaserowError(rsp.status, err.error, err.detail)
1106            if rsp.status == 204:
1107                return None
1108            if rsp.status != 200:
1109                raise UnspecifiedBaserowError(rsp.status, await rsp.text())
1110            body = await rsp.text()
1111            if result_type is not None:
1112                rsl = result_type.model_validate_json(body)
1113                return rsl
1114            return None

This class manages interaction with the Baserow server via HTTP using REST API calls. Access to the Baserow API requires authentication, and there are tow methods available: Database Tokens and JWT Tokens.

Database Tokens: These tokens are designed for delivering data to frontends and, as such, can only perform CRUD (Create, Read, Update, Delete) operations on a database. New tokens can be created in the User Settings, where their permissions can also be configured. For instance, it is possible to create a token that only allows reading. These tokens have unlimited validity.

from baserow import Client

client = Client("baserow.example.com", token="<API-TOKEN>")

JWT Tokens: All other functionalities require a JWT token, which can be obtained by providing login credentials (email address and password) to the Baserow API. These tokens have a limited lifespan of 10 minutes and will be refreshed if needed.

from baserow import Client

client = Client(
    "baserow.example.com",
    email="baserow.user@example.com",
    password="<PASSWORD>",
)

Singleton/Global Client. In many applications, maintaining a consistent connection to a single Baserow instance throughout the runtime is crucial. To facilitate this, the package provides a Global Client, which acts as a singleton. This means the client needs to be configured just once using GlobalClient.configure(). After this initial setup, the Global Client can be universally accessed and used throughout the program.

When utilizing the ORM functionality of the table models, all methods within the Table models inherently use this Global Client. Please note that the Global Client can only be configured once. Attempting to call the GlobalClient.configure() method more than once will result in an exception.

# Either configure the global client with a database token...
GlobalClient.configure("baserow.example.com", token="<API-TOKEN>")

# ...or with the login credentials (email and password).
GlobalClient.configure(
    "baserow.example.com",
    email="baserow.user@example.com",
    password="<PASSWORD>",
)

This client can also be used directly, without utilizing the ORM functionality of the package.

Arguments:
  • url (str): The base URL of the Baserow instance. token (str, optional):
  • An access token (referred to as a database token in Baserow's documentation) can be generated in the user settings within Baserow.
  • email (str, optional): Email address of a Baserow user for the JWT authentication.
  • password (str, optional): Password of a Baserow user for the JWT authentication.
Client( url: str, token: Optional[str] = None, email: Optional[str] = None, password: Optional[str] = None)
211    def __init__(
212        self,
213        url: str,
214        token: Optional[str] = None,
215        email: Optional[str] = None,
216        password: Optional[str] = None,
217    ):
218        if not token and not email and not password:
219            raise ValueError(
220                "you must either provide a database token or the login credentials (email, password) of a user"
221            )
222        if token and (email or password):
223            raise ValueError(
224                "passing parameters for both types of authentication (database token and login credentials) simultaneously is not allowed"
225            )
226        if not token and (not email or not password):
227            missing = "email" if not email else "password"
228            raise ValueError(
229                f"""incomplete authentication with login credentials, {
230                    missing} is missing"""
231            )
232        self._url = url
233        self._token = token
234        self._email = email
235        self._password = password
236        self._session: aiohttp.ClientSession = aiohttp.ClientSession()
237        self._auth_method = AuthMethod.DATABASE_TOKEN if token else AuthMethod.JWT
238        # Cache is only accessed by __header() method.
239        self.__headers_cache: Optional[dict[str, str]] = None
240        self.__jwt_access_token: Optional[str] = None
241        self.__jwt_refresh_token: Optional[str] = None
242        self.__jwt_token_age: Optional[datetime] = None
async def token_auth(self, email: str, password: str) -> TokenResponse:
244    async def token_auth(self, email: str, password: str) -> TokenResponse:
245        """
246        Authenticates an existing user based on their email and their password.
247        If successful, an access token will be returned.
248
249        This method is designed to function even with a partially initialized
250        instance, as it's used for optional JWT token retrieval during class
251        initialization.
252
253        Args:
254            email (str): Email address of a Baserow user for the JWT
255                authentication.
256            password (str): Password of a Baserow user for the JWT
257                authentication.
258            url (url, optional): Optional base url.
259            session (aiohttp.ClientSession, optional): Optional client session.
260        """
261        return await self._typed_request(
262            "post",
263            _url_join(self._url, API_PREFIX, "user/token-auth"),
264            TokenResponse,
265            headers=CONTENT_TYPE_JSON,
266            json={"email": email, "password": password},
267            use_default_headers=False,
268        )

Authenticates an existing user based on their email and their password. If successful, an access token will be returned.

This method is designed to function even with a partially initialized instance, as it's used for optional JWT token retrieval during class initialization.

Arguments:
  • email (str): Email address of a Baserow user for the JWT authentication.
  • password (str): Password of a Baserow user for the JWT authentication.
  • url (url, optional): Optional base url.
  • session (aiohttp.ClientSession, optional): Optional client session.
async def token_refresh(self, refresh_token: str) -> TokenRefresh:
270    async def token_refresh(self, refresh_token: str) -> TokenRefresh:
271        """
272        Generate a new JWT access_token that can be used to continue operating
273        on Baserow starting from a valid refresh token. The initial JWT access
274        and refresh token can be generated using `Client.token_auth()`.
275
276        Args:
277            refresh_token: The JWT refresh token obtained by
278                `Client.token_auth()`.
279        """
280        return await self._typed_request(
281            "post",
282            _url_join(self._url, API_PREFIX, "user/token-refresh"),
283            TokenRefresh,
284            headers=CONTENT_TYPE_JSON,
285            json={"refresh_token": refresh_token},
286            use_default_headers=False,
287        )

Generate a new JWT access_token that can be used to continue operating on Baserow starting from a valid refresh token. The initial JWT access and refresh token can be generated using Client.token_auth().

Arguments:
async def list_table_rows( self, table_id: int, user_field_names: bool, result_type: Optional[Type[~T]] = None, filter: Optional[baserow.filter.Filter] = None, order_by: Optional[list[str]] = None, page: Optional[int] = None, size: Optional[int] = None) -> RowResponse:
289    async def list_table_rows(
290        self,
291        table_id: int,
292        user_field_names: bool,
293        result_type: Optional[Type[T]] = None,
294        filter: Optional[Filter] = None,
295        order_by: Optional[list[str]] = None,
296        page: Optional[int] = None,
297        size: Optional[int] = None,
298    ) -> RowResponse[T]:
299        """
300        Lists rows in the table with the given ID. Note that Baserow uses
301        paging. If all rows of a table are needed, the
302        Client.list_all_table_rows method can be used.
303
304        Args:
305            table_id (int): The ID of the table to be queried.
306            user_field_names (bool): When set to true, the returned fields will
307                be named according to their field names. Otherwise, the unique
308                IDs of the fields will be used.
309            result_type (Optional[Type[T]]): Which type will appear as an item
310                in the result list and should be serialized accordingly. If set
311                to None, Pydantic will attempt to serialize it to the standard
312                types.
313            filter (Optional[list[Filter]], optional): Allows the dataset to be
314                filtered.
315            order_by (Optional[list[str]], optional): A list of field names/IDs
316                by which the result should be sorted. If the field name is
317                prepended with a +, the sorting is ascending; if with a -, it is
318                descending.
319            page (Optional[int], optional): The page of the paging.
320            size (Optional[int], optional): How many records should be returned
321                at max. Defaults to 100 and cannot exceed 200.
322        """
323        params: dict[str, str] = {
324            "user_field_names": "true" if user_field_names else "false",
325        }
326        if filter is not None:
327            params["filters"] = filter.model_dump_json(by_alias=True)
328        if order_by is not None:
329            params["order_by"] = _list_to_str(order_by)
330        if page is not None:
331            params["page"] = str(page)
332        if size is not None:
333            params["size"] = str(size)
334        url = _url_join(
335            self._url, API_PREFIX,
336            "database/rows/table",
337            str(table_id),
338        )
339        if result_type:
340            model = RowResponse[result_type]
341        else:
342            model = RowResponse[Any]
343        return await self._typed_request("get", url, model, params=params)

Lists rows in the table with the given ID. Note that Baserow uses paging. If all rows of a table are needed, the Client.list_all_table_rows method can be used.

Arguments:
  • table_id (int): The ID of the table to be queried.
  • user_field_names (bool): When set to true, the returned fields will be named according to their field names. Otherwise, the unique IDs of the fields will be used.
  • result_type (Optional[Type[T]]): Which type will appear as an item in the result list and should be serialized accordingly. If set to None, Pydantic will attempt to serialize it to the standard types.
  • filter (Optional[list[Filter]], optional): Allows the dataset to be filtered.
  • order_by (Optional[list[str]], optional): A list of field names/IDs by which the result should be sorted. If the field name is prepended with a +, the sorting is ascending; if with a -, it is descending.
  • page (Optional[int], optional): The page of the paging.
  • size (Optional[int], optional): How many records should be returned at max. Defaults to 100 and cannot exceed 200.
async def list_all_table_rows( self, table_id: int, user_field_names: bool, result_type: Optional[Type[~T]] = None, filter: Optional[baserow.filter.Filter] = None, order_by: Optional[list[str]] = None) -> RowResponse:
345    async def list_all_table_rows(
346        self,
347        table_id: int,
348        user_field_names: bool,
349        result_type: Optional[Type[T]] = None,
350        filter: Optional[Filter] = None,
351        order_by: Optional[list[str]] = None,
352    ) -> RowResponse[T]:
353        """
354        Since Baserow uses paging, this method sends as many requests to Baserow
355        as needed until all rows are received. This function should only be used
356        when all data is truly needed. This should be a rare occurrence, as
357        filtering can occur on Baserow's side using the filter parameter.
358
359        Args:
360            table_id (int): The ID of the table to be queried. user_field_names
361            (bool): When set to true, the returned fields will
362                be named according to their field names. Otherwise, the unique
363                IDs of the fields will be used.
364            result_type (Optional[Type[T]]): Which type will appear as an item
365                in the result list and should be serialized accordingly. If set
366                to None, Pydantic will attempt to serialize it to the standard
367                types.
368            filter (Optional[list[Filter]], optional): Allows the dataset to be
369                filtered.
370            order_by (Optional[list[str]], optional): A list of field names/IDs
371                by which the result should be sorted. If the field name is
372                prepended with a +, the sorting is ascending; if with a -, it is
373                descending.
374        """
375        count: int = await self.table_row_count(table_id)
376        total_calls = (count + 200 - 1) // 200
377
378        requests = []
379        for page in range(1, total_calls+1):
380            rqs = asyncio.create_task(
381                self.list_table_rows(
382                    table_id,
383                    user_field_names,
384                    result_type=result_type,
385                    filter=filter,
386                    order_by=order_by,
387                    page=page,
388                    size=200,
389                )
390            )
391            requests.append(rqs)
392        responses = await asyncio.gather(*requests)
393
394        rsl: Optional[RowResponse[T]] = None
395        for rsp in responses:
396            if rsl is None:
397                rsl = rsp
398            else:
399                rsl.results.extend(rsp.results)
400        if rsl is None:
401            return RowResponse(
402                count=0,
403                previous=None,
404                next=None,
405                results=[],
406            )
407        return rsl

Since Baserow uses paging, this method sends as many requests to Baserow as needed until all rows are received. This function should only be used when all data is truly needed. This should be a rare occurrence, as filtering can occur on Baserow's side using the filter parameter.

Arguments:
  • table_id (int): The ID of the table to be queried. user_field_names
  • (bool): When set to true, the returned fields will be named according to their field names. Otherwise, the unique IDs of the fields will be used.
  • result_type (Optional[Type[T]]): Which type will appear as an item in the result list and should be serialized accordingly. If set to None, Pydantic will attempt to serialize it to the standard types.
  • filter (Optional[list[Filter]], optional): Allows the dataset to be filtered.
  • order_by (Optional[list[str]], optional): A list of field names/IDs by which the result should be sorted. If the field name is prepended with a +, the sorting is ascending; if with a -, it is descending.
async def table_row_count( self, table_id: int, filter: Optional[baserow.filter.Filter] = None) -> int:
409    async def table_row_count(self, table_id: int, filter: Optional[Filter] = None) -> int:
410        """
411        Determines how many rows or records are present in the table with the
412        given ID. Filters can be optionally passed as parameters.
413
414        Args:
415            table_id (int): The ID of the table to be queried.
416            filter (Optional[list[Filter]], optional): Allows the dataset to be
417                filtered. Only rows matching the filter will be counted.
418        """
419        rsl = await self.list_table_rows(table_id, True, filter=filter, size=1)
420        return rsl.count

Determines how many rows or records are present in the table with the given ID. Filters can be optionally passed as parameters.

Arguments:
  • table_id (int): The ID of the table to be queried.
  • filter (Optional[list[Filter]], optional): Allows the dataset to be filtered. Only rows matching the filter will be counted.
async def list_fields(self, table_id: int) -> FieldResponse:
422    async def list_fields(self, table_id: int) -> FieldResponse:
423        """
424        Lists all fields (»columns«) of a table.
425
426        Args:
427            table_id (int): The ID of the table to be queried.
428        """
429        return await self._typed_request(
430            "get",
431            _url_join(
432                self._url,
433                API_PREFIX,
434                "database/fields/table/",
435                str(table_id),
436            ),
437            FieldResponse,
438        )

Lists all fields (»columns«) of a table.

Arguments:
  • table_id (int): The ID of the table to be queried.
async def get_row( self, table_id: int, row_id: int, user_field_names: bool, result_type: Optional[Type[~T]] = None) -> ~T:
440    async def get_row(
441        self,
442        table_id: int,
443        row_id: int,
444        user_field_names: bool,
445        result_type: Optional[Type[T]] = None,
446    ) -> T:
447        """
448        Fetch a single row/entry from the given table by the row ID.
449
450        Args:
451            table_id (int): The ID of the table to be queried.
452            row_id (int): The ID of the row to be returned.
453            user_field_names (bool): When set to true, the fields in the
454                provided data parameter are named according to their field
455                names. Otherwise, the unique IDs of the fields will be used.
456            result_type (Optional[Type[T]]): Which type will appear as an item
457                in the result list and should be serialized accordingly. If set
458                to None, Pydantic will attempt to serialize it to the standard
459                types.
460        """
461        if result_type:
462            model = result_type
463        else:
464            model = Any
465        return await self._typed_request(
466            "get",
467            _url_join(
468                self._url,
469                API_PREFIX,
470                "database/rows/table",
471                str(table_id),
472                str(row_id),
473            ),
474            model,
475            params={"user_field_names": "true" if user_field_names else "false"}
476        )

Fetch a single row/entry from the given table by the row ID.

Arguments:
  • table_id (int): The ID of the table to be queried.
  • row_id (int): The ID of the row to be returned.
  • user_field_names (bool): When set to true, the fields in the provided data parameter are named according to their field names. Otherwise, the unique IDs of the fields will be used.
  • result_type (Optional[Type[T]]): Which type will appear as an item in the result list and should be serialized accordingly. If set to None, Pydantic will attempt to serialize it to the standard types.
async def create_row( self, table_id: int, data: Union[~T, dict[str, Any]], user_field_names: bool, before: Optional[int] = None) -> Union[~T, MinimalRow]:
478    async def create_row(
479        self,
480        table_id: int,
481        data: Union[T, dict[str, Any]],
482        user_field_names: bool,
483        before: Optional[int] = None,
484    ) -> Union[T, MinimalRow]:
485        """
486        Creates a new row in the table with the given ID. The data can be
487        provided either as a dictionary or as a Pydantic model. Please note that
488        this method does not check whether the fields provided with `data`
489        actually exist.
490
491        The return value depends on the `data` parameter: If a Pydantic model is
492        passed, the return value is an instance of this model with the values as
493        they are in the newly created row. If any arbitrary dictionary is
494        passed, `MinimalRow` is returned, which contains only the ID field.
495
496        Args:
497            table_id (int): The ID of the table where the new row should be
498                created.
499            data (Union[T, dict[str, Any]]): The data of the new row.
500            user_field_names (bool): When set to true, the fields in the
501                provided data parameter are named according to their field
502                names. Otherwise, the unique IDs of the fields will be used.
503            before (Optional[int], optional):  If provided then the newly
504                created row will be positioned before the row with the provided
505                id. 
506        """
507        params: dict[str, str] = {
508            "user_field_names": "true" if user_field_names else "false",
509        }
510        if before is not None:
511            params["before"] = str(before)
512
513        if not isinstance(data, dict):
514            json = data.model_dump(by_alias=True)
515        else:
516            json = data
517
518        return await self._typed_request(
519            "post",
520            _url_join(
521                self._url,
522                API_PREFIX,
523                "database/rows/table",
524                str(table_id),
525            ),
526            type(data) if not isinstance(data, dict) else MinimalRow,
527            CONTENT_TYPE_JSON,
528            params,
529            json,
530        )

Creates a new row in the table with the given ID. The data can be provided either as a dictionary or as a Pydantic model. Please note that this method does not check whether the fields provided with data actually exist.

The return value depends on the data parameter: If a Pydantic model is passed, the return value is an instance of this model with the values as they are in the newly created row. If any arbitrary dictionary is passed, MinimalRow is returned, which contains only the ID field.

Arguments:
  • table_id (int): The ID of the table where the new row should be created.
  • data (Union[T, dict[str, Any]]): The data of the new row.
  • user_field_names (bool): When set to true, the fields in the provided data parameter are named according to their field names. Otherwise, the unique IDs of the fields will be used.
  • before (Optional[int], optional): If provided then the newly created row will be positioned before the row with the provided id.
async def create_rows( self, table_id: int, data: Union[list[~T], list[dict[str, Any]]], user_field_names: bool, before: Optional[int] = None) -> Union[BatchResponse[TypeVar], BatchResponse[MinimalRow]]:
532    async def create_rows(
533        self,
534        table_id: int,
535        data: Union[list[T], list[dict[str, Any]]],
536        user_field_names: bool,
537        before: Optional[int] = None,
538    ) -> Union[BatchResponse[T], BatchResponse[MinimalRow]]:
539        """
540        Creates one or multiple new row(w) in the table with the given ID using
541        Baserow's batch functionality. The data can be provided either as a list
542        of dictionaries or as a Pydantic models. Please note that this method
543        does not check whether the fields provided with `data` actually exist.
544
545        The return value depends on the `data` parameter: If a Pydantic model is
546        passed, the return value is an instance of this model with the values as
547        they are in the newly created row. If any arbitrary dictionary is
548        passed, `MinimalRow` is returned, which contains only the ID field.
549
550        If the given list is empty, no call is executed; instead, an empty
551        response is returned.
552
553        Args:
554            table_id (int): The ID of the table where the new row should be
555                created.
556            data (Union[list[T], list[dict[str, Any]]]): The data of the new
557                row.
558            user_field_names (bool): When set to true, the fields in the
559                provided data parameter are named according to their field
560                names. Otherwise, the unique IDs of the fields will be used.
561            before (Optional[int], optional):  If provided then the newly
562                created row will be positioned before the row with the provided
563                id. 
564        """
565        if len(data) == 0:
566            return BatchResponse(items=[])
567        params: dict[str, str] = {
568            "user_field_names": "true" if user_field_names else "false",
569        }
570        if before is not None:
571            params["before"] = str(before)
572        if len(data) == 0:
573            raise ValueError("data parameter cannot be empty list")
574        if not isinstance(data[0], dict):
575            result_type = BatchResponse[type(data[0])]
576        else:
577            result_type = BatchResponse[MinimalRow]
578        items: list[dict[str, Any]] = []
579        for item in data:
580            if not isinstance(item, dict):
581                items.append(item.model_dump(by_alias=True))
582            else:
583                items.append(item)
584        json = {"items": items}
585        return await self._typed_request(
586            "post",
587            _url_join(
588                self._url,
589                API_PREFIX,
590                "database/rows/table",
591                str(table_id),
592                "batch",
593            ),
594            result_type,
595            CONTENT_TYPE_JSON,
596            params,
597            json,
598        )

Creates one or multiple new row(w) in the table with the given ID using Baserow's batch functionality. The data can be provided either as a list of dictionaries or as a Pydantic models. Please note that this method does not check whether the fields provided with data actually exist.

The return value depends on the data parameter: If a Pydantic model is passed, the return value is an instance of this model with the values as they are in the newly created row. If any arbitrary dictionary is passed, MinimalRow is returned, which contains only the ID field.

If the given list is empty, no call is executed; instead, an empty response is returned.

Arguments:
  • table_id (int): The ID of the table where the new row should be created.
  • data (Union[list[T], list[dict[str, Any]]]): The data of the new row.
  • user_field_names (bool): When set to true, the fields in the provided data parameter are named according to their field names. Otherwise, the unique IDs of the fields will be used.
  • before (Optional[int], optional): If provided then the newly created row will be positioned before the row with the provided id.
async def update_row( self, table_id: int, row_id: int, data: Union[~T, dict[str, Any]], user_field_names: bool) -> Union[~T, MinimalRow]:
600    async def update_row(
601        self,
602        table_id: int,
603        row_id: int,
604        data: Union[T, dict[str, Any]],
605        user_field_names: bool,
606    ) -> Union[T, MinimalRow]:
607        """ 
608        Updates a row by it's ID in the table with the given ID. The data can be
609        provided either as a dictionary or as a Pydantic model. Please note that
610        this method does not check whether the fields provided with `data`
611        actually exist.
612
613        The return value depends on the `data` parameter: If a Pydantic model is
614        passed, the return value is an instance of this model with the values as
615        they are in the newly created row. If any arbitrary dictionary is
616        passed, `MinimalRow` is returned, which contains only the ID field.
617
618        Args:
619            table_id (int): The ID of the table where the new row should be
620                created.
621            row_id (int): The ID of the row which should be updated.
622            data (Union[T, dict[str, Any]]): The data of the new row.
623            user_field_names (bool): When set to true, the fields in the
624                provided data parameter are named according to their field
625                names. Otherwise, the unique IDs of the fields will be used.
626        """
627        params: dict[str, str] = {
628            "user_field_names": "true" if user_field_names else "false",
629        }
630
631        if not isinstance(data, dict):
632            json = data.model_dump(by_alias=True)
633        else:
634            json = data
635
636        return await self._typed_request(
637            "patch",
638            _url_join(
639                self._url,
640                API_PREFIX,
641                "database/rows/table",
642                str(table_id),
643                str(row_id),
644            ),
645            type(data) if not isinstance(data, dict) else MinimalRow,
646            CONTENT_TYPE_JSON,
647            params,
648            json
649        )

Updates a row by it's ID in the table with the given ID. The data can be provided either as a dictionary or as a Pydantic model. Please note that this method does not check whether the fields provided with data actually exist.

The return value depends on the data parameter: If a Pydantic model is passed, the return value is an instance of this model with the values as they are in the newly created row. If any arbitrary dictionary is passed, MinimalRow is returned, which contains only the ID field.

Arguments:
  • table_id (int): The ID of the table where the new row should be created.
  • row_id (int): The ID of the row which should be updated.
  • data (Union[T, dict[str, Any]]): The data of the new row.
  • user_field_names (bool): When set to true, the fields in the provided data parameter are named according to their field names. Otherwise, the unique IDs of the fields will be used.
async def upload_file(self, file: _io.BufferedReader) -> baserow.file.File:
651    async def upload_file(self, file: BufferedReader) -> File:
652        """
653        Uploads a file to Baserow by uploading the file contents directly. The
654        file is passed as a `BufferedReader`. So, a local file can be loaded
655        using `open("my-file.ext", "rb")` and then passed to this method.
656
657        After the file is uploaded, it needs to be linked to the field in the
658        table row. For this, with the Client.update_row() method, either the
659        complete field.File instance can be added as a list item to the File
660        field or simply the name (field.File.name, the name is unique in any
661        case).
662
663        Example usage:
664
665        ```python
666        with open("my-image.png", "rb") as file:
667            rsl = await client.upload_file(file)
668
669        table_id = 23
670        row_id = 42
671        file_field_name = "Attachments"
672        await client.update_row(
673            table_id,
674            row_id,
675            {file_field_name: [{"name": rsl.name}],
676            True
677        )
678        ```
679
680        It's also possible to directly upload a file accessible via a public
681        URL. For this purpose, use Client.upload_file_via_url().
682
683        Args:
684            file (BufferedReader): A BufferedReader containing the file to be
685                uploaded.
686        """
687        return await self._typed_request(
688            "post",
689            _url_join(
690                self._url,
691                API_PREFIX,
692                "user-files/upload-file",
693            ),
694            File,
695            data={"file": file},
696        )

Uploads a file to Baserow by uploading the file contents directly. The file is passed as a BufferedReader. So, a local file can be loaded using open("my-file.ext", "rb") and then passed to this method.

After the file is uploaded, it needs to be linked to the field in the table row. For this, with the Client.update_row() method, either the complete field.File instance can be added as a list item to the File field or simply the name (field.File.name, the name is unique in any case).

Example usage:

with open("my-image.png", "rb") as file:
    rsl = await client.upload_file(file)

table_id = 23
row_id = 42
file_field_name = "Attachments"
await client.update_row(
    table_id,
    row_id,
    {file_field_name: [{"name": rsl.name}],
    True
)

It's also possible to directly upload a file accessible via a public URL. For this purpose, use Client.upload_file_via_url().

Arguments:
  • file (BufferedReader): A BufferedReader containing the file to be uploaded.
async def upload_file_via_url(self, url: str) -> baserow.file.File:
698    async def upload_file_via_url(self, url: str) -> File:
699        """
700        Uploads a file from a given URL to the storage of Baserow. The Baserow
701        instance must have access to this URL.
702
703        After the file is uploaded, it needs to be linked to the field in the
704        table row. For this, with the Client.update_row() method, either the
705        complete field.File instance can be added as a list item to the File
706        field or simply the name (field.File.name, the name is unique in any
707        case).
708
709        Example usage:
710
711        ```python
712        rsl = await client.upload_file_via_url("https://picsum.photos/500")
713
714        table_id = 23
715        row_id = 42
716        file_field_name = "Attachments"
717        await client.update_row(
718            table_id,
719            row_id,
720            {file_field_name: [{"name": rsl.name}],
721            True
722        )
723        ```
724
725        It's also possible to upload a locally available file. For this purpose,
726        use `Client.upload_file()`.
727
728        Args:
729            url (str): The URL of the file.
730        """
731        return await self._typed_request(
732            "post",
733            _url_join(
734                self._url,
735                API_PREFIX,
736                "user-files/upload-via-url",
737            ),
738            File,
739            CONTENT_TYPE_JSON,
740            json={"url": url}
741        )

Uploads a file from a given URL to the storage of Baserow. The Baserow instance must have access to this URL.

After the file is uploaded, it needs to be linked to the field in the table row. For this, with the Client.update_row() method, either the complete field.File instance can be added as a list item to the File field or simply the name (field.File.name, the name is unique in any case).

Example usage:

rsl = await client.upload_file_via_url("https://picsum.photos/500")

table_id = 23
row_id = 42
file_field_name = "Attachments"
await client.update_row(
    table_id,
    row_id,
    {file_field_name: [{"name": rsl.name}],
    True
)

It's also possible to upload a locally available file. For this purpose, use Client.upload_file().

Arguments:
  • url (str): The URL of the file.
async def delete_row(self, table_id: int, row_id: Union[int, list[int]]):
743    async def delete_row(
744        self,
745        table_id: int,
746        row_id: Union[int, list[int]],
747    ):
748        """
749        Deletes a row with the given ID in the table with the given ID. It's
750        also possible to delete more than one row simultaneously. For this, a
751        list of IDs can be passed using the row_id parameter.
752
753        Args:
754            table_id (int): The ID of the table where the row should be deleted.
755            row_id (Union[int, list[int]]): The ID(s) of the row(s) which should
756                be deleted.
757        """
758        if isinstance(row_id, int):
759            return await self._request(
760                "delete",
761                _url_join(
762                    self._url,
763                    API_PREFIX,
764                    "database/rows/table",
765                    str(table_id),
766                    str(row_id),
767                ),
768                None,
769            )
770        return await self._request(
771            "post",
772            _url_join(
773                self._url,
774                API_PREFIX,
775                "database/rows/table",
776                str(table_id),
777                "batch-delete",
778            ),
779            None,
780            CONTENT_TYPE_JSON,
781            None,
782            {"items": row_id},
783        )

Deletes a row with the given ID in the table with the given ID. It's also possible to delete more than one row simultaneously. For this, a list of IDs can be passed using the row_id parameter.

Arguments:
  • table_id (int): The ID of the table where the row should be deleted.
  • row_id (Union[int, list[int]]): The ID(s) of the row(s) which should be deleted.
@jwt_only
async def list_database_tables(self, database_id: int) -> DatabaseTablesResponse:
785    @jwt_only
786    async def list_database_tables(
787        self,
788        database_id: int,
789    ) -> DatabaseTablesResponse:
790        """
791        Lists all the tables that are in the database related to the database
792        given by it's ID. Please note that this method only works when access is
793        through a JWT token, meaning login credentials are used for
794        authentication. Additionally, the account being used must have access to
795        the database/workspace.
796
797        Args:
798            database_id (int): The ID of the database from which one wants to
799                retrieve a listing of all tables. 
800        """
801        return await self._typed_request(
802            "get",
803            _url_join(
804                self._url,
805                API_PREFIX,
806                "database/tables/database",
807                str(database_id),
808            ),
809            DatabaseTablesResponse,
810        )

Lists all the tables that are in the database related to the database given by it's ID. Please note that this method only works when access is through a JWT token, meaning login credentials are used for authentication. Additionally, the account being used must have access to the database/workspace.

Arguments:
  • database_id (int): The ID of the database from which one wants to retrieve a listing of all tables.
@jwt_only
async def create_database_table( self, database_id: int, name: str, client_session_id: Optional[str] = None, client_undo_redo_action_group_id: Optional[str] = None) -> DatabaseTableResponse:
812    @jwt_only
813    async def create_database_table(
814        self,
815        database_id: int,
816        name: str,
817        client_session_id: Optional[str] = None,
818        client_undo_redo_action_group_id: Optional[str] = None,
819    ) -> DatabaseTableResponse:
820        """
821        Creates synchronously a new table with the given for the database
822        related to the provided database_id parameter.
823
824        Args:
825            database_id (int): The ID of the database in which the new table
826                should be created.
827            name (str): Human readable name for the new table.
828            client_session_id (str, optional): An optional UUID that marks
829                the action performed by this request as having occurred in a
830                particular client session. Then using the undo/redo endpoints
831                with the same ClientSessionId header this action can be
832                undone/redone.
833            client_undo_redo_action_group_id (str, optional): An optional UUID
834                that marks the action performed by this request as having
835                occurred in a particular action group.Then calling the undo/redo
836                endpoint with the same ClientSessionId header, all the actions
837                belonging to the same action group can be undone/redone together
838                in a single API call.
839        """
840        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
841        if client_session_id:
842            headers["ClientSessionId"] = client_session_id
843        if client_undo_redo_action_group_id:
844            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
845        return await self._typed_request(
846            "post",
847            _url_join(
848                self._url,
849                API_PREFIX,
850                "database/tables/database",
851                str(database_id),
852            ),
853            DatabaseTableResponse,
854            headers=headers,
855            json={"name": name},
856        )

Creates synchronously a new table with the given for the database related to the provided database_id parameter.

Arguments:
  • database_id (int): The ID of the database in which the new table should be created.
  • name (str): Human readable name for the new table.
  • client_session_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular client session. Then using the undo/redo endpoints with the same ClientSessionId header this action can be undone/redone.
  • client_undo_redo_action_group_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular action group.Then calling the undo/redo endpoint with the same ClientSessionId header, all the actions belonging to the same action group can be undone/redone together in a single API call.
858    async def create_database_table_field(
859        self,
860        table_id: int,
861        field: FieldConfigType,
862        client_session_id: Optional[str] = None,
863        client_undo_redo_action_group_id: Optional[str] = None,
864    ) -> FieldConfig:
865        """
866        Adds a new field to a table specified by its ID.
867
868        Args:
869            table_id (int): The ID of the table to be altered.
870            field (FieldConfigType): The config of the field to be added.
871            client_session_id (str, optional): An optional UUID that marks
872                the action performed by this request as having occurred in a
873                particular client session. Then using the undo/redo endpoints
874                with the same ClientSessionId header this action can be
875                undone/redone.
876            client_undo_redo_action_group_id (str, optional): An optional UUID
877                that marks the action performed by this request as having
878                occurred in a particular action group.Then calling the undo/redo
879                endpoint with the same ClientSessionId header, all the actions
880                belonging to the same action group can be undone/redone together
881                in a single API call.
882        """
883        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
884        if client_session_id:
885            headers["ClientSessionId"] = client_session_id
886        if client_undo_redo_action_group_id:
887            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
888        return await self._typed_request(
889            "post",
890            _url_join(
891                self._url,
892                API_PREFIX,
893                "database/fields/table",
894                str(table_id),
895            ),
896            FieldConfig,
897            headers=headers,
898            json=field.model_dump(),
899        )

Adds a new field to a table specified by its ID.

Arguments:
  • table_id (int): The ID of the table to be altered.
  • field (FieldConfigType): The config of the field to be added.
  • client_session_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular client session. Then using the undo/redo endpoints with the same ClientSessionId header this action can be undone/redone.
  • client_undo_redo_action_group_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular action group.Then calling the undo/redo endpoint with the same ClientSessionId header, all the actions belonging to the same action group can be undone/redone together in a single API call.
901    async def update_database_table_field(
902        self,
903        field_id: int,
904        field: FieldConfigType | dict[str, Any],
905        client_session_id: Optional[str] = None,
906        client_undo_redo_action_group_id: Optional[str] = None,
907    ) -> FieldConfig:
908        """
909        Updates a table field defined by it's id
910
911        Args:
912            field_id (int): The ID of the field to be updated.
913            field (FieldConfigType | dict[str, Any]): The config of the field to
914                be added.
915            client_session_id (str, optional): An optional UUID that marks
916                the action performed by this request as having occurred in a
917                particular client session. Then using the undo/redo endpoints
918                with the same ClientSessionId header this action can be
919                undone/redone.
920            client_undo_redo_action_group_id (str, optional): An optional UUID
921                that marks the action performed by this request as having
922                occurred in a particular action group.Then calling the undo/redo
923                endpoint with the same ClientSessionId header, all the actions
924                belonging to the same action group can be undone/redone together
925                in a single API call.
926        """
927        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
928        if client_session_id:
929            headers["ClientSessionId"] = client_session_id
930        if client_undo_redo_action_group_id:
931            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
932
933        if not isinstance(field, dict):
934            json = field.model_dump(by_alias=True)
935        else:
936            json = field
937
938        return await self._typed_request(
939            "patch",
940            _url_join(
941                self._url,
942                API_PREFIX,
943                "database/fields",
944                str(field_id),
945            ),
946            FieldConfig,
947            headers=headers,
948            json=json,
949        )

Updates a table field defined by it's id

Arguments:
  • field_id (int): The ID of the field to be updated.
  • field (FieldConfigType | dict[str, Any]): The config of the field to be added.
  • client_session_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular client session. Then using the undo/redo endpoints with the same ClientSessionId header this action can be undone/redone.
  • client_undo_redo_action_group_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular action group.Then calling the undo/redo endpoint with the same ClientSessionId header, all the actions belonging to the same action group can be undone/redone together in a single API call.
async def delete_database_table_field( self, field_id: int, client_session_id: Optional[str] = None, client_undo_redo_action_group_id: Optional[str] = None):
951    async def delete_database_table_field(
952        self,
953        field_id: int,
954        client_session_id: Optional[str] = None,
955        client_undo_redo_action_group_id: Optional[str] = None,
956    ):
957        """
958        Deletes a table field defined by it's id
959
960        Args:
961            field_id (int): The ID of the field to be deleted.
962            client_session_id (str, optional): An optional UUID that marks
963                the action performed by this request as having occurred in a
964                particular client session. Then using the undo/redo endpoints
965                with the same ClientSessionId header this action can be
966                undone/redone.
967            client_undo_redo_action_group_id (str, optional): An optional UUID
968                that marks the action performed by this request as having
969                occurred in a particular action group.Then calling the undo/redo
970                endpoint with the same ClientSessionId header, all the actions
971                belonging to the same action group can be undone/redone together
972                in a single API call.
973        """
974        headers: dict[str, str] = CONTENT_TYPE_JSON.copy()
975        if client_session_id:
976            headers["ClientSessionId"] = client_session_id
977        if client_undo_redo_action_group_id:
978            headers["ClientUndoRedoActionGroupId"] = client_undo_redo_action_group_id
979        await self._request(
980            "DELETE",
981            _url_join(
982                self._url,
983                API_PREFIX,
984                "database/fields",
985                str(field_id),
986            ),
987            None,
988            headers=headers,
989        )

Deletes a table field defined by it's id

Arguments:
  • field_id (int): The ID of the field to be deleted.
  • client_session_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular client session. Then using the undo/redo endpoints with the same ClientSessionId header this action can be undone/redone.
  • client_undo_redo_action_group_id (str, optional): An optional UUID that marks the action performed by this request as having occurred in a particular action group.Then calling the undo/redo endpoint with the same ClientSessionId header, all the actions belonging to the same action group can be undone/redone together in a single API call.
async def close(self):
991    async def close(self):
992        """
993        The connection session with the client is terminated. Subsequently, the
994        client object cannot be used anymore. It is necessary to explicitly and
995        manually close the session only when the client object is directly
996        instantiated.
997        """
998        await self._session.close()

The connection session with the client is terminated. Subsequently, the client object cannot be used anymore. It is necessary to explicitly and manually close the session only when the client object is directly instantiated.

class GlobalClient(Client):
1117class GlobalClient(Client):
1118    """
1119    The singleton version of the client encapsulates the client in a singleton.
1120    The parameters (URL and access tokens) can be configured independently of
1121    the actual instantiation.
1122
1123    Unless specified otherwise, this singleton is used by all table instances
1124    for accessing Baserow.
1125
1126    This is helpful in systems where the client can be configured once at
1127    program start (e.g., in the `__main__.py`) based on the settings file and
1128    then used throughout the program without specifying additional parameters.
1129    The Singleton pattern ensures that only one instance of the client is used
1130    throughout the entire program, thereby maintaining full control over the
1131    `aiohttp.ClientSession`.
1132
1133    The configuration can be done either directly in the code using
1134    `GlobalClient.configure()` or from a JSON file using
1135    `GlobalClient.from_file()`.
1136    """
1137    _instance: Optional[Client] = None
1138    _is_initialized: bool = False
1139    is_configured: bool = False
1140    __url: str = ""
1141    __token: Optional[str] = None
1142    __email: Optional[str] = None
1143    __password: Optional[str] = None
1144
1145    def __new__(cls):
1146        if not cls.is_configured:
1147            raise PackageClientNotConfiguredError
1148        if cls._instance is None:
1149            cls._instance = super().__new__(cls)
1150        return cls._instance
1151
1152    def __init__(self):
1153        if not self._is_initialized:
1154            super().__init__(
1155                self.__url,
1156                token=self.__token,
1157                email=self.__email,
1158                password=self.__password,
1159            )
1160            self._is_initialized = True
1161
1162    @classmethod
1163    def configure(
1164        cls,
1165        url: str,
1166        token: Optional[str] = None,
1167        email: Optional[str] = None,
1168        password: Optional[str] = None,
1169    ):
1170        """
1171        Set the URL and token before the first use of the client.
1172
1173        Args:
1174            token (str, optional): An access token (referred to as a database token
1175                in Baserow's documentation) can be generated in the user settings
1176                within Baserow.
1177            email (str, optional): Email address of a Baserow user for the JWT
1178                authentication.
1179            password (str, optional): Password of a Baserow user for the JWT
1180                authentication.
1181        """
1182        if cls.is_configured:
1183            raise PackageClientAlreadyConfiguredError(cls.__url, url)
1184        cls.__url = url
1185        cls.__token = token
1186        cls.__email = email
1187        cls.__password = password
1188        cls.is_configured = True
1189
1190    @classmethod
1191    def from_file(cls, path: str):
1192        """
1193        Attempts to load the client configuration from the given JSON file. As
1194        with `GlobalClient.configure()`, it is only possible to use either the
1195        token or the login credentials. Structure of the config file for Token:
1196
1197        ```json
1198        {
1199            "url": "https:/your.baserow.instance"
1200            "token": "your_token_here"
1201        }
1202        ```
1203
1204        For JWT authentication using login credentials:
1205
1206        ```json
1207        {
1208            "url": "https:/your.baserow.instance"
1209            "email": "your-login-mail@example.com",
1210            "password": "your-secret-password"
1211        }
1212        ```
1213
1214        Args:
1215            path: Path to input JSON-file.
1216        """
1217        with open(path, "r") as f:
1218            cfg = json.load(f)
1219
1220        cls.configure(
1221            cfg["url"],
1222            token=cfg["token"] if "token" in cfg else None,
1223            email=cfg["email"] if "email" in cfg else None,
1224            password=cfg["password"] if "password" in cfg else None,
1225        )

The singleton version of the client encapsulates the client in a singleton. The parameters (URL and access tokens) can be configured independently of the actual instantiation.

Unless specified otherwise, this singleton is used by all table instances for accessing Baserow.

This is helpful in systems where the client can be configured once at program start (e.g., in the __main__.py) based on the settings file and then used throughout the program without specifying additional parameters. The Singleton pattern ensures that only one instance of the client is used throughout the entire program, thereby maintaining full control over the aiohttp.ClientSession.

The configuration can be done either directly in the code using GlobalClient.configure() or from a JSON file using GlobalClient.from_file().

is_configured: bool = False
@classmethod
def configure( cls, url: str, token: Optional[str] = None, email: Optional[str] = None, password: Optional[str] = None):
1162    @classmethod
1163    def configure(
1164        cls,
1165        url: str,
1166        token: Optional[str] = None,
1167        email: Optional[str] = None,
1168        password: Optional[str] = None,
1169    ):
1170        """
1171        Set the URL and token before the first use of the client.
1172
1173        Args:
1174            token (str, optional): An access token (referred to as a database token
1175                in Baserow's documentation) can be generated in the user settings
1176                within Baserow.
1177            email (str, optional): Email address of a Baserow user for the JWT
1178                authentication.
1179            password (str, optional): Password of a Baserow user for the JWT
1180                authentication.
1181        """
1182        if cls.is_configured:
1183            raise PackageClientAlreadyConfiguredError(cls.__url, url)
1184        cls.__url = url
1185        cls.__token = token
1186        cls.__email = email
1187        cls.__password = password
1188        cls.is_configured = True

Set the URL and token before the first use of the client.

Arguments:
  • token (str, optional): An access token (referred to as a database token in Baserow's documentation) can be generated in the user settings within Baserow.
  • email (str, optional): Email address of a Baserow user for the JWT authentication.
  • password (str, optional): Password of a Baserow user for the JWT authentication.
@classmethod
def from_file(cls, path: str):
1190    @classmethod
1191    def from_file(cls, path: str):
1192        """
1193        Attempts to load the client configuration from the given JSON file. As
1194        with `GlobalClient.configure()`, it is only possible to use either the
1195        token or the login credentials. Structure of the config file for Token:
1196
1197        ```json
1198        {
1199            "url": "https:/your.baserow.instance"
1200            "token": "your_token_here"
1201        }
1202        ```
1203
1204        For JWT authentication using login credentials:
1205
1206        ```json
1207        {
1208            "url": "https:/your.baserow.instance"
1209            "email": "your-login-mail@example.com",
1210            "password": "your-secret-password"
1211        }
1212        ```
1213
1214        Args:
1215            path: Path to input JSON-file.
1216        """
1217        with open(path, "r") as f:
1218            cfg = json.load(f)
1219
1220        cls.configure(
1221            cfg["url"],
1222            token=cfg["token"] if "token" in cfg else None,
1223            email=cfg["email"] if "email" in cfg else None,
1224            password=cfg["password"] if "password" in cfg else None,
1225        )

Attempts to load the client configuration from the given JSON file. As with GlobalClient.configure(), it is only possible to use either the token or the login credentials. Structure of the config file for Token:

{
    "url": "https:/your.baserow.instance"
    "token": "your_token_here"
}

For JWT authentication using login credentials:

{
    "url": "https:/your.baserow.instance"
    "email": "your-login-mail@example.com",
    "password": "your-secret-password"
}
Arguments:
  • path: Path to input JSON-file.