Skip to content

Bulk charges

BulkChargeClient

Bases: BaseAPIClient

Provides a wrapper for paystack Bulk Charge API

The Bulk Charges API allows you to create and manage multiple recurring payments from your customers. https://paystack.com/docs/api/bulk-charge/

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
class BulkChargeClient(BaseAPIClient):
    """Provides a wrapper for paystack Bulk Charge API

    The Bulk Charges API allows you to create and manage multiple recurring payments from your customers.
    https://paystack.com/docs/api/bulk-charge/
    """

    def initiate(
        self,
        body: list[BulkChargeInstruction],
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[BulkCharge] | Response[PaystackDataModel]:
        """
        Send a list of dictionaries with authorization ``codes`` and ``amount``
        (in kobo if currency is NGN, pesewas, if currency is GHS, and cents,
        if currency is ZAR ) so paystack can process transactions as a batch.

        Args:
            body: A list of BulkChargeInstruction.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url("/bulkcharge")
        payload = [item.model_dump() for item in body]
        return self._handle_request(  # type: ignore
            HTTPMethod.POST,
            url,
            payload,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    def get_batches(
        self,
        page: int = 1,
        pagination: int = 50,
        start_date: str | None = None,
        end_date: str | None = None,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[list[BulkCharge]] | Response[PaystackDataModel]:
        """This gets all bulk charge batches created by the integration.

        Args:
            page: Specify exactly what transfer you want to page. If not specified, we use a default value of 1.
            pagination: Specify how many records you want to retrieve per page.
                If not specified we use a default value of 50.
            start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge?perPage={pagination}")
        query_params = [
            ("page", page),
            ("from", start_date),
            ("to", end_date),
        ]
        url = append_query_params(query_params, url)
        return self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    def get_batch(
        self,
        id_or_code: str | int,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[BulkCharge] | Response[PaystackDataModel]:
        """
        This method retrieves a specific batch code. It also returns
        useful information on its progress by way of the total_charges
        and pending_charges attributes in the Response.

        Args:
            id_or_code: An ID or code for the charge whose batches you want to retrieve.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/{id_or_code}")
        return self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    def get_charges_in_batch(
        self,
        id_or_code: str | int,
        status: Status,
        pagination: int = 50,
        page: int = 1,
        start_date: str | None = None,
        end_date: str | None = None,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]:
        """
        This method retrieves the charges associated with a specified
        batch code. Pagination parameters are available. You can also
        filter by status. Charge statuses can be `Status.PENDING`,
        `Status.SUCCESS` or `Status.FAILED`.

        Args:
            id_or_code: An ID or code for the batch whose charges you want to retrieve.
            status: Any of the values from the Status enum.
            pagination: Specify how many records you want to retrieve per page.
                If not specified we use a default value of 50.
            page: Specify exactly what transfer you want to page. If not specified we use a default value of 1.
            start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/{id_or_code}/charges?perPage={pagination}")
        query_params = [
            ("status", status),
            ("page", page),
            ("from", start_date),
            ("to", end_date),
        ]
        url = append_query_params(query_params, url)
        return self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkChargeUnitCharge,
        )

    def pause_batch(
        self,
        batch_code: str,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[None] | Response[PaystackDataModel]:
        """Use this method to pause processing a batch.

        Args:
            batch_code: The batch code for the bulk charge you want to pause.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/pause/{batch_code}")
        return self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class,
        )

    def resume_batch(
        self,
        batch_code: str,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[None] | Response[PaystackDataModel]:
        """Use this method to resume processing a batch

        Args:
            batch_code: The batch code for the bulk charge you want to resume.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/resume/{batch_code}")
        return self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class,
        )

get_batch(id_or_code, alternate_model_class=None)

This method retrieves a specific batch code. It also returns useful information on its progress by way of the total_charges and pending_charges attributes in the Response.

Parameters:

Name Type Description Default
id_or_code str | int

An ID or code for the charge whose batches you want to retrieve.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[BulkCharge] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def get_batch(
    self,
    id_or_code: str | int,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[BulkCharge] | Response[PaystackDataModel]:
    """
    This method retrieves a specific batch code. It also returns
    useful information on its progress by way of the total_charges
    and pending_charges attributes in the Response.

    Args:
        id_or_code: An ID or code for the charge whose batches you want to retrieve.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/{id_or_code}")
    return self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

get_batches(page=1, pagination=50, start_date=None, end_date=None, alternate_model_class=None)

This gets all bulk charge batches created by the integration.

Parameters:

Name Type Description Default
page int

Specify exactly what transfer you want to page. If not specified, we use a default value of 1.

1
pagination int

Specify how many records you want to retrieve per page. If not specified we use a default value of 50.

50
start_date str | None

A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
end_date str | None

A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[list[BulkCharge]] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def get_batches(
    self,
    page: int = 1,
    pagination: int = 50,
    start_date: str | None = None,
    end_date: str | None = None,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[list[BulkCharge]] | Response[PaystackDataModel]:
    """This gets all bulk charge batches created by the integration.

    Args:
        page: Specify exactly what transfer you want to page. If not specified, we use a default value of 1.
        pagination: Specify how many records you want to retrieve per page.
            If not specified we use a default value of 50.
        start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge?perPage={pagination}")
    query_params = [
        ("page", page),
        ("from", start_date),
        ("to", end_date),
    ]
    url = append_query_params(query_params, url)
    return self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

get_charges_in_batch(id_or_code, status, pagination=50, page=1, start_date=None, end_date=None, alternate_model_class=None)

This method retrieves the charges associated with a specified batch code. Pagination parameters are available. You can also filter by status. Charge statuses can be Status.PENDING, Status.SUCCESS or Status.FAILED.

Parameters:

Name Type Description Default
id_or_code str | int

An ID or code for the batch whose charges you want to retrieve.

required
status Status

Any of the values from the Status enum.

required
pagination int

Specify how many records you want to retrieve per page. If not specified we use a default value of 50.

50
page int

Specify exactly what transfer you want to page. If not specified we use a default value of 1.

1
start_date str | None

A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
end_date str | None

A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def get_charges_in_batch(
    self,
    id_or_code: str | int,
    status: Status,
    pagination: int = 50,
    page: int = 1,
    start_date: str | None = None,
    end_date: str | None = None,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]:
    """
    This method retrieves the charges associated with a specified
    batch code. Pagination parameters are available. You can also
    filter by status. Charge statuses can be `Status.PENDING`,
    `Status.SUCCESS` or `Status.FAILED`.

    Args:
        id_or_code: An ID or code for the batch whose charges you want to retrieve.
        status: Any of the values from the Status enum.
        pagination: Specify how many records you want to retrieve per page.
            If not specified we use a default value of 50.
        page: Specify exactly what transfer you want to page. If not specified we use a default value of 1.
        start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/{id_or_code}/charges?perPage={pagination}")
    query_params = [
        ("status", status),
        ("page", page),
        ("from", start_date),
        ("to", end_date),
    ]
    url = append_query_params(query_params, url)
    return self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkChargeUnitCharge,
    )

initiate(body, alternate_model_class=None)

Send a list of dictionaries with authorization codes and amount (in kobo if currency is NGN, pesewas, if currency is GHS, and cents, if currency is ZAR ) so paystack can process transactions as a batch.

Parameters:

Name Type Description Default
body list[BulkChargeInstruction]

A list of BulkChargeInstruction.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[BulkCharge] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def initiate(
    self,
    body: list[BulkChargeInstruction],
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[BulkCharge] | Response[PaystackDataModel]:
    """
    Send a list of dictionaries with authorization ``codes`` and ``amount``
    (in kobo if currency is NGN, pesewas, if currency is GHS, and cents,
    if currency is ZAR ) so paystack can process transactions as a batch.

    Args:
        body: A list of BulkChargeInstruction.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url("/bulkcharge")
    payload = [item.model_dump() for item in body]
    return self._handle_request(  # type: ignore
        HTTPMethod.POST,
        url,
        payload,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

pause_batch(batch_code, alternate_model_class=None)

Use this method to pause processing a batch.

Parameters:

Name Type Description Default
batch_code str

The batch code for the bulk charge you want to pause.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[None] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def pause_batch(
    self,
    batch_code: str,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[None] | Response[PaystackDataModel]:
    """Use this method to pause processing a batch.

    Args:
        batch_code: The batch code for the bulk charge you want to pause.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/pause/{batch_code}")
    return self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class,
    )

resume_batch(batch_code, alternate_model_class=None)

Use this method to resume processing a batch

Parameters:

Name Type Description Default
batch_code str

The batch code for the bulk charge you want to resume.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[None] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/sync_clients/bulk_charges.py
def resume_batch(
    self,
    batch_code: str,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[None] | Response[PaystackDataModel]:
    """Use this method to resume processing a batch

    Args:
        batch_code: The batch code for the bulk charge you want to resume.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/resume/{batch_code}")
    return self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class,
    )

AsyncBulkChargeClient

Bases: BaseAsyncAPIClient

Provides a wrapper for paystack Bulk Charge API

The Bulk Charges API allows you to create and manage multiple recurring payments from your customers. https://paystack.com/docs/api/bulk-charge/

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
class AsyncBulkChargeClient(BaseAsyncAPIClient):
    """Provides a wrapper for paystack Bulk Charge API

    The Bulk Charges API allows you to create and manage multiple recurring payments from your customers.
    https://paystack.com/docs/api/bulk-charge/
    """

    async def initiate(
        self,
        body: list[BulkChargeInstruction],
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[BulkCharge] | Response[PaystackDataModel]:
        """
        Send a list of dictionaries with authorization ``codes`` and ``amount``
        (in kobo if currency is NGN, pesewas, if currency is GHS, and cents,
        if currency is ZAR ) so paystack can process transactions as a batch.

        Args:
            body: A list of BulkChargeInstruction.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url("/bulkcharge")
        payload = [item.model_dump() for item in body]
        return await self._handle_request(  # type: ignore
            HTTPMethod.POST,
            url,
            payload,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    async def get_batches(
        self,
        page: int = 1,
        pagination: int = 50,
        start_date: str | None = None,
        end_date: str | None = None,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[list[BulkCharge]] | Response[PaystackDataModel]:
        """This gets all bulk charge batches created by the integration.

        Args:
            page: Specify exactly what transfer you want to page. If not specified, we use a default value of 1.
            pagination: Specify how many records you want to retrieve per page.
                If not specified we use a default value of 50.
            start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge?perPage={pagination}")
        query_params = [
            ("page", page),
            ("from", start_date),
            ("to", end_date),
        ]
        url = append_query_params(query_params, url)
        return await self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    async def get_batch(
        self,
        id_or_code: str | int,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[BulkCharge] | Response[PaystackDataModel]:
        """
        This method retrieves a specific batch code. It also returns
        useful information on its progress by way of the total_charges
        and pending_charges attributes in the Response.

        Args:
            id_or_code: An ID or code for the charge whose batches you want to retrieve.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/{id_or_code}")
        return await self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkCharge,
        )

    async def get_charges_in_batch(
        self,
        id_or_code: str | int,
        status: Status,
        pagination: int = 50,
        page: int = 1,
        start_date: str | None = None,
        end_date: str | None = None,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]:
        """
        This method retrieves the charges associated with a specified
        batch code. Pagination parameters are available. You can also
        filter by status. Charge statuses can be `Status.PENDING`,
        `Status.SUCCESS` or `Status.FAILED`.

        Args:
            id_or_code: An ID or code for the batch whose charges you want to retrieve.
            status: Any of the values from the Status enum.
            pagination: Specify how many records you want to retrieve per page.
                If not specified we use a default value of 50.
            page: Specify exactly what transfer you want to page. If not specified we use a default value of 1.
            start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/{id_or_code}/charges?perPage={pagination}")
        query_params = [
            ("status", status),
            ("page", page),
            ("from", start_date),
            ("to", end_date),
        ]
        url = append_query_params(query_params, url)
        return await self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class or BulkChargeUnitCharge,
        )

    async def pause_batch(
        self,
        batch_code: str,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[None] | Response[PaystackDataModel]:
        """Use this method to pause processing a batch.

        Args:
            batch_code: The batch code for the bulk charge you want to pause.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/pause/{batch_code}")
        return await self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class,
        )

    async def resume_batch(
        self,
        batch_code: str,
        alternate_model_class: type[PaystackDataModel] | None = None,
    ) -> Response[None] | Response[PaystackDataModel]:
        """Use this method to resume processing a batch

        Args:
            batch_code: The batch code for the bulk charge you want to resume.
            alternate_model_class: A pydantic model class to use instead of the
                default pydantic model used by the library to present the data in
                the `Response.data`. The default behaviour of the library is to
                set  `Response.data` to `None` if it fails to serialize the data
                returned from paystack with the model provided in the library.
                Providing a pydantic model class via this parameter overrides
                the library default model with the model class you provide.
                This can come in handy when the models in the library do not
                accurately represent the data returned, and you prefer working with the
                data as a pydantic model instead of as a dict of the response returned
                by  paystack before it is serialized with pydantic models, The original
                data can be accessed via `Response.raw`.

        Returns:
            A pydantic model containing the response gotten from paystack's server.
        """

        url = self._full_url(f"/bulkcharge/resume/{batch_code}")
        return await self._handle_request(  # type: ignore
            HTTPMethod.GET,
            url,
            response_data_model_class=alternate_model_class,
        )

get_batch(id_or_code, alternate_model_class=None) async

This method retrieves a specific batch code. It also returns useful information on its progress by way of the total_charges and pending_charges attributes in the Response.

Parameters:

Name Type Description Default
id_or_code str | int

An ID or code for the charge whose batches you want to retrieve.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[BulkCharge] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def get_batch(
    self,
    id_or_code: str | int,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[BulkCharge] | Response[PaystackDataModel]:
    """
    This method retrieves a specific batch code. It also returns
    useful information on its progress by way of the total_charges
    and pending_charges attributes in the Response.

    Args:
        id_or_code: An ID or code for the charge whose batches you want to retrieve.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/{id_or_code}")
    return await self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

get_batches(page=1, pagination=50, start_date=None, end_date=None, alternate_model_class=None) async

This gets all bulk charge batches created by the integration.

Parameters:

Name Type Description Default
page int

Specify exactly what transfer you want to page. If not specified, we use a default value of 1.

1
pagination int

Specify how many records you want to retrieve per page. If not specified we use a default value of 50.

50
start_date str | None

A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
end_date str | None

A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[list[BulkCharge]] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def get_batches(
    self,
    page: int = 1,
    pagination: int = 50,
    start_date: str | None = None,
    end_date: str | None = None,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[list[BulkCharge]] | Response[PaystackDataModel]:
    """This gets all bulk charge batches created by the integration.

    Args:
        page: Specify exactly what transfer you want to page. If not specified, we use a default value of 1.
        pagination: Specify how many records you want to retrieve per page.
            If not specified we use a default value of 50.
        start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge?perPage={pagination}")
    query_params = [
        ("page", page),
        ("from", start_date),
        ("to", end_date),
    ]
    url = append_query_params(query_params, url)
    return await self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

get_charges_in_batch(id_or_code, status, pagination=50, page=1, start_date=None, end_date=None, alternate_model_class=None) async

This method retrieves the charges associated with a specified batch code. Pagination parameters are available. You can also filter by status. Charge statuses can be Status.PENDING, Status.SUCCESS or Status.FAILED.

Parameters:

Name Type Description Default
id_or_code str | int

An ID or code for the batch whose charges you want to retrieve.

required
status Status

Any of the values from the Status enum.

required
pagination int

Specify how many records you want to retrieve per page. If not specified we use a default value of 50.

50
page int

Specify exactly what transfer you want to page. If not specified we use a default value of 1.

1
start_date str | None

A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
end_date str | None

A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21

None
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def get_charges_in_batch(
    self,
    id_or_code: str | int,
    status: Status,
    pagination: int = 50,
    page: int = 1,
    start_date: str | None = None,
    end_date: str | None = None,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[list[BulkChargeUnitCharge]] | Response[PaystackDataModel]:
    """
    This method retrieves the charges associated with a specified
    batch code. Pagination parameters are available. You can also
    filter by status. Charge statuses can be `Status.PENDING`,
    `Status.SUCCESS` or `Status.FAILED`.

    Args:
        id_or_code: An ID or code for the batch whose charges you want to retrieve.
        status: Any of the values from the Status enum.
        pagination: Specify how many records you want to retrieve per page.
            If not specified we use a default value of 50.
        page: Specify exactly what transfer you want to page. If not specified we use a default value of 1.
        start_date: A timestamp from which to start listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        end_date: A timestamp at which to stop listing batches e.g. 2016-09-24T00:00:05.000Z, 2016-09-21
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/{id_or_code}/charges?perPage={pagination}")
    query_params = [
        ("status", status),
        ("page", page),
        ("from", start_date),
        ("to", end_date),
    ]
    url = append_query_params(query_params, url)
    return await self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class or BulkChargeUnitCharge,
    )

initiate(body, alternate_model_class=None) async

Send a list of dictionaries with authorization codes and amount (in kobo if currency is NGN, pesewas, if currency is GHS, and cents, if currency is ZAR ) so paystack can process transactions as a batch.

Parameters:

Name Type Description Default
body list[BulkChargeInstruction]

A list of BulkChargeInstruction.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[BulkCharge] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def initiate(
    self,
    body: list[BulkChargeInstruction],
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[BulkCharge] | Response[PaystackDataModel]:
    """
    Send a list of dictionaries with authorization ``codes`` and ``amount``
    (in kobo if currency is NGN, pesewas, if currency is GHS, and cents,
    if currency is ZAR ) so paystack can process transactions as a batch.

    Args:
        body: A list of BulkChargeInstruction.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url("/bulkcharge")
    payload = [item.model_dump() for item in body]
    return await self._handle_request(  # type: ignore
        HTTPMethod.POST,
        url,
        payload,
        response_data_model_class=alternate_model_class or BulkCharge,
    )

pause_batch(batch_code, alternate_model_class=None) async

Use this method to pause processing a batch.

Parameters:

Name Type Description Default
batch_code str

The batch code for the bulk charge you want to pause.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[None] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def pause_batch(
    self,
    batch_code: str,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[None] | Response[PaystackDataModel]:
    """Use this method to pause processing a batch.

    Args:
        batch_code: The batch code for the bulk charge you want to pause.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/pause/{batch_code}")
    return await self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class,
    )

resume_batch(batch_code, alternate_model_class=None) async

Use this method to resume processing a batch

Parameters:

Name Type Description Default
batch_code str

The batch code for the bulk charge you want to resume.

required
alternate_model_class type[PaystackDataModel] | None

A pydantic model class to use instead of the default pydantic model used by the library to present the data in the Response.data. The default behaviour of the library is to set Response.data to None if it fails to serialize the data returned from paystack with the model provided in the library. Providing a pydantic model class via this parameter overrides the library default model with the model class you provide. This can come in handy when the models in the library do not accurately represent the data returned, and you prefer working with the data as a pydantic model instead of as a dict of the response returned by paystack before it is serialized with pydantic models, The original data can be accessed via Response.raw.

None

Returns:

Type Description
Response[None] | Response[PaystackDataModel]

A pydantic model containing the response gotten from paystack's server.

Source code in src/pypaystack2/sub_clients/async_clients/bulk_charges.py
async def resume_batch(
    self,
    batch_code: str,
    alternate_model_class: type[PaystackDataModel] | None = None,
) -> Response[None] | Response[PaystackDataModel]:
    """Use this method to resume processing a batch

    Args:
        batch_code: The batch code for the bulk charge you want to resume.
        alternate_model_class: A pydantic model class to use instead of the
            default pydantic model used by the library to present the data in
            the `Response.data`. The default behaviour of the library is to
            set  `Response.data` to `None` if it fails to serialize the data
            returned from paystack with the model provided in the library.
            Providing a pydantic model class via this parameter overrides
            the library default model with the model class you provide.
            This can come in handy when the models in the library do not
            accurately represent the data returned, and you prefer working with the
            data as a pydantic model instead of as a dict of the response returned
            by  paystack before it is serialized with pydantic models, The original
            data can be accessed via `Response.raw`.

    Returns:
        A pydantic model containing the response gotten from paystack's server.
    """

    url = self._full_url(f"/bulkcharge/resume/{batch_code}")
    return await self._handle_request(  # type: ignore
        HTTPMethod.GET,
        url,
        response_data_model_class=alternate_model_class,
    )