From 0dc29a5a8647a00eed94a28a0a90bb4e2ff327df Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 02:03:54 +0100 Subject: [PATCH 01/19] feat: Introduce new audit, notification, and storage cleaner workers, --- .env.staging.example | 8 ++++++++ .gitignore | 2 +- app/service/face_embedding.py | 1 + app/worker/audit.py | 0 app/worker/notification.py | 0 app/worker/storage_cleaner.py | 0 6 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 app/worker/audit.py create mode 100644 app/worker/notification.py create mode 100644 app/worker/storage_cleaner.py diff --git a/.env.staging.example b/.env.staging.example index c5c29d2..ec25849 100644 --- a/.env.staging.example +++ b/.env.staging.example @@ -42,3 +42,11 @@ encryption_key=super_secret_encryption_key totp_issuer=MultiAI + + +GOOGLE_CLIENT_ID= +GOOGLE_CLIENT_SECRET= +GOOGLE_REDIRECT_URI=http://127.0.0.1:8000/staff/drive/callback +GOOGLE_OAUTH_SCOPES=https://www.googleapis.com/auth/drive.readonly openid email profile + +FACE_ENCRYPTION_KEY=base64-encoded-32-byte-key \ No newline at end of file diff --git a/.gitignore b/.gitignore index d87d619..5cc1fe4 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,6 @@ __pycache__ db/schema.sql .vscode/settings.json - +multiai-c9380-firebase-adminsdk-fbsvc-cb6e5ce41b.json db.txt diff --git a/app/service/face_embedding.py b/app/service/face_embedding.py index f71c906..b54347f 100644 --- a/app/service/face_embedding.py +++ b/app/service/face_embedding.py @@ -68,6 +68,7 @@ def prepare(self) -> None: self.init_model() def embed(self, image: np.ndarray, bboxes: Sequence[BBox]) -> list[float]: + if not bboxes: raise ValueError("No faces to embed") diff --git a/app/worker/audit.py b/app/worker/audit.py new file mode 100644 index 0000000..e69de29 diff --git a/app/worker/notification.py b/app/worker/notification.py new file mode 100644 index 0000000..e69de29 diff --git a/app/worker/storage_cleaner.py b/app/worker/storage_cleaner.py new file mode 100644 index 0000000..e69de29 From bb040454113483233be1754cb9b3d3c445f23e53 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 02:25:33 +0100 Subject: [PATCH 02/19] feat: Introduce new services for auditing and user notifications, and workers for notification management and storage cleaning. --- app/service/audit.py | 0 app/service/user_notification.py | 0 app/worker/notification.py | 13 +++++++++++++ app/worker/storage_cleaner.py | 1 + 4 files changed, 14 insertions(+) create mode 100644 app/service/audit.py create mode 100644 app/service/user_notification.py diff --git a/app/service/audit.py b/app/service/audit.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/user_notification.py b/app/service/user_notification.py new file mode 100644 index 0000000..e69de29 diff --git a/app/worker/notification.py b/app/worker/notification.py index e69de29..8cf1889 100644 --- a/app/worker/notification.py +++ b/app/worker/notification.py @@ -0,0 +1,13 @@ +async def initilize_notifcation_services(): + pass +async def init_fcm(): + pass +async def init_apn(): + pass +async def init_web_push(): + pass + +async def init_container(): + # we use container here to intilzie connection with asyncpg and then we get device_id using the user_id +async def listen_nats_event(): + pass \ No newline at end of file diff --git a/app/worker/storage_cleaner.py b/app/worker/storage_cleaner.py index e69de29..3d61af5 100644 --- a/app/worker/storage_cleaner.py +++ b/app/worker/storage_cleaner.py @@ -0,0 +1 @@ +async def initilize_ \ No newline at end of file From 338b46adc43983f162363c98cac8d4e0cfa4f164 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 02:31:06 +0100 Subject: [PATCH 03/19] feat: Add user notification system with API endpoints for listing and marking notifications as read. --- app/container.py | 7 ++ app/router/mobile/__init__.py | 5 +- app/router/mobile/notifications.py | 33 ++++++++ app/schema/request/mobile/notifications.py | 9 +++ app/schema/response/mobile/notifications.py | 36 +++++++++ app/service/user_notification.py | 76 +++++++++++++++++++ db/generated/models.py | 2 +- db/generated/notifications.py | 84 +++++++++++++++++++++ db/queries/notifications.sql | 23 ++++++ 9 files changed, 272 insertions(+), 3 deletions(-) create mode 100644 app/router/mobile/notifications.py create mode 100644 app/schema/request/mobile/notifications.py create mode 100644 app/schema/response/mobile/notifications.py create mode 100644 db/generated/notifications.py create mode 100644 db/queries/notifications.sql diff --git a/app/container.py b/app/container.py index 96b8f26..707e466 100644 --- a/app/container.py +++ b/app/container.py @@ -14,6 +14,7 @@ from app.service.upload_requests import UploadRequestsService from app.service.users import AuthService +from app.service.user_notification import UserNotificationService from db.generated import devices as device_queries from db.generated import photos as photo_queries from db.generated import session as session_queries @@ -27,6 +28,7 @@ from db.generated import events as event_queries from db.generated import eventParticipant as participant_queries from db.generated import stuff_user as staff_queries +from db.generated import notifications as notification_queries from app.service.event import EventService class Container: @@ -49,6 +51,7 @@ def __init__( self.upload_request_photo_querier = upload_request_photo_queries.AsyncQuerier(conn) self.photo_querier = photo_queries.AsyncQuerier(conn) self.staff_notification_querier = staff_notification_queries.AsyncQuerier(conn) + self.notification_querier = notification_queries.AsyncQuerier(conn) self.event_querier = event_queries.AsyncQuerier(conn) self.participant_querier = participant_queries.AsyncQuerier(conn) self.staff_querier = staff_queries.AsyncQuerier(conn) @@ -94,6 +97,10 @@ def __init__( staff_notifications_service=self.staff_notifications_service, ) + self.user_notifications_service = UserNotificationService( + notification_querier=self.notification_querier, + ) + self.staff_user_service = StaffUserService() self.staff_user_service.init( diff --git a/app/router/mobile/__init__.py b/app/router/mobile/__init__.py index b9be1e0..aa3c807 100644 --- a/app/router/mobile/__init__.py +++ b/app/router/mobile/__init__.py @@ -2,11 +2,12 @@ from app.router.mobile.auth import router as mobile_auth_router from app.router.mobile.enrollement import router as onboarding_router from app.router.mobile.event import router as event_router +from app.router.mobile.notifications import router as mobile_notifications_router -router = APIRouter(prefix="/user",tags=["user"]) +router = APIRouter(prefix="/user", tags=["user"]) router.add_api_route router.include_router(mobile_auth_router) router.include_router(onboarding_router) router.include_router(event_router) - +router.include_router(mobile_notifications_router) diff --git a/app/router/mobile/notifications.py b/app/router/mobile/notifications.py new file mode 100644 index 0000000..ca568cd --- /dev/null +++ b/app/router/mobile/notifications.py @@ -0,0 +1,33 @@ +from fastapi import APIRouter, Depends + +from app.container import Container, get_container +from app.deps.token_auth import MobileUserSchema, get_current_mobile_user +from app.schema.request.mobile.notifications import MarkUserNotificationsReadRequest +from app.schema.response.mobile.notifications import UserNotificationListResponse + + +router = APIRouter(prefix="/notifications") + + +@router.get("", response_model=UserNotificationListResponse) +async def list_user_notifications( + container: Container = Depends(get_container), + current_user: MobileUserSchema = Depends(get_current_mobile_user), +) -> UserNotificationListResponse: + notifications = await container.user_notifications_service.list_notifications( + user_id=current_user.user_id, + ) + return UserNotificationListResponse.from_models(notifications) + + +@router.post("/read", response_model=UserNotificationListResponse) +async def mark_user_notifications_as_read( + req: MarkUserNotificationsReadRequest, + container: Container = Depends(get_container), + current_user: MobileUserSchema = Depends(get_current_mobile_user), +) -> UserNotificationListResponse: + notifications = await container.user_notifications_service.mark_many_as_read( + notification_ids=req.notification_ids, + user_id=current_user.user_id, + ) + return UserNotificationListResponse.from_models(notifications) diff --git a/app/schema/request/mobile/notifications.py b/app/schema/request/mobile/notifications.py new file mode 100644 index 0000000..f6a94dc --- /dev/null +++ b/app/schema/request/mobile/notifications.py @@ -0,0 +1,9 @@ +from uuid import UUID + +from pydantic import BaseModel, Field + + +class MarkUserNotificationsReadRequest(BaseModel): + notification_ids: list[UUID] = Field( + ..., min_length=1, max_length=100 + ) diff --git a/app/schema/response/mobile/notifications.py b/app/schema/response/mobile/notifications.py new file mode 100644 index 0000000..d347248 --- /dev/null +++ b/app/schema/response/mobile/notifications.py @@ -0,0 +1,36 @@ +from datetime import datetime +from typing import Any +from uuid import UUID + +from pydantic import BaseModel + +from db.generated.models import Notification + + +class UserNotificationSchema(BaseModel): + id: UUID + type: str + payload: dict[str, Any] + read_at: datetime | None + created_at: datetime + + @classmethod + def from_model(cls, notification: Notification) -> "UserNotificationSchema": + return cls( + id=notification.id, + type=notification.type, + payload=notification.payload, + read_at=notification.read_at, + created_at=notification.created_at, + ) + + +class UserNotificationListResponse(BaseModel): + items: list[UserNotificationSchema] + + @classmethod + def from_models( + cls, + notifications: list[Notification], + ) -> "UserNotificationListResponse": + return cls(items=[UserNotificationSchema.from_model(item) for item in notifications]) diff --git a/app/service/user_notification.py b/app/service/user_notification.py index e69de29..e13b4d9 100644 --- a/app/service/user_notification.py +++ b/app/service/user_notification.py @@ -0,0 +1,76 @@ +from typing import Any +import uuid + +from app.core.exceptions import AppException +from db.generated import notifications as notification_queries +from db.generated.models import Notification + + +class UserNotificationService: + def __init__( + self, + notification_querier: notification_queries.AsyncQuerier, + ) -> None: + self.notification_querier = notification_querier + + async def create_notification( + self, + *, + user_id: uuid.UUID, + type: str, + payload: dict[str, Any], + ) -> Notification: + notification = await self.notification_querier.create_notification( + user_id=user_id, + type=type, + payload=payload, + ) + if notification is None: + raise AppException.internal_error("Failed to create user notification") + return notification + + async def list_notifications( + self, + *, + user_id: uuid.UUID, + ) -> list[Notification]: + notifications: list[Notification] = [] + async for notification in self.notification_querier.list_notifications_by_user_id( + user_id=user_id + ): + notifications.append(notification) + return notifications + + async def mark_as_read( + self, + *, + notification_id: uuid.UUID, + user_id: uuid.UUID, + ) -> Notification: + notification = await self.notification_querier.mark_notification_as_read( + id=notification_id, + user_id=user_id, + ) + if notification is None: + raise AppException.not_found("Notification not found or already read") + return notification + + async def mark_many_as_read( + self, + *, + notification_ids: list[uuid.UUID], + user_id: uuid.UUID, + ) -> list[Notification]: + notifications: list[Notification] = [] + seen_notification_ids: set[uuid.UUID] = set() + for notification_id in notification_ids: + if notification_id in seen_notification_ids: + continue + seen_notification_ids.add(notification_id) + notifications.append( + await self.mark_as_read( + notification_id=notification_id, + user_id=user_id, + ) + ) + return notifications diff --git a/db/generated/models.py b/db/generated/models.py index 28a9da1..3a87f2b 100644 --- a/db/generated/models.py +++ b/db/generated/models.py @@ -29,8 +29,8 @@ class ProcessingJobStatus(str, enum.Enum): class StaffRole(str, enum.Enum): ADMIN = "admin" - MULTI_TEAM_LEAD = "multi_team_lead" MULTI = "multi" + MULTI_TEAM_LEAD = "multi_team_lead" class UploadRequestStatus(str, enum.Enum): diff --git a/db/generated/notifications.py b/db/generated/notifications.py new file mode 100644 index 0000000..3166cd3 --- /dev/null +++ b/db/generated/notifications.py @@ -0,0 +1,84 @@ +# Code generated by sqlc. DO NOT EDIT. +# versions: +# sqlc v1.30.0 +# source: notifications.sql +from typing import Any, AsyncIterator, Optional +import uuid + +import sqlalchemy +import sqlalchemy.ext.asyncio + +from db.generated import models + + +CREATE_NOTIFICATION = """-- name: create_notification \\:one +INSERT INTO notifications ( + user_id, + type, + payload +) VALUES ( + :p1, :p2, :p3 +) +RETURNING id, user_id, type, payload, read_at, created_at +""" + + +LIST_NOTIFICATIONS_BY_USER_ID = """-- name: list_notifications_by_user_id \\:many +SELECT id, user_id, type, payload, read_at, created_at +FROM notifications +WHERE user_id = :p1 +ORDER BY created_at DESC +""" + + +MARK_NOTIFICATION_AS_READ = """-- name: mark_notification_as_read \\:one +UPDATE notifications +SET read_at = NOW() +WHERE id = :p1 + AND user_id = :p2 + AND read_at IS NULL +RETURNING id, user_id, type, payload, read_at, created_at +""" + + +class AsyncQuerier: + def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): + self._conn = conn + + async def create_notification(self, *, user_id: uuid.UUID, type: str, payload: Any) -> Optional[models.Notification]: + row = (await self._conn.execute(sqlalchemy.text(CREATE_NOTIFICATION), {"p1": user_id, "p2": type, "p3": payload})).first() + if row is None: + return None + return models.Notification( + id=row[0], + user_id=row[1], + type=row[2], + payload=row[3], + read_at=row[4], + created_at=row[5], + ) + + async def list_notifications_by_user_id(self, *, user_id: uuid.UUID) -> AsyncIterator[models.Notification]: + result = await self._conn.stream(sqlalchemy.text(LIST_NOTIFICATIONS_BY_USER_ID), {"p1": user_id}) + async for row in result: + yield models.Notification( + id=row[0], + user_id=row[1], + type=row[2], + payload=row[3], + read_at=row[4], + created_at=row[5], + ) + + async def mark_notification_as_read(self, *, id: uuid.UUID, user_id: uuid.UUID) -> Optional[models.Notification]: + row = (await self._conn.execute(sqlalchemy.text(MARK_NOTIFICATION_AS_READ), {"p1": id, "p2": user_id})).first() + if row is None: + return None + return models.Notification( + id=row[0], + user_id=row[1], + type=row[2], + payload=row[3], + read_at=row[4], + created_at=row[5], + ) diff --git a/db/queries/notifications.sql b/db/queries/notifications.sql new file mode 100644 index 0000000..b7dc9ec --- /dev/null +++ b/db/queries/notifications.sql @@ -0,0 +1,23 @@ +-- name: CreateNotification :one +INSERT INTO notifications ( + user_id, + type, + payload +) VALUES ( + $1, $2, $3 +) +RETURNING id, user_id, type, payload, read_at, created_at; + +-- name: ListNotificationsByUserID :many +SELECT id, user_id, type, payload, read_at, created_at +FROM notifications +WHERE user_id = $1 +ORDER BY created_at DESC; + +-- name: MarkNotificationAsRead :one +UPDATE notifications +SET read_at = NOW() +WHERE id = $1 + AND user_id = $2 + AND read_at IS NULL +RETURNING id, user_id, type, payload, read_at, created_at; From fb5288ae24a8c5a268ff74f4e76c34070a395fd4 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 02:46:49 +0100 Subject: [PATCH 04/19] feat: Implement push notification support using Firebase Admin and APNS2, adding type stubs, new constants, and updating dependencies. --- app/core/constant.py | 8 + app/infra/nats.py | 4 + app/worker/notification.py | 281 ++++++- pyproject.toml | 3 + typings/apns2/client.pyi | 78 ++ typings/apns2/credentials.pyi | 39 + typings/apns2/errors.pyi | 175 +++++ typings/apns2/payload.pyi | 25 + typings/firebase_admin/__about__.pyi | 10 + typings/firebase_admin/__init__.pyi | 125 +++ typings/firebase_admin/_auth_client.pyi | 618 +++++++++++++++ typings/firebase_admin/_auth_providers.pyi | 192 +++++ typings/firebase_admin/_auth_utils.pyi | 234 ++++++ typings/firebase_admin/_gapic_utils.pyi | 43 ++ typings/firebase_admin/_http_client.pyi | 106 +++ typings/firebase_admin/_messaging_encoder.pyi | 204 +++++ typings/firebase_admin/_messaging_utils.pyi | 402 ++++++++++ typings/firebase_admin/_rfc3339.pyi | 24 + typings/firebase_admin/_sseclient.pyi | 92 +++ typings/firebase_admin/_token_gen.pyi | 177 +++++ typings/firebase_admin/_user_identifier.pyi | 91 +++ typings/firebase_admin/_user_import.pyi | 405 ++++++++++ typings/firebase_admin/_user_mgt.pyi | 527 +++++++++++++ typings/firebase_admin/_utils.pyi | 77 ++ typings/firebase_admin/app_check.pyi | 54 ++ typings/firebase_admin/auth.pyi | 716 ++++++++++++++++++ typings/firebase_admin/credentials.pyi | 155 ++++ typings/firebase_admin/db.pyi | 573 ++++++++++++++ typings/firebase_admin/exceptions.pyi | 191 +++++ typings/firebase_admin/firestore.pyi | 48 ++ typings/firebase_admin/firestore_async.pyi | 48 ++ typings/firebase_admin/functions.pyi | 226 ++++++ typings/firebase_admin/instance_id.pyi | 41 + typings/firebase_admin/messaging.pyi | 285 +++++++ typings/firebase_admin/ml.pyi | 529 +++++++++++++ typings/firebase_admin/project_management.pyi | 422 +++++++++++ typings/firebase_admin/remote_config.pyi | 340 +++++++++ typings/firebase_admin/storage.pyi | 48 ++ typings/firebase_admin/tenant_mgt.pyi | 261 +++++++ uv.lock | 465 +++++++++++- 40 files changed, 8320 insertions(+), 22 deletions(-) create mode 100644 typings/apns2/client.pyi create mode 100644 typings/apns2/credentials.pyi create mode 100644 typings/apns2/errors.pyi create mode 100644 typings/apns2/payload.pyi create mode 100644 typings/firebase_admin/__about__.pyi create mode 100644 typings/firebase_admin/__init__.pyi create mode 100644 typings/firebase_admin/_auth_client.pyi create mode 100644 typings/firebase_admin/_auth_providers.pyi create mode 100644 typings/firebase_admin/_auth_utils.pyi create mode 100644 typings/firebase_admin/_gapic_utils.pyi create mode 100644 typings/firebase_admin/_http_client.pyi create mode 100644 typings/firebase_admin/_messaging_encoder.pyi create mode 100644 typings/firebase_admin/_messaging_utils.pyi create mode 100644 typings/firebase_admin/_rfc3339.pyi create mode 100644 typings/firebase_admin/_sseclient.pyi create mode 100644 typings/firebase_admin/_token_gen.pyi create mode 100644 typings/firebase_admin/_user_identifier.pyi create mode 100644 typings/firebase_admin/_user_import.pyi create mode 100644 typings/firebase_admin/_user_mgt.pyi create mode 100644 typings/firebase_admin/_utils.pyi create mode 100644 typings/firebase_admin/app_check.pyi create mode 100644 typings/firebase_admin/auth.pyi create mode 100644 typings/firebase_admin/credentials.pyi create mode 100644 typings/firebase_admin/db.pyi create mode 100644 typings/firebase_admin/exceptions.pyi create mode 100644 typings/firebase_admin/firestore.pyi create mode 100644 typings/firebase_admin/firestore_async.pyi create mode 100644 typings/firebase_admin/functions.pyi create mode 100644 typings/firebase_admin/instance_id.pyi create mode 100644 typings/firebase_admin/messaging.pyi create mode 100644 typings/firebase_admin/ml.pyi create mode 100644 typings/firebase_admin/project_management.pyi create mode 100644 typings/firebase_admin/remote_config.pyi create mode 100644 typings/firebase_admin/storage.pyi create mode 100644 typings/firebase_admin/tenant_mgt.pyi diff --git a/app/core/constant.py b/app/core/constant.py index 7d3f191..7a3b139 100644 --- a/app/core/constant.py +++ b/app/core/constant.py @@ -5,6 +5,14 @@ class RedisKey(str, Enum): UserSession = "user_session" UserSessionByUser = "user_session:{user_id}" + +class NotificationChannel(str, Enum): + WEB = "web" + MOBILE = "mobile" + + +NOTIFICATION_EVENT_SUBJECT = "notification.event" + IMAGE_ALLOWED_TYPES = { "image/jpeg", "image/png", diff --git a/app/infra/nats.py b/app/infra/nats.py index d2d2454..5394e98 100644 --- a/app/infra/nats.py +++ b/app/infra/nats.py @@ -7,14 +7,18 @@ from pydantic import BaseModel from app.core.config import settings +from app.core.constant import NOTIFICATION_EVENT_SUBJECT class Message(BaseModel): data: dict[str, Any] + + class NatsSubjects(Enum): USER_SIGNUP = "user.signup" USER_LOGIN = "user.login" USER_LOGOUT = "user.logout" + NOTIFICATION_EVENT = NOTIFICATION_EVENT_SUBJECT STAFF_UPLOAD_REQUEST_CREATED = "staff.upload_request.created" STAFF_UPLOAD_REQUEST_APPROVED = "staff.upload_request.approved" STAFF_UPLOAD_REQUEST_REJECTED = "staff.upload_request.rejected" diff --git a/app/worker/notification.py b/app/worker/notification.py index 8cf1889..0a83308 100644 --- a/app/worker/notification.py +++ b/app/worker/notification.py @@ -1,13 +1,268 @@ -async def initilize_notifcation_services(): - pass -async def init_fcm(): - pass -async def init_apn(): - pass -async def init_web_push(): - pass - -async def init_container(): - # we use container here to intilzie connection with asyncpg and then we get device_id using the user_id -async def listen_nats_event(): - pass \ No newline at end of file +"""Forward notifications coming from NATS to the configured push providers.""" +from __future__ import annotations + +import asyncio +import dataclasses +import json +import uuid +from typing import Any + +import sqlalchemy.ext.asyncio + +from app.core.constant import NotificationChannel, NOTIFICATION_EVENT_SUBJECT +from app.core.logger import logger +from app.infra.database import engine +from app.infra.nats import NatsClient, NatsSubjects +from app.service.device import DeviceService +from db.generated import devices as device_queries +from db.generated.models import UserDevice + +try: + from firebase_admin import messaging as firebase_messaging +except ImportError: # pragma: no cover - optional dependency + firebase_messaging = None + +try: + from apns2.client import APNsClient + from apns2.payload import Payload as APNPayload +except ImportError: # pragma: no cover - optional dependency + APNsClient = None + APNPayload = None + +try: + from pywebpush import webpush, WebPushException +except ImportError: # pragma: no cover - optional dependency + webpush = None + WebPushException = None + + +@dataclasses.dataclass +class NotificationEventPayload: + user_id: uuid.UUID + channel: NotificationChannel + title: str | None = None + body: str | None = None + data: dict[str, str] = dataclasses.field(default_factory=dict) + device_info: dict[str, Any] | None = None + metadata: dict[str, Any] | None = None + + @classmethod + def from_dict(cls, payload: dict[str, Any]) -> "NotificationEventPayload" | None: + raw_user_id = payload.get("user_id") + raw_channel = payload.get("channel") + if not isinstance(raw_user_id, str) or not isinstance(raw_channel, str): + logger.warning("Notification payload missing user_id or channel: %s", payload) + return None + try: + user_id = uuid.UUID(raw_user_id) + except ValueError as exc: + logger.warning("Invalid user_id %s: %s", raw_user_id, exc) + return None + try: + channel = NotificationChannel(raw_channel) + except ValueError: + logger.warning("Unsupported notification channel %s", raw_channel) + return None + + data = payload.get("data") + data_dict: dict[str, str] = {} + if isinstance(data, dict): + data_dict = {str(k): str(v) for k, v in data.items()} + + device_info = payload.get("device_info") + if device_info is not None and not isinstance(device_info, dict): + logger.warning("device_info must be an object: %s", payload) + device_info = None + + metadata = payload.get("metadata") + if metadata is not None and not isinstance(metadata, dict): + metadata = None + + return cls( + user_id=user_id, + channel=channel, + title=payload.get("title"), + body=payload.get("body"), + data=data_dict, + device_info=device_info, + metadata=metadata, + ) + + +async def init_push_integrations() -> None: + """Initialize third-party push clients and perform early validation.""" + if firebase_messaging: + logger.info("Firebase Admin available for FCM delivery") + else: + logger.warning("Firebase Admin not installed; mobile push disabled") + + if APNsClient and APNPayload: + logger.info("APNs client available for iOS delivery") + else: + logger.warning("APNs client not installed; iOS push disabled") + + if webpush: + logger.info("pywebpush available for web push delivery") + else: + logger.warning("pywebpush not installed; web push disabled") + + +async def send_fcm_notification(device: UserDevice, payload: NotificationEventPayload) -> None: + if firebase_messaging is None: + logger.debug("Skipping FCM delivery because firebase_admin is not installed") + return + + token = payload.device_info and payload.device_info.get("fcm_token") + if token is None: + logger.warning("Missing FCM token for payload %s", payload) + return + + message = firebase_messaging.Message( + token=token, + notification=firebase_messaging.Notification( + title=payload.title, body=payload.body + ), + data=payload.data, + ) + + try: + firebase_messaging.send(message) + logger.info("FCM notification queued for user %s token %s", payload.user_id, token) + except Exception as exc: + logger.exception("FCM send failed for token %s: %s", token, exc) + + +async def send_apn_notification(device: UserDevice, payload: NotificationEventPayload) -> None: + if APNsClient is None or APNPayload is None: + logger.debug("Skipping APN delivery because APNs client is unavailable") + return + + token = payload.device_info and payload.device_info.get("apn_token") + if token is None: + logger.warning("Missing APN token for payload %s", payload) + return + + apn_payload = APNPayload(alert={"title": payload.title, "body": payload.body}) + client = APNsClient( + credentials="/path/to/certificate.pem", + use_sandbox=True, + use_alternative_port=False, + ) + try: + client.send_notification(token, apn_payload) + logger.info("APN notification queued for user %s token %s", payload.user_id, token) + except Exception as exc: + logger.exception("APN send failed for %s: %s", token, exc) + + +async def send_web_push_notification(payload: NotificationEventPayload) -> None: + if webpush is None or WebPushException is None: + logger.debug("Skipping WebPush delivery because pywebpush is unavailable") + return + + if not payload.device_info: + logger.warning("Web notification missing subscription info: %s", payload) + return + + subscription_info = payload.device_info + vapid_claims = {"sub": "mailto:alerts@example.com"} + try: + webpush( + subscription_info=subscription_info, + data=json.dumps({"title": payload.title, "body": payload.body, "data": payload.data}), + vapid_private_key="/path/to/vapid-private.key", + vapid_claims=vapid_claims, + ) + logger.info("Web push queued for user %s", payload.user_id) + except WebPushException as exc: + logger.exception("Web push failed for user %s: %s", payload.user_id, exc) + + +class NotificationDeliveryWorker: + def __init__(self) -> None: + self._conn: sqlalchemy.ext.asyncio.AsyncConnection | None = None + self._device_service: DeviceService | None = None + + async def start(self) -> None: + if self._conn is not None: + return + self._conn = await engine.connect() + self._device_service = DeviceService() + self._device_service.init(device_querier=device_queries.AsyncQuerier(self._conn)) + + async def stop(self) -> None: + if self._conn is not None: + await self._conn.close() + self._conn = None + self._device_service = None + + async def deliver(self, payload: NotificationEventPayload) -> None: + if payload.channel is NotificationChannel.MOBILE: + await self._deliver_to_mobile(payload) + elif payload.channel is NotificationChannel.WEB: + await send_web_push_notification(payload) + else: + logger.warning("Unsupported channel %s for payload %s", payload.channel, payload) + + async def _deliver_to_mobile(self, payload: NotificationEventPayload) -> None: + if self._device_service is None: + logger.warning("Device service missing for mobile delivery") + return + devices, _ = await self._device_service.get_all_devices(user_id=payload.user_id) + if not devices: + logger.debug("No registered devices for user %s", payload.user_id) + return + for device in devices: + device_type = (device.device_type or "").lower() + if device_type == "ios": + await send_apn_notification(device, payload) + elif device_type == "android": + await send_fcm_notification(device, payload) + else: + await send_fcm_notification(device, payload) + + +async def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: + try: + return json.loads(raw_data.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + logger.error("Cannot parse notification payload: %s", exc) + return None + + +async def _handle_event(worker: NotificationDeliveryWorker, raw_data: bytes) -> None: + raw_payload = await _parse_payload(raw_data) + if raw_payload is None: + return + payload = NotificationEventPayload.from_dict(raw_payload) + if payload is None: + return + try: + await worker.deliver(payload) + except Exception: + logger.exception("Failed to deliver notification payload %s", raw_payload) + + +async def listen_nats_event(worker: NotificationDeliveryWorker) -> None: + await NatsClient.subscribe( + NatsSubjects.NOTIFICATION_EVENT, + lambda data: _handle_event(worker, data), + ) + logger.info("Listening for notification events on %s", NOTIFICATION_EVENT_SUBJECT) + + +async def main() -> None: + await init_push_integrations() + worker = NotificationDeliveryWorker() + await worker.start() + await NatsClient.connect() + try: + await listen_nats_event(worker) + await asyncio.Event().wait() + finally: + await worker.stop() + await NatsClient.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pyproject.toml b/pyproject.toml index c9d7300..ca0bd4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ dependencies = [ "insightface>=0.7.3", "onnxruntime>=1.24.4", "python-multipart>=0.0.22", + "firebase-admin>=6.8.0", + "apns2>=0.7.1", + "pywebpush>=2.3.0", ] [tool.ruff] diff --git a/typings/apns2/client.pyi b/typings/apns2/client.pyi new file mode 100644 index 0000000..65c9dca --- /dev/null +++ b/typings/apns2/client.pyi @@ -0,0 +1,78 @@ +""" +This type stub file was generated by pyright. +""" + +from enum import Enum +from typing import Dict, Iterable, Optional, Tuple, Union +from .credentials import Credentials +from .payload import Payload + +class NotificationPriority(Enum): + Immediate = ... + Delayed = ... + + +class NotificationType(Enum): + Alert = ... + Background = ... + VoIP = ... + Complication = ... + FileProvider = ... + MDM = ... + + +RequestStream = ... +Notification = ... +DEFAULT_APNS_PRIORITY = ... +CONCURRENT_STREAMS_SAFETY_MAXIMUM = ... +MAX_CONNECTION_RETRIES = ... +logger = ... +class APNsClient: + SANDBOX_SERVER = ... + LIVE_SERVER = ... + DEFAULT_PORT = ... + ALTERNATIVE_PORT = ... + def __init__(self, credentials: Union[Credentials, str], use_sandbox: bool = ..., use_alternative_port: bool = ..., proto: Optional[str] = ..., json_encoder: Optional[type] = ..., password: Optional[str] = ..., proxy_host: Optional[str] = ..., proxy_port: Optional[int] = ..., heartbeat_period: Optional[float] = ...) -> None: + ... + + def send_notification(self, token_hex: str, notification: Payload, topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ...) -> None: + ... + + def send_notification_async(self, token_hex: str, notification: Payload, topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ..., push_type: Optional[NotificationType] = ...) -> int: + ... + + def get_notification_result(self, stream_id: int) -> Union[str, Tuple[str, str]]: + """ + Get result for specified stream + The function returns: 'Success' or 'failure reason' or ('Unregistered', timestamp) + """ + ... + + def send_notification_batch(self, notifications: Iterable[Notification], topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ..., push_type: Optional[NotificationType] = ...) -> Dict[str, Union[str, Tuple[str, str]]]: + """ + Send a notification to a list of tokens in batch. Instead of sending a synchronous request + for each token, send multiple requests concurrently. This is done on the same connection, + using HTTP/2 streams (one request per stream). + + APNs allows many streams simultaneously, but the number of streams can vary depending on + server load. This method reads the SETTINGS frame sent by the server to figure out the + maximum number of concurrent streams. Typically, APNs reports a maximum of 500. + + The function returns a dictionary mapping each token to its result. The result is "Success" + if the token was sent successfully, or the string returned by APNs in the 'reason' field of + the response, if the token generated an error. + """ + ... + + def update_max_concurrent_streams(self) -> None: + ... + + def connect(self) -> None: + """ + Establish a connection to APNs. If already connected, the function does nothing. If the + connection fails, the function retries up to MAX_CONNECTION_RETRIES times. + """ + ... + + + diff --git a/typings/apns2/credentials.pyi b/typings/apns2/credentials.pyi new file mode 100644 index 0000000..fa4fb0f --- /dev/null +++ b/typings/apns2/credentials.pyi @@ -0,0 +1,39 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional, TYPE_CHECKING +from hyper import HTTP20Connection +from hyper.ssl_compat import SSLContext + +if TYPE_CHECKING: + ... +DEFAULT_TOKEN_LIFETIME = ... +DEFAULT_TOKEN_ENCRYPTION_ALGORITHM = ... +class Credentials: + def __init__(self, ssl_context: Optional[SSLContext] = ...) -> None: + ... + + def create_connection(self, server: str, port: int, proto: Optional[str], proxy_host: Optional[str] = ..., proxy_port: Optional[int] = ...) -> HTTP20Connection: + ... + + def get_authorization_header(self, topic: Optional[str]) -> Optional[str]: + ... + + + +class CertificateCredentials(Credentials): + def __init__(self, cert_file: Optional[str] = ..., password: Optional[str] = ..., cert_chain: Optional[str] = ...) -> None: + ... + + + +class TokenCredentials(Credentials): + def __init__(self, auth_key_path: str, auth_key_id: str, team_id: str, encryption_algorithm: str = ..., token_lifetime: int = ...) -> None: + ... + + def get_authorization_header(self, topic: Optional[str]) -> str: + ... + + + diff --git a/typings/apns2/errors.pyi b/typings/apns2/errors.pyi new file mode 100644 index 0000000..d8e6670 --- /dev/null +++ b/typings/apns2/errors.pyi @@ -0,0 +1,175 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional, Type + +class APNsException(Exception): + ... + + +class ConnectionFailed(APNsException): + """There was an error connecting to APNs.""" + ... + + +class InternalException(APNsException): + """This exception should not be raised. If it is, please report this as a bug.""" + ... + + +class BadPayloadException(APNsException): + """Something bad with the payload.""" + ... + + +class BadCollapseId(BadPayloadException): + """The collapse identifier exceeds the maximum allowed size""" + ... + + +class BadDeviceToken(APNsException): + """The specified device token was bad. + Verify that the request contains a valid token and that the token matches the environment.""" + ... + + +class BadExpirationDate(BadPayloadException): + """The apns-expiration value is bad.""" + ... + + +class BadMessageId(InternalException): + """The apns-id value is bad.""" + ... + + +class BadPriority(InternalException): + """The apns-priority value is bad.""" + ... + + +class BadTopic(BadPayloadException): + """The apns-topic was invalid.""" + ... + + +class DeviceTokenNotForTopic(APNsException): + """The device token does not match the specified topic.""" + ... + + +class DuplicateHeaders(InternalException): + """One or more headers were repeated.""" + ... + + +class IdleTimeout(APNsException): + """Idle time out.""" + ... + + +class MissingDeviceToken(APNsException): + """The device token is not specified in the request :path. + Verify that the :path header contains the device token.""" + ... + + +class MissingTopic(BadPayloadException): + """The apns-topic header of the request was not specified and was required. + The apns-topic header is mandatory when the client is connected using a certificate + that supports multiple topics.""" + ... + + +class PayloadEmpty(BadPayloadException): + """The message payload was empty.""" + ... + + +class TopicDisallowed(BadPayloadException): + """Pushing to this topic is not allowed.""" + ... + + +class BadCertificate(APNsException): + """The certificate was bad.""" + ... + + +class BadCertificateEnvironment(APNsException): + """The client certificate was for the wrong environment.""" + ... + + +class ExpiredProviderToken(APNsException): + """The provider token is stale and a new token should be generated.""" + ... + + +class Forbidden(APNsException): + """The specified action is not allowed.""" + ... + + +class InvalidProviderToken(APNsException): + """The provider token is not valid or the token signature could not be verified.""" + ... + + +class MissingProviderToken(APNsException): + """No provider certificate was used to connect to APNs and Authorization header was missing or no provider token + was specified. """ + ... + + +class BadPath(APNsException): + """The request contained a bad :path value.""" + ... + + +class MethodNotAllowed(InternalException): + """The specified :method was not POST.""" + ... + + +class Unregistered(APNsException): + """The device token is inactive for the specified topic.""" + def __init__(self, timestamp: Optional[str] = ...) -> None: + ... + + + +class PayloadTooLarge(BadPayloadException): + """The message payload was too large. The maximum payload size is 4096 bytes.""" + ... + + +class TooManyProviderTokenUpdates(APNsException): + """The provider token is being updated too often.""" + ... + + +class TooManyRequests(APNsException): + """Too many requests were made consecutively to the same device token.""" + ... + + +class InternalServerError(APNsException): + """An internal server error occurred.""" + ... + + +class ServiceUnavailable(APNsException): + """The service is unavailable.""" + ... + + +class Shutdown(APNsException): + """The server is shutting down.""" + ... + + +def exception_class_for_reason(reason: str) -> Type[APNsException]: + ... + diff --git a/typings/apns2/payload.pyi b/typings/apns2/payload.pyi new file mode 100644 index 0000000..ea26787 --- /dev/null +++ b/typings/apns2/payload.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, Dict, Iterable, List, Optional, Union + +MAX_PAYLOAD_SIZE = ... +class PayloadAlert: + def __init__(self, title: Optional[str] = ..., title_localized_key: Optional[str] = ..., title_localized_args: Optional[List[str]] = ..., body: Optional[str] = ..., body_localized_key: Optional[str] = ..., body_localized_args: Optional[List[str]] = ..., action_localized_key: Optional[str] = ..., action: Optional[str] = ..., launch_image: Optional[str] = ...) -> None: + ... + + def dict(self) -> Dict[str, Any]: + ... + + + +class Payload: + def __init__(self, alert: Union[PayloadAlert, str, None] = ..., badge: Optional[int] = ..., sound: Optional[str] = ..., category: Optional[str] = ..., url_args: Optional[Iterable[str]] = ..., custom: Optional[Dict[str, Any]] = ..., thread_id: Optional[str] = ..., content_available: bool = ..., mutable_content: bool = ...) -> None: + ... + + def dict(self) -> Dict[str, Any]: + ... + + + diff --git a/typings/firebase_admin/__about__.pyi b/typings/firebase_admin/__about__.pyi new file mode 100644 index 0000000..b3c0997 --- /dev/null +++ b/typings/firebase_admin/__about__.pyi @@ -0,0 +1,10 @@ +""" +This type stub file was generated by pyright. +""" + +"""About information (version, etc) for Firebase Admin SDK.""" +__version__ = ... +__title__ = ... +__author__ = ... +__license__ = ... +__url__ = ... diff --git a/typings/firebase_admin/__init__.pyi b/typings/firebase_admin/__init__.pyi new file mode 100644 index 0000000..4efa37a --- /dev/null +++ b/typings/firebase_admin/__init__.pyi @@ -0,0 +1,125 @@ +""" +This type stub file was generated by pyright. +""" + +import datetime +import json +import os +import threading +from google.auth.credentials import Credentials as GoogleAuthCredentials +from google.auth.exceptions import DefaultCredentialsError +from firebase_admin import credentials +from firebase_admin.__about__ import __version__ + +"""Firebase Admin SDK for Python.""" +_apps = ... +_apps_lock = ... +_clock = ... +_DEFAULT_APP_NAME = ... +_FIREBASE_CONFIG_ENV_VAR = ... +_CONFIG_VALID_KEYS = ... +def initialize_app(credential=..., options=..., name=...): # -> App: + """Initializes and returns a new App instance. + + Creates a new App instance using the specified options + and the app name. If an instance already exists by the same + app name a ValueError is raised. + If options are not provided an attempt is made to load the options from the environment. + This is done by looking up the ``FIREBASE_CONFIG`` environment variable. If the value of + the variable starts with ``"{"``, it is parsed as a JSON object. Otherwise it is treated + as a file name and the JSON content is read from the corresponding file. + Use this function whenever a new App instance is required. Do not directly invoke the + App constructor. + + Args: + credential: A credential object used to initialize the SDK (optional). If none is provided, + Google Application Default Credentials are used. + options: A dictionary of configuration options (optional). Supported options include + ``databaseURL``, ``storageBucket``, ``projectId``, ``databaseAuthVariableOverride``, + ``serviceAccountId`` and ``httpTimeout``. If ``httpTimeout`` is not set, the SDK uses + a default timeout of 120 seconds. + + name: Name of the app (optional). + Returns: + App: A newly initialized instance of App. + + Raises: + ValueError: If the app name is already in use, or any of the + provided arguments are invalid. + """ + ... + +def delete_app(app): # -> None: + """Gracefully deletes an App instance. + + Args: + app: The app instance to be deleted. + + Raises: + ValueError: If the app is not initialized. + """ + ... + +def get_app(name=...): + """Retrieves an App instance by name. + + Args: + name: Name of the App instance to retrieve (optional). + + Returns: + App: An App instance with the given name. + + Raises: + ValueError: If the specified name is not a string, or if the specified + app does not exist. + """ + ... + +class _AppOptions: + """A collection of configuration options for an App.""" + def __init__(self, options) -> None: + ... + + def get(self, key, default=...): # -> Any | None: + """Returns the option identified by the provided key.""" + ... + + + +class App: + """The entry point for Firebase Python SDK. + + Represents a Firebase app, while holding the configuration and state + common to all Firebase APIs. + """ + def __init__(self, name, credential, options) -> None: + """Constructs a new App using the provided name and options. + + Args: + name: Name of the application. + credential: A credential object. + options: A dictionary of configuration options. + + Raises: + ValueError: If an argument is None or invalid. + """ + ... + + @property + def name(self): # -> str: + ... + + @property + def credential(self): # -> _ExternalCredentials | Base: + ... + + @property + def options(self): # -> _AppOptions: + ... + + @property + def project_id(self): # -> str | Any | None: + ... + + + diff --git a/typings/firebase_admin/_auth_client.pyi b/typings/firebase_admin/_auth_client.pyi new file mode 100644 index 0000000..374d332 --- /dev/null +++ b/typings/firebase_admin/_auth_client.pyi @@ -0,0 +1,618 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase auth client sub module.""" +class Client: + """Firebase Authentication client scoped to a specific tenant.""" + def __init__(self, app, tenant_id=...) -> None: + ... + + @property + def tenant_id(self): # -> None: + """Tenant ID associated with this client.""" + ... + + def create_custom_token(self, uid, developer_claims=...): # -> bytes: + """Builds and signs a Firebase custom auth token. + + Args: + uid: ID of the user for whom the token is created. + developer_claims: A dictionary of claims to be included in the token + (optional). + + Returns: + bytes: A token minted from the input parameters. + + Raises: + ValueError: If input parameters are invalid. + TokenSignError: If an error occurs while signing the token using the remote IAM service. + """ + ... + + def verify_id_token(self, id_token, check_revoked=..., clock_skew_seconds=...): # -> Any | Mapping[str, Any]: + """Verifies the signature and data for the provided JWT. + + Accepts a signed token string, verifies that it is current, was issued + to this project, and that it was correctly signed by Google. + + Args: + id_token: A string of the encoded JWT. + check_revoked: Boolean, If true, checks whether the token has been revoked or + the user disabled (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the token. + Must be between 0-60. Defaults to 0. + + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``id_token`` is a not a string or is empty. + InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. + ExpiredIdTokenError: If the specified ID token has expired. + RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been + revoked. + TenantIdMismatchError: If ``id_token`` belongs to a tenant that is different than + this ``Client`` instance. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the ID token. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + ... + + def revoke_refresh_tokens(self, uid): # -> None: + """Revokes all refresh tokens for an existing user. + + This method updates the user's ``tokens_valid_after_timestamp`` to the current UTC + in seconds since the epoch. It is important that the server on which this is called has its + clock set correctly and synchronized. + + While this revokes all sessions for a specified user and disables any new ID tokens for + existing sessions from getting minted, existing ID tokens may remain active until their + natural expiration (one hour). To verify that ID tokens are revoked, use + ``verify_id_token(idToken, check_revoked=True)``. + + Args: + uid: A user ID string. + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while revoking the refresh token. + """ + ... + + def get_user(self, uid): # -> UserRecord: + """Gets the user data corresponding to the specified user ID. + + Args: + uid: A user ID string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the user ID is None, empty or malformed. + UserNotFoundError: If the specified user ID does not exist. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + + def get_user_by_email(self, email): # -> UserRecord: + """Gets the user data corresponding to the specified user email. + + Args: + email: A user email address string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the email is None, empty or malformed. + UserNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + + def get_user_by_phone_number(self, phone_number): # -> UserRecord: + """Gets the user data corresponding to the specified phone number. + + Args: + phone_number: A phone number string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the phone number is ``None``, empty or malformed. + UserNotFoundError: If no user exists for the specified phone number. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + + def get_users(self, identifiers): # -> GetUsersResult: + """Gets the user data corresponding to the specified identifiers. + + There are no ordering guarantees; in particular, the nth entry in the + result list is not guaranteed to correspond to the nth entry in the input + parameters list. + + A maximum of 100 identifiers may be supplied. If more than 100 + identifiers are supplied, this method raises a `ValueError`. + + Args: + identifiers (list[Identifier]): A list of ``Identifier`` instances used + to indicate which user records should be returned. Must have <= 100 + entries. + + Returns: + GetUsersResult: A ``GetUsersResult`` instance corresponding to the + specified identifiers. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 100 + identifiers are specified. + """ + ... + + def list_users(self, page_token=..., max_results=...): # -> ListUsersPage: + """Retrieves a page of user accounts from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of user accounts that may be included in the returned + page. This function never returns ``None``. If there are no user accounts in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 1000, which is also the maximum number + allowed. + + Returns: + ListUsersPage: A page of user accounts. + + Raises: + ValueError: If max_results or page_token are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + ... + + def create_user(self, **kwargs): # -> UserRecord: + """Creates a new user account with the specified properties. + + Args: + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + uid: User ID to assign to the newly created user (optional). + display_name: The user's display name (optional). + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). + photo_url: The user's photo URL (optional). + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + + Returns: + UserRecord: A UserRecord instance for the newly created user. + + Raises: + ValueError: If the specified user properties are invalid. + FirebaseError: If an error occurs while creating the user account. + """ + ... + + def update_user(self, uid, **kwargs): # -> UserRecord: + """Updates an existing user account with the specified properties. + + Args: + uid: A user ID string. + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + display_name: The user's display name (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). Can be removed by explicitly + passing ``auth.DELETE_ATTRIBUTE``. + photo_url: The user's photo URL (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + custom_claims: A dictionary or a JSON string contining the custom claims to be set on + the user account (optional). To remove all custom claims, pass + ``auth.DELETE_ATTRIBUTE``. + valid_since: An integer signifying the seconds since the epoch (optional). This field + is set by ``revoke_refresh_tokens`` and it is discouraged to set this field + directly. + providers_to_delete: The list of provider IDs to unlink, + eg: 'google.com', 'password', etc. + + Returns: + UserRecord: An updated UserRecord instance for the user. + + Raises: + ValueError: If the specified user ID or properties are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + ... + + def set_custom_user_claims(self, uid, custom_claims): # -> None: + """Sets additional claims on an existing user account. + + Custom claims set via this function can be used to define user roles and privilege levels. + These claims propagate to all the devices where the user is already signed in (after token + expiration or when token refresh is forced), and next time the user signs in. The claims + can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, + iat, iss, etc), an error is thrown. Claims payload must also not be larger then 1000 + characters when serialized into a JSON string. + + Args: + uid: A user ID string. + custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any + claims set previously. + + Raises: + ValueError: If the specified user ID or the custom claims are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + ... + + def delete_user(self, uid): # -> None: + """Deletes the user identified by the specified user ID. + + Args: + uid: A user ID string. + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while deleting the user account. + """ + ... + + def delete_users(self, uids): # -> DeleteUsersResult: + """Deletes the users specified by the given identifiers. + + Deleting a non-existing user does not generate an error (the method is + idempotent.) Non-existing users are considered to be successfully + deleted and are therefore included in the + `DeleteUserResult.success_count` value. + + A maximum of 1000 identifiers may be supplied. If more than 1000 + identifiers are supplied, this method raises a `ValueError`. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + + Returns: + DeleteUsersResult: The total number of successful/failed deletions, as + well as the array of errors that correspond to the failed + deletions. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + """ + ... + + def import_users(self, users, hash_alg=...): # -> UserImportResult: + """Imports the specified list of users into Firebase Auth. + + At most 1000 users can be imported at a time. This operation is optimized for bulk imports + and ignores checks on identifier uniqueness, which could result in duplications. The + ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the + ``UserImportHash`` class for supported hash algorithms. + + Args: + users: A list of ``ImportUserRecord`` instances to import. Length of the list must not + exceed 1000. + hash_alg: A ``UserImportHash`` object (optional). Required when importing users with + passwords. + + Returns: + UserImportResult: An object summarizing the result of the import operation. + + Raises: + ValueError: If the provided arguments are invalid. + FirebaseError: If an error occurs while importing users. + """ + ... + + def generate_password_reset_link(self, email, action_code_settings=...): + """Generates the out-of-band email action link for password reset flows for the specified + email address. + + Args: + email: The email of the user whose password is to be reset. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to + be passed in the deep link. + + Returns: + link: The password reset link created by the API + + Raises: + ValueError: If the provided arguments are invalid + EmailNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while generating the link + """ + ... + + def generate_email_verification_link(self, email, action_code_settings=...): + """Generates the out-of-band email action link for email verification flows for the + specified email address. + + Args: + email: The email of the user to be verified. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to + be passed in the deep link. + + Returns: + link: The email verification link created by the API + + Raises: + ValueError: If the provided arguments are invalid + UserNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while generating the link + """ + ... + + def generate_sign_in_with_email_link(self, email, action_code_settings): + """Generates the out-of-band email action link for email link sign-in flows, using the + action code settings provided. + + Args: + email: The email of the user signing in. + action_code_settings: ``ActionCodeSettings`` instance. Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + + Returns: + link: The email sign-in link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + ... + + def get_oidc_provider_config(self, provider_id): # -> OIDCProviderConfig: + """Returns the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Returns: + SAMLProviderConfig: An OIDC provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the OIDC provider. + """ + ... + + def create_oidc_provider_config(self, provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: + """Creates a new OIDC provider config from the given parameters. + + OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config. + issuer: Issuer of the new config. Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for + the new provider. By default, this is not enabled if no response type is + specified. A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response + flow for the new provider. By default, this is enabled if no response type is + specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The newly created OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new OIDC provider config. + """ + ... + + def update_oidc_provider_config(self, provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: + """Updates an existing OIDC provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config (optional). + issuer: Issuer of the new config (optional). Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for + the new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response + flow for the new provider. By default, this is enabled if no response type is + specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The updated OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the OIDC provider config. + """ + ... + + def delete_oidc_provider_config(self, provider_id): # -> None: + """Deletes the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the OIDC provider. + """ + ... + + def list_oidc_provider_configs(self, page_token=..., max_results=...): # -> _ListOIDCProviderConfigsPage: + """Retrieves a page of OIDC provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no OIDC configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + + Returns: + ListProviderConfigsPage: A page of OIDC provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the OIDC provider configs. + """ + ... + + def get_saml_provider_config(self, provider_id): # -> SAMLProviderConfig: + """Returns the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Returns: + SAMLProviderConfig: A SAML provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the SAML provider. + """ + ... + + def create_saml_provider_config(self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=...): # -> SAMLProviderConfig: + """Creates a new SAML provider config from the given parameters. + + SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier. + sso_url: The SAML IdP SSO URL. Must be a valid URL. + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider. Multiple certificates are accepted to prevent outages during IdP key + rotation (for example ADFS rotates every 10 days). When the Auth server receives a + SAML response, it will match the SAML response with the certificate on record. + Otherwise the response is rejected. Developers are expected to manage the + certificate updates as keys are rotated. + rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by + the developer but needs to be provided to the SAML IdP. + callback_url: Callback URL string. This is fixed and must always be the same as the + OAuth redirect URL provisioned by Firebase Auth, unless a custom authDomain is + used. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + + Returns: + SAMLProviderConfig: The newly created SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new SAML provider config. + """ + ... + + def update_saml_provider_config(self, provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=...): # -> SAMLProviderConfig: + """Updates an existing SAML provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier (optional). + sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider (optional). + rp_entity_id: The SAML relying party entity ID (optional). + callback_url: Callback URL string (optional). + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + + Returns: + SAMLProviderConfig: The updated SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the SAML provider config. + """ + ... + + def delete_saml_provider_config(self, provider_id): # -> None: + """Deletes the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the SAML provider. + """ + ... + + def list_saml_provider_configs(self, page_token=..., max_results=...): # -> _ListSAMLProviderConfigsPage: + """Retrieves a page of SAML provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no SAML configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + + Returns: + ListProviderConfigsPage: A page of SAML provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the SAML provider configs. + """ + ... + + + diff --git a/typings/firebase_admin/_auth_providers.pyi b/typings/firebase_admin/_auth_providers.pyi new file mode 100644 index 0000000..ec65f65 --- /dev/null +++ b/typings/firebase_admin/_auth_providers.pyi @@ -0,0 +1,192 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _auth_utils + +"""Firebase auth providers management sub module.""" +MAX_LIST_CONFIGS_RESULTS = ... +class ProviderConfig: + """Parent type for all authentication provider config types.""" + def __init__(self, data) -> None: + ... + + @property + def provider_id(self): + ... + + @property + def display_name(self): + ... + + @property + def enabled(self): + ... + + + +class OIDCProviderConfig(ProviderConfig): + """Represents the OIDC auth provider configuration. + + See https://openid.net/specs/openid-connect-core-1_0-final.html. + """ + @property + def issuer(self): + ... + + @property + def client_id(self): + ... + + @property + def client_secret(self): + ... + + @property + def id_token_response_type(self): + ... + + @property + def code_response_type(self): + ... + + + +class SAMLProviderConfig(ProviderConfig): + """Represents he SAML auth provider configuration. + + See http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html. + """ + @property + def idp_entity_id(self): + ... + + @property + def sso_url(self): + ... + + @property + def x509_certificates(self): # -> list[Any]: + ... + + @property + def callback_url(self): + ... + + @property + def rp_entity_id(self): + ... + + + +class ListProviderConfigsPage: + """Represents a page of AuthProviderConfig instances retrieved from a Firebase project. + + Provides methods for traversing the provider configs included in this page, as well as + retrieving subsequent pages. The iterator returned by ``iterate_all()`` can be used to iterate + through all provider configs in the Firebase project starting from this page. + """ + def __init__(self, download, page_token, max_results) -> None: + ... + + @property + def provider_configs(self): + """A list of ``AuthProviderConfig`` instances available in this page.""" + ... + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + ... + + @property + def has_next_page(self): # -> bool: + """A boolean indicating whether more pages are available.""" + ... + + def get_next_page(self): # -> Self | None: + """Retrieves the next page of provider configs, if available. + + Returns: + ListProviderConfigsPage: Next page of provider configs, or None if this is the last + page. + """ + ... + + def iterate_all(self): # -> _ProviderConfigIterator: + """Retrieves an iterator for provider configs. + + Returned iterator will iterate through all the provider configs in the Firebase project + starting from this page. The iterator will never buffer more than one page of configs + in memory at a time. + + Returns: + iterator: An iterator of AuthProviderConfig instances. + """ + ... + + + +class _ListOIDCProviderConfigsPage(ListProviderConfigsPage): + @property + def provider_configs(self): # -> list[OIDCProviderConfig]: + ... + + + +class _ListSAMLProviderConfigsPage(ListProviderConfigsPage): + @property + def provider_configs(self): # -> list[SAMLProviderConfig]: + ... + + + +class _ProviderConfigIterator(_auth_utils.PageIterator): + @property + def items(self): + ... + + + +class ProviderConfigClient: + """Client for managing Auth provider configurations.""" + PROVIDER_CONFIG_URL = ... + def __init__(self, http_client, project_id, tenant_id=..., url_override=...) -> None: + ... + + def get_oidc_provider_config(self, provider_id): # -> OIDCProviderConfig: + ... + + def create_oidc_provider_config(self, provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: + """Creates a new OIDC provider config from the given parameters.""" + ... + + def update_oidc_provider_config(self, provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: + """Updates an existing OIDC provider config with the given parameters.""" + ... + + def delete_oidc_provider_config(self, provider_id): # -> None: + ... + + def list_oidc_provider_configs(self, page_token=..., max_results=...): # -> _ListOIDCProviderConfigsPage: + ... + + def get_saml_provider_config(self, provider_id): # -> SAMLProviderConfig: + ... + + def create_saml_provider_config(self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=...): # -> SAMLProviderConfig: + """Creates a new SAML provider config from the given parameters.""" + ... + + def update_saml_provider_config(self, provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=...): # -> SAMLProviderConfig: + """Updates an existing SAML provider config with the given parameters.""" + ... + + def delete_saml_provider_config(self, provider_id): # -> None: + ... + + def list_saml_provider_configs(self, page_token=..., max_results=...): # -> _ListSAMLProviderConfigsPage: + ... + + + diff --git a/typings/firebase_admin/_auth_utils.pyi b/typings/firebase_admin/_auth_utils.pyi new file mode 100644 index 0000000..5de7d7b --- /dev/null +++ b/typings/firebase_admin/_auth_utils.pyi @@ -0,0 +1,234 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import exceptions + +"""Firebase auth utils.""" +EMULATOR_HOST_ENV_VAR = ... +MAX_CLAIMS_PAYLOAD_SIZE = ... +RESERVED_CLAIMS = ... +VALID_EMAIL_ACTION_TYPES = ... +class PageIterator: + """An iterator that allows iterating over a sequence of items, one at a time. + + This implementation loads a page of items into memory, and iterates on them. When the whole + page has been traversed, it loads another page. This class never keeps more than one page + of entries in memory. + """ + def __init__(self, current_page) -> None: + ... + + def __next__(self): + ... + + def __iter__(self): # -> Self: + ... + + @property + def items(self): + ... + + + +def get_emulator_host(): # -> str: + ... + +def is_emulated(): # -> bool: + ... + +def validate_uid(uid, required=...): # -> str | None: + ... + +def validate_email(email, required=...): # -> str | None: + ... + +def validate_phone(phone, required=...): # -> str | None: + """Validates the specified phone number. + + Phone number vlidation is very lax here. Backend will enforce E.164 spec compliance, and + normalize accordingly. Here we check if the number starts with + sign, and contains at + least one alphanumeric character. + """ + ... + +def validate_password(password, required=...): # -> str | None: + ... + +def validate_bytes(value, label, required=...): # -> bytes | None: + ... + +def validate_display_name(display_name, required=...): # -> str | None: + ... + +def validate_provider_id(provider_id, required=...): # -> str | None: + ... + +def validate_provider_uid(provider_uid, required=...): # -> str | None: + ... + +def validate_photo_url(photo_url, required=...): # -> str | None: + """Parses and validates the given URL string.""" + ... + +def validate_timestamp(timestamp, label, required=...): # -> int | None: + """Validates the given timestamp value. Timestamps must be positive integers.""" + ... + +def validate_int(value, label, low=..., high=...): # -> int: + """Validates that the given value represents an integer. + + There are several ways to represent an integer in Python (e.g. 2, 2L, 2.0). This method allows + for all such representations except for booleans. Booleans also behave like integers, but + always translate to 1 and 0. Passing a boolean to an API that expects integers is most likely + a developer error. + """ + ... + +def validate_string(value, label): # -> str: + """Validates that the given value is a string.""" + ... + +def validate_boolean(value, label): # -> bool: + """Validates that the given value is a boolean.""" + ... + +def validate_custom_claims(custom_claims, required=...): # -> str | None: + """Validates the specified custom claims. + + Custom claims must be specified as a JSON string. The string must not exceed 1000 + characters, and the parsed JSON payload must not contain reserved JWT claims. + """ + ... + +def validate_action_type(action_type): + ... + +def validate_provider_ids(provider_ids, required=...): # -> list[Any]: + ... + +def build_update_mask(params): # -> list[Any]: + """Creates an update mask list from the given dictionary.""" + ... + +class UidAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided uid already exists.""" + default_message = ... + def __init__(self, message, cause, http_response) -> None: + ... + + + +class EmailAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided email already exists.""" + default_message = ... + def __init__(self, message, cause, http_response) -> None: + ... + + + +class InsufficientPermissionError(exceptions.PermissionDeniedError): + """The credential used to initialize the SDK lacks required permissions.""" + default_message = ... + def __init__(self, message, cause, http_response) -> None: + ... + + + +class InvalidDynamicLinkDomainError(exceptions.InvalidArgumentError): + """Dynamic link domain in ActionCodeSettings is not authorized.""" + default_message = ... + def __init__(self, message, cause, http_response) -> None: + ... + + + +class InvalidIdTokenError(exceptions.InvalidArgumentError): + """The provided ID token is not a valid Firebase ID token.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class PhoneNumberAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided phone number already exists.""" + default_message = ... + def __init__(self, message, cause, http_response) -> None: + ... + + + +class UnexpectedResponseError(exceptions.UnknownError): + """Backend service responded with an unexpected or malformed response.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UserNotFoundError(exceptions.NotFoundError): + """No user record found for the specified identifier.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class EmailNotFoundError(exceptions.NotFoundError): + """No user record found for the specified email.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class TenantNotFoundError(exceptions.NotFoundError): + """No tenant found for the specified identifier.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class TenantIdMismatchError(exceptions.InvalidArgumentError): + """Missing or invalid tenant ID field in the given JWT.""" + def __init__(self, message) -> None: + ... + + + +class ConfigurationNotFoundError(exceptions.NotFoundError): + """No auth provider found for the specified identifier.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UserDisabledError(exceptions.InvalidArgumentError): + """An operation failed due to a user record being disabled.""" + default_message = ... + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class TooManyAttemptsTryLaterError(exceptions.ResourceExhaustedError): + """Rate limited because of too many attempts.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class ResetPasswordExceedLimitError(exceptions.ResourceExhaustedError): + """Reset password emails exceeded their limits.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +_CODE_TO_EXC_TYPE = ... +def handle_auth_backend_error(error): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Converts a requests error received from the Firebase Auth service into a FirebaseError.""" + ... + diff --git a/typings/firebase_admin/_gapic_utils.pyi b/typings/firebase_admin/_gapic_utils.pyi new file mode 100644 index 0000000..1b0f444 --- /dev/null +++ b/typings/firebase_admin/_gapic_utils.pyi @@ -0,0 +1,43 @@ +""" +This type stub file was generated by pyright. +""" + +"""Internal utilities for interacting with Google API client.""" +def handle_platform_error_from_googleapiclient(error, handle_func=...): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Constructs a ``FirebaseError`` from the given googleapiclient error. + + This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. + + Args: + error: An error raised by the googleapiclient while making an HTTP call to a GCP API. + handle_func: A function that can be used to handle platform errors in a custom way. When + specified, this function will be called with three arguments. It has the same + signature as ```_handle_func_googleapiclient``, but may return ``None``. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + ... + +def handle_googleapiclient_error(error, message=..., code=..., http_response=...): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Constructs a ``FirebaseError`` from the given googleapiclient error. + + This method is agnostic of the remote service that produced the error, whether it is a GCP + service or otherwise. Therefore, this method does not attempt to parse the error response in + any way. + + Args: + error: An error raised by the googleapiclient module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError`` (optional). If not + specified the string representation of the ``error`` argument is used as the message. + code: A GCP error code that will be used to determine the resulting error type (optional). + If not specified the HTTP status code on the error response is used to determine a + suitable error code. + http_response: A requests HTTP response object to associate with the exception (optional). + If not specified, one will be created from the ``error``. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + ... + diff --git a/typings/firebase_admin/_http_client.pyi b/typings/firebase_admin/_http_client.pyi new file mode 100644 index 0000000..def2156 --- /dev/null +++ b/typings/firebase_admin/_http_client.pyi @@ -0,0 +1,106 @@ +""" +This type stub file was generated by pyright. +""" + +from requests.packages.urllib3.util import retry + +"""Internal HTTP client module. + + This module provides utilities for making HTTP calls using the requests library. + """ +if hasattr(retry.Retry.DEFAULT, 'allowed_methods'): + _ANY_METHOD = ... +else: + _ANY_METHOD = ... +DEFAULT_RETRY_CONFIG = ... +DEFAULT_TIMEOUT_SECONDS = ... +METRICS_HEADERS = ... +class HttpClient: + """Base HTTP client used to make HTTP calls. + + HttpClient maintains an HTTP session, and handles request authentication and retries if + necessary. + """ + def __init__(self, credential=..., session=..., base_url=..., headers=..., retries=..., timeout=...) -> None: + """Creates a new HttpClient instance from the provided arguments. + + If a credential is provided, initializes a new HTTP session authorized with it. If neither + a credential nor a session is provided, initializes a new unauthorized session. + + Args: + credential: A Google credential that can be used to authenticate requests (optional). + session: A custom HTTP session (optional). + base_url: A URL prefix to be added to all outgoing requests (optional). + headers: A map of headers to be added to all outgoing requests (optional). + retries: A urllib retry configuration. Default settings would retry once for low-level + connection and socket read errors, and up to 4 times for HTTP 500 and 503 errors. + Pass a False value to disable retries (optional). + timeout: HTTP timeout in seconds. Defaults to 120 seconds when not specified. Set to + None to disable timeouts (optional). + """ + ... + + @property + def session(self): # -> Session | None: + ... + + @property + def base_url(self): # -> str: + ... + + @property + def timeout(self): # -> int: + ... + + def parse_body(self, resp): + ... + + def request(self, method, url, **kwargs): # -> Response: + """Makes an HTTP call using the Python requests library. + + This is the sole entry point to the requests library. All other helper methods in this + class call this method to send HTTP requests out. Refer to + http://docs.python-requests.org/en/master/api/ for more information on supported options + and features. + + Args: + method: HTTP method name as a string (e.g. get, post). + url: URL of the remote endpoint. + **kwargs: An additional set of keyword arguments to be passed into the requests API + (e.g. json, params, timeout). + + Returns: + Response: An HTTP response object. + + Raises: + RequestException: Any requests exceptions encountered while making the HTTP call. + """ + ... + + def headers(self, method, url, **kwargs): # -> CaseInsensitiveDict[str]: + ... + + def body_and_response(self, method, url, **kwargs): # -> tuple[Any, Response | Any]: + ... + + def body(self, method, url, **kwargs): + ... + + def headers_and_body(self, method, url, **kwargs): # -> tuple[CaseInsensitiveDict[str] | Any, Any]: + ... + + def close(self): # -> None: + ... + + + +class JsonHttpClient(HttpClient): + """An HTTP client that parses response messages as JSON.""" + def __init__(self, **kwargs) -> None: + ... + + def parse_body(self, resp): + ... + + + diff --git a/typings/firebase_admin/_messaging_encoder.pyi b/typings/firebase_admin/_messaging_encoder.pyi new file mode 100644 index 0000000..1680e9e --- /dev/null +++ b/typings/firebase_admin/_messaging_encoder.pyi @@ -0,0 +1,204 @@ +""" +This type stub file was generated by pyright. +""" + +import json + +"""Encoding and validation utils for the messaging (FCM) module.""" +class Message: + """A message that can be sent via Firebase Cloud Messaging. + + Contains payload information as well as recipient information. In particular, the message must + contain exactly one of token, topic or condition fields. + + Args: + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. + notification: An instance of ``messaging.Notification`` (optional). + android: An instance of ``messaging.AndroidConfig`` (optional). + webpush: An instance of ``messaging.WebpushConfig`` (optional). + apns: An instance of ``messaging.ApnsConfig`` (optional). + fcm_options: An instance of ``messaging.FCMOptions`` (optional). + token: The registration token of the device to which the message should be sent (optional). + topic: Name of the FCM topic to which the message should be sent (optional). Topic name + may contain the ``/topics/`` prefix. + condition: The FCM condition to which the message should be sent (optional). + """ + def __init__(self, data=..., notification=..., android=..., webpush=..., apns=..., fcm_options=..., token=..., topic=..., condition=...) -> None: + ... + + def __str__(self) -> str: + ... + + + +class MulticastMessage: + """A message that can be sent to multiple tokens via Firebase Cloud Messaging. + + Args: + tokens: A list of registration tokens of targeted devices. + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. + notification: An instance of ``messaging.Notification`` (optional). + android: An instance of ``messaging.AndroidConfig`` (optional). + webpush: An instance of ``messaging.WebpushConfig`` (optional). + apns: An instance of ``messaging.ApnsConfig`` (optional). + fcm_options: An instance of ``messaging.FCMOptions`` (optional). + """ + def __init__(self, tokens, data=..., notification=..., android=..., webpush=..., apns=..., fcm_options=...) -> None: + ... + + + +class _Validators: + """A collection of data validation utilities. + + Methods provided in this class raise ``ValueErrors`` if any validations fail. + """ + @classmethod + def check_string(cls, label, value, non_empty=...): # -> str | None: + """Checks if the given value is a string.""" + ... + + @classmethod + def check_number(cls, label, value): # -> Number | None: + ... + + @classmethod + def check_string_dict(cls, label, value): # -> dict[Any, Any] | None: + """Checks if the given value is a dictionary comprised only of string keys and values.""" + ... + + @classmethod + def check_string_list(cls, label, value): # -> list[Any] | None: + """Checks if the given value is a list comprised only of strings.""" + ... + + @classmethod + def check_number_list(cls, label, value): # -> list[Any] | None: + """Checks if the given value is a list comprised only of numbers.""" + ... + + @classmethod + def check_analytics_label(cls, label, value): # -> str | None: + """Checks if the given value is a valid analytics label.""" + ... + + @classmethod + def check_boolean(cls, label, value): # -> bool | None: + """Checks if the given value is boolean.""" + ... + + @classmethod + def check_datetime(cls, label, value): # -> datetime | None: + """Checks if the given value is a datetime.""" + ... + + + +class MessageEncoder(json.JSONEncoder): + """A custom ``JSONEncoder`` implementation for serializing Message instances into JSON.""" + @classmethod + def remove_null_values(cls, dict_value): # -> dict[Any, Any]: + ... + + @classmethod + def encode_android(cls, android): # -> dict[Any, Any] | None: + """Encodes an ``AndroidConfig`` instance into JSON.""" + ... + + @classmethod + def encode_android_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: + """Encodes an ``AndroidFCMOptions`` instance into JSON.""" + ... + + @classmethod + def encode_ttl(cls, ttl): # -> str | None: + """Encodes an ``AndroidConfig`` ``TTL`` duration into a string.""" + ... + + @classmethod + def encode_milliseconds(cls, label, msec): # -> str | None: + """Encodes a duration in milliseconds into a string.""" + ... + + @classmethod + def encode_android_notification(cls, notification): # -> dict[Any, Any] | None: + """Encodes an ``AndroidNotification`` instance into JSON.""" + ... + + @classmethod + def encode_light_settings(cls, light_settings): # -> dict[Any, Any] | None: + """Encodes a ``LightSettings`` instance into JSON.""" + ... + + @classmethod + def encode_webpush(cls, webpush): # -> dict[Any, Any] | None: + """Encodes a ``WebpushConfig`` instance into JSON.""" + ... + + @classmethod + def encode_webpush_notification(cls, notification): # -> dict[Any, Any] | None: + """Encodes a ``WebpushNotification`` instance into JSON.""" + ... + + @classmethod + def encode_webpush_notification_actions(cls, actions): # -> list[Any] | None: + """Encodes a list of ``WebpushNotificationActions`` into JSON.""" + ... + + @classmethod + def encode_webpush_fcm_options(cls, options): # -> dict[Any, Any] | None: + """Encodes a ``WebpushFCMOptions`` instance into JSON.""" + ... + + @classmethod + def encode_apns(cls, apns): # -> dict[Any, Any] | None: + """Encodes an ``APNSConfig`` instance into JSON.""" + ... + + @classmethod + def encode_apns_payload(cls, payload): # -> dict[Any, Any] | None: + """Encodes an ``APNSPayload`` instance into JSON.""" + ... + + @classmethod + def encode_apns_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: + """Encodes an ``APNSFCMOptions`` instance into JSON.""" + ... + + @classmethod + def encode_aps(cls, aps): # -> dict[Any, Any]: + """Encodes an ``Aps`` instance into JSON.""" + ... + + @classmethod + def encode_aps_sound(cls, sound): # -> str | dict[Any, Any] | None: + """Encodes an APNs sound configuration into JSON.""" + ... + + @classmethod + def encode_aps_alert(cls, alert): # -> str | dict[Any, Any] | None: + """Encodes an ``ApsAlert`` instance into JSON.""" + ... + + @classmethod + def encode_notification(cls, notification): # -> dict[Any, Any] | None: + """Encodes a ``Notification`` instance into JSON.""" + ... + + @classmethod + def sanitize_topic_name(cls, topic): # -> None: + """Removes the /topics/ prefix from the topic name, if present.""" + ... + + def default(self, o): # -> Any | dict[Any, Any]: + ... + + @classmethod + def encode_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: + """Encodes an ``FCMOptions`` instance into JSON.""" + ... + + + diff --git a/typings/firebase_admin/_messaging_utils.pyi b/typings/firebase_admin/_messaging_utils.pyi new file mode 100644 index 0000000..db9da16 --- /dev/null +++ b/typings/firebase_admin/_messaging_utils.pyi @@ -0,0 +1,402 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import exceptions + +"""Types and utilities used by the messaging (FCM) module.""" +class Notification: + """A notification that can be included in a message. + + Args: + title: Title of the notification (optional). + body: Body of the notification (optional). + image: Image url of the notification (optional) + """ + def __init__(self, title=..., body=..., image=...) -> None: + ... + + + +class AndroidConfig: + """Android-specific options that can be included in a message. + + Args: + collapse_key: Collapse key string for the message (optional). This is an identifier for a + group of messages that can be collapsed, so that only the last message is sent when + delivery can be resumed. A maximum of 4 different collapse keys may be active at a + given time. + priority: Priority of the message (optional). Must be one of ``high`` or ``normal``. + ttl: The time-to-live duration of the message (optional). This can be specified + as a numeric seconds value or a ``datetime.timedelta`` instance. + restricted_package_name: The package name of the application where the registration tokens + must match in order to receive the message (optional). + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. When specified, overrides any data fields set via ``Message.data``. + notification: A ``messaging.AndroidNotification`` to be included in the message (optional). + fcm_options: A ``messaging.AndroidFCMOptions`` to be included in the message (optional). + direct_boot_ok: A boolean indicating whether messages will be allowed to be delivered to + the app while the device is in direct boot mode (optional). + """ + def __init__(self, collapse_key=..., priority=..., ttl=..., restricted_package_name=..., data=..., notification=..., fcm_options=..., direct_boot_ok=...) -> None: + ... + + + +class AndroidNotification: + """Android-specific notification parameters. + + Args: + title: Title of the notification (optional). If specified, overrides the title set via + ``messaging.Notification``. + body: Body of the notification (optional). If specified, overrides the body set via + ``messaging.Notification``. + icon: Icon of the notification (optional). + color: Color of the notification icon expressed in ``#rrggbb`` form (optional). + sound: Sound to be played when the device receives the notification (optional). This is + usually the file name of the sound resource. + tag: Tag of the notification (optional). This is an identifier used to replace existing + notifications in the notification drawer. If not specified, each request creates a new + notification. + click_action: The action associated with a user click on the notification (optional). If + specified, an activity with a matching intent filter is launched when a user clicks on + the notification. + body_loc_key: Key of the body string in the app's string resources to use to localize the + body text (optional). + body_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``body_loc_key`` (optional). + title_loc_key: Key of the title string in the app's string resources to use to localize the + title text (optional). + title_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``title_loc_key`` (optional). + channel_id: channel_id of the notification (optional). + image: Image url of the notification (optional). + ticker: Sets the ``ticker`` text, which is sent to accessibility services. Prior to API + level 21 (Lollipop), sets the text that is displayed in the status bar when the + notification first arrives (optional). + sticky: When set to ``False`` or unset, the notification is automatically dismissed when the + user clicks it in the panel. When set to ``True``, the notification persists even when + the user clicks it (optional). + event_timestamp: For notifications that inform users about events with an absolute time + reference, sets the time that the event in the notification occurred as a + ``datetime.datetime`` instance. If the ``datetime.datetime`` instance is naive, it + defaults to be in the UTC timezone. Notifications in the panel are sorted by this time + (optional). + local_only: Sets whether or not this notification is relevant only to the current device. + Some notifications can be bridged to other devices for remote display, such as a Wear OS + watch. This hint can be set to recommend this notification not be bridged (optional). + See Wear OS guides: + https://developer.android.com/training/wearables/notifications/bridger#existing-method-of-preventing-bridging + priority: Sets the relative priority for this notification. Low-priority notifications may + be hidden from the user in certain situations. Note this priority differs from + ``AndroidMessagePriority``. This priority is processed by the client after the message + has been delivered. Whereas ``AndroidMessagePriority`` is an FCM concept that controls + when the message is delivered (optional). Must be one of ``default``, ``min``, ``low``, + ``high``, ``max`` or ``normal``. + vibrate_timings_millis: Sets the vibration pattern to use. Pass in an array of milliseconds + to turn the vibrator on or off. The first value indicates the duration to wait before + turning the vibrator on. The next value indicates the duration to keep the vibrator on. + Subsequent values alternate between duration to turn the vibrator off and to turn the + vibrator on. If ``vibrate_timings`` is set and ``default_vibrate_timings`` is set to + ``True``, the default value is used instead of the user-specified ``vibrate_timings``. + default_vibrate_timings: If set to ``True``, use the Android framework's default vibrate + pattern for the notification (optional). Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. + If ``default_vibrate_timings`` is set to ``True`` and ``vibrate_timings`` is also set, + the default value is used instead of the user-specified ``vibrate_timings``. + default_sound: If set to ``True``, use the Android framework's default sound for the + notification (optional). Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml + light_settings: Settings to control the notification's LED blinking rate and color if LED is + available on the device. The total blinking time is controlled by the OS (optional). + default_light_settings: If set to ``True``, use the Android framework's default LED light + settings for the notification. Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. + If ``default_light_settings`` is set to ``True`` and ``light_settings`` is also set, the + user-specified ``light_settings`` is used instead of the default value. + visibility: Sets the visibility of the notification. Must be either ``private``, ``public``, + or ``secret``. If unspecified, it remains undefined in the Admin SDK, and defers to + the FCM backend's default mapping. + notification_count: Sets the number of items this notification represents. May be displayed + as a badge count for Launchers that support badging. See ``NotificationBadge`` + https://developer.android.com/training/notify-user/badges. For example, this might be + useful if you're using just one notification to represent multiple new messages but you + want the count here to represent the number of total new messages. If zero or + unspecified, systems that support badging use the default, which is to increment a + number displayed on the long-press menu each time a new notification arrives. + proxy: Sets if the notification may be proxied. Must be one of ``allow``, ``deny``, or + ``if_priority_lowered``. If unspecified, it remains undefined in the Admin SDK, and + defers to the FCM backend's default mapping. + + + """ + def __init__(self, title=..., body=..., icon=..., color=..., sound=..., tag=..., click_action=..., body_loc_key=..., body_loc_args=..., title_loc_key=..., title_loc_args=..., channel_id=..., image=..., ticker=..., sticky=..., event_timestamp=..., local_only=..., priority=..., vibrate_timings_millis=..., default_vibrate_timings=..., default_sound=..., light_settings=..., default_light_settings=..., visibility=..., notification_count=..., proxy=...) -> None: + ... + + + +class LightSettings: + """Represents settings to control notification LED that can be included in a + ``messaging.AndroidNotification``. + + Args: + color: Sets the color of the LED in ``#rrggbb`` or ``#rrggbbaa`` format. + light_on_duration_millis: Along with ``light_off_duration``, defines the blink rate of LED + flashes. + light_off_duration_millis: Along with ``light_on_duration``, defines the blink rate of LED + flashes. + """ + def __init__(self, color, light_on_duration_millis, light_off_duration_millis) -> None: + ... + + + +class AndroidFCMOptions: + """Options for features provided by the FCM SDK for Android. + + Args: + analytics_label: contains additional options for features provided by the FCM Android SDK + (optional). + """ + def __init__(self, analytics_label=...) -> None: + ... + + + +class WebpushConfig: + """Webpush-specific options that can be included in a message. + + Args: + headers: A dictionary of headers (optional). Refer `Webpush Specification`_ for supported + headers. + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. When specified, overrides any data fields set via ``Message.data``. + notification: A ``messaging.WebpushNotification`` to be included in the message (optional). + fcm_options: A ``messaging.WebpushFCMOptions`` instance to be included in the message + (optional). + + .. _Webpush Specification: https://tools.ietf.org/html/rfc8030#section-5 + """ + def __init__(self, headers=..., data=..., notification=..., fcm_options=...) -> None: + ... + + + +class WebpushNotificationAction: + """An action available to the users when the notification is presented. + + Args: + action: Action string. + title: Title string. + icon: Icon URL for the action (optional). + """ + def __init__(self, action, title, icon=...) -> None: + ... + + + +class WebpushNotification: + """Webpush-specific notification parameters. + + Refer to the `Notification Reference`_ for more information. + + Args: + title: Title of the notification (optional). If specified, overrides the title set via + ``messaging.Notification``. + body: Body of the notification (optional). If specified, overrides the body set via + ``messaging.Notification``. + icon: Icon URL of the notification (optional). + actions: A list of ``messaging.WebpushNotificationAction`` instances (optional). + badge: URL of the image used to represent the notification when there is + not enough space to display the notification itself (optional). + data: Any arbitrary JSON data that should be associated with the notification (optional). + direction: The direction in which to display the notification (optional). Must be either + 'auto', 'ltr' or 'rtl'. + image: The URL of an image to be displayed in the notification (optional). + language: Notification language (optional). + renotify: A boolean indicating whether the user should be notified after a new + notification replaces an old one (optional). + require_interaction: A boolean indicating whether a notification should remain active + until the user clicks or dismisses it, rather than closing automatically (optional). + silent: ``True`` to indicate that the notification should be silent (optional). + tag: An identifying tag on the notification (optional). + timestamp_millis: A timestamp value in milliseconds on the notification (optional). + vibrate: A vibration pattern for the device's vibration hardware to emit when the + notification fires (optional). The pattern is specified as an integer array. + custom_data: A dict of custom key-value pairs to be included in the notification + (optional) + + .. _Notification Reference: https://developer.mozilla.org/en-US/docs/Web/API\ + /notification/Notification + """ + def __init__(self, title=..., body=..., icon=..., actions=..., badge=..., data=..., direction=..., image=..., language=..., renotify=..., require_interaction=..., silent=..., tag=..., timestamp_millis=..., vibrate=..., custom_data=...) -> None: + ... + + + +class WebpushFCMOptions: + """Options for features provided by the FCM SDK for Web. + + Args: + link: The link to open when the user clicks on the notification. Must be an HTTPS URL + (optional). + """ + def __init__(self, link=...) -> None: + ... + + + +class APNSConfig: + """APNS-specific options that can be included in a message. + + Refer to `APNS Documentation`_ for more information. + + Args: + headers: A dictionary of headers (optional). + payload: A ``messaging.APNSPayload`` to be included in the message (optional). + fcm_options: A ``messaging.APNSFCMOptions`` instance to be included in the message + (optional). + + .. _APNS Documentation: https://developer.apple.com/library/content/documentation\ + /NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html + """ + def __init__(self, headers=..., payload=..., fcm_options=...) -> None: + ... + + + +class APNSPayload: + """Payload of an APNS message. + + Args: + aps: A ``messaging.Aps`` instance to be included in the payload. + **kwargs: Arbitrary keyword arguments to be included as custom fields in the payload + (optional). + """ + def __init__(self, aps, **kwargs) -> None: + ... + + + +class Aps: + """Aps dictionary to be included in an APNS payload. + + Args: + alert: A string or a ``messaging.ApsAlert`` instance (optional). + badge: A number representing the badge to be displayed with the message (optional). + sound: Name of the sound file to be played with the message or a + ``messaging.CriticalSound`` instance (optional). + content_available: A boolean indicating whether to configure a background update + notification (optional). + category: String identifier representing the message type (optional). + thread_id: An app-specific string identifier for grouping messages (optional). + mutable_content: A boolean indicating whether to support mutating notifications at + the client using app extensions (optional). + custom_data: A dict of custom key-value pairs to be included in the Aps dictionary + (optional). + """ + def __init__(self, alert=..., badge=..., sound=..., content_available=..., category=..., thread_id=..., mutable_content=..., custom_data=...) -> None: + ... + + + +class CriticalSound: + """Critical alert sound configuration that can be included in ``messaging.Aps``. + + Args: + name: The name of a sound file in your app's main bundle or in the ``Library/Sounds`` + folder of your app's container directory. Specify the string ``default`` to play the + system sound. + critical: Set to ``True`` to set the critical alert flag on the sound configuration + (optional). + volume: The volume for the critical alert's sound. Must be a value between 0.0 (silent) + and 1.0 (full volume) (optional). + """ + def __init__(self, name, critical=..., volume=...) -> None: + ... + + + +class ApsAlert: + """An alert that can be included in ``messaging.Aps``. + + Args: + title: Title of the alert (optional). If specified, overrides the title set via + ``messaging.Notification``. + subtitle: Subtitle of the alert (optional). + body: Body of the alert (optional). If specified, overrides the body set via + ``messaging.Notification``. + loc_key: Key of the body string in the app's string resources to use to localize the + body text (optional). + loc_args: A list of resource keys that will be used in place of the format specifiers + in ``loc_key`` (optional). + title_loc_key: Key of the title string in the app's string resources to use to localize the + title text (optional). + title_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``title_loc_key`` (optional). + action_loc_key: Key of the text in the app's string resources to use to localize the + action button text (optional). + launch_image: Image for the notification action (optional). + custom_data: A dict of custom key-value pairs to be included in the ApsAlert dictionary + (optional) + """ + def __init__(self, title=..., subtitle=..., body=..., loc_key=..., loc_args=..., title_loc_key=..., title_loc_args=..., action_loc_key=..., launch_image=..., custom_data=...) -> None: + ... + + + +class APNSFCMOptions: + """Options for features provided by the FCM SDK for iOS. + + Args: + analytics_label: contains additional options for features provided by the FCM iOS SDK + (optional). + image: contains the URL of an image that is going to be displayed in a notification + (optional). + """ + def __init__(self, analytics_label=..., image=...) -> None: + ... + + + +class FCMOptions: + """Options for features provided by SDK. + + Args: + analytics_label: contains additional options to use across all platforms (optional). + """ + def __init__(self, analytics_label=...) -> None: + ... + + + +class ThirdPartyAuthError(exceptions.UnauthenticatedError): + """APNs certificate or web push auth key was invalid or missing.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class QuotaExceededError(exceptions.ResourceExhaustedError): + """Sending limit exceeded for the message target.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class SenderIdMismatchError(exceptions.PermissionDeniedError): + """The authenticated sender ID is different from the sender ID for the registration token.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UnregisteredError(exceptions.NotFoundError): + """App instance was unregistered from FCM. + + This usually means that the token used is no longer valid and a new one must be used.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + diff --git a/typings/firebase_admin/_rfc3339.pyi b/typings/firebase_admin/_rfc3339.pyi new file mode 100644 index 0000000..ff017f7 --- /dev/null +++ b/typings/firebase_admin/_rfc3339.pyi @@ -0,0 +1,24 @@ +""" +This type stub file was generated by pyright. +""" + +"""Parse RFC3339 date strings""" +def parse_to_epoch(datestr): # -> float: + """Parse an RFC3339 date string and return the number of seconds since the + epoch (as a float). + + In particular, this method is meant to parse the strings returned by the + JSON mapping of protobuf google.protobuf.timestamp.Timestamp instances: + https://github.com/protocolbuffers/protobuf/blob/4cf5bfee9546101d98754d23ff378ff718ba8438/src/google/protobuf/timestamp.proto#L99 + + This method has microsecond precision; nanoseconds will be truncated. + + Args: + datestr: A string in RFC3339 format. + Returns: + Float: The number of seconds since the Unix epoch. + Raises: + ValueError: Raised if the `datestr` is not a valid RFC3339 date string. + """ + ... + diff --git a/typings/firebase_admin/_sseclient.pyi b/typings/firebase_admin/_sseclient.pyi new file mode 100644 index 0000000..00b97e1 --- /dev/null +++ b/typings/firebase_admin/_sseclient.pyi @@ -0,0 +1,92 @@ +""" +This type stub file was generated by pyright. +""" + +import requests +from google.auth import transport + +"""SSEClient module to stream realtime updates from the Firebase Database. + +Based on a similar implementation from Pyrebase. +""" +end_of_field = ... +class KeepAuthSession(transport.requests.AuthorizedSession): + """A session that does not drop authentication on redirects between domains.""" + def __init__(self, credential) -> None: + ... + + def rebuild_auth(self, prepared_request, response): # -> None: + ... + + + +class _EventBuffer: + """A helper class for buffering and parsing raw SSE data.""" + def __init__(self) -> None: + ... + + def append(self, char): # -> None: + ... + + def truncate(self): # -> None: + ... + + @property + def is_end_of_field(self): # -> bool: + ... + + @property + def buffer_string(self): # -> str: + ... + + + +class SSEClient: + """SSE client implementation.""" + def __init__(self, url, session, retry=..., **kwargs) -> None: + """Initializes the SSEClient. + + Args: + url: The remote url to connect to. + session: The requests session. + retry: The retry interval in milliseconds (optional). + **kwargs: Extra kwargs that will be sent to ``requests.get()`` (optional). + """ + ... + + def close(self): # -> None: + """Closes the SSEClient instance.""" + ... + + def __iter__(self): # -> Self: + ... + + def __next__(self): # -> Event | None: + ... + + def next(self): # -> Event | None: + ... + + + +class Event: + """Event represents the events fired by SSE.""" + sse_line_pattern = ... + def __init__(self, data=..., event_type=..., event_id=..., retry=...) -> None: + ... + + @classmethod + def parse(cls, raw): # -> Self: + """Given a possibly-multiline string representing an SSE message, parses it + and returns an Event object. + + Args: + raw: the raw data to parse. + + Returns: + Event: A new ``Event`` with the parameters initialized. + """ + ... + + + diff --git a/typings/firebase_admin/_token_gen.pyi b/typings/firebase_admin/_token_gen.pyi new file mode 100644 index 0000000..fe6cc5d --- /dev/null +++ b/typings/firebase_admin/_token_gen.pyi @@ -0,0 +1,177 @@ +""" +This type stub file was generated by pyright. +""" + +from google.auth import transport +from firebase_admin import _auth_utils, exceptions + +"""Firebase token minting and validation sub module.""" +ID_TOKEN_ISSUER_PREFIX = ... +ID_TOKEN_CERT_URI = ... +COOKIE_ISSUER_PREFIX = ... +COOKIE_CERT_URI = ... +MIN_SESSION_COOKIE_DURATION_SECONDS = ... +MAX_SESSION_COOKIE_DURATION_SECONDS = ... +MAX_TOKEN_LIFETIME_SECONDS = ... +FIREBASE_AUDIENCE = ... +RESERVED_CLAIMS = ... +METADATA_SERVICE_URL = ... +ALGORITHM_RS256 = ... +ALGORITHM_NONE = ... +AUTH_EMULATOR_EMAIL = ... +class _EmulatedSigner(google.auth.crypt.Signer): + key_id = ... + def __init__(self) -> None: + ... + + def sign(self, message): # -> Literal[b""]: + ... + + + +class _SigningProvider: + """Stores a reference to a google.auth.crypto.Signer.""" + def __init__(self, signer, signer_email, alg=...) -> None: + ... + + @property + def signer(self): # -> Any: + ... + + @property + def signer_email(self): # -> Any: + ... + + @property + def alg(self): # -> str: + ... + + @classmethod + def from_credential(cls, google_cred): # -> _SigningProvider: + ... + + @classmethod + def from_iam(cls, request, google_cred, service_account): # -> _SigningProvider: + ... + + @classmethod + def for_emulator(cls): # -> _SigningProvider: + ... + + + +class TokenGenerator: + """Generates custom tokens and session cookies.""" + ID_TOOLKIT_URL = ... + def __init__(self, app, http_client, url_override=...) -> None: + ... + + @property + def signing_provider(self): # -> _SigningProvider: + """Initializes and returns the SigningProvider instance to be used.""" + ... + + def create_custom_token(self, uid, developer_claims=..., tenant_id=...): # -> bytes: + """Builds and signs a Firebase custom auth token.""" + ... + + def create_session_cookie(self, id_token, expires_in): + """Creates a session cookie from the provided ID token.""" + ... + + + +class CertificateFetchRequest(transport.Request): + """A google-auth transport that supports HTTP cache-control. + + Also injects a timeout to each outgoing HTTP request. + """ + def __init__(self, timeout_seconds=...) -> None: + ... + + @property + def session(self): # -> Session: + ... + + @property + def timeout_seconds(self): # -> None: + ... + + def __call__(self, url, method=..., body=..., headers=..., timeout=..., **kwargs): + ... + + + +class TokenVerifier: + """Verifies ID tokens and session cookies.""" + def __init__(self, app) -> None: + ... + + def verify_id_token(self, id_token, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: + ... + + def verify_session_cookie(self, cookie, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: + ... + + + +class _JWTVerifier: + """Verifies Firebase JWTs (ID tokens or session cookies).""" + def __init__(self, **kwargs) -> None: + ... + + def verify(self, token, request, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: + """Verifies the signature and data for the provided JWT.""" + ... + + + +class TokenSignError(exceptions.UnknownError): + """Unexpected error while signing a Firebase custom token.""" + def __init__(self, message, cause) -> None: + ... + + + +class CertificateFetchError(exceptions.UnknownError): + """Failed to fetch some public key certificates required to verify a token.""" + def __init__(self, message, cause) -> None: + ... + + + +class ExpiredIdTokenError(_auth_utils.InvalidIdTokenError): + """The provided ID token is expired.""" + def __init__(self, message, cause) -> None: + ... + + + +class RevokedIdTokenError(_auth_utils.InvalidIdTokenError): + """The provided ID token has been revoked.""" + def __init__(self, message) -> None: + ... + + + +class InvalidSessionCookieError(exceptions.InvalidArgumentError): + """The provided string is not a valid Firebase session cookie.""" + def __init__(self, message, cause=...) -> None: + ... + + + +class ExpiredSessionCookieError(InvalidSessionCookieError): + """The provided session cookie is expired.""" + def __init__(self, message, cause) -> None: + ... + + + +class RevokedSessionCookieError(InvalidSessionCookieError): + """The provided session cookie has been revoked.""" + def __init__(self, message) -> None: + ... + + + diff --git a/typings/firebase_admin/_user_identifier.pyi b/typings/firebase_admin/_user_identifier.pyi new file mode 100644 index 0000000..c852707 --- /dev/null +++ b/typings/firebase_admin/_user_identifier.pyi @@ -0,0 +1,91 @@ +""" +This type stub file was generated by pyright. +""" + +"""Classes to uniquely identify a user.""" +class UserIdentifier: + """Identifies a user to be looked up.""" + ... + + +class UidIdentifier(UserIdentifier): + """Used for looking up an account by uid. + + See ``auth.get_user()``. + """ + def __init__(self, uid) -> None: + """Constructs a new `UidIdentifier` object. + + Args: + uid: A user ID string. + """ + ... + + @property + def uid(self): # -> str | None: + ... + + + +class EmailIdentifier(UserIdentifier): + """Used for looking up an account by email. + + See ``auth.get_user()``. + """ + def __init__(self, email) -> None: + """Constructs a new `EmailIdentifier` object. + + Args: + email: A user email address string. + """ + ... + + @property + def email(self): # -> str | None: + ... + + + +class PhoneIdentifier(UserIdentifier): + """Used for looking up an account by phone number. + + See ``auth.get_user()``. + """ + def __init__(self, phone_number) -> None: + """Constructs a new `PhoneIdentifier` object. + + Args: + phone_number: A phone number string. + """ + ... + + @property + def phone_number(self): # -> str | None: + ... + + + +class ProviderIdentifier(UserIdentifier): + """Used for looking up an account by provider. + + See ``auth.get_user()``. + """ + def __init__(self, provider_id, provider_uid) -> None: + """Constructs a new `ProviderIdentifier` object. + +   Args: +     provider_id: A provider ID string. +     provider_uid: A provider UID string. + """ + ... + + @property + def provider_id(self): # -> str | None: + ... + + @property + def provider_uid(self): # -> str | None: + ... + + + diff --git a/typings/firebase_admin/_user_import.pyi b/typings/firebase_admin/_user_import.pyi new file mode 100644 index 0000000..3d8e677 --- /dev/null +++ b/typings/firebase_admin/_user_import.pyi @@ -0,0 +1,405 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase user import sub module.""" +def b64_encode(bytes_value): # -> str: + ... + +class UserProvider: + """Represents a user identity provider that can be associated with a Firebase user. + + One or more providers can be specified in an ``ImportUserRecord`` when importing users via + ``auth.import_users()``. + + Args: + uid: User's unique ID assigned by the identity provider. + provider_id: ID of the identity provider. This can be a short domain name or the identifier + of an OpenID identity provider. + email: User's email address (optional). + display_name: User's display name (optional). + photo_url: User's photo URL (optional). + """ + def __init__(self, uid, provider_id, email=..., display_name=..., photo_url=...) -> None: + ... + + @property + def uid(self): # -> str | None: + ... + + @uid.setter + def uid(self, uid): # -> None: + ... + + @property + def provider_id(self): # -> str | None: + ... + + @provider_id.setter + def provider_id(self, provider_id): # -> None: + ... + + @property + def email(self): # -> str | None: + ... + + @email.setter + def email(self, email): # -> None: + ... + + @property + def display_name(self): # -> str | None: + ... + + @display_name.setter + def display_name(self, display_name): # -> None: + ... + + @property + def photo_url(self): # -> str | None: + ... + + @photo_url.setter + def photo_url(self, photo_url): # -> None: + ... + + def to_dict(self): # -> dict[str, str]: + ... + + + +class ImportUserRecord: + """Represents a user account to be imported to Firebase Auth. + + Must specify the ``uid`` field at a minimum. A sequence of ``ImportUserRecord`` objects can be + passed to the ``auth.import_users()`` function, in order to import those users into Firebase + Auth in bulk. If the ``password_hash`` is set on a user, a hash configuration must be + specified when calling ``import_users()``. + + Args: + uid: User's unique ID. Must be a non-empty string not longer than 128 characters. + email: User's email address (optional). + email_verified: A boolean indicating whether the user's email has been verified (optional). + display_name: User's display name (optional). + phone_number: User's phone number (optional). + photo_url: User's photo URL (optional). + disabled: A boolean indicating whether this user account has been disabled (optional). + user_metadata: An ``auth.UserMetadata`` instance with additional user metadata (optional). + provider_data: A list of ``auth.UserProvider`` instances (optional). + custom_claims: A ``dict`` of custom claims to be set on the user account (optional). + password_hash: User's password hash as a ``bytes`` sequence (optional). + password_salt: User's password salt as a ``bytes`` sequence (optional). + + Raises: + ValueError: If provided arguments are invalid. + """ + def __init__(self, uid, email=..., email_verified=..., display_name=..., phone_number=..., photo_url=..., disabled=..., user_metadata=..., provider_data=..., custom_claims=..., password_hash=..., password_salt=...) -> None: + ... + + @property + def uid(self): # -> str | None: + ... + + @uid.setter + def uid(self, uid): # -> None: + ... + + @property + def email(self): # -> str | None: + ... + + @email.setter + def email(self, email): # -> None: + ... + + @property + def display_name(self): # -> str | None: + ... + + @display_name.setter + def display_name(self, display_name): # -> None: + ... + + @property + def phone_number(self): # -> str | None: + ... + + @phone_number.setter + def phone_number(self, phone_number): # -> None: + ... + + @property + def photo_url(self): # -> str | None: + ... + + @photo_url.setter + def photo_url(self, photo_url): # -> None: + ... + + @property + def password_hash(self): # -> bytes | None: + ... + + @password_hash.setter + def password_hash(self, password_hash): # -> None: + ... + + @property + def password_salt(self): # -> bytes | None: + ... + + @password_salt.setter + def password_salt(self, password_salt): # -> None: + ... + + @property + def user_metadata(self): + ... + + @user_metadata.setter + def user_metadata(self, user_metadata): # -> None: + ... + + @property + def provider_data(self): + ... + + @provider_data.setter + def provider_data(self, provider_data): # -> None: + ... + + @property + def custom_claims(self): # -> dict[Any, Any]: + ... + + @custom_claims.setter + def custom_claims(self, custom_claims): # -> None: + ... + + def to_dict(self): # -> dict[str, Any]: + """Returns a dict representation of the user. For internal use only.""" + ... + + + +class UserImportHash: + """Represents a hash algorithm used to hash user passwords. + + An instance of this class must be specified when importing users with passwords via the + ``auth.import_users()`` API. Use one of the provided class methods to obtain new + instances when required. Refer to `documentation`_ for more details. + + .. _documentation: https://firebase.google.com/docs/auth/admin/import-users + """ + def __init__(self, name, data=...) -> None: + ... + + def to_dict(self): # -> dict[str, Any]: + ... + + @classmethod + def hmac_sha512(cls, key): # -> UserImportHash: + """Creates a new HMAC SHA512 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def hmac_sha256(cls, key): # -> UserImportHash: + """Creates a new HMAC SHA256 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def hmac_sha1(cls, key): # -> UserImportHash: + """Creates a new HMAC SHA1 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def hmac_md5(cls, key): # -> UserImportHash: + """Creates a new HMAC MD5 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def md5(cls, rounds): # -> UserImportHash: + """Creates a new MD5 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def sha1(cls, rounds): # -> UserImportHash: + """Creates a new SHA1 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def sha256(cls, rounds): # -> UserImportHash: + """Creates a new SHA256 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def sha512(cls, rounds): # -> UserImportHash: + """Creates a new SHA512 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def pbkdf_sha1(cls, rounds): # -> UserImportHash: + """Creates a new PBKDF SHA1 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 120000. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def pbkdf2_sha256(cls, rounds): # -> UserImportHash: + """Creates a new PBKDF2 SHA256 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 120000. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def scrypt(cls, key, rounds, memory_cost, salt_separator=...): # -> UserImportHash: + """Creates a new Scrypt algorithm instance. + + This is the modified Scrypt algorithm used by Firebase Auth. See ``standard_scrypt()`` + function for the standard Scrypt algorith, + + Args: + key: Signer key as a byte sequence. + rounds: Number of rounds. Must be an integer between 1 and 8. + memory_cost: Memory cost as an integer between 1 and 14. + salt_separator: Salt separator as a byte sequence (optional). + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def bcrypt(cls): # -> UserImportHash: + """Creates a new Bcrypt algorithm instance. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + @classmethod + def standard_scrypt(cls, memory_cost, parallelization, block_size, derived_key_length): # -> UserImportHash: + """Creates a new standard Scrypt algorithm instance. + + Args: + memory_cost: CPU Memory cost as a non-negative integer. + parallelization: Parallelization as a non-negative integer. + block_size: Block size as a non-negative integer. + derived_key_length: Derived key length as a non-negative integer. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + ... + + + +class ErrorInfo: + """Represents an error encountered while performing a batch operation such + as importing users or deleting multiple user accounts. + """ + def __init__(self, error) -> None: + ... + + @property + def index(self): + ... + + @property + def reason(self): + ... + + + +class UserImportResult: + """Represents the result of a bulk user import operation. + + See ``auth.import_users()`` API for more details. + """ + def __init__(self, result, total) -> None: + ... + + @property + def success_count(self): + """Returns the number of users successfully imported.""" + ... + + @property + def failure_count(self): # -> int: + """Returns the number of users that failed to be imported.""" + ... + + @property + def errors(self): # -> list[ErrorInfo]: + """Returns a list of ``auth.ErrorInfo`` instances describing the errors encountered.""" + ... + + + diff --git a/typings/firebase_admin/_user_mgt.pyi b/typings/firebase_admin/_user_mgt.pyi new file mode 100644 index 0000000..11ce7a0 --- /dev/null +++ b/typings/firebase_admin/_user_mgt.pyi @@ -0,0 +1,527 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _auth_utils + +"""Firebase user management sub module.""" +MAX_LIST_USERS_RESULTS = ... +MAX_IMPORT_USERS_SIZE = ... +B64_REDACTED = ... +class Sentinel: + def __init__(self, description) -> None: + ... + + + +DELETE_ATTRIBUTE = ... +class UserMetadata: + """Contains additional metadata associated with a user account.""" + def __init__(self, creation_timestamp=..., last_sign_in_timestamp=..., last_refresh_timestamp=...) -> None: + ... + + @property + def creation_timestamp(self): # -> int | None: + """ Creation timestamp in milliseconds since the epoch. + + Returns: + integer: The user creation timestamp in milliseconds since the epoch. + """ + ... + + @property + def last_sign_in_timestamp(self): # -> int | None: + """ Last sign in timestamp in milliseconds since the epoch. + + Returns: + integer: The last sign in timestamp in milliseconds since the epoch. + """ + ... + + @property + def last_refresh_timestamp(self): # -> int | None: + """The time at which the user was last active (ID token refreshed). + + Returns: + integer: Milliseconds since epoch timestamp, or `None` if the user was + never active. + """ + ... + + + +class UserInfo: + """A collection of standard profile information for a user. + + Used to expose profile information returned by an identity provider. + """ + @property + def uid(self): + """Returns the user ID of this user.""" + ... + + @property + def display_name(self): + """Returns the display name of this user.""" + ... + + @property + def email(self): + """Returns the email address associated with this user.""" + ... + + @property + def phone_number(self): + """Returns the phone number associated with this user.""" + ... + + @property + def photo_url(self): + """Returns the photo URL of this user.""" + ... + + @property + def provider_id(self): + """Returns the ID of the identity provider. + + This can be a short domain name (e.g. google.com), or the identity of an OpenID + identity provider. + """ + ... + + + +class UserRecord(UserInfo): + """Contains metadata associated with a Firebase user account.""" + def __init__(self, data) -> None: + ... + + @property + def uid(self): # -> None: + """Returns the user ID of this user. + + Returns: + string: A user ID string. This value is never None or empty. + """ + ... + + @property + def display_name(self): # -> None: + """Returns the display name of this user. + + Returns: + string: A display name string or None. + """ + ... + + @property + def email(self): # -> None: + """Returns the email address associated with this user. + + Returns: + string: An email address string or None. + """ + ... + + @property + def phone_number(self): # -> None: + """Returns the phone number associated with this user. + + Returns: + string: A phone number string or None. + """ + ... + + @property + def photo_url(self): # -> None: + """Returns the photo URL of this user. + + Returns: + string: A URL string or None. + """ + ... + + @property + def provider_id(self): # -> Literal['firebase']: + """Returns the provider ID of this user. + + Returns: + string: A constant provider ID value. + """ + ... + + @property + def email_verified(self): # -> bool: + """Returns whether the email address of this user has been verified. + + Returns: + bool: True if the email has been verified, and False otherwise. + """ + ... + + @property + def disabled(self): # -> bool: + """Returns whether this user account is disabled. + + Returns: + bool: True if the user account is disabled, and False otherwise. + """ + ... + + @property + def tokens_valid_after_timestamp(self): # -> int: + """Returns the time, in milliseconds since the epoch, before which tokens are invalid. + + Note: this is truncated to 1 second accuracy. + + Returns: + int: Timestamp in milliseconds since the epoch, truncated to the second. + All tokens issued before that time are considered revoked. + """ + ... + + @property + def user_metadata(self): # -> UserMetadata: + """Returns additional metadata associated with this user. + + Returns: + UserMetadata: A UserMetadata instance. Does not return None. + """ + ... + + @property + def provider_data(self): # -> list[ProviderUserInfo]: + """Returns a list of UserInfo instances. + + Each object represents an identity from an identity provider that is linked to this user. + + Returns: + list: A list of UserInfo objects, which may be empty. + """ + ... + + @property + def custom_claims(self): # -> Any | None: + """Returns any custom claims set on this user account. + + Returns: + dict: A dictionary of claims or None. + """ + ... + + @property + def tenant_id(self): # -> None: + """Returns the tenant ID of this user. + + Returns: + string: A tenant ID string or None. + """ + ... + + + +class ExportedUserRecord(UserRecord): + """Contains metadata associated with a user including password hash and salt.""" + @property + def password_hash(self): # -> None: + """The user's password hash as a base64-encoded string. + + If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this + is the base64-encoded password hash of the user. If a different hashing algorithm was + used to create this user, as is typical when migrating from another Auth system, this + is an empty string. If no password is set, or if the service account doesn't have permission + to read the password, then this is ``None``. + """ + ... + + @property + def password_salt(self): # -> None: + """The user's password salt as a base64-encoded string. + + If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this + is the base64-encoded password salt of the user. If a different hashing algorithm was + used to create this user, as is typical when migrating from another Auth system, this is + an empty string. If no password is set, or if the service account doesn't have permission to + read the password, then this is ``None``. + """ + ... + + + +class GetUsersResult: + """Represents the result of the ``auth.get_users()`` API.""" + def __init__(self, users, not_found) -> None: + """Constructs a `GetUsersResult` object. + + Args: + users: List of `UserRecord` instances. + not_found: List of `UserIdentifier` instances. + """ + ... + + @property + def users(self): # -> Any: + """Set of `UserRecord` instances, corresponding to the set of users + that were requested. Only users that were found are listed here. The + result set is unordered. + """ + ... + + @property + def not_found(self): # -> Any: + """Set of `UserIdentifier` instances that were requested, but not + found. + """ + ... + + + +class ListUsersPage: + """Represents a page of user records exported from a Firebase project. + + Provides methods for traversing the user accounts included in this page, as well as retrieving + subsequent pages of users. The iterator returned by ``iterate_all()`` can be used to iterate + through all users in the Firebase project starting from this page. + """ + def __init__(self, download, page_token, max_results) -> None: + ... + + @property + def users(self): # -> list[ExportedUserRecord]: + """A list of ``ExportedUserRecord`` instances available in this page.""" + ... + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + ... + + @property + def has_next_page(self): # -> bool: + """A boolean indicating whether more pages are available.""" + ... + + def get_next_page(self): # -> ListUsersPage | None: + """Retrieves the next page of user accounts, if available. + + Returns: + ListUsersPage: Next page of users, or None if this is the last page. + """ + ... + + def iterate_all(self): # -> _UserIterator: + """Retrieves an iterator for user accounts. + + Returned iterator will iterate through all the user accounts in the Firebase project + starting from this page. The iterator will never buffer more than one page of users + in memory at a time. + + Returns: + iterator: An iterator of ExportedUserRecord instances. + """ + ... + + + +class DeleteUsersResult: + """Represents the result of the ``auth.delete_users()`` API.""" + def __init__(self, result, total) -> None: + """Constructs a `DeleteUsersResult` object. + + Args: + result: The proto response, wrapped in a + `BatchDeleteAccountsResponse` instance. + total: Total integer number of deletion attempts. + """ + ... + + @property + def success_count(self): + """Returns the number of users that were deleted successfully (possibly + zero). + + Users that did not exist prior to calling `delete_users()` are + considered to be successfully deleted. + """ + ... + + @property + def failure_count(self): # -> int: + """Returns the number of users that failed to be deleted (possibly + zero). + """ + ... + + @property + def errors(self): + """A list of `auth.ErrorInfo` instances describing the errors that + were encountered during the deletion. Length of this list is equal to + `failure_count`. + """ + ... + + + +class BatchDeleteAccountsResponse: + """Represents the results of a `delete_users()` call.""" + def __init__(self, errors=...) -> None: + """Constructs a `BatchDeleteAccountsResponse` instance, corresponding to + the JSON representing the `BatchDeleteAccountsResponse` proto. + + Args: + errors: List of dictionaries, with each dictionary representing an + `ErrorInfo` instance as returned by the server. `None` implies + an empty list. + """ + ... + + + +class ProviderUserInfo(UserInfo): + """Contains metadata regarding how a user is known by a particular identity provider.""" + def __init__(self, data) -> None: + ... + + @property + def uid(self): # -> None: + ... + + @property + def display_name(self): # -> None: + ... + + @property + def email(self): # -> None: + ... + + @property + def phone_number(self): # -> None: + ... + + @property + def photo_url(self): # -> None: + ... + + @property + def provider_id(self): # -> None: + ... + + + +class ActionCodeSettings: + """Contains required continue/state URL with optional Android and iOS settings. + Used when invoking the email action link generation APIs. + """ + def __init__(self, url, handle_code_in_app=..., dynamic_link_domain=..., ios_bundle_id=..., android_package_name=..., android_install_app=..., android_minimum_version=...) -> None: + ... + + + +def encode_action_code_settings(settings): # -> dict[Any, Any]: + """ Validates the provided action code settings for email link generation and + populates the REST api parameters. + + settings - ``ActionCodeSettings`` object provided to be encoded + returns - dict of parameters to be passed for link gereration. + """ + ... + +class UserManager: + """Provides methods for interacting with the Google Identity Toolkit.""" + ID_TOOLKIT_URL = ... + def __init__(self, http_client, project_id, tenant_id=..., url_override=...) -> None: + ... + + def get_user(self, **kwargs): + """Gets the user data corresponding to the provided key.""" + ... + + def get_users(self, identifiers): # -> list[Any]: + """Looks up multiple users by their identifiers (uid, email, etc.) + + Args: + identifiers: UserIdentifier[]: The identifiers indicating the user + to be looked up. Must have <= 100 entries. + + Returns: + list[dict[string, string]]: List of dicts representing the JSON + `UserInfo` responses from the server. + + Raises: + ValueError: If any of the identifiers are invalid or if more than + 100 identifiers are specified. + UnexpectedResponseError: If the backend server responds with an + unexpected message. + """ + ... + + def list_users(self, page_token=..., max_results=...): + """Retrieves a batch of users.""" + ... + + def create_user(self, uid=..., display_name=..., email=..., phone_number=..., photo_url=..., password=..., disabled=..., email_verified=...): + """Creates a new user account with the specified properties.""" + ... + + def update_user(self, uid, display_name=..., email=..., phone_number=..., photo_url=..., password=..., disabled=..., email_verified=..., valid_since=..., custom_claims=..., providers_to_delete=...): + """Updates an existing user account with the specified properties""" + ... + + def delete_user(self, uid): # -> None: + """Deletes the user identified by the specified user ID.""" + ... + + def delete_users(self, uids, force_delete=...): # -> BatchDeleteAccountsResponse: + """Deletes the users identified by the specified user ids. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + force_delete: Optional parameter that indicates if users should be + deleted, even if they're not disabled. Defaults to False. + + + Returns: + BatchDeleteAccountsResponse: Server's proto response, wrapped in a + python object. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + UnexpectedResponseError: If the backend server responds with an + unexpected message. + """ + ... + + def import_users(self, users, hash_alg=...): # -> dict[Any, Any]: + """Imports the given list of users to Firebase Auth.""" + ... + + def generate_email_action_link(self, action_type, email, action_code_settings=...): + """Fetches the email action links for types + + Args: + action_type: String. Valid values ['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET'] + email: Email of the user for which the action is performed + action_code_settings: ``ActionCodeSettings`` object or dict (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link, etc. + Returns: + link_url: action url to be emailed to the user + + Raises: + UnexpectedResponseError: If the backend server responds with an unexpected message + FirebaseError: If an error occurs while generating the link + ValueError: If the provided arguments are invalid + """ + ... + + + +class _UserIterator(_auth_utils.PageIterator): + @property + def items(self): + ... + + + diff --git a/typings/firebase_admin/_utils.pyi b/typings/firebase_admin/_utils.pyi new file mode 100644 index 0000000..fee37e5 --- /dev/null +++ b/typings/firebase_admin/_utils.pyi @@ -0,0 +1,77 @@ +""" +This type stub file was generated by pyright. +""" + +import google.auth + +"""Internal utilities common to all modules.""" +_ERROR_CODE_TO_EXCEPTION_TYPE = ... +_HTTP_STATUS_TO_ERROR_CODE = ... +_RPC_CODE_TO_ERROR_CODE = ... +def get_metrics_header(): # -> str: + ... + +def get_app_service(app, name, initializer): + ... + +def handle_platform_error_from_requests(error, handle_func=...): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Constructs a ``FirebaseError`` from the given requests error. + + This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. + + Args: + error: An error raised by the requests module while making an HTTP call to a GCP API. + handle_func: A function that can be used to handle platform errors in a custom way. When + specified, this function will be called with three arguments. It has the same + signature as ```_handle_func_requests``, but may return ``None``. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + ... + +def handle_operation_error(error): # -> UnknownError: + """Constructs a ``FirebaseError`` from the given operation error. + + Args: + error: An error returned by a long running operation. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + ... + +def handle_requests_error(error, message=..., code=...): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Constructs a ``FirebaseError`` from the given requests error. + + This method is agnostic of the remote service that produced the error, whether it is a GCP + service or otherwise. Therefore, this method does not attempt to parse the error response in + any way. + + Args: + error: An error raised by the requests module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError`` (optional). If not + specified the string representation of the ``error`` argument is used as the message. + code: A GCP error code that will be used to determine the resulting error type (optional). + If not specified the HTTP status code on the error response is used to determine a + suitable error code. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + ... + +class EmulatorAdminCredentials(google.auth.credentials.Credentials): + """ Credentials for use with the firebase local emulator. + + This is used instead of user-supplied credentials or ADC. It will silently do nothing when + asked to refresh credentials. + """ + def __init__(self) -> None: + ... + + def refresh(self, request): # -> None: + ... + + + diff --git a/typings/firebase_admin/app_check.pyi b/typings/firebase_admin/app_check.pyi new file mode 100644 index 0000000..93efe2f --- /dev/null +++ b/typings/firebase_admin/app_check.pyi @@ -0,0 +1,54 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, Dict + +"""Firebase App Check module.""" +_APP_CHECK_ATTRIBUTE = ... +def verify_token(token: str, app=...) -> Dict[str, Any]: + """Verifies a Firebase App Check token. + + Args: + token: A token from App Check. + app: An App instance (optional). + + Returns: + Dict[str, Any]: The token's decoded claims. + + Raises: + ValueError: If the app's ``project_id`` is invalid or unspecified, + or if the token's headers or payload are invalid. + PyJWKClientError: If PyJWKClient fails to fetch a valid signing key. + """ + ... + +class _AppCheckService: + """Service class that implements Firebase App Check functionality.""" + _APP_CHECK_ISSUER = ... + _JWKS_URL = ... + _project_id = ... + _scoped_project_id = ... + _jwks_client = ... + _APP_CHECK_HEADERS = ... + def __init__(self, app) -> None: + ... + + def verify_token(self, token: str) -> Dict[str, Any]: + """Verifies a Firebase App Check token.""" + ... + + + +class _Validators: + """A collection of data validation utilities. + + Methods provided in this class raise ``ValueErrors`` if any validations fail. + """ + @classmethod + def check_string(cls, label: str, value: Any): # -> None: + """Checks if the given value is a string.""" + ... + + + diff --git a/typings/firebase_admin/auth.pyi b/typings/firebase_admin/auth.pyi new file mode 100644 index 0000000..8ebad28 --- /dev/null +++ b/typings/firebase_admin/auth.pyi @@ -0,0 +1,716 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _auth_client, _auth_providers, _auth_utils, _token_gen, _user_identifier, _user_import, _user_mgt + +"""Firebase Authentication module. + +This module contains functions for minting and verifying JWTs used for +authenticating against Firebase services. It also provides functions for +creating and managing user accounts in Firebase projects. +""" +_AUTH_ATTRIBUTE = ... +__all__ = ['ActionCodeSettings', 'CertificateFetchError', 'Client', 'ConfigurationNotFoundError', 'DELETE_ATTRIBUTE', 'EmailAlreadyExistsError', 'EmailNotFoundError', 'ErrorInfo', 'ExpiredIdTokenError', 'ExpiredSessionCookieError', 'ExportedUserRecord', 'DeleteUsersResult', 'GetUsersResult', 'ImportUserRecord', 'InsufficientPermissionError', 'InvalidDynamicLinkDomainError', 'InvalidIdTokenError', 'InvalidSessionCookieError', 'ListProviderConfigsPage', 'ListUsersPage', 'OIDCProviderConfig', 'PhoneNumberAlreadyExistsError', 'ProviderConfig', 'ResetPasswordExceedLimitError', 'RevokedIdTokenError', 'RevokedSessionCookieError', 'SAMLProviderConfig', 'TokenSignError', 'TooManyAttemptsTryLaterError', 'UidAlreadyExistsError', 'UnexpectedResponseError', 'UserDisabledError', 'UserImportHash', 'UserImportResult', 'UserInfo', 'UserMetadata', 'UserNotFoundError', 'UserProvider', 'UserRecord', 'UserIdentifier', 'UidIdentifier', 'EmailIdentifier', 'PhoneIdentifier', 'ProviderIdentifier', 'create_custom_token', 'create_oidc_provider_config', 'create_saml_provider_config', 'create_session_cookie', 'create_user', 'delete_oidc_provider_config', 'delete_saml_provider_config', 'delete_user', 'delete_users', 'generate_email_verification_link', 'generate_password_reset_link', 'generate_sign_in_with_email_link', 'get_oidc_provider_config', 'get_saml_provider_config', 'get_user', 'get_user_by_email', 'get_user_by_phone_number', 'get_users', 'import_users', 'list_saml_provider_configs', 'list_users', 'revoke_refresh_tokens', 'set_custom_user_claims', 'update_oidc_provider_config', 'update_saml_provider_config', 'update_user', 'verify_id_token', 'verify_session_cookie'] +ActionCodeSettings = _user_mgt.ActionCodeSettings +CertificateFetchError = _token_gen.CertificateFetchError +Client = _auth_client.Client +ConfigurationNotFoundError = _auth_utils.ConfigurationNotFoundError +DELETE_ATTRIBUTE = ... +DeleteUsersResult = _user_mgt.DeleteUsersResult +EmailAlreadyExistsError = _auth_utils.EmailAlreadyExistsError +EmailNotFoundError = _auth_utils.EmailNotFoundError +ErrorInfo = _user_import.ErrorInfo +ExpiredIdTokenError = _token_gen.ExpiredIdTokenError +ExpiredSessionCookieError = _token_gen.ExpiredSessionCookieError +ExportedUserRecord = _user_mgt.ExportedUserRecord +GetUsersResult = _user_mgt.GetUsersResult +ImportUserRecord = _user_import.ImportUserRecord +InsufficientPermissionError = _auth_utils.InsufficientPermissionError +InvalidDynamicLinkDomainError = _auth_utils.InvalidDynamicLinkDomainError +InvalidIdTokenError = _auth_utils.InvalidIdTokenError +InvalidSessionCookieError = _token_gen.InvalidSessionCookieError +ListProviderConfigsPage = _auth_providers.ListProviderConfigsPage +ListUsersPage = _user_mgt.ListUsersPage +OIDCProviderConfig = _auth_providers.OIDCProviderConfig +PhoneNumberAlreadyExistsError = _auth_utils.PhoneNumberAlreadyExistsError +ProviderConfig = _auth_providers.ProviderConfig +ResetPasswordExceedLimitError = _auth_utils.ResetPasswordExceedLimitError +RevokedIdTokenError = _token_gen.RevokedIdTokenError +RevokedSessionCookieError = _token_gen.RevokedSessionCookieError +SAMLProviderConfig = _auth_providers.SAMLProviderConfig +TokenSignError = _token_gen.TokenSignError +TooManyAttemptsTryLaterError = _auth_utils.TooManyAttemptsTryLaterError +UidAlreadyExistsError = _auth_utils.UidAlreadyExistsError +UnexpectedResponseError = _auth_utils.UnexpectedResponseError +UserDisabledError = _auth_utils.UserDisabledError +UserImportHash = _user_import.UserImportHash +UserImportResult = _user_import.UserImportResult +UserInfo = _user_mgt.UserInfo +UserMetadata = _user_mgt.UserMetadata +UserNotFoundError = _auth_utils.UserNotFoundError +UserProvider = _user_import.UserProvider +UserRecord = _user_mgt.UserRecord +UserIdentifier = _user_identifier.UserIdentifier +UidIdentifier = _user_identifier.UidIdentifier +EmailIdentifier = _user_identifier.EmailIdentifier +PhoneIdentifier = _user_identifier.PhoneIdentifier +ProviderIdentifier = _user_identifier.ProviderIdentifier +def create_custom_token(uid, developer_claims=..., app=...): + """Builds and signs a Firebase custom auth token. + + Args: + uid: ID of the user for whom the token is created. + developer_claims: A dictionary of claims to be included in the token + (optional). + app: An App instance (optional). + + Returns: + bytes: A token minted from the input parameters. + + Raises: + ValueError: If input parameters are invalid. + TokenSignError: If an error occurs while signing the token using the remote IAM service. + """ + ... + +def verify_id_token(id_token, app=..., check_revoked=..., clock_skew_seconds=...): + """Verifies the signature and data for the provided JWT. + + Accepts a signed token string, verifies that it is current, and issued + to this project, and that it was correctly signed by Google. + + Args: + id_token: A string of the encoded JWT. + app: An App instance (optional). + check_revoked: Boolean, If true, checks whether the token has been revoked or + the user disabled (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the token. + Must be between 0-60. Defaults to 0. + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``id_token`` is a not a string or is empty. + InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. + ExpiredIdTokenError: If the specified ID token has expired. + RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been revoked. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the ID token. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + ... + +def create_session_cookie(id_token, expires_in, app=...): + """Creates a new Firebase session cookie from the given ID token and options. + + The returned JWT can be set as a server-side session cookie with a custom cookie policy. + + Args: + id_token: The Firebase ID token to exchange for a session cookie. + expires_in: Duration until the cookie is expired. This can be specified + as a numeric seconds value or a ``datetime.timedelta`` instance. + app: An App instance (optional). + + Returns: + bytes: A session cookie generated from the input parameters. + + Raises: + ValueError: If input parameters are invalid. + FirebaseError: If an error occurs while creating the cookie. + """ + ... + +def verify_session_cookie(session_cookie, check_revoked=..., app=..., clock_skew_seconds=...): + """Verifies a Firebase session cookie. + + Accepts a session cookie string, verifies that it is current, and issued + to this project, and that it was correctly signed by Google. + + Args: + session_cookie: A session cookie string to verify. + check_revoked: Boolean, if true, checks whether the cookie has been revoked or the + user disabled (optional). + app: An App instance (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the cookie. + + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``session_cookie`` is a not a string or is empty. + InvalidSessionCookieError: If ``session_cookie`` is not a valid Firebase session cookie. + ExpiredSessionCookieError: If the specified session cookie has expired. + RevokedSessionCookieError: If ``check_revoked`` is ``True`` and the cookie has been revoked. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the session cookie. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + ... + +def revoke_refresh_tokens(uid, app=...): # -> None: + """Revokes all refresh tokens for an existing user. + + This function updates the user's ``tokens_valid_after_timestamp`` to the current UTC + in seconds since the epoch. It is important that the server on which this is called has its + clock set correctly and synchronized. + + While this revokes all sessions for a specified user and disables any new ID tokens for + existing sessions from getting minted, existing ID tokens may remain active until their + natural expiration (one hour). To verify that ID tokens are revoked, use + ``verify_id_token(idToken, check_revoked=True)``. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while revoking the refresh token. + """ + ... + +def get_user(uid, app=...): + """Gets the user data corresponding to the specified user ID. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the user ID is None, empty or malformed. + UserNotFoundError: If the specified user ID does not exist. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + +def get_user_by_email(email, app=...): + """Gets the user data corresponding to the specified user email. + + Args: + email: A user email address string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the email is None, empty or malformed. + UserNotFoundError: If no user exists by the specified email address. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + +def get_user_by_phone_number(phone_number, app=...): + """Gets the user data corresponding to the specified phone number. + + Args: + phone_number: A phone number string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the phone number is None, empty or malformed. + UserNotFoundError: If no user exists by the specified phone number. + FirebaseError: If an error occurs while retrieving the user. + """ + ... + +def get_users(identifiers, app=...): + """Gets the user data corresponding to the specified identifiers. + + There are no ordering guarantees; in particular, the nth entry in the + result list is not guaranteed to correspond to the nth entry in the input + parameters list. + + A maximum of 100 identifiers may be supplied. If more than 100 + identifiers are supplied, this method raises a `ValueError`. + + Args: + identifiers (list[UserIdentifier]): A list of ``UserIdentifier`` + instances used to indicate which user records should be returned. + Must have <= 100 entries. + app: An App instance (optional). + + Returns: + GetUsersResult: A ``GetUsersResult`` instance corresponding to the + specified identifiers. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 100 + identifiers are specified. + """ + ... + +def list_users(page_token=..., max_results=..., app=...): + """Retrieves a page of user accounts from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of user accounts that may be included in the returned page. + This function never returns None. If there are no user accounts in the Firebase project, this + returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the page + (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in the + returned page (optional). Defaults to 1000, which is also the maximum number allowed. + app: An App instance (optional). + + Returns: + ListUsersPage: A page of user accounts. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + ... + +def create_user(**kwargs): + """Creates a new user account with the specified properties. + + Args: + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + uid: User ID to assign to the newly created user (optional). + display_name: The user's display name (optional). + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). + photo_url: The user's photo URL (optional). + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + app: An App instance (optional). + + Returns: + UserRecord: A user record instance for the newly created user. + + Raises: + ValueError: If the specified user properties are invalid. + FirebaseError: If an error occurs while creating the user account. + """ + ... + +def update_user(uid, **kwargs): + """Updates an existing user account with the specified properties. + + Args: + uid: A user ID string. + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + display_name: The user's display name (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). Can be removed by explicitly + passing ``auth.DELETE_ATTRIBUTE``. + photo_url: The user's photo URL (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + custom_claims: A dictionary or a JSON string containing the custom claims to be set on the + user account (optional). To remove all custom claims, pass ``auth.DELETE_ATTRIBUTE``. + valid_since: An integer signifying the seconds since the epoch (optional). This field is + set by ``revoke_refresh_tokens`` and it is discouraged to set this field directly. + app: An App instance (optional). + + Returns: + UserRecord: An updated user record instance for the user. + + Raises: + ValueError: If the specified user ID or properties are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + ... + +def set_custom_user_claims(uid, custom_claims, app=...): # -> None: + """Sets additional claims on an existing user account. + + Custom claims set via this function can be used to define user roles and privilege levels. + These claims propagate to all the devices where the user is already signed in (after token + expiration or when token refresh is forced), and next time the user signs in. The claims + can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, iat, + iss, etc), an error is thrown. Claims payload must also not be larger then 1000 characters + when serialized into a JSON string. + + Args: + uid: A user ID string. + custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any + claims set previously. + app: An App instance (optional). + + Raises: + ValueError: If the specified user ID or the custom claims are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + ... + +def delete_user(uid, app=...): # -> None: + """Deletes the user identified by the specified user ID. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while deleting the user account. + """ + ... + +def delete_users(uids, app=...): + """Deletes the users specified by the given identifiers. + + Deleting a non-existing user does not generate an error (the method is + idempotent.) Non-existing users are considered to be successfully deleted + and are therefore included in the `DeleteUserResult.success_count` value. + + A maximum of 1000 identifiers may be supplied. If more than 1000 + identifiers are supplied, this method raises a `ValueError`. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + app: An App instance (optional). + + Returns: + DeleteUsersResult: The total number of successful/failed deletions, as + well as the array of errors that correspond to the failed deletions. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + """ + ... + +def import_users(users, hash_alg=..., app=...): + """Imports the specified list of users into Firebase Auth. + + At most 1000 users can be imported at a time. This operation is optimized for bulk imports and + will ignore checks on identifier uniqueness which could result in duplications. The + ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the + ``UserImportHash`` class for supported hash algorithms. + + Args: + users: A list of ``ImportUserRecord`` instances to import. Length of the list must not + exceed 1000. + hash_alg: A ``UserImportHash`` object (optional). Required when importing users with + passwords. + app: An App instance (optional). + + Returns: + UserImportResult: An object summarizing the result of the import operation. + + Raises: + ValueError: If the provided arguments are invalid. + FirebaseError: If an error occurs while importing users. + """ + ... + +def generate_password_reset_link(email, action_code_settings=..., app=...): + """Generates the out-of-band email action link for password reset flows for the specified email + address. + + Args: + email: The email of the user whose password is to be reset. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + Returns: + link: The password reset link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + ... + +def generate_email_verification_link(email, action_code_settings=..., app=...): + """Generates the out-of-band email action link for email verification flows for the specified + email address. + + Args: + email: The email of the user to be verified. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + Returns: + link: The email verification link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + ... + +def generate_sign_in_with_email_link(email, action_code_settings, app=...): + """Generates the out-of-band email action link for email link sign-in flows, using the action + code settings provided. + + Args: + email: The email of the user signing in. + action_code_settings: ``ActionCodeSettings`` instance. Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + + Returns: + link: The email sign-in link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + ... + +def get_oidc_provider_config(provider_id, app=...): + """Returns the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Returns: + OIDCProviderConfig: An OIDC provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the OIDC provider. + """ + ... + +def create_oidc_provider_config(provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=..., app=...): + """Creates a new OIDC provider config from the given parameters. + + OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config. + issuer: Issuer of the new config. Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + app: An App instance (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for the + new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response flow + for the new provider. By default, this is enabled if no response type is specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The newly created OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new OIDC provider config. + """ + ... + +def update_oidc_provider_config(provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=..., app=...): + """Updates an existing OIDC provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config (optional). + issuer: Issuer of the new config (optional). Must be a valid URL. + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + app: An App instance (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for the + new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response flow + for the new provider. By default, this is enabled if no response type is specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The updated OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the OIDC provider config. + """ + ... + +def delete_oidc_provider_config(provider_id, app=...): # -> None: + """Deletes the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the OIDC provider. + """ + ... + +def list_oidc_provider_configs(page_token=..., max_results=..., app=...): + """Retrieves a page of OIDC provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no OIDC configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + app: An App instance (optional). + + Returns: + ListProviderConfigsPage: A page of OIDC provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the OIDC provider configs. + """ + ... + +def get_saml_provider_config(provider_id, app=...): + """Returns the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Returns: + SAMLProviderConfig: A SAML provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the SAML provider. + """ + ... + +def create_saml_provider_config(provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=..., app=...): + """Creates a new SAML provider config from the given parameters. + + SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier. + sso_url: The SAML IdP SSO URL. Must be a valid URL. + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this provider. + Multiple certificates are accepted to prevent outages during IdP key rotation (for + example ADFS rotates every 10 days). When the Auth server receives a SAML response, it + will match the SAML response with the certificate on record. Otherwise the response is + rejected. Developers are expected to manage the certificate updates as keys are + rotated. + rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by the + developer but needs to be provided to the SAML IdP. + callback_url: Callback URL string. This is fixed and must always be the same as the OAuth + redirect URL provisioned by Firebase Auth, unless a custom authDomain is used. + display_name: The user-friendly display name to the current configuration (optional). This + name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + app: An App instance (optional). + + Returns: + SAMLProviderConfig: The newly created SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new SAML provider config. + """ + ... + +def update_saml_provider_config(provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=..., app=...): + """Updates an existing SAML provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier (optional). + sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider (optional). + rp_entity_id: The SAML relying party entity ID (optional). + callback_url: Callback URL string (optional). + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + app: An App instance (optional). + + Returns: + SAMLProviderConfig: The updated SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the SAML provider config. + """ + ... + +def delete_saml_provider_config(provider_id, app=...): # -> None: + """Deletes the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the SAML provider. + """ + ... + +def list_saml_provider_configs(page_token=..., max_results=..., app=...): + """Retrieves a page of SAML provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no SAML configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + app: An App instance (optional). + + Returns: + ListProviderConfigsPage: A page of SAML provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the SAML provider configs. + """ + ... + diff --git a/typings/firebase_admin/credentials.pyi b/typings/firebase_admin/credentials.pyi new file mode 100644 index 0000000..eab4a3c --- /dev/null +++ b/typings/firebase_admin/credentials.pyi @@ -0,0 +1,155 @@ +""" +This type stub file was generated by pyright. +""" + +from google.auth.credentials import Credentials as GoogleAuthCredentials + +"""Firebase credentials module.""" +_request = ... +_scopes = ... +AccessTokenInfo = ... +class Base: + """Provides OAuth2 access tokens for accessing Firebase services.""" + def get_access_token(self): # -> AccessTokenInfo: + """Fetches a Google OAuth2 access token using this credential instance. + + Returns: + AccessTokenInfo: An access token obtained using the credential. + """ + ... + + def get_credential(self): + """Returns the Google credential instance used for authentication.""" + ... + + + +class _ExternalCredentials(Base): + """A wrapper for google.auth.credentials.Credentials typed credential instances""" + def __init__(self, credential: GoogleAuthCredentials) -> None: + ... + + def get_credential(self): # -> Credentials: + """Returns the underlying Google Credential + + Returns: + google.auth.credentials.Credentials: A Google Auth credential instance.""" + ... + + + +class Certificate(Base): + """A credential initialized from a JSON certificate keyfile.""" + _CREDENTIAL_TYPE = ... + def __init__(self, cert) -> None: + """Initializes a credential from a Google service account certificate. + + Service account certificates can be downloaded as JSON files from the Firebase console. + To instantiate a credential from a certificate file, either specify the file path or a + dict representing the parsed contents of the file. + + Args: + cert: Path to a certificate file or a dict representing the contents of a certificate. + + Raises: + IOError: If the specified certificate file doesn't exist or cannot be read. + ValueError: If the specified certificate is invalid. + """ + ... + + @property + def project_id(self): # -> None: + ... + + @property + def signer(self): # -> Any: + ... + + @property + def service_account_email(self): # -> Any: + ... + + def get_credential(self): # -> Credentials: + """Returns the underlying Google credential. + + Returns: + google.auth.credentials.Credentials: A Google Auth credential instance.""" + ... + + + +class ApplicationDefault(Base): + """A Google Application Default credential.""" + def __init__(self) -> None: + """Creates an instance that will use Application Default credentials. + + The credentials will be lazily initialized when get_credential() or + project_id() is called. See those methods for possible errors raised. + """ + ... + + def get_credential(self): # -> None: + """Returns the underlying Google credential. + + Raises: + google.auth.exceptions.DefaultCredentialsError: If Application Default + credentials cannot be initialized in the current environment. + Returns: + google.auth.credentials.Credentials: A Google Auth credential instance.""" + ... + + @property + def project_id(self): # -> str | None: + """Returns the project_id from the underlying Google credential. + + Raises: + google.auth.exceptions.DefaultCredentialsError: If Application Default + credentials cannot be initialized in the current environment. + Returns: + str: The project id.""" + ... + + + +class RefreshToken(Base): + """A credential initialized from an existing refresh token.""" + _CREDENTIAL_TYPE = ... + def __init__(self, refresh_token) -> None: + """Initializes a credential from a refresh token JSON file. + + The JSON must consist of client_id, client_secret and refresh_token fields. Refresh + token files are typically created and managed by the gcloud SDK. To instantiate + a credential from a refresh token file, either specify the file path or a dict + representing the parsed contents of the file. + + Args: + refresh_token: Path to a refresh token file or a dict representing the contents of a + refresh token file. + + Raises: + IOError: If the specified file doesn't exist or cannot be read. + ValueError: If the refresh token configuration is invalid. + """ + ... + + @property + def client_id(self): # -> None: + ... + + @property + def client_secret(self): # -> None: + ... + + @property + def refresh_token(self): # -> None: + ... + + def get_credential(self): # -> Credentials: + """Returns the underlying Google credential. + + Returns: + google.auth.credentials.Credentials: A Google Auth credential instance.""" + ... + + + diff --git a/typings/firebase_admin/db.pyi b/typings/firebase_admin/db.pyi new file mode 100644 index 0000000..f97ff73 --- /dev/null +++ b/typings/firebase_admin/db.pyi @@ -0,0 +1,573 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _http_client, exceptions + +"""Firebase Realtime Database module. + +This module contains functions and classes that facilitate interacting with the Firebase Realtime +Database. It supports basic data manipulation operations, as well as complex queries such as +limit queries and range queries. However, it does not support realtime update notifications. This +module uses the Firebase REST API underneath. +""" +_DB_ATTRIBUTE = ... +_INVALID_PATH_CHARACTERS = ... +_RESERVED_FILTERS = ... +_USER_AGENT = ... +_TRANSACTION_MAX_RETRIES = ... +_EMULATOR_HOST_ENV_VAR = ... +def reference(path=..., app=..., url=...): # -> Reference: + """Returns a database ``Reference`` representing the node at the specified path. + + If no path is specified, this function returns a ``Reference`` that represents the database + root. By default, the returned References provide access to the Firebase Database specified at + app initialization. To connect to a different database instance in the same Firebase project, + specify the ``url`` parameter. + + Args: + path: Path to a node in the Firebase realtime database (optional). + app: An App instance (optional). + url: Base URL of the Firebase Database instance (optional). When specified, takes + precedence over the the ``databaseURL`` option set at app initialization. + + Returns: + Reference: A newly initialized Reference. + + Raises: + ValueError: If the specified path or app is invalid. + """ + ... + +class Event: + """Represents a realtime update event received from the database.""" + def __init__(self, sse_event) -> None: + ... + + @property + def data(self): # -> Any: + """Parsed JSON data of this event.""" + ... + + @property + def path(self): # -> Any: + """Path of the database reference that triggered this event.""" + ... + + @property + def event_type(self): + """Event type string (put, patch).""" + ... + + + +class ListenerRegistration: + """Represents the addition of an event listener to a database reference.""" + def __init__(self, callback, sse) -> None: + """Initializes a new listener with given parameters. + + This is an internal API. Use the ``db.Reference.listen()`` method to start a + new listener. + + Args: + callback: The callback function to fire in case of event. + sse: A transport session to make requests with. + """ + ... + + def close(self): # -> None: + """Stops the event listener represented by this registration + + This closes the SSE HTTP connection, and joins the background thread. + """ + ... + + + +class Reference: + """Reference represents a node in the Firebase realtime database.""" + def __init__(self, **kwargs) -> None: + """Creates a new Reference using the provided parameters. + + This method is for internal use only. Use db.reference() to obtain an instance of + Reference. + """ + ... + + @property + def key(self): # -> str | None: + ... + + @property + def path(self): + ... + + @property + def parent(self): # -> Reference | None: + ... + + def child(self, path): # -> Reference: + """Returns a Reference to the specified child node. + + The path may point to an immediate child of the current Reference, or a deeply nested + child. Child paths must not begin with '/'. + + Args: + path: Path to the child node. + + Returns: + Reference: A database Reference representing the specified child node. + + Raises: + ValueError: If the child path is not a string, not well-formed or begins with '/'. + """ + ... + + def get(self, etag=..., shallow=...): # -> tuple[Any, Any]: + """Returns the value, and optionally the ETag, at the current location of the database. + + Args: + etag: A boolean indicating whether the Etag value should be returned or not (optional). + shallow: A boolean indicating whether to execute a shallow read (optional). Shallow + reads do not retrieve the child nodes of the current database location. Cannot be + set to True if ``etag`` is also set to True. + + Returns: + object: If etag is False returns the decoded JSON value of the current database location. + If etag is True, returns a 2-tuple consisting of the decoded JSON value and the Etag + associated with the current database location. + + Raises: + ValueError: If both ``etag`` and ``shallow`` are set to True. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def get_if_changed(self, etag): # -> tuple[Literal[False], None, None] | tuple[Literal[True], Any, Any]: + """Gets data in this location only if the specified ETag does not match. + + Args: + etag: The ETag value to be checked against the ETag of the current location. + + Returns: + tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. If the ETag + specified by the caller did not match, the boolen value will be True and the JSON + and ETag values would reflect the corresponding values in the database. If the ETag + matched, the boolean value will be False and the other elements of the tuple will be + None. + + Raises: + ValueError: If the ETag is not a string. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def set(self, value): # -> None: + """Sets the data at this location to the given value. + + The value must be JSON-serializable and not None. + + Args: + value: JSON-serializable value to be set at this location. + + Raises: + ValueError: If the provided value is None. + TypeError: If the value is not JSON-serializable. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def set_if_unchanged(self, expected_etag, value): # -> tuple[Literal[True], Any, Any] | tuple[Literal[False], Any, Any]: + """Conditonally sets the data at this location to the given value. + + Sets the data at this location to the given value only if ``expected_etag`` is same as the + ETag value in the database. + + Args: + expected_etag: Value of ETag we want to check. + value: JSON-serializable value to be set at this location. + + Returns: + tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. The boolean + indicates whether the set operation was successful or not. The decoded JSON and the + ETag corresponds to the latest value in this database location. + + Raises: + ValueError: If the value is None, or if expected_etag is not a string. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def push(self, value=...): # -> Reference: + """Creates a new child node. + + The optional value argument can be used to provide an initial value for the child node. If + no value is provided, child node will have empty string as the default value. + + Args: + value: JSON-serializable initial value for the child node (optional). + + Returns: + Reference: A Reference representing the newly created child node. + + Raises: + ValueError: If the value is None. + TypeError: If the value is not JSON-serializable. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def update(self, value): # -> None: + """Updates the specified child keys of this Reference to the provided values. + + Args: + value: A dictionary containing the child keys to update, and their new values. + + Raises: + ValueError: If value is empty or not a dictionary. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def delete(self): # -> None: + """Deletes this node from the database. + + Raises: + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + def listen(self, callback): # -> ListenerRegistration: + """Registers the ``callback`` function to receive realtime updates. + + The specified callback function will get invoked with ``db.Event`` objects for each + realtime update received from the database. It will also get called whenever the SDK + reconnects to the server due to network issues or credential expiration. In general, + the OAuth2 credentials used to authorize connections to the server expire every hour. + Therefore clients should expect the ``callback`` to fire at least once every hour, even if + there are no updates in the database. + + This API is based on the event streaming support available in the Firebase REST API. Each + call to ``listen()`` starts a new HTTP connection and a background thread. This is an + experimental feature. It currently does not honor the auth overrides and timeout settings. + Cannot be used in thread-constrained environments like Google App Engine. + + Args: + callback: A function to be called when a data change is detected. + + Returns: + ListenerRegistration: An object that can be used to stop the event listener. + + Raises: + FirebaseError: If an error occurs while starting the initial HTTP connection. + """ + ... + + def transaction(self, transaction_update): # -> object: + """Atomically modifies the data at this location. + + Unlike a normal ``set()``, which just overwrites the data regardless of its previous state, + ``transaction()`` is used to modify the existing value to a new value, ensuring there are + no conflicts with other clients simultaneously writing to the same location. + + This is accomplished by passing an update function which is used to transform the current + value of this reference into a new value. If another client writes to this location before + the new value is successfully saved, the update function is called again with the new + current value, and the write will be retried. In case of repeated failures, this method + will retry the transaction up to 25 times before giving up and raising a + TransactionAbortedError. The update function may also force an early abort by raising an + exception instead of returning a value. + + Args: + transaction_update: A function which will be passed the current data stored at this + location. The function should return the new value it would like written. If + an exception is raised, the transaction will be aborted, and the data at this + location will not be modified. The exceptions raised by this function are + propagated to the caller of the transaction method. + + Returns: + object: New value of the current database Reference (only if the transaction commits). + + Raises: + TransactionAbortedError: If the transaction aborts after exhausting all retry attempts. + ValueError: If transaction_update is not a function. + """ + ... + + def order_by_child(self, path): # -> Query: + """Returns a Query that orders data by child values. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Args: + path: Path to a valid child of the current Reference. + + Returns: + Query: A database Query instance. + + Raises: + ValueError: If the child path is not a string, not well-formed or None. + """ + ... + + def order_by_key(self): # -> Query: + """Creates a Query that orderes data by key. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Returns: + Query: A database Query instance. + """ + ... + + def order_by_value(self): # -> Query: + """Creates a Query that orderes data by value. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Returns: + Query: A database Query instance. + """ + ... + + + +class Query: + """Represents a complex query that can be executed on a Reference. + + Complex queries can consist of up to 2 components: a required ordering constraint, and an + optional filtering constraint. At the server, data is first sorted according to the given + ordering constraint (e.g. order by child). Then the filtering constraint (e.g. limit, range) + is applied on the sorted data to produce the final result. Despite the ordering constraint, + the final result is returned by the server as an unordered collection. Therefore the Query + interface performs another round of sorting at the client-side before returning the results + to the caller. This client-side sorted results are returned to the user as a Python + OrderedDict. + """ + def __init__(self, **kwargs) -> None: + ... + + def limit_to_first(self, limit): # -> Self: + """Creates a query with limit, and anchors it to the start of the window. + + Args: + limit: The maximum number of child nodes to return. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is not an integer, or set_limit_last() was called previously. + """ + ... + + def limit_to_last(self, limit): # -> Self: + """Creates a query with limit, and anchors it to the end of the window. + + Args: + limit: The maximum number of child nodes to return. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is not an integer, or set_limit_first() was called previously. + """ + ... + + def start_at(self, start): # -> Self: + """Sets the lower bound for a range query. + + The Query will only return child nodes with a value greater than or equal to the specified + value. + + Args: + start: JSON-serializable value to start at, inclusive. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + ... + + def end_at(self, end): # -> Self: + """Sets the upper bound for a range query. + + The Query will only return child nodes with a value less than or equal to the specified + value. + + Args: + end: JSON-serializable value to end at, inclusive. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + ... + + def equal_to(self, value): # -> Self: + """Sets an equals constraint on the Query. + + The Query will only return child nodes whose value is equal to the specified value. + + Args: + value: JSON-serializable value to query for. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + ... + + def get(self): # -> OrderedDict[int, Any] | list[Any] | dict[Any, Any]: + """Executes this Query and returns the results. + + The results will be returned as a sorted list or an OrderedDict. + + Returns: + object: Decoded JSON result of the Query. + + Raises: + FirebaseError: If an error occurs while communicating with the remote database server. + """ + ... + + + +class TransactionAbortedError(exceptions.AbortedError): + """A transaction was aborted aftr exceeding the maximum number of retries.""" + def __init__(self, message) -> None: + ... + + + +class _Sorter: + """Helper class for sorting query results.""" + def __init__(self, results, order_by) -> None: + ... + + def get(self): # -> OrderedDict[int, Any] | list[Any]: + ... + + + +class _SortEntry: + """A wrapper that is capable of sorting items in a dictionary.""" + _type_none = ... + _type_bool_false = ... + _type_bool_true = ... + _type_numeric = ... + _type_string = ... + _type_object = ... + def __init__(self, key, value, order_by) -> None: + ... + + @property + def key(self): # -> Any: + ... + + @property + def index(self): # -> Any | None: + ... + + @property + def index_type(self): # -> int: + ... + + @property + def value(self): # -> Any: + ... + + def __lt__(self, other) -> bool: + ... + + def __le__(self, other) -> bool: + ... + + def __gt__(self, other) -> bool: + ... + + def __ge__(self, other) -> bool: + ... + + def __eq__(self, other) -> bool: + ... + + + +class _DatabaseService: + """Service that maintains a collection of database clients.""" + _DEFAULT_AUTH_OVERRIDE = ... + def __init__(self, app) -> None: + ... + + def get_client(self, db_url=...): + """Creates a client based on the db_url. Clients may be cached.""" + ... + + def close(self): # -> None: + ... + + + +class _Client(_http_client.JsonHttpClient): + """HTTP client used to make REST calls. + + _Client maintains an HTTP session, and handles authenticating HTTP requests along with + marshalling and unmarshalling of JSON data. + """ + def __init__(self, credential, base_url, timeout, params=...) -> None: + """Creates a new _Client from the given parameters. + + This exists primarily to enable testing. For regular use, obtain _Client instances by + calling the from_app() class method. + + Args: + credential: A Google credential that can be used to authenticate requests. + base_url: A URL prefix to be added to all outgoing requests. This is typically the + Firebase Realtime Database URL. + timeout: HTTP request timeout in seconds. If set to None connections will never + timeout, which is the default behavior of the underlying requests library. + params: Dict of query parameters to add to all outgoing requests. + """ + ... + + def request(self, method, url, **kwargs): # -> Response: + """Makes an HTTP call using the Python requests library. + + Extends the request() method of the parent JsonHttpClient class. Handles default + params like auth overrides, and low-level exceptions. + + Args: + method: HTTP method name as a string (e.g. get, post). + url: URL path of the remote endpoint. This will be appended to the server's base URL. + **kwargs: An additional set of keyword arguments to be passed into requests API + (e.g. json, params). + + Returns: + Response: An HTTP response object. + + Raises: + FirebaseError: If an error occurs while making the HTTP call. + """ + ... + + def create_listener_session(self): # -> KeepAuthSession: + ... + + @classmethod + def handle_rtdb_error(cls, error): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Converts an error encountered while calling RTDB into a FirebaseError.""" + ... + + + diff --git a/typings/firebase_admin/exceptions.pyi b/typings/firebase_admin/exceptions.pyi new file mode 100644 index 0000000..c01e956 --- /dev/null +++ b/typings/firebase_admin/exceptions.pyi @@ -0,0 +1,191 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase Exceptions module. + +This module defines the base types for exceptions and the platform-wide error codes as outlined in +https://cloud.google.com/apis/design/errors. + +:class:`FirebaseError` is the parent class of all exceptions raised by the Admin SDK. It contains +the ``code``, ``http_response`` and ``cause`` properties common to all Firebase exception types. +Each exception also carries a message that outlines what went wrong. This can be logged for +audit or debugging purposes. + +When calling an Admin SDK API, developers can catch the parent ``FirebaseError`` and +inspect its ``code`` to implement fine-grained error handling. Alternatively, developers can +catch one or more subtypes of ``FirebaseError``. Under normal conditions, any given API can raise +only a small subset of the available exception subtypes. However, the SDK also exposes rare error +conditions like connection timeouts and other I/O errors as instances of ``FirebaseError``. +Therefore it is always a good idea to have a handler specified for ``FirebaseError``, after all the +subtype error handlers. +""" +INVALID_ARGUMENT = ... +FAILED_PRECONDITION = ... +OUT_OF_RANGE = ... +UNAUTHENTICATED = ... +PERMISSION_DENIED = ... +NOT_FOUND = ... +CONFLICT = ... +ABORTED = ... +ALREADY_EXISTS = ... +RESOURCE_EXHAUSTED = ... +CANCELLED = ... +DATA_LOSS = ... +UNKNOWN = ... +INTERNAL = ... +UNAVAILABLE = ... +DEADLINE_EXCEEDED = ... +class FirebaseError(Exception): + """Base class for all errors raised by the Admin SDK. + + Args: + code: A string error code that represents the type of the exception. Possible error + codes are defined in https://cloud.google.com/apis/design/errors#handling_errors. + message: A human-readable error message string. + cause: The exception that caused this error (optional). + http_response: If this error was caused by an HTTP error response, this property is + set to the ``requests.Response`` object that represents the HTTP response (optional). + See https://docs.python-requests.org/en/master/api/#requests.Response for details of + this object. + """ + def __init__(self, code, message, cause=..., http_response=...) -> None: + ... + + @property + def code(self): # -> Any: + ... + + @property + def cause(self): # -> None: + ... + + @property + def http_response(self): # -> None: + ... + + + +class InvalidArgumentError(FirebaseError): + """Client specified an invalid argument.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class FailedPreconditionError(FirebaseError): + """Request can not be executed in the current system state, such as deleting a non-empty + directory.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class OutOfRangeError(FirebaseError): + """Client specified an invalid range.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UnauthenticatedError(FirebaseError): + """Request not authenticated due to missing, invalid, or expired OAuth token.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class PermissionDeniedError(FirebaseError): + """Client does not have sufficient permission. + + This can happen because the OAuth token does not have the right scopes, the client doesn't + have permission, or the API has not been enabled for the client project. + """ + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class NotFoundError(FirebaseError): + """A specified resource is not found, or the request is rejected by undisclosed reasons, such + as whitelisting.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class ConflictError(FirebaseError): + """Concurrency conflict, such as read-modify-write conflict.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class AbortedError(FirebaseError): + """Concurrency conflict, such as read-modify-write conflict.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class AlreadyExistsError(FirebaseError): + """The resource that a client tried to create already exists.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class ResourceExhaustedError(FirebaseError): + """Either out of resource quota or reaching rate limiting.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class CancelledError(FirebaseError): + """Request cancelled by the client.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class DataLossError(FirebaseError): + """Unrecoverable data loss or data corruption.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UnknownError(FirebaseError): + """Unknown server error.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class InternalError(FirebaseError): + """Internal server error.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class UnavailableError(FirebaseError): + """Service unavailable. Typically the server is down.""" + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + +class DeadlineExceededError(FirebaseError): + """Request deadline exceeded. + + This will happen only if the caller sets a deadline that is shorter than the method's + default deadline (i.e. requested deadline is not enough for the server to process the + request) and the request did not finish within the deadline. + """ + def __init__(self, message, cause=..., http_response=...) -> None: + ... + + + diff --git a/typings/firebase_admin/firestore.pyi b/typings/firebase_admin/firestore.pyi new file mode 100644 index 0000000..66ceb79 --- /dev/null +++ b/typings/firebase_admin/firestore.pyi @@ -0,0 +1,48 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional +from firebase_admin import App +from google.cloud import firestore + +"""Cloud Firestore module. + +This module contains utilities for accessing the Google Cloud Firestore databases associated with +Firebase apps. This requires the ``google-cloud-firestore`` Python module. +""" +existing = ... +_FIRESTORE_ATTRIBUTE = ... +def client(app: Optional[App] = ..., database_id: Optional[str] = ...) -> firestore.Client: + """Returns a client that can be used to interact with Google Cloud Firestore. + + Args: + app: An App instance (optional). + database_id: The database ID of the Google Cloud Firestore database to be used. + Defaults to the default Firestore database ID if not specified or an empty string + (optional). + + Returns: + google.cloud.firestore.Firestore: A `Firestore Client`_. + + Raises: + ValueError: If the specified database ID is not a valid string, or if a project ID is not + specified either via options, credentials or environment variables, or if the specified + project ID is not a valid string. + + .. _Firestore Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ + google.cloud.firestore_v1.client.Client + """ + ... + +class _FirestoreService: + """Service that maintains a collection of firestore clients.""" + def __init__(self, app: App) -> None: + ... + + def get_client(self, database_id: Optional[str]) -> firestore.Client: + """Creates a client based on the database_id. These clients are cached.""" + ... + + + diff --git a/typings/firebase_admin/firestore_async.pyi b/typings/firebase_admin/firestore_async.pyi new file mode 100644 index 0000000..6488317 --- /dev/null +++ b/typings/firebase_admin/firestore_async.pyi @@ -0,0 +1,48 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional +from firebase_admin import App +from google.cloud import firestore + +"""Cloud Firestore Async module. + +This module contains utilities for asynchronusly accessing the Google Cloud Firestore databases +associated with Firebase apps. This requires the ``google-cloud-firestore`` Python module. +""" +existing = ... +_FIRESTORE_ASYNC_ATTRIBUTE: str = ... +def client(app: Optional[App] = ..., database_id: Optional[str] = ...) -> firestore.AsyncClient: + """Returns an async client that can be used to interact with Google Cloud Firestore. + + Args: + app: An App instance (optional). + database_id: The database ID of the Google Cloud Firestore database to be used. + Defaults to the default Firestore database ID if not specified or an empty string + (optional). + + Returns: + google.cloud.firestore.Firestore_Async: A `Firestore Async Client`_. + + Raises: + ValueError: If the specified database ID is not a valid string, or if a project ID is not + specified either via options, credentials or environment variables, or if the specified + project ID is not a valid string. + + .. _Firestore Async Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ + google.cloud.firestore_v1.async_client.AsyncClient + """ + ... + +class _FirestoreAsyncService: + """Service that maintains a collection of firestore async clients.""" + def __init__(self, app: App) -> None: + ... + + def get_client(self, database_id: Optional[str]) -> firestore.AsyncClient: + """Creates an async client based on the database_id. These clients are cached.""" + ... + + + diff --git a/typings/firebase_admin/functions.pyi b/typings/firebase_admin/functions.pyi new file mode 100644 index 0000000..6f68652 --- /dev/null +++ b/typings/firebase_admin/functions.pyi @@ -0,0 +1,226 @@ +""" +This type stub file was generated by pyright. +""" + +from datetime import datetime +from typing import Any, Dict, Optional +from dataclasses import dataclass +from firebase_admin import App + +"""Firebase Functions module.""" +_FUNCTIONS_ATTRIBUTE = ... +__all__ = ['TaskOptions', 'task_queue'] +_CLOUD_TASKS_API_RESOURCE_PATH = ... +_CLOUD_TASKS_API_URL_FORMAT = ... +_FIREBASE_FUNCTION_URL_FORMAT = ... +_FUNCTIONS_HEADERS = ... +_DEFAULT_LOCATION = ... +def task_queue(function_name: str, extension_id: Optional[str] = ..., app: Optional[App] = ...) -> TaskQueue: + """Creates a reference to a TaskQueue for a given function name. + + The function name can be either: + 1. A fully qualified function resource name: + `projects/{project-id}/locations/{location-id}/functions/{function-name}` + + 2. A partial resource name with location and function name, in which case + the runtime project ID is used: + `locations/{location-id}/functions/{function-name}` + + 3. A partial function name, in which case the runtime project ID and the + default location, `us-central1`, is used: + `{function-name}` + + Args: + function_name: Name of the function. + extension_id: Firebase extension ID (optional). + app: An App instance (optional). + + Returns: + TaskQueue: A TaskQueue instance. + + Raises: + ValueError: If the input arguments are invalid. + """ + ... + +class _FunctionsService: + """Service class that implements Firebase Functions functionality.""" + def __init__(self, app: App) -> None: + ... + + def task_queue(self, function_name: str, extension_id: Optional[str] = ...) -> TaskQueue: + """Creates a TaskQueue instance.""" + ... + + @classmethod + def handle_functions_error(cls, error: Any): # -> DeadlineExceededError | UnavailableError | UnknownError: + """Handles errors received from the Cloud Functions API.""" + ... + + + +class TaskQueue: + """TaskQueue class that implements Firebase Cloud Tasks Queues functionality.""" + def __init__(self, function_name: str, extension_id: Optional[str], project_id, credential, http_client) -> None: + ... + + def enqueue(self, task_data: Any, opts: Optional[TaskOptions] = ...) -> str: + """Creates a task and adds it to the queue. Tasks cannot be updated after creation. + + This action requires `cloudtasks.tasks.create` IAM permission on the service account. + + Args: + task_data: The data payload of the task. + opts: Options when enqueuing a new task (optional). + + Raises: + FirebaseError: If an error occurs while requesting the task to be queued by + the Cloud Functions service. + ValueError: If the input arguments are invalid. + + Returns: + str: The ID of the task relative to this queue. + """ + ... + + def delete(self, task_id: str) -> None: + """Deletes an enqueued task if it has not yet started. + + This action requires `cloudtasks.tasks.delete` IAM permission on the service account. + + Args: + task_id: The ID of the task relative to this queue. + + Raises: + FirebaseError: If an error occurs while requesting the task to be deleted by + the Cloud Functions service. + ValueError: If the input arguments are invalid. + """ + ... + + + +class _Validators: + """A collection of data validation utilities.""" + @classmethod + def check_non_empty_string(cls, label: str, value: Any): # -> None: + """Checks if given value is a non-empty string and throws error if not.""" + ... + + @classmethod + def is_non_empty_string(cls, value: Any): # -> bool: + """Checks if given value is a non-empty string and returns bool.""" + ... + + @classmethod + def is_task_id(cls, task_id: Any): # -> bool: + """Checks if given value is a valid task id.""" + ... + + @classmethod + def is_url(cls, url: Any): # -> bool: + """Checks if given value is a valid url.""" + ... + + + +@dataclass +class TaskOptions: + """Task Options that can be applied to a Task. + + Args: + schedule_delay_seconds: The number of seconds after the current time at which to attempt or + retry the task. Should only be set if ``schedule_time`` is not set. + + schedule_time: The time when the task is scheduled to be attempted or retried. Should only + be set if ``schedule_delay_seconds`` is not set. + + dispatch_deadline_seconds: The deadline for requests sent to the worker. If the worker does + not respond by this deadline then the request is cancelled and the attempt is marked as + a ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the task according to the + ``RetryConfig``. The default is 10 minutes. The deadline must be in the range of 15 + seconds and 30 minutes (1800 seconds). + + task_id: The ID to use for the enqueued task. If not provided, one will be automatically + generated. + + If provided, an explicitly specified task ID enables task de-duplication. + Task IDs should be strings that contain only letters ([A-Za-z]), numbers ([0-9]), + hyphens (-), and underscores (_) with a maximum length of 500 characters. If a task's + ID is identical to that of an existing task or a task that was deleted or executed + recently then the call will throw an error with code `functions/task-already-exists`. + Another task with the same ID can't be created for ~1hour after the original task was + deleted or executed. + + Because there is an extra lookup cost to identify duplicate task IDs, setting ID + significantly increases latency. + + Also, note that the infrastructure relies on an approximately uniform distribution + of task IDs to store and serve tasks efficiently. For this reason, using hashed strings + for the task ID or for the prefix of the task ID is recommended. Choosing task IDs that + are sequential or have sequential prefixes, for example using a timestamp, causes an + increase in latency and error rates in all task commands. + + Push IDs from the Firebase Realtime Database make poor IDs because they are based on + timestamps and will cause contention (slowdowns) in your task queue. Reversed push IDs + however form a perfect distribution and are an ideal key. To reverse a string in Python + use ``reversedString = someString[::-1]`` + + headers: HTTP request headers to include in the request to the task queue function. These + headers represent a subset of the headers that will accompany the task's HTTP request. + Some HTTP request headers will be ignored or replaced: `Authorization`, `Host`, + `Content-Length`, `User-Agent` and others cannot be overridden. + + A complete list of these ignored or replaced headers can be found in the following + definition of the HttpRequest.headers property: + https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#httprequest + + By default, Content-Type is set to 'application/json'. + + The size of the headers must be less than 80KB. + + uri: The full URL that the request will be sent to. Must be a valid RFC3986 https or + http URL. + """ + schedule_delay_seconds: Optional[int] = ... + schedule_time: Optional[datetime] = ... + dispatch_deadline_seconds: Optional[int] = ... + task_id: Optional[str] = ... + headers: Optional[Dict[str, str]] = ... + uri: Optional[str] = ... + + +@dataclass +class Task: + """Contains the relevant fields for enqueueing tasks that trigger Cloud Functions. + + This is a limited subset of the Cloud Functions `Task` resource. See the following + page for definitions of this class's properties: + https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#resource:-task + + Args: + httpRequest: The request to be made by the task worker. + name: The name of the function. See the Cloud docs for the format of this property. + schedule_time: The time when the task is scheduled to be attempted or retried. + dispatch_deadline: The deadline for requests sent to the worker. + """ + http_request: Dict[str, Optional[str | dict]] + name: Optional[str] = ... + schedule_time: Optional[str] = ... + dispatch_deadline: Optional[str] = ... + + +@dataclass +class Resource: + """Contains the parsed address of a resource. + + Args: + resource_id: The ID of the resource. + project_id: The project ID of the resource. + location_id: The location ID of the resource. + """ + resource_id: str + project_id: Optional[str] = ... + location_id: Optional[str] = ... + + diff --git a/typings/firebase_admin/instance_id.pyi b/typings/firebase_admin/instance_id.pyi new file mode 100644 index 0000000..0b7e554 --- /dev/null +++ b/typings/firebase_admin/instance_id.pyi @@ -0,0 +1,41 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase Instance ID module. + +This module enables deleting instance IDs associated with Firebase projects. +""" +_IID_SERVICE_URL = ... +_IID_ATTRIBUTE = ... +def delete_instance_id(instance_id, app=...): # -> None: + """Deletes the specified instance ID and the associated data from Firebase. + + Note that Google Analytics for Firebase uses its own form of Instance ID to + keep track of analytics data. Therefore deleting a regular Instance ID does + not delete Analytics data. See `Delete an Instance ID`_ for more information. + + Args: + instance_id: A non-empty instance ID string. + app: An App instance (optional). + + Raises: + InstanceIdError: If an error occurs while invoking the backend instance ID service. + ValueError: If the specified instance ID or app is invalid. + + .. _Delete an Instance ID: https://firebase.google.com/support/privacy\ + /manage-iids#delete_an_instance_id + """ + ... + +class _InstanceIdService: + """Provides methods for interacting with the remote instance ID service.""" + error_codes = ... + def __init__(self, app) -> None: + ... + + def delete_instance_id(self, instance_id): # -> None: + ... + + + diff --git a/typings/firebase_admin/messaging.pyi b/typings/firebase_admin/messaging.pyi new file mode 100644 index 0000000..2050069 --- /dev/null +++ b/typings/firebase_admin/messaging.pyi @@ -0,0 +1,285 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _messaging_encoder, _messaging_utils + +"""Firebase Cloud Messaging module.""" +_MESSAGING_ATTRIBUTE = ... +__all__ = ['AndroidConfig', 'AndroidFCMOptions', 'AndroidNotification', 'APNSConfig', 'APNSFCMOptions', 'APNSPayload', 'Aps', 'ApsAlert', 'BatchResponse', 'CriticalSound', 'ErrorInfo', 'FCMOptions', 'LightSettings', 'Message', 'MulticastMessage', 'Notification', 'QuotaExceededError', 'SenderIdMismatchError', 'SendResponse', 'ThirdPartyAuthError', 'TopicManagementResponse', 'UnregisteredError', 'WebpushConfig', 'WebpushFCMOptions', 'WebpushNotification', 'WebpushNotificationAction', 'send', 'send_all', 'send_multicast', 'send_each', 'send_each_for_multicast', 'subscribe_to_topic', 'unsubscribe_from_topic'] +AndroidConfig = _messaging_utils.AndroidConfig +AndroidFCMOptions = _messaging_utils.AndroidFCMOptions +AndroidNotification = _messaging_utils.AndroidNotification +APNSConfig = _messaging_utils.APNSConfig +APNSFCMOptions = _messaging_utils.APNSFCMOptions +APNSPayload = _messaging_utils.APNSPayload +Aps = _messaging_utils.Aps +ApsAlert = _messaging_utils.ApsAlert +CriticalSound = _messaging_utils.CriticalSound +FCMOptions = _messaging_utils.FCMOptions +LightSettings = _messaging_utils.LightSettings +Message = _messaging_encoder.Message +MulticastMessage = _messaging_encoder.MulticastMessage +Notification = _messaging_utils.Notification +WebpushConfig = _messaging_utils.WebpushConfig +WebpushFCMOptions = _messaging_utils.WebpushFCMOptions +WebpushNotification = _messaging_utils.WebpushNotification +WebpushNotificationAction = _messaging_utils.WebpushNotificationAction +QuotaExceededError = _messaging_utils.QuotaExceededError +SenderIdMismatchError = _messaging_utils.SenderIdMismatchError +ThirdPartyAuthError = _messaging_utils.ThirdPartyAuthError +UnregisteredError = _messaging_utils.UnregisteredError +def send(message, dry_run=..., app=...): + """Sends the given message via Firebase Cloud Messaging (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead FCM performs all the usual validations, and emulates the send operation. + + Args: + message: An instance of ``messaging.Message``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + string: A message ID string that uniquely identifies the sent message. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + ... + +def send_each(messages, dry_run=..., app=...): + """Sends each message in the given list via Firebase Cloud Messaging. + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead FCM performs all the usual validations, and emulates the send operation. + + Args: + messages: A list of ``messaging.Message`` instances. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + ... + +def send_each_for_multicast(multicast_message, dry_run=..., app=...): + """Sends the given mutlicast message to each token via Firebase Cloud Messaging (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead FCM performs all the usual validations, and emulates the send operation. + + Args: + multicast_message: An instance of ``messaging.MulticastMessage``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + ... + +def send_all(messages, dry_run=..., app=...): + """Sends the given list of messages via Firebase Cloud Messaging as a single batch. + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead FCM performs all the usual validations, and emulates the send operation. + + Args: + messages: A list of ``messaging.Message`` instances. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + + send_all() is deprecated. Use send_each() instead. + """ + ... + +def send_multicast(multicast_message, dry_run=..., app=...): + """Sends the given mutlicast message to all tokens via Firebase Cloud Messaging (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead FCM performs all the usual validations, and emulates the send operation. + + Args: + multicast_message: An instance of ``messaging.MulticastMessage``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + + send_multicast() is deprecated. Use send_each_for_multicast() instead. + """ + ... + +def subscribe_to_topic(tokens, topic, app=...): + """Subscribes a list of registration tokens to an FCM topic. + + Args: + tokens: A non-empty list of device registration tokens. List may not have more than 1000 + elements. + topic: Name of the topic to subscribe to. May contain the ``/topics/`` prefix. + app: An App instance (optional). + + Returns: + TopicManagementResponse: A ``TopicManagementResponse`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with instance ID service. + ValueError: If the input arguments are invalid. + """ + ... + +def unsubscribe_from_topic(tokens, topic, app=...): + """Unsubscribes a list of registration tokens from an FCM topic. + + Args: + tokens: A non-empty list of device registration tokens. List may not have more than 1000 + elements. + topic: Name of the topic to unsubscribe from. May contain the ``/topics/`` prefix. + app: An App instance (optional). + + Returns: + TopicManagementResponse: A ``TopicManagementResponse`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with instance ID service. + ValueError: If the input arguments are invalid. + """ + ... + +class ErrorInfo: + """An error encountered when performing a topic management operation.""" + def __init__(self, index, reason) -> None: + ... + + @property + def index(self): # -> Any: + """Index of the registration token to which this error is related to.""" + ... + + @property + def reason(self): # -> Any: + """String describing the nature of the error.""" + ... + + + +class TopicManagementResponse: + """The response received from a topic management operation.""" + def __init__(self, resp) -> None: + ... + + @property + def success_count(self): # -> int: + """Number of tokens that were successfully subscribed or unsubscribed.""" + ... + + @property + def failure_count(self): # -> int: + """Number of tokens that could not be subscribed or unsubscribed due to errors.""" + ... + + @property + def errors(self): # -> list[Any]: + """A list of ``messaging.ErrorInfo`` objects (possibly empty).""" + ... + + + +class BatchResponse: + """The response received from a batch request to the FCM API.""" + def __init__(self, responses) -> None: + ... + + @property + def responses(self): # -> Any: + """A list of ``messaging.SendResponse`` objects (possibly empty).""" + ... + + @property + def success_count(self): # -> int: + ... + + @property + def failure_count(self): # -> int: + ... + + + +class SendResponse: + """The response received from an individual batched request to the FCM API.""" + def __init__(self, resp, exception) -> None: + ... + + @property + def message_id(self): # -> None: + """A message ID string that uniquely identifies the message.""" + ... + + @property + def success(self): # -> bool: + """A boolean indicating if the request was successful.""" + ... + + @property + def exception(self): # -> Any: + """A ``FirebaseError`` if an error occurs while sending the message to the FCM service.""" + ... + + + +class _MessagingService: + """Service class that implements Firebase Cloud Messaging (FCM) functionality.""" + FCM_URL = ... + FCM_BATCH_URL = ... + IID_URL = ... + IID_HEADERS = ... + JSON_ENCODER = ... + FCM_ERROR_TYPES = ... + def __init__(self, app) -> None: + ... + + @classmethod + def encode_message(cls, message): # -> Any | dict[Any, Any]: + ... + + def send(self, message, dry_run=...): + """Sends the given message to FCM via the FCM v1 API.""" + ... + + def send_each(self, messages, dry_run=...): # -> BatchResponse: + """Sends the given messages to FCM via the FCM v1 API.""" + ... + + def send_all(self, messages, dry_run=...): # -> BatchResponse: + """Sends the given messages to FCM via the batch API.""" + ... + + def make_topic_management_request(self, tokens, topic, operation): # -> TopicManagementResponse: + """Invokes the IID service for topic management functionality.""" + ... + + + diff --git a/typings/firebase_admin/ml.pyi b/typings/firebase_admin/ml.pyi new file mode 100644 index 0000000..f533beb --- /dev/null +++ b/typings/firebase_admin/ml.pyi @@ -0,0 +1,529 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase ML module. + +This module contains functions for creating, updating, getting, listing, +deleting, publishing and unpublishing Firebase ML models. +""" +_GCS_ENABLED = ... +_TF_ENABLED = ... +_ML_ATTRIBUTE = ... +_MAX_PAGE_SIZE = ... +_MODEL_ID_PATTERN = ... +_DISPLAY_NAME_PATTERN = ... +_TAG_PATTERN = ... +_GCS_TFLITE_URI_PATTERN = ... +_AUTO_ML_MODEL_PATTERN = ... +_RESOURCE_NAME_PATTERN = ... +_OPERATION_NAME_PATTERN = ... +def create_model(model, app=...): # -> Model: + """Creates a model in the current Firebase project. + + Args: + model: An ml.Model to create. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The model that was created in Firebase ML. + """ + ... + +def update_model(model, app=...): # -> Model: + """Updates a model's metadata or model file. + + Args: + model: The ml.Model to update. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The updated model. + """ + ... + +def publish_model(model_id, app=...): # -> Model: + """Publishes a Firebase ML model. + + A published model can be downloaded to client apps. + + Args: + model_id: The id of the model to publish. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The published model. + """ + ... + +def unpublish_model(model_id, app=...): # -> Model: + """Unpublishes a Firebase ML model. + + Args: + model_id: The id of the model to unpublish. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The unpublished model. + """ + ... + +def get_model(model_id, app=...): # -> Model: + """Gets the model specified by the given ID. + + Args: + model_id: The id of the model to get. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The requested model. + """ + ... + +def list_models(list_filter=..., page_size=..., page_token=..., app=...): # -> ListModelsPage: + """Lists the current project's models. + + Args: + list_filter: a list filter string such as ``tags:'tag_1'``. None will return all models. + page_size: A number between 1 and 100 inclusive that specifies the maximum + number of models to return per page. None for default. + page_token: A next page token returned from a previous page of results. None + for first page of results. + app: A Firebase app instance (or None to use the default app). + + Returns: + ListModelsPage: A (filtered) list of models. + """ + ... + +def delete_model(model_id, app=...): # -> None: + """Deletes a model from the current project. + + Args: + model_id: The id of the model you wish to delete. + app: A Firebase app instance (or None to use the default app). + """ + ... + +class Model: + """A Firebase ML Model object. + + Args: + display_name: The display name of your model - used to identify your model in code. + tags: Optional list of strings associated with your model. Can be used in list queries. + model_format: A subclass of ModelFormat. (e.g. TFLiteFormat) Specifies the model details. + """ + def __init__(self, display_name=..., tags=..., model_format=...) -> None: + ... + + @classmethod + def from_dict(cls, data, app=...): # -> Model: + """Create an instance of the object from a dict.""" + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + @property + def model_id(self): # -> str | Any | None: + """The model's ID, unique to the project.""" + ... + + @property + def display_name(self): # -> None: + """The model's display name, used to refer to the model in code and in + the Firebase console.""" + ... + + @display_name.setter + def display_name(self, display_name): # -> Self: + ... + + @property + def create_time(self): # -> int | None: + """The time the model was created.""" + ... + + @property + def update_time(self): # -> int | None: + """The time the model was last updated.""" + ... + + @property + def validation_error(self): + """Validation error message.""" + ... + + @property + def published(self): # -> bool: + """True if the model is published and available for clients to + download.""" + ... + + @property + def etag(self): # -> None: + """The entity tag (ETag) of the model resource.""" + ... + + @property + def model_hash(self): # -> None: + """SHA256 hash of the model binary.""" + ... + + @property + def tags(self): # -> None: + """Tag strings, used for filtering query results.""" + ... + + @tags.setter + def tags(self, tags): # -> Self: + ... + + @property + def locked(self): # -> bool: + """True if the Model object is locked by an active operation.""" + ... + + def wait_for_unlocked(self, max_time_seconds=...): # -> None: + """Waits for the model to be unlocked. (All active operations complete) + + Args: + max_time_seconds: The maximum number of seconds to wait for the model to unlock. + (None for no limit) + + Raises: + exceptions.DeadlineExceeded: If max_time_seconds passed and the model is still locked. + """ + ... + + @property + def model_format(self): # -> None: + """The model's ``ModelFormat`` object, which represents the model's + format and storage location.""" + ... + + @model_format.setter + def model_format(self, model_format): # -> Self: + ... + + def as_dict(self, for_upload=...): # -> dict[Any, Any]: + """Returns a serializable representation of the object.""" + ... + + + +class ModelFormat: + """Abstract base class representing a Model Format such as TFLite.""" + def as_dict(self, for_upload=...): + """Returns a serializable representation of the object.""" + ... + + + +class TFLiteFormat(ModelFormat): + """Model format representing a TFLite model. + + Args: + model_source: A TFLiteModelSource sub class. Specifies the details of the model source. + """ + def __init__(self, model_source=...) -> None: + ... + + @classmethod + def from_dict(cls, data): # -> TFLiteFormat: + """Create an instance of the object from a dict.""" + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + @property + def model_source(self): # -> TFLiteModelSource | None: + """The TF Lite model's location.""" + ... + + @model_source.setter + def model_source(self, model_source): # -> None: + ... + + @property + def size_bytes(self): # -> None: + """The size in bytes of the TF Lite model.""" + ... + + def as_dict(self, for_upload=...): # -> dict[str, dict[Any, Any]]: + """Returns a serializable representation of the object.""" + ... + + + +class TFLiteModelSource: + """Abstract base class representing a model source for TFLite format models.""" + def as_dict(self, for_upload=...): + """Returns a serializable representation of the object.""" + ... + + + +class _CloudStorageClient: + """Cloud Storage helper class""" + GCS_URI = ... + BLOB_NAME = ... + @staticmethod + def upload(bucket_name, model_file_name, app): # -> str: + """Upload a model file to the specified Storage bucket.""" + ... + + @staticmethod + def sign_uri(gcs_tflite_uri, app): # -> str: + """Makes the gcs_tflite_uri readable for GET for 10 minutes via signed_uri.""" + ... + + + +class TFLiteGCSModelSource(TFLiteModelSource): + """TFLite model source representing a tflite model file stored in GCS.""" + _STORAGE_CLIENT = ... + def __init__(self, gcs_tflite_uri, app=...) -> None: + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + @classmethod + def from_tflite_model_file(cls, model_file_name, bucket_name=..., app=...): # -> TFLiteGCSModelSource: + """Uploads the model file to an existing Google Cloud Storage bucket. + + Args: + model_file_name: The name of the model file. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: A Firebase app instance (or None to use the default app). + + Returns: + TFLiteGCSModelSource: The source created from the model_file + + Raises: + ImportError: If the Cloud Storage Library has not been installed. + """ + ... + + @classmethod + def from_saved_model(cls, saved_model_dir, model_file_name=..., bucket_name=..., app=...): # -> TFLiteGCSModelSource: + """Creates a Tensor Flow Lite model from the saved model, and uploads the model to GCS. + + Args: + saved_model_dir: The saved model directory. + model_file_name: The name that the tflite model will be saved as in Cloud Storage. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: Optional. A Firebase app instance (or None to use the default app) + + Returns: + TFLiteGCSModelSource: The source created from the saved_model_dir + + Raises: + ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. + """ + ... + + @classmethod + def from_keras_model(cls, keras_model, model_file_name=..., bucket_name=..., app=...): # -> TFLiteGCSModelSource: + """Creates a Tensor Flow Lite model from the keras model, and uploads the model to GCS. + + Args: + keras_model: A tf.keras model. + model_file_name: The name that the tflite model will be saved as in Cloud Storage. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: Optional. A Firebase app instance (or None to use the default app) + + Returns: + TFLiteGCSModelSource: The source created from the keras_model + + Raises: + ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. + """ + ... + + @property + def gcs_tflite_uri(self): # -> Any: + """URI of the model file in Cloud Storage.""" + ... + + @gcs_tflite_uri.setter + def gcs_tflite_uri(self, gcs_tflite_uri): # -> None: + ... + + def as_dict(self, for_upload=...): # -> dict[str, str] | dict[str, Any]: + """Returns a serializable representation of the object.""" + ... + + + +class TFLiteAutoMlSource(TFLiteModelSource): + """TFLite model source representing a tflite model created with AutoML. + + AutoML model support is deprecated and will be removed in the next major version. + """ + def __init__(self, auto_ml_model, app=...) -> None: + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + @property + def auto_ml_model(self): + """Resource name of the model, created by the AutoML API or Cloud console.""" + ... + + @auto_ml_model.setter + def auto_ml_model(self, auto_ml_model): # -> None: + ... + + def as_dict(self, for_upload=...): # -> dict[str, Any]: + """Returns a serializable representation of the object.""" + ... + + + +class ListModelsPage: + """Represents a page of models in a Firebase project. + + Provides methods for traversing the models included in this page, as well as + retrieving subsequent pages of models. The iterator returned by + ``iterate_all()`` can be used to iterate through all the models in the + Firebase project starting from this page. + """ + def __init__(self, list_models_func, list_filter, page_size, page_token, app) -> None: + ... + + @property + def models(self): # -> list[Model]: + """A list of Models from this page.""" + ... + + @property + def list_filter(self): # -> Any: + """The filter string used to filter the models.""" + ... + + @property + def next_page_token(self): + """Token identifying the next page of results.""" + ... + + @property + def has_next_page(self): # -> bool: + """True if more pages are available.""" + ... + + def get_next_page(self): # -> ListModelsPage | None: + """Retrieves the next page of models if available. + + Returns: + ListModelsPage: Next page of models, or None if this is the last page. + """ + ... + + def iterate_all(self): # -> _ModelIterator: + """Retrieves an iterator for Models. + + Returned iterator will iterate through all the models in the Firebase + project starting from this page. The iterator will never buffer more than + one page of models in memory at a time. + + Returns: + iterator: An iterator of Model instances. + """ + ... + + + +class _ModelIterator: + """An iterator that allows iterating over models, one at a time. + + This implementation loads a page of models into memory, and iterates on them. + When the whole page has been traversed, it loads another page. This class + never keeps more than one page of entries in memory. + """ + def __init__(self, current_page) -> None: + ... + + def next(self): # -> Model: + ... + + def __next__(self): # -> Model: + ... + + def __iter__(self): # -> Self: + ... + + + +class _MLService: + """Firebase ML service.""" + PROJECT_URL = ... + OPERATION_URL = ... + POLL_EXPONENTIAL_BACKOFF_FACTOR = ... + POLL_BASE_WAIT_TIME_SECONDS = ... + def __init__(self, app) -> None: + ... + + def get_operation(self, op_name): + ... + + def handle_operation(self, operation, wait_for_operation=..., max_time_seconds=...): # -> dict[Any, Any] | None: + """Handles long running operations. + + Args: + operation: The operation to handle. + wait_for_operation: Should we allow polling for the operation to complete. + If no polling is requested, a locked model will be returned instead. + max_time_seconds: The maximum seconds to try polling for operation complete. + (None for no limit) + + Returns: + dict: A dictionary of the returned model properties. + + Raises: + TypeError: if the operation is not a dictionary. + ValueError: If the operation is malformed. + UnknownError: If the server responds with an unexpected response. + err: If the operation exceeds polling attempts or stop_time + """ + ... + + def create_model(self, model): # -> dict[Any, Any] | None: + ... + + def update_model(self, model, update_mask=...): # -> dict[Any, Any] | None: + ... + + def set_published(self, model_id, publish): # -> dict[Any, Any] | None: + ... + + def get_model(self, model_id): + ... + + def list_models(self, list_filter, page_size, page_token): + """ lists Firebase ML models.""" + ... + + def delete_model(self, model_id): # -> None: + ... + + + diff --git a/typings/firebase_admin/project_management.pyi b/typings/firebase_admin/project_management.pyi new file mode 100644 index 0000000..1901a7d --- /dev/null +++ b/typings/firebase_admin/project_management.pyi @@ -0,0 +1,422 @@ +""" +This type stub file was generated by pyright. +""" + +"""Firebase Project Management module. + +This module enables management of resources in Firebase projects, such as Android and iOS apps. +""" +_PROJECT_MANAGEMENT_ATTRIBUTE = ... +def android_app(app_id, app=...): # -> AndroidApp: + """Obtains a reference to an Android app in the associated Firebase project. + + Args: + app_id: The app ID that identifies this Android app. + app: An App instance (optional). + + Returns: + AndroidApp: An ``AndroidApp`` instance. + """ + ... + +def ios_app(app_id, app=...): # -> IOSApp: + """Obtains a reference to an iOS app in the associated Firebase project. + + Args: + app_id: The app ID that identifies this iOS app. + app: An App instance (optional). + + Returns: + IOSApp: An ``IOSApp`` instance. + """ + ... + +def list_android_apps(app=...): + """Lists all Android apps in the associated Firebase project. + + Args: + app: An App instance (optional). + + Returns: + list: a list of ``AndroidApp`` instances referring to each Android app in the Firebase + project. + """ + ... + +def list_ios_apps(app=...): + """Lists all iOS apps in the associated Firebase project. + + Args: + app: An App instance (optional). + + Returns: + list: a list of ``IOSApp`` instances referring to each iOS app in the Firebase project. + """ + ... + +def create_android_app(package_name, display_name=..., app=...): + """Creates a new Android app in the associated Firebase project. + + Args: + package_name: The package name of the Android app to be created. + display_name: A nickname for this Android app (optional). + app: An App instance (optional). + + Returns: + AndroidApp: An ``AndroidApp`` instance that is a reference to the newly created app. + """ + ... + +def create_ios_app(bundle_id, display_name=..., app=...): + """Creates a new iOS app in the associated Firebase project. + + Args: + bundle_id: The bundle ID of the iOS app to be created. + display_name: A nickname for this iOS app (optional). + app: An App instance (optional). + + Returns: + IOSApp: An ``IOSApp`` instance that is a reference to the newly created app. + """ + ... + +class AndroidApp: + """A reference to an Android app within a Firebase project. + + Note: Unless otherwise specified, all methods defined in this class make an RPC. + + Please use the module-level function ``android_app(app_id)`` to obtain instances of this class + instead of instantiating it directly. + """ + def __init__(self, app_id, service) -> None: + ... + + @property + def app_id(self): # -> Any: + """Returns the app ID of the Android app to which this instance refers. + + Note: This method does not make an RPC. + + Returns: + string: The app ID of the Android app to which this instance refers. + """ + ... + + def get_metadata(self): + """Retrieves detailed information about this Android app. + + Returns: + AndroidAppMetadata: An ``AndroidAppMetadata`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + ... + + def set_display_name(self, new_display_name): + """Updates the display name attribute of this Android app to the one given. + + Args: + new_display_name: The new display name for this Android app. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + ... + + def get_config(self): + """Retrieves the configuration artifact associated with this Android app.""" + ... + + def get_sha_certificates(self): + """Retrieves the entire list of SHA certificates associated with this Android app. + + Returns: + list: A list of ``SHACertificate`` instances. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + ... + + def add_sha_certificate(self, certificate_to_add): + """Adds a SHA certificate to this Android app. + + Args: + certificate_to_add: The SHA certificate to add. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. (For example, if the certificate_to_add already exists.) + """ + ... + + def delete_sha_certificate(self, certificate_to_delete): + """Removes a SHA certificate from this Android app. + + Args: + certificate_to_delete: The SHA certificate to delete. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. (For example, if the certificate_to_delete is not found.) + """ + ... + + + +class IOSApp: + """A reference to an iOS app within a Firebase project. + + Note: Unless otherwise specified, all methods defined in this class make an RPC. + + Please use the module-level function ``ios_app(app_id)`` to obtain instances of this class + instead of instantiating it directly. + """ + def __init__(self, app_id, service) -> None: + ... + + @property + def app_id(self): # -> Any: + """Returns the app ID of the iOS app to which this instance refers. + + Note: This method does not make an RPC. + + Returns: + string: The app ID of the iOS app to which this instance refers. + """ + ... + + def get_metadata(self): + """Retrieves detailed information about this iOS app. + + Returns: + IOSAppMetadata: An ``IOSAppMetadata`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + ... + + def set_display_name(self, new_display_name): + """Updates the display name attribute of this iOS app to the one given. + + Args: + new_display_name: The new display name for this iOS app. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + ... + + def get_config(self): + """Retrieves the configuration artifact associated with this iOS app.""" + ... + + + +class _AppMetadata: + """Detailed information about a Firebase Android or iOS app.""" + def __init__(self, name, app_id, display_name, project_id) -> None: + ... + + @property + def app_id(self): # -> str: + """The globally unique, Firebase-assigned identifier of this Android or iOS app. + + This ID is unique even across apps of different platforms. + """ + ... + + @property + def display_name(self): # -> str: + """The user-assigned display name of this Android or iOS app. + + Note that the display name can be None if it has never been set by the user.""" + ... + + @property + def project_id(self): # -> str: + """The permanent, globally unique, user-assigned ID of the parent Firebase project.""" + ... + + def __eq__(self, other) -> bool: + ... + + + +class AndroidAppMetadata(_AppMetadata): + """Android-specific information about an Android Firebase app.""" + def __init__(self, package_name, name, app_id, display_name, project_id) -> None: + """Clients should not instantiate this class directly.""" + ... + + @property + def package_name(self): # -> str: + """The canonical package name of this Android app as it would appear in the Play Store.""" + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + def __hash__(self) -> int: + ... + + + +class IOSAppMetadata(_AppMetadata): + """iOS-specific information about an iOS Firebase app.""" + def __init__(self, bundle_id, name, app_id, display_name, project_id) -> None: + """Clients should not instantiate this class directly.""" + ... + + @property + def bundle_id(self): # -> str: + """The canonical bundle ID of this iOS app as it would appear in the iOS AppStore.""" + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + def __hash__(self) -> int: + ... + + + +class SHACertificate: + """Represents a SHA-1 or SHA-256 certificate associated with an Android app.""" + SHA_1 = ... + SHA_256 = ... + _SHA_1_RE = ... + _SHA_256_RE = ... + def __init__(self, sha_hash, name=...) -> None: + """Creates a new SHACertificate instance. + + Args: + sha_hash: A string; the certificate hash for the Android app. + name: The fully qualified resource name of this certificate; note that this field should + be omitted if the instance is being constructed for the purpose of calling the + add_sha_certificate() method on an ``AndroidApp``. + + Raises: + ValueError: If the sha_hash is not a valid SHA-1 or SHA-256 certificate hash. + """ + ... + + @property + def name(self): # -> None: + """Returns the fully qualified resource name of this certificate, if known. + + Returns: + string: The fully qualified resource name of this certificate, if known; otherwise, the + empty string. + """ + ... + + @property + def sha_hash(self): + """Returns the certificate hash. + + Returns: + string: The certificate hash. + """ + ... + + @property + def cert_type(self): # -> str: + """Returns the type of the SHA certificate encoded in the hash. + + Returns: + string: One of 'SHA_1' or 'SHA_256'. + """ + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + def __hash__(self) -> int: + ... + + + +class _ProjectManagementService: + """Provides methods for interacting with the Firebase Project Management Service.""" + BASE_URL = ... + MAXIMUM_LIST_APPS_PAGE_SIZE = ... + MAXIMUM_POLLING_ATTEMPTS = ... + POLL_BASE_WAIT_TIME_SECONDS = ... + POLL_EXPONENTIAL_BACKOFF_FACTOR = ... + ANDROID_APPS_RESOURCE_NAME = ... + ANDROID_APP_IDENTIFIER_NAME = ... + IOS_APPS_RESOURCE_NAME = ... + IOS_APP_IDENTIFIER_NAME = ... + def __init__(self, app) -> None: + ... + + def get_android_app_metadata(self, app_id): # -> AndroidAppMetadata: + ... + + def get_ios_app_metadata(self, app_id): # -> IOSAppMetadata: + ... + + def set_android_app_display_name(self, app_id, new_display_name): # -> None: + ... + + def set_ios_app_display_name(self, app_id, new_display_name): # -> None: + ... + + def list_android_apps(self): # -> list[Any]: + ... + + def list_ios_apps(self): # -> list[Any]: + ... + + def create_android_app(self, package_name, display_name=...): # -> AndroidApp: + ... + + def create_ios_app(self, bundle_id, display_name=...): # -> IOSApp: + ... + + def get_android_app_config(self, app_id): # -> str: + ... + + def get_ios_app_config(self, app_id): # -> str: + ... + + def get_sha_certificates(self, app_id): # -> list[SHACertificate]: + ... + + def add_sha_certificate(self, app_id, certificate_to_add): # -> None: + ... + + def delete_sha_certificate(self, certificate_to_delete): # -> None: + ... + + + diff --git a/typings/firebase_admin/remote_config.pyi b/typings/firebase_admin/remote_config.pyi new file mode 100644 index 0000000..65615e8 --- /dev/null +++ b/typings/firebase_admin/remote_config.pyi @@ -0,0 +1,340 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Dict, Literal, Optional, Union +from enum import Enum +from firebase_admin import App + +"""Firebase Remote Config Module. +This module has required APIs for the clients to use Firebase Remote Config with python. +""" +logger = ... +_REMOTE_CONFIG_ATTRIBUTE = ... +MAX_CONDITION_RECURSION_DEPTH = ... +ValueSource = Literal['default', 'remote', 'static'] +class PercentConditionOperator(Enum): + """Enum representing the available operators for percent conditions. + """ + LESS_OR_EQUAL = ... + GREATER_THAN = ... + BETWEEN = ... + UNKNOWN = ... + + +class CustomSignalOperator(Enum): + """Enum representing the available operators for custom signal conditions. + """ + STRING_CONTAINS = ... + STRING_DOES_NOT_CONTAIN = ... + STRING_EXACTLY_MATCHES = ... + STRING_CONTAINS_REGEX = ... + NUMERIC_LESS_THAN = ... + NUMERIC_LESS_EQUAL = ... + NUMERIC_EQUAL = ... + NUMERIC_NOT_EQUAL = ... + NUMERIC_GREATER_THAN = ... + NUMERIC_GREATER_EQUAL = ... + SEMANTIC_VERSION_LESS_THAN = ... + SEMANTIC_VERSION_LESS_EQUAL = ... + SEMANTIC_VERSION_EQUAL = ... + SEMANTIC_VERSION_NOT_EQUAL = ... + SEMANTIC_VERSION_GREATER_THAN = ... + SEMANTIC_VERSION_GREATER_EQUAL = ... + UNKNOWN = ... + + +class _ServerTemplateData: + """Parses, validates and encapsulates template data and metadata.""" + def __init__(self, template_data) -> None: + """Initializes a new ServerTemplateData instance. + + Args: + template_data: The data to be parsed for getting the parameters and conditions. + + Raises: + ValueError: If the template data is not valid. + """ + ... + + @property + def parameters(self): # -> dict[Any, Any]: + ... + + @property + def etag(self): # -> str: + ... + + @property + def version(self): # -> str: + ... + + @property + def conditions(self): # -> list[Any]: + ... + + @property + def template_data_json(self): # -> str: + ... + + + +class ServerTemplate: + """Represents a Server Template with implementations for loading and evaluating the template.""" + def __init__(self, app: App = ..., default_config: Optional[Dict[str, str]] = ...) -> None: + """Initializes a ServerTemplate instance. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + """ + ... + + async def load(self): # -> None: + """Fetches the server template and caches the data.""" + ... + + def evaluate(self, context: Optional[Dict[str, Union[str, int]]] = ...) -> ServerConfig: + """Evaluates the cached server template to produce a ServerConfig. + + Args: + context: A dictionary of values to use for evaluating conditions. + + Returns: + A ServerConfig object. + Raises: + ValueError: If the input arguments are invalid. + """ + ... + + def set(self, template_data_json: str): # -> None: + """Updates the cache to store the given template is of type ServerTemplateData. + + Args: + template_data_json: A json string representing ServerTemplateData to be cached. + """ + ... + + def to_json(self): # -> str: + """Provides the server template in a JSON format to be used for initialization later.""" + ... + + + +class ServerConfig: + """Represents a Remote Config Server Side Config.""" + def __init__(self, config_values) -> None: + ... + + def get_boolean(self, key): + """Returns the value as a boolean.""" + ... + + def get_string(self, key): + """Returns the value as a string.""" + ... + + def get_int(self, key): + """Returns the value as an integer.""" + ... + + def get_float(self, key): + """Returns the value as a float.""" + ... + + def get_value_source(self, key): + """Returns the source of the value.""" + ... + + + +class _RemoteConfigService: + """Internal class that facilitates sending requests to the Firebase Remote + Config backend API. + """ + def __init__(self, app) -> None: + """Initialize a JsonHttpClient with necessary inputs. + + Args: + app: App instance to be used for fetching app specific details required + for initializing the http client. + """ + ... + + async def get_server_template(self): # -> _ServerTemplateData: + """Requests for a server template and converts the response to an instance of + ServerTemplateData for storing the template parameters and conditions.""" + ... + + + +class _ConditionEvaluator: + """Internal class that facilitates sending requests to the Firebase Remote + Config backend API.""" + def __init__(self, conditions, parameters, context, config_values) -> None: + ... + + def evaluate(self): # -> Any: + """Internal function that evaluates the cached server template to produce + a ServerConfig""" + ... + + def evaluate_conditions(self, conditions, context) -> Dict[str, bool]: + """Evaluates a list of conditions and returns a dictionary of results. + + Args: + conditions: A list of NamedCondition objects. + context: An EvaluationContext object. + + Returns: + A dictionary that maps condition names to boolean evaluation results. + """ + ... + + def evaluate_condition(self, condition, context, nesting_level: int = ...) -> bool: + """Recursively evaluates a condition. + + Args: + condition: The condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + The boolean result of the condition evaluation. + """ + ... + + def evaluate_or_condition(self, or_condition, context, nesting_level: int = ...) -> bool: + """Evaluates an OR condition. + + Args: + or_condition: The OR condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + True if any of the subconditions are true, False otherwise. + """ + ... + + def evaluate_and_condition(self, and_condition, context, nesting_level: int = ...) -> bool: + """Evaluates an AND condition. + + Args: + and_condition: The AND condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + True if all of the subconditions are met; False otherwise. + """ + ... + + def evaluate_percent_condition(self, percent_condition, context) -> bool: + """Evaluates a percent condition. + + Args: + percent_condition: The percent condition to evaluate. + context: An EvaluationContext object. + + Returns: + True if the condition is met, False otherwise. + """ + ... + + def hash_seeded_randomization_id(self, seeded_randomization_id: str) -> int: + """Hashes a seeded randomization ID. + + Args: + seeded_randomization_id: The seeded randomization ID to hash. + + Returns: + The hashed value. + """ + ... + + def evaluate_custom_signal_condition(self, custom_signal_condition, context) -> bool: + """Evaluates a custom signal condition. + + Args: + custom_signal_condition: The custom signal condition to evaluate. + context: An EvaluationContext object. + + Returns: + True if the condition is met, False otherwise. + """ + ... + + + +async def get_server_template(app: App = ..., default_config: Optional[Dict[str, str]] = ...): # -> ServerTemplate: + """Initializes a new ServerTemplate instance and fetches the server template. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + + Returns: + ServerTemplate: An object having the cached server template to be used for evaluation. + """ + ... + +def init_server_template(app: App = ..., default_config: Optional[Dict[str, str]] = ..., template_data_json: Optional[str] = ...): # -> ServerTemplate: + """Initializes a new ServerTemplate instance. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + template_data_json: An optional template data JSON to be set on initialization. + + Returns: + ServerTemplate: A new ServerTemplate instance initialized with an optional + template and config. + """ + ... + +class _Value: + """Represents a value fetched from Remote Config. + """ + DEFAULT_VALUE_FOR_BOOLEAN = ... + DEFAULT_VALUE_FOR_STRING = ... + DEFAULT_VALUE_FOR_INTEGER = ... + DEFAULT_VALUE_FOR_FLOAT_NUMBER = ... + BOOLEAN_TRUTHY_VALUES = ... + def __init__(self, source: ValueSource, value: str = ...) -> None: + """Initializes a Value instance. + + Args: + source: The source of the value (e.g., 'default', 'remote', 'static'). + "static" indicates the value was defined by a static constant. + "default" indicates the value was defined by default config. + "remote" indicates the value was defined by config produced by evaluating a template. + value: The string value. + """ + ... + + def as_string(self) -> str: + """Returns the value as a string.""" + ... + + def as_boolean(self) -> bool: + """Returns the value as a boolean.""" + ... + + def as_int(self) -> float: + """Returns the value as a number.""" + ... + + def as_float(self) -> float: + """Returns the value as a number.""" + ... + + def get_source(self) -> ValueSource: + """Returns the source of the value.""" + ... + + + diff --git a/typings/firebase_admin/storage.pyi b/typings/firebase_admin/storage.pyi new file mode 100644 index 0000000..d29ff43 --- /dev/null +++ b/typings/firebase_admin/storage.pyi @@ -0,0 +1,48 @@ +""" +This type stub file was generated by pyright. +""" + +from google.cloud import storage + +"""Firebase Cloud Storage module. + +This module contains utilities for accessing Google Cloud Storage buckets associated with +Firebase apps. This requires the ``google-cloud-storage`` Python module. +""" +_STORAGE_ATTRIBUTE = ... +def bucket(name=..., app=...) -> storage.Bucket: + """Returns a handle to a Google Cloud Storage bucket. + + If the name argument is not provided, uses the 'storageBucket' option specified when + initializing the App. If that is also not available raises an error. This function + does not make any RPC calls. + + Args: + name: Name of a cloud storage bucket (optional). + app: An App instance (optional). + + Returns: + google.cloud.storage.Bucket: A handle to the specified bucket. + + Raises: + ValueError: If a bucket name is not specified either via options or method arguments, + or if the specified bucket name is not a valid string. + """ + ... + +class _StorageClient: + """Holds a Google Cloud Storage client instance.""" + STORAGE_HEADERS = ... + def __init__(self, credentials, project, default_bucket) -> None: + ... + + @classmethod + def from_app(cls, app): # -> _StorageClient: + ... + + def bucket(self, name=...): # -> Bucket: + """Returns a handle to the specified Cloud Storage Bucket.""" + ... + + + diff --git a/typings/firebase_admin/tenant_mgt.pyi b/typings/firebase_admin/tenant_mgt.pyi new file mode 100644 index 0000000..1f8f4d6 --- /dev/null +++ b/typings/firebase_admin/tenant_mgt.pyi @@ -0,0 +1,261 @@ +""" +This type stub file was generated by pyright. +""" + +from firebase_admin import _auth_utils + +"""Firebase tenant management module. + +This module contains functions for creating and configuring authentication tenants within a +Google Cloud Identity Platform (GCIP) instance. +""" +_TENANT_MGT_ATTRIBUTE = ... +_MAX_LIST_TENANTS_RESULTS = ... +_DISPLAY_NAME_PATTERN = ... +__all__ = ['ListTenantsPage', 'Tenant', 'TenantIdMismatchError', 'TenantNotFoundError', 'auth_for_tenant', 'create_tenant', 'delete_tenant', 'get_tenant', 'list_tenants', 'update_tenant'] +TenantIdMismatchError = _auth_utils.TenantIdMismatchError +TenantNotFoundError = _auth_utils.TenantNotFoundError +def auth_for_tenant(tenant_id, app=...): + """Gets an Auth Client instance scoped to the given tenant ID. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Returns: + auth.Client: An ``auth.Client`` object. + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + """ + ... + +def get_tenant(tenant_id, app=...): + """Gets the tenant corresponding to the given ``tenant_id``. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Returns: + Tenant: A tenant object. + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while retrieving the tenant. + """ + ... + +def create_tenant(display_name, allow_password_sign_up=..., enable_email_link_sign_in=..., app=...): + """Creates a new tenant from the given options. + + Args: + display_name: Display name string for the new tenant. Must begin with a letter and contain + only letters, digits and hyphens. Length must be between 4 and 20. + allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in + provider (optional). + enable_email_link_sign_in: A boolean indicating whether to enable or disable email link + sign-in (optional). Disabling this makes the password required for email sign-in. + app: An App instance (optional). + + Returns: + Tenant: A tenant object. + + Raises: + ValueError: If any of the given arguments are invalid. + FirebaseError: If an error occurs while creating the tenant. + """ + ... + +def update_tenant(tenant_id, display_name=..., allow_password_sign_up=..., enable_email_link_sign_in=..., app=...): + """Updates an existing tenant with the given options. + + Args: + tenant_id: ID of the tenant to update. + display_name: Updated display name string for the tenant (optional). + allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in + provider. + enable_email_link_sign_in: A boolean indicating whether to enable or disable email link + sign-in. Disabling this makes the password required for email sign-in. + app: An App instance (optional). + + Returns: + Tenant: The updated tenant object. + + Raises: + ValueError: If any of the given arguments are invalid. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while creating the tenant. + """ + ... + +def delete_tenant(tenant_id, app=...): # -> None: + """Deletes the tenant corresponding to the given ``tenant_id``. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while retrieving the tenant. + """ + ... + +def list_tenants(page_token=..., max_results=..., app=...): # -> ListTenantsPage: + """Retrieves a page of tenants from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of tenants that may be included in the returned page. + This function never returns None. If there are no user accounts in the Firebase project, this + returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the page + (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in the + returned page (optional). Defaults to 100, which is also the maximum number allowed. + app: An App instance (optional). + + Returns: + ListTenantsPage: A page of tenants. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + ... + +class Tenant: + """Represents a tenant in a multi-tenant application. + + Multi-tenancy support requires Google Cloud Identity Platform (GCIP). To learn more about + GCIP including pricing and features, see https://cloud.google.com/identity-platform. + + Before multi-tenancy can be used in a Google Cloud Identity Platform project, tenants must be + enabled in that project via the Cloud Console UI. A Tenant instance provides information + such as the display name, tenant identifier and email authentication configuration. + """ + def __init__(self, data) -> None: + ... + + @property + def tenant_id(self): + ... + + @property + def display_name(self): # -> None: + ... + + @property + def allow_password_sign_up(self): + ... + + @property + def enable_email_link_sign_in(self): + ... + + + +class _TenantManagementService: + """Firebase tenant management service.""" + TENANT_MGT_URL = ... + def __init__(self, app) -> None: + ... + + def auth_for_tenant(self, tenant_id): # -> Client: + """Gets an Auth Client instance scoped to the given tenant ID.""" + ... + + def get_tenant(self, tenant_id): # -> Tenant: + """Gets the tenant corresponding to the given ``tenant_id``.""" + ... + + def create_tenant(self, display_name, allow_password_sign_up=..., enable_email_link_sign_in=...): # -> Tenant: + """Creates a new tenant from the given parameters.""" + ... + + def update_tenant(self, tenant_id, display_name=..., allow_password_sign_up=..., enable_email_link_sign_in=...): # -> Tenant: + """Updates the specified tenant with the given parameters.""" + ... + + def delete_tenant(self, tenant_id): # -> None: + """Deletes the tenant corresponding to the given ``tenant_id``.""" + ... + + def list_tenants(self, page_token=..., max_results=...): + """Retrieves a batch of tenants.""" + ... + + + +class ListTenantsPage: + """Represents a page of tenants fetched from a Firebase project. + + Provides methods for traversing tenants included in this page, as well as retrieving + subsequent pages of tenants. The iterator returned by ``iterate_all()`` can be used to iterate + through all tenants in the Firebase project starting from this page. + """ + def __init__(self, download, page_token, max_results) -> None: + ... + + @property + def tenants(self): # -> list[Tenant]: + """A list of ``ExportedUserRecord`` instances available in this page.""" + ... + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + ... + + @property + def has_next_page(self): # -> bool: + """A boolean indicating whether more pages are available.""" + ... + + def get_next_page(self): # -> ListTenantsPage | None: + """Retrieves the next page of tenants, if available. + + Returns: + ListTenantsPage: Next page of tenants, or None if this is the last page. + """ + ... + + def iterate_all(self): # -> _TenantIterator: + """Retrieves an iterator for tenants. + + Returned iterator will iterate through all the tenants in the Firebase project + starting from this page. The iterator will never buffer more than one page of tenants + in memory at a time. + + Returns: + iterator: An iterator of Tenant instances. + """ + ... + + + +class _TenantIterator: + """An iterator that allows iterating over tenants. + + This implementation loads a page of tenants into memory, and iterates on them. When the whole + page has been traversed, it loads another page. This class never keeps more than one page + of entries in memory. + """ + def __init__(self, current_page) -> None: + ... + + def next(self): + ... + + def __next__(self): + ... + + def __iter__(self): # -> Self: + ... + + + diff --git a/uv.lock b/uv.lock index c6884ac..f15e868 100644 --- a/uv.lock +++ b/uv.lock @@ -203,6 +203,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] +[[package]] +name = "apns2" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "hyper" }, + { name = "pyjwt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/a1/0c66ac293963b132e7eeb8edf5fa035e96eae3262623305704c88df876e6/apns2-0.7.1.tar.gz", hash = "sha256:8c24207aa96dff4687f8d7c9149fc42086f3506b0a76da1f5bf48d74e5569567", size = 11052, upload-time = "2019-10-08T19:52:56.116Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/91/f27cd1299e00eb6955199856f19327de73c5eb803503356fd238b81d3430/apns2-0.7.1-py2.py3-none-any.whl", hash = "sha256:360bcd1f1d6308348adcb317c0192d1631fe01a2b9b73ce95f57c708de2bb88a", size = 9945, upload-time = "2019-10-08T19:52:54.313Z" }, +] + [[package]] name = "argon2-cffi" version = "23.1.0" @@ -301,10 +315,12 @@ version = "0.1.0" source = { virtual = "." } dependencies = [ { name = "alembic" }, + { name = "apns2" }, { name = "asyncpg" }, { name = "bcrypt" }, { name = "cryptography" }, { name = "fastapi", extra = ["standard"] }, + { name = "firebase-admin" }, { name = "greenlet" }, { name = "insightface" }, { name = "miniopy-async" }, @@ -319,6 +335,7 @@ dependencies = [ { name = "pyjwt" }, { name = "pyotp" }, { name = "python-multipart" }, + { name = "pywebpush" }, { name = "redis" }, { name = "setuptools" }, ] @@ -333,10 +350,12 @@ dev = [ [package.metadata] requires-dist = [ { name = "alembic", specifier = ">=1.18.4" }, + { name = "apns2", specifier = ">=0.7.1" }, { name = "asyncpg", specifier = ">=0.31.0" }, { name = "bcrypt", specifier = "==4.3.0" }, { name = "cryptography", specifier = ">=46.0.5" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.135.1" }, + { name = "firebase-admin", specifier = ">=6.8.0" }, { name = "greenlet", specifier = ">=3.3.2" }, { name = "insightface", specifier = ">=0.7.3" }, { name = "miniopy-async", specifier = ">=1.23.4" }, @@ -351,6 +370,7 @@ requires-dist = [ { name = "pyjwt", specifier = ">=2.11.0" }, { name = "pyotp", specifier = ">=2.9.0" }, { name = "python-multipart", specifier = ">=0.0.22" }, + { name = "pywebpush", specifier = ">=2.3.0" }, { name = "redis", specifier = ">=7.2.1" }, { name = "setuptools", specifier = ">=82.0.0" }, ] @@ -412,6 +432,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, ] +[[package]] +name = "cachecontrol" +version = "0.14.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msgpack" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2d/f6/c972b32d80760fb79d6b9eeb0b3010a46b89c0b23cf6329417ff7886cd22/cachecontrol-0.14.4.tar.gz", hash = "sha256:e6220afafa4c22a47dd0badb319f84475d79108100d04e26e8542ef7d3ab05a1", size = 16150, upload-time = "2025-11-14T04:32:13.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl", hash = "sha256:b7ac014ff72ee199b5f8af1de29d60239954f223e948196fa3d84adaffc71d2b", size = 22247, upload-time = "2025-11-14T04:32:11.733Z" }, +] + [[package]] name = "certifi" version = "2025.11.12" @@ -896,6 +929,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/11/0aa8455af26f0ae89e42be67f3a874255ee5d7f0f026fc86e8d56f76b428/fastar-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e59673307b6a08210987059a2bdea2614fe26e3335d0e5d1a3d95f49a05b1418", size = 460467, upload-time = "2025-11-26T02:36:07.978Z" }, ] +[[package]] +name = "firebase-admin" +version = "6.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachecontrol" }, + { name = "google-api-core", extra = ["grpc"], marker = "platform_python_implementation != 'PyPy'" }, + { name = "google-api-python-client" }, + { name = "google-cloud-firestore", marker = "platform_python_implementation != 'PyPy'" }, + { name = "google-cloud-storage" }, + { name = "pyjwt", extra = ["crypto"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/d8/4230b0770a2cd9d4de53bb1f0a17fa204716a9b271bc3be1fb109dfb8b9d/firebase_admin-6.8.0.tar.gz", hash = "sha256:24a9870219cfd6578586357858e00758aea26a39df74c53be5d803f5654d883e", size = 112211, upload-time = "2025-04-24T18:53:24.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/e4/a4fea0c28787e6fadfdc6bf76f497c8136fdbb915f2942de1070918c1202/firebase_admin-6.8.0-py3-none-any.whl", hash = "sha256:84d5fd82859c4d27b63338c3fe9724667dfe400aa2fd9fef0efffbf6e23bca82", size = 134188, upload-time = "2025-04-24T18:53:23.182Z" }, +] + [[package]] name = "flatbuffers" version = "25.12.19" @@ -1034,6 +1084,164 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] +[[package]] +name = "google-api-core" +version = "2.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl", hash = "sha256:80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5", size = 173288, upload-time = "2026-02-18T20:28:10.367Z" }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + +[[package]] +name = "google-api-python-client" +version = "2.193.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "httplib2" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/f4/e14b6815d3b1885328dd209676a3a4c704882743ac94e18ef0093894f5c8/google_api_python_client-2.193.0.tar.gz", hash = "sha256:8f88d16e89d11341e0a8b199cafde0fb7e6b44260dffb88d451577cbd1bb5d33", size = 14281006, upload-time = "2026-03-17T18:25:29.415Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/6d/fe75167797790a56d17799b75e1129bb93f7ff061efc7b36e9731bd4be2b/google_api_python_client-2.193.0-py3-none-any.whl", hash = "sha256:c42aa324b822109901cfecab5dc4fc3915d35a7b376835233c916c70610322db", size = 14856490, upload-time = "2026-03-17T18:25:26.608Z" }, +] + +[[package]] +name = "google-auth" +version = "2.49.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyasn1-modules" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "httplib2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/ad/c1f2b1175096a8d04cf202ad5ea6065f108d26be6fc7215876bde4a7981d/google_auth_httplib2-0.3.0.tar.gz", hash = "sha256:177898a0175252480d5ed916aeea183c2df87c1f9c26705d74ae6b951c268b0b", size = 11134, upload-time = "2025-12-15T22:13:51.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl", hash = "sha256:426167e5df066e3f5a0fc7ea18768c08e7296046594ce4c8c409c2457dd1f776", size = 9529, upload-time = "2025-12-15T22:13:51.048Z" }, +] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027, upload-time = "2025-10-29T23:17:39.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469, upload-time = "2025-10-29T23:17:38.548Z" }, +] + +[[package]] +name = "google-cloud-firestore" +version = "2.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "grpcio" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/84/4acfdc4d29de4eae4dd6ac1267611421c2a36975c473b2a86fd2e9752e75/google_cloud_firestore-2.25.0.tar.gz", hash = "sha256:9bca3b504f5473048eeab603b9bec69bbeffcdddc4e5fc65cdcc01b449628fc0", size = 621860, upload-time = "2026-03-12T19:31:06.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/f2/29abde0fcd98f32ce61fc2064a1a3e39b4e64746537409f8ea5521f79afb/google_cloud_firestore-2.25.0-py3-none-any.whl", hash = "sha256:c933a7696b7dd160953d60413ab9481387f6dd8367e77dd750d841689773104a", size = 416714, upload-time = "2026-03-12T19:30:36.674Z" }, +] + +[[package]] +name = "google-cloud-storage" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/e3/747759eebc72e420c25903d6bc231d0ceb110b66ac7e6ee3f350417152cd/google_cloud_storage-3.10.0.tar.gz", hash = "sha256:1aeebf097c27d718d84077059a28d7e87f136f3700212215f1ceeae1d1c5d504", size = 17309829, upload-time = "2026-03-18T15:54:11.875Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/e2/d58442f4daee5babd9255cf492a1f3d114357164072f8339a22a3ad460a2/google_cloud_storage-3.10.0-py3-none-any.whl", hash = "sha256:0072e7783b201e45af78fd9779894cdb6bec2bf922ee932f3fcc16f8bce9b9a3", size = 324382, upload-time = "2026-03-18T15:54:10.091Z" }, +] + +[[package]] +name = "google-crc32c" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192, upload-time = "2025-12-16T00:35:25.142Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300, upload-time = "2025-12-16T00:21:56.723Z" }, + { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867, upload-time = "2025-12-16T00:38:31.302Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364, upload-time = "2025-12-16T00:40:22.96Z" }, + { url = "https://files.pythonhosted.org/packages/21/3f/3457ea803db0198c9aaca2dd373750972ce28a26f00544b6b85088811939/google_crc32c-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb5c869c2923d56cb0c8e6bcdd73c009c36ae39b652dbe46a05eb4ef0ad01454", size = 33740, upload-time = "2025-12-16T00:40:23.96Z" }, + { url = "https://files.pythonhosted.org/packages/df/c0/87c2073e0c72515bb8733d4eef7b21548e8d189f094b5dad20b0ecaf64f6/google_crc32c-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:3cc0c8912038065eafa603b238abf252e204accab2a704c63b9e14837a854962", size = 34437, upload-time = "2025-12-16T00:35:21.395Z" }, + { url = "https://files.pythonhosted.org/packages/d1/db/000f15b41724589b0e7bc24bc7a8967898d8d3bc8caf64c513d91ef1f6c0/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:3ebb04528e83b2634857f43f9bb8ef5b2bbe7f10f140daeb01b58f972d04736b", size = 31297, upload-time = "2025-12-16T00:23:20.709Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/8ebed0c39c53a7e838e2a486da8abb0e52de135f1b376ae2f0b160eb4c1a/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:450dc98429d3e33ed2926fc99ee81001928d63460f8538f21a5d6060912a8e27", size = 30867, upload-time = "2025-12-16T00:43:14.628Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/b468aec74a0354b34c8cbf748db20d6e350a68a2b0912e128cabee49806c/google_crc32c-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3b9776774b24ba76831609ffbabce8cdf6fa2bd5e9df37b594221c7e333a81fa", size = 33344, upload-time = "2025-12-16T00:40:24.742Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e8/b33784d6fc77fb5062a8a7854e43e1e618b87d5ddf610a88025e4de6226e/google_crc32c-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:89c17d53d75562edfff86679244830599ee0a48efc216200691de8b02ab6b2b8", size = 33694, upload-time = "2025-12-16T00:40:25.505Z" }, + { url = "https://files.pythonhosted.org/packages/92/b1/d3cbd4d988afb3d8e4db94ca953df429ed6db7282ed0e700d25e6c7bfc8d/google_crc32c-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:57a50a9035b75643996fbf224d6661e386c7162d1dfdab9bc4ca790947d1007f", size = 34435, upload-time = "2025-12-16T00:35:22.107Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/8ecf3c2b864a490b9e7010c84fd203ec8cf3b280651106a3a74dd1b0ca72/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:e6584b12cb06796d285d09e33f63309a09368b9d806a551d8036a4207ea43697", size = 31301, upload-time = "2025-12-16T00:24:48.527Z" }, + { url = "https://files.pythonhosted.org/packages/36/c6/f7ff6c11f5ca215d9f43d3629163727a272eabc356e5c9b2853df2bfe965/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:f4b51844ef67d6cf2e9425983274da75f18b1597bb2c998e1c0a0e8d46f8f651", size = 30868, upload-time = "2025-12-16T00:48:12.163Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381, upload-time = "2025-12-16T00:40:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734, upload-time = "2025-12-16T00:40:27.028Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878, upload-time = "2025-12-16T00:35:23.142Z" }, +] + +[[package]] +name = "google-resumable-media" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265, upload-time = "2025-11-17T15:38:06.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340, upload-time = "2025-11-17T15:38:05.594Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.73.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/96/a0205167fa0154f4a542fd6925bdc63d039d88dab3588b875078107e6f06/googleapis_common_protos-1.73.0.tar.gz", hash = "sha256:778d07cd4fbeff84c6f7c72102f0daf98fa2bfd3fa8bea426edc545588da0b5a", size = 147323, upload-time = "2026-03-06T21:53:09.727Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/28/23eea8acd65972bbfe295ce3666b28ac510dfcb115fac089d3edb0feb00a/googleapis_common_protos-1.73.0-py3-none-any.whl", hash = "sha256:dfdaaa2e860f242046be561e6d6cb5c5f1541ae02cfbcb034371aadb2942b4e8", size = 297578, upload-time = "2026-03-06T21:52:33.933Z" }, +] + [[package]] name = "greenlet" version = "3.3.2" @@ -1077,6 +1285,61 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, ] +[[package]] +name = "grpcio" +version = "1.78.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, +] + +[[package]] +name = "grpcio-status" +version = "1.78.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/cd/89ce482a931b543b92cdd9b2888805518c4620e0094409acb8c81dd4610a/grpcio_status-1.78.0.tar.gz", hash = "sha256:a34cfd28101bfea84b5aa0f936b4b423019e9213882907166af6b3bddc59e189", size = 13808, upload-time = "2026-02-06T10:01:48.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/8a/1241ec22c41028bddd4a052ae9369267b4475265ad0ce7140974548dc3fa/grpcio_status-1.78.0-py3-none-any.whl", hash = "sha256:b492b693d4bf27b47a6c32590701724f1d3b9444b36491878fb71f6208857f34", size = 14523, upload-time = "2026-02-06T10:01:32.584Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -1086,6 +1349,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] +[[package]] +name = "h2" +version = "2.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/ad/73a6c1a40eadbf9eef93fe16285a366c834cbd61783c30e6c23ef4b11e53/h2-2.6.2.tar.gz", hash = "sha256:af35878673c83a44afbc12b13ac91a489da2819b5dc1e11768f3c2406f740fe9", size = 169942, upload-time = "2017-04-03T07:56:34.319Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/8b/8d5610e8ddbcde6d014907526b4c6c294520a7233fc456d7be1fcade3bbc/h2-2.6.2-py2.py3-none-any.whl", hash = "sha256:93cbd1013a2218539af05cdf9fc37b786655b93bbc94f5296b7dabd1c5cadf41", size = 71894, upload-time = "2017-04-03T07:56:30.674Z" }, +] + +[[package]] +name = "hpack" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/f1/b4440e46e265a29c0cb7b09b6daec6edf93c79eae713cfed93fbbf8716c5/hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2", size = 43321, upload-time = "2017-03-29T13:00:11.691Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/cc/e53517f4a1e13f74776ca93271caef378dadec14d71c61c949d759d3db69/hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89", size = 38552, upload-time = "2017-03-29T13:00:09.659Z" }, +] + +[[package]] +name = "http-ece" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/af/249d1576653b69c20b9ac30e284b63bd94af6a175d72d87813235caf2482/http_ece-1.2.1.tar.gz", hash = "sha256:8c6ab23116bbf6affda894acfd5f2ca0fb8facbcbb72121c11c75c33e7ce8cff", size = 8830, upload-time = "2024-08-08T00:10:47.301Z" } + [[package]] name = "httpcore" version = "1.0.9" @@ -1099,6 +1393,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] +[[package]] +name = "httplib2" +version = "0.31.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c1/1f/e86365613582c027dda5ddb64e1010e57a3d53e99ab8a72093fa13d565ec/httplib2-0.31.2.tar.gz", hash = "sha256:385e0869d7397484f4eab426197a4c020b606edd43372492337c0b4010ae5d24", size = 250800, upload-time = "2026-01-23T11:04:44.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/90/fd509079dfcab01102c0fdd87f3a9506894bc70afcf9e9785ef6b2b3aff6/httplib2-0.31.2-py3-none-any.whl", hash = "sha256:dbf0c2fa3862acf3c55c078ea9c0bc4481d7dc5117cae71be9514912cf9f8349", size = 91099, upload-time = "2026-01-23T11:04:42.78Z" }, +] + [[package]] name = "httptools" version = "0.7.1" @@ -1143,6 +1449,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] +[[package]] +name = "hyper" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "h2" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/f7/f60d8032f331994f29ce2d79fb5d7fe1e3c1355cac0078c070cf4feb3b52/hyper-0.7.0.tar.gz", hash = "sha256:12c82eacd122a659673484c1ea0d34576430afbe5aa6b8f63fe37fcb06a2458c", size = 631878, upload-time = "2016-09-27T12:58:46.21Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/c3/e77072050a8d3a22255695d0cd7fde19bfe962364a6f6870ef47a9f9f66b/hyper-0.7.0-py2.py3-none-any.whl", hash = "sha256:069514f54231fb7b5df2fb910a114663a83306d5296f588fffcb0a9be19407fc", size = 269790, upload-time = "2016-09-27T12:58:42.841Z" }, +] + +[[package]] +name = "hyperframe" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/96/7080c938d2b06105365bae946c77c78a32d9e763eaa05d0e431b02d7bc12/hyperframe-3.2.0.tar.gz", hash = "sha256:05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e", size = 16177, upload-time = "2016-02-02T14:45:41.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/89/44ff46f15dba53a8c16cb8cab89ecb1e44f8aa211628b43d341004cfcf7a/hyperframe-3.2.0-py2.py3-none-any.whl", hash = "sha256:4dcab11967482d400853b396d042038e4c492a15a5d2f57259e2b5f89a32f755", size = 13636, upload-time = "2016-02-02T14:45:48.989Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -1576,6 +1904,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + [[package]] name = "multidict" version = "6.7.1" @@ -2069,19 +2441,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] +[[package]] +name = "proto-plus" +version = "1.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/02/8832cde80e7380c600fbf55090b6ab7b62bd6825dbedde6d6657c15a1f8e/proto_plus-1.27.1.tar.gz", hash = "sha256:912a7460446625b792f6448bade9e55cd4e41e6ac10e27009ef71a7f317fa147", size = 56929, upload-time = "2026-02-02T17:34:49.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl", hash = "sha256:e4643061f3a4d0de092d62aa4ad09fa4756b2cbb89d4627f3985018216f9fefc", size = 50480, upload-time = "2026-02-02T17:34:47.339Z" }, +] + [[package]] name = "protobuf" -version = "7.34.0" +version = "6.33.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/00/04a2ab36b70a52d0356852979e08b44edde0435f2115dc66e25f2100f3ab/protobuf-7.34.0.tar.gz", hash = "sha256:3871a3df67c710aaf7bb8d214cc997342e63ceebd940c8c7fc65c9b3d697591a", size = 454726, upload-time = "2026-02-27T00:30:25.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e329966799f2c271d5e05e236459fe1cbfdb8755aaa3b0914fa60947ddea408", size = 429248, upload-time = "2026-02-27T00:30:14.924Z" }, - { url = "https://files.pythonhosted.org/packages/45/99/b029bbbc61e8937545da5b79aa405ab2d9cf307a728f8c9459ad60d7a481/protobuf-7.34.0-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:9d7a5005fb96f3c1e64f397f91500b0eb371b28da81296ae73a6b08a5b76cdd6", size = 325753, upload-time = "2026-02-27T00:30:17.247Z" }, - { url = "https://files.pythonhosted.org/packages/cc/79/09f02671eb75b251c5550a1c48e7b3d4b0623efd7c95a15a50f6f9fc1e2e/protobuf-7.34.0-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:4a72a8ec94e7a9f7ef7fe818ed26d073305f347f8b3b5ba31e22f81fd85fca02", size = 340200, upload-time = "2026-02-27T00:30:18.672Z" }, - { url = "https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:964cf977e07f479c0697964e83deda72bcbc75c3badab506fb061b352d991b01", size = 324268, upload-time = "2026-02-27T00:30:20.088Z" }, - { url = "https://files.pythonhosted.org/packages/1f/3e/38ff2ddee5cc946f575c9d8cc822e34bde205cf61acf8099ad88ef19d7d2/protobuf-7.34.0-cp310-abi3-win32.whl", hash = "sha256:f791ec509707a1d91bd02e07df157e75e4fb9fbdad12a81b7396201ec244e2e3", size = 426628, upload-time = "2026-02-27T00:30:21.555Z" }, - { url = "https://files.pythonhosted.org/packages/cb/71/7c32eaf34a61a1bae1b62a2ac4ffe09b8d1bb0cf93ad505f42040023db89/protobuf-7.34.0-cp310-abi3-win_amd64.whl", hash = "sha256:9f9079f1dde4e32342ecbd1c118d76367090d4aaa19da78230c38101c5b3dd40", size = 437901, upload-time = "2026-02-27T00:30:22.836Z" }, - { url = "https://files.pythonhosted.org/packages/a4/e7/14dc9366696dcb53a413449881743426ed289d687bcf3d5aee4726c32ebb/protobuf-7.34.0-py3-none-any.whl", hash = "sha256:e3b914dd77fa33fa06ab2baa97937746ab25695f389869afdf03e81f34e45dc7", size = 170716, upload-time = "2026-02-27T00:30:23.994Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, ] [[package]] @@ -2097,6 +2481,39 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/5b/181e2e3becb7672b502f0ed7f16ed7352aca7c109cfb94cf3878a9186db9/psycopg-3.3.3-py3-none-any.whl", hash = "sha256:f96525a72bcfade6584ab17e89de415ff360748c766f0106959144dcbb38c698", size = 212768, upload-time = "2026-02-18T16:46:27.365Z" }, ] +[[package]] +name = "py-vapid" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/ed/c648c8018fab319951764f4babe68ddcbbff7f2bbcd7ff7e531eac1788c8/py_vapid-1.9.4.tar.gz", hash = "sha256:a004023560cbc54e34fc06380a0580f04ffcc788e84fb6d19e9339eeb6551a28", size = 74750, upload-time = "2026-01-05T22:13:25.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/15/f9d0171e1ad863ca49e826d5afb6b50566f20dc9b4f76965096d3555ce9e/py_vapid-1.9.4-py2.py3-none-any.whl", hash = "sha256:f165a5bf90dcf966b226114f01f178f137579a09784c7f0628fa2f0a299741b6", size = 23912, upload-time = "2026-01-05T20:42:05.455Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + [[package]] name = "pycparser" version = "3.0" @@ -2272,6 +2689,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, ] +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + [[package]] name = "pyotp" version = "2.9.0" @@ -2320,6 +2742,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, ] +[[package]] +name = "pywebpush" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "cryptography" }, + { name = "http-ece" }, + { name = "py-vapid" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/d9/e497a24bc9f659bfc0e570382a41e6b2d6726fbcfa4d85aaa23fe9c81ba2/pywebpush-2.3.0.tar.gz", hash = "sha256:d1e27db8de9e6757c1875f67292554bd54c41874c36f4b5c4ebb5442dce204f2", size = 28489, upload-time = "2026-02-09T23:30:18.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/d8/ac21241cf8007cb93255eabf318da4f425ec0f75d28c366992253aa8c1b2/pywebpush-2.3.0-py3-none-any.whl", hash = "sha256:3d97469fb14d4323c362319d438183737249a4115b50e146ce233e7f01e3cf98", size = 22851, upload-time = "2026-02-09T23:30:16.093Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -2965,6 +3403,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, ] +[[package]] +name = "uritemplate" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" }, +] + [[package]] name = "urllib3" version = "2.6.3" From fc2b2cd7ef04f5928afc970b01ad7e3127e9114d Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 03:13:39 +0100 Subject: [PATCH 05/19] feat: Add type stubs for googleapiclient and pywebpush, refactor notification and storage cleaner workers into packages, and introduce a new audit worker. --- app/worker/{audit.py => audit/main.py} | 0 app/worker/audit/settings.py | 0 app/worker/notification.py | 268 ------ app/worker/notification/__init__.py | 5 + app/worker/notification/main.py | 116 +++ app/worker/notification/providers/__init__.py | 8 + app/worker/notification/providers/apn.py | 43 + app/worker/notification/providers/fcm.py | 38 + app/worker/notification/providers/webpush.py | 41 + app/worker/notification/schema/__init__.py | 6 + .../notification/schema/notification.py | 63 ++ app/worker/notification/settings.py | 31 + app/worker/storage_cleaner.py | 1 - app/worker/storage_cleaner/main.py | 0 app/worker/storage_cleaner/settings.py | 0 typings/googleapiclient/__init__.pyi | 7 + typings/googleapiclient/_auth.pyi | 56 ++ typings/googleapiclient/_helpers.pyi | 120 +++ typings/googleapiclient/channel.pyi | 211 +++++ typings/googleapiclient/discovery.pyi | 333 +++++++ .../discovery_cache/__init__.pyi | 34 + .../discovery_cache/appengine_memcache.pyi | 28 + .../googleapiclient/discovery_cache/base.pyi | 35 + .../discovery_cache/file_cache.pyi | 35 + typings/googleapiclient/errors.pyi | 108 +++ typings/googleapiclient/http.pyi | 857 ++++++++++++++++++ typings/googleapiclient/mimeparse.pyi | 107 +++ typings/googleapiclient/model.pyi | 262 ++++++ typings/googleapiclient/sample_tools.pyi | 36 + typings/googleapiclient/schema.pyi | 160 ++++ typings/googleapiclient/version.pyi | 5 + typings/pywebpush/__init__.pyi | 276 ++++++ typings/pywebpush/__main__.pyi | 13 + typings/pywebpush/foo.pyi | 7 + typings/pywebpush/tests/__init__.pyi | 4 + 35 files changed, 3045 insertions(+), 269 deletions(-) rename app/worker/{audit.py => audit/main.py} (100%) create mode 100644 app/worker/audit/settings.py delete mode 100644 app/worker/notification.py create mode 100644 app/worker/notification/__init__.py create mode 100644 app/worker/notification/main.py create mode 100644 app/worker/notification/providers/__init__.py create mode 100644 app/worker/notification/providers/apn.py create mode 100644 app/worker/notification/providers/fcm.py create mode 100644 app/worker/notification/providers/webpush.py create mode 100644 app/worker/notification/schema/__init__.py create mode 100644 app/worker/notification/schema/notification.py create mode 100644 app/worker/notification/settings.py delete mode 100644 app/worker/storage_cleaner.py create mode 100644 app/worker/storage_cleaner/main.py create mode 100644 app/worker/storage_cleaner/settings.py create mode 100644 typings/googleapiclient/__init__.pyi create mode 100644 typings/googleapiclient/_auth.pyi create mode 100644 typings/googleapiclient/_helpers.pyi create mode 100644 typings/googleapiclient/channel.pyi create mode 100644 typings/googleapiclient/discovery.pyi create mode 100644 typings/googleapiclient/discovery_cache/__init__.pyi create mode 100644 typings/googleapiclient/discovery_cache/appengine_memcache.pyi create mode 100644 typings/googleapiclient/discovery_cache/base.pyi create mode 100644 typings/googleapiclient/discovery_cache/file_cache.pyi create mode 100644 typings/googleapiclient/errors.pyi create mode 100644 typings/googleapiclient/http.pyi create mode 100644 typings/googleapiclient/mimeparse.pyi create mode 100644 typings/googleapiclient/model.pyi create mode 100644 typings/googleapiclient/sample_tools.pyi create mode 100644 typings/googleapiclient/schema.pyi create mode 100644 typings/googleapiclient/version.pyi create mode 100644 typings/pywebpush/__init__.pyi create mode 100644 typings/pywebpush/__main__.pyi create mode 100644 typings/pywebpush/foo.pyi create mode 100644 typings/pywebpush/tests/__init__.pyi diff --git a/app/worker/audit.py b/app/worker/audit/main.py similarity index 100% rename from app/worker/audit.py rename to app/worker/audit/main.py diff --git a/app/worker/audit/settings.py b/app/worker/audit/settings.py new file mode 100644 index 0000000..e69de29 diff --git a/app/worker/notification.py b/app/worker/notification.py deleted file mode 100644 index 0a83308..0000000 --- a/app/worker/notification.py +++ /dev/null @@ -1,268 +0,0 @@ -"""Forward notifications coming from NATS to the configured push providers.""" -from __future__ import annotations - -import asyncio -import dataclasses -import json -import uuid -from typing import Any - -import sqlalchemy.ext.asyncio - -from app.core.constant import NotificationChannel, NOTIFICATION_EVENT_SUBJECT -from app.core.logger import logger -from app.infra.database import engine -from app.infra.nats import NatsClient, NatsSubjects -from app.service.device import DeviceService -from db.generated import devices as device_queries -from db.generated.models import UserDevice - -try: - from firebase_admin import messaging as firebase_messaging -except ImportError: # pragma: no cover - optional dependency - firebase_messaging = None - -try: - from apns2.client import APNsClient - from apns2.payload import Payload as APNPayload -except ImportError: # pragma: no cover - optional dependency - APNsClient = None - APNPayload = None - -try: - from pywebpush import webpush, WebPushException -except ImportError: # pragma: no cover - optional dependency - webpush = None - WebPushException = None - - -@dataclasses.dataclass -class NotificationEventPayload: - user_id: uuid.UUID - channel: NotificationChannel - title: str | None = None - body: str | None = None - data: dict[str, str] = dataclasses.field(default_factory=dict) - device_info: dict[str, Any] | None = None - metadata: dict[str, Any] | None = None - - @classmethod - def from_dict(cls, payload: dict[str, Any]) -> "NotificationEventPayload" | None: - raw_user_id = payload.get("user_id") - raw_channel = payload.get("channel") - if not isinstance(raw_user_id, str) or not isinstance(raw_channel, str): - logger.warning("Notification payload missing user_id or channel: %s", payload) - return None - try: - user_id = uuid.UUID(raw_user_id) - except ValueError as exc: - logger.warning("Invalid user_id %s: %s", raw_user_id, exc) - return None - try: - channel = NotificationChannel(raw_channel) - except ValueError: - logger.warning("Unsupported notification channel %s", raw_channel) - return None - - data = payload.get("data") - data_dict: dict[str, str] = {} - if isinstance(data, dict): - data_dict = {str(k): str(v) for k, v in data.items()} - - device_info = payload.get("device_info") - if device_info is not None and not isinstance(device_info, dict): - logger.warning("device_info must be an object: %s", payload) - device_info = None - - metadata = payload.get("metadata") - if metadata is not None and not isinstance(metadata, dict): - metadata = None - - return cls( - user_id=user_id, - channel=channel, - title=payload.get("title"), - body=payload.get("body"), - data=data_dict, - device_info=device_info, - metadata=metadata, - ) - - -async def init_push_integrations() -> None: - """Initialize third-party push clients and perform early validation.""" - if firebase_messaging: - logger.info("Firebase Admin available for FCM delivery") - else: - logger.warning("Firebase Admin not installed; mobile push disabled") - - if APNsClient and APNPayload: - logger.info("APNs client available for iOS delivery") - else: - logger.warning("APNs client not installed; iOS push disabled") - - if webpush: - logger.info("pywebpush available for web push delivery") - else: - logger.warning("pywebpush not installed; web push disabled") - - -async def send_fcm_notification(device: UserDevice, payload: NotificationEventPayload) -> None: - if firebase_messaging is None: - logger.debug("Skipping FCM delivery because firebase_admin is not installed") - return - - token = payload.device_info and payload.device_info.get("fcm_token") - if token is None: - logger.warning("Missing FCM token for payload %s", payload) - return - - message = firebase_messaging.Message( - token=token, - notification=firebase_messaging.Notification( - title=payload.title, body=payload.body - ), - data=payload.data, - ) - - try: - firebase_messaging.send(message) - logger.info("FCM notification queued for user %s token %s", payload.user_id, token) - except Exception as exc: - logger.exception("FCM send failed for token %s: %s", token, exc) - - -async def send_apn_notification(device: UserDevice, payload: NotificationEventPayload) -> None: - if APNsClient is None or APNPayload is None: - logger.debug("Skipping APN delivery because APNs client is unavailable") - return - - token = payload.device_info and payload.device_info.get("apn_token") - if token is None: - logger.warning("Missing APN token for payload %s", payload) - return - - apn_payload = APNPayload(alert={"title": payload.title, "body": payload.body}) - client = APNsClient( - credentials="/path/to/certificate.pem", - use_sandbox=True, - use_alternative_port=False, - ) - try: - client.send_notification(token, apn_payload) - logger.info("APN notification queued for user %s token %s", payload.user_id, token) - except Exception as exc: - logger.exception("APN send failed for %s: %s", token, exc) - - -async def send_web_push_notification(payload: NotificationEventPayload) -> None: - if webpush is None or WebPushException is None: - logger.debug("Skipping WebPush delivery because pywebpush is unavailable") - return - - if not payload.device_info: - logger.warning("Web notification missing subscription info: %s", payload) - return - - subscription_info = payload.device_info - vapid_claims = {"sub": "mailto:alerts@example.com"} - try: - webpush( - subscription_info=subscription_info, - data=json.dumps({"title": payload.title, "body": payload.body, "data": payload.data}), - vapid_private_key="/path/to/vapid-private.key", - vapid_claims=vapid_claims, - ) - logger.info("Web push queued for user %s", payload.user_id) - except WebPushException as exc: - logger.exception("Web push failed for user %s: %s", payload.user_id, exc) - - -class NotificationDeliveryWorker: - def __init__(self) -> None: - self._conn: sqlalchemy.ext.asyncio.AsyncConnection | None = None - self._device_service: DeviceService | None = None - - async def start(self) -> None: - if self._conn is not None: - return - self._conn = await engine.connect() - self._device_service = DeviceService() - self._device_service.init(device_querier=device_queries.AsyncQuerier(self._conn)) - - async def stop(self) -> None: - if self._conn is not None: - await self._conn.close() - self._conn = None - self._device_service = None - - async def deliver(self, payload: NotificationEventPayload) -> None: - if payload.channel is NotificationChannel.MOBILE: - await self._deliver_to_mobile(payload) - elif payload.channel is NotificationChannel.WEB: - await send_web_push_notification(payload) - else: - logger.warning("Unsupported channel %s for payload %s", payload.channel, payload) - - async def _deliver_to_mobile(self, payload: NotificationEventPayload) -> None: - if self._device_service is None: - logger.warning("Device service missing for mobile delivery") - return - devices, _ = await self._device_service.get_all_devices(user_id=payload.user_id) - if not devices: - logger.debug("No registered devices for user %s", payload.user_id) - return - for device in devices: - device_type = (device.device_type or "").lower() - if device_type == "ios": - await send_apn_notification(device, payload) - elif device_type == "android": - await send_fcm_notification(device, payload) - else: - await send_fcm_notification(device, payload) - - -async def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: - try: - return json.loads(raw_data.decode("utf-8")) - except (UnicodeDecodeError, json.JSONDecodeError) as exc: - logger.error("Cannot parse notification payload: %s", exc) - return None - - -async def _handle_event(worker: NotificationDeliveryWorker, raw_data: bytes) -> None: - raw_payload = await _parse_payload(raw_data) - if raw_payload is None: - return - payload = NotificationEventPayload.from_dict(raw_payload) - if payload is None: - return - try: - await worker.deliver(payload) - except Exception: - logger.exception("Failed to deliver notification payload %s", raw_payload) - - -async def listen_nats_event(worker: NotificationDeliveryWorker) -> None: - await NatsClient.subscribe( - NatsSubjects.NOTIFICATION_EVENT, - lambda data: _handle_event(worker, data), - ) - logger.info("Listening for notification events on %s", NOTIFICATION_EVENT_SUBJECT) - - -async def main() -> None: - await init_push_integrations() - worker = NotificationDeliveryWorker() - await worker.start() - await NatsClient.connect() - try: - await listen_nats_event(worker) - await asyncio.Event().wait() - finally: - await worker.stop() - await NatsClient.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/app/worker/notification/__init__.py b/app/worker/notification/__init__.py new file mode 100644 index 0000000..51ddbc5 --- /dev/null +++ b/app/worker/notification/__init__.py @@ -0,0 +1,5 @@ +from __future__ import annotations + +from .main import main # noqa: F401 + +__all__ = ["main"] diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py new file mode 100644 index 0000000..66f2880 --- /dev/null +++ b/app/worker/notification/main.py @@ -0,0 +1,116 @@ +import asyncio +import json +from typing import Any + +import sqlalchemy.ext.asyncio + +from app.core.constant import NOTIFICATION_EVENT_SUBJECT, NotificationChannel +from app.core.logger import logger +from app.infra.database import engine +from app.infra.nats import NatsClient, NatsSubjects +from app.service.device import DeviceService +from db.generated import devices as device_queries + +from app.worker.notification.providers import ( + send_apn_notification, + send_fcm_notification, + send_web_push_notification, +) +from app.worker.notification.schema.notification import NotificationEventPayload + + +async def init_push_integrations() -> None: + logger.info("Notification worker ready to deliver pushes") + + +class NotificationDeliveryWorker: + def __init__(self) -> None: + self._conn: sqlalchemy.ext.asyncio.AsyncConnection | None = None + self._device_service: DeviceService | None = None + + async def start(self) -> None: + if self._conn is not None: + return + self._conn = await engine.connect() + self._device_service = DeviceService() + self._device_service.init(device_querier=device_queries.AsyncQuerier(self._conn)) + + async def stop(self) -> None: + if self._conn is not None: + await self._conn.close() + self._conn = None + self._device_service = None + + async def deliver(self, payload: NotificationEventPayload) -> None: + if payload.channel == NotificationChannel.MOBILE: + await self._deliver_to_mobile(payload) + return + if payload.channel == NotificationChannel.WEB: + await send_web_push_notification(payload) + return + logger.warning("Unhandled notification channel %s", payload.channel) + + async def _deliver_to_mobile(self, payload: NotificationEventPayload) -> None: + if self._device_service is None: + logger.warning("Device service unavailable for mobile delivery") + return + devices, _ = await self._device_service.get_all_devices(user_id=payload.user_id) + if not devices: + logger.debug("No devices registered for user %s", payload.user_id) + return + for device in devices: + device_type = (device.device_type or "").lower() + if device_type == "ios": + await send_apn_notification(payload) + else: + await send_fcm_notification(payload) + + +def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: + try: + parsed = json.loads(raw_data.decode("utf-8")) + if not isinstance(parsed, dict): + logger.warning("Notification payload must be an object, got %s", type(parsed)) + return None + return parsed + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + logger.error("Cannot parse notification payload: %s", exc) + return None + + +async def _handle_event(worker: NotificationDeliveryWorker, raw_data: Any) -> None: + parsed = _parse_payload(raw_data) + if parsed is None: + return + payload = NotificationEventPayload.from_mapping(parsed) + if payload is None: + return + try: + await worker.deliver(payload) + except Exception: + logger.exception("Failed to deliver payload for %s", parsed.get("user_id")) + + +async def listen_nats_event(worker: NotificationDeliveryWorker) -> None: + await NatsClient.subscribe( + NatsSubjects.NOTIFICATION_EVENT, + lambda data: _handle_event(worker, data), + ) + logger.info("Listening for notification events on %s", NOTIFICATION_EVENT_SUBJECT) + + +async def main() -> None: + await init_push_integrations() + worker = NotificationDeliveryWorker() + await worker.start() + await NatsClient.connect() + try: + await listen_nats_event(worker) + await asyncio.Event().wait() + finally: + await worker.stop() + await NatsClient.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/app/worker/notification/providers/__init__.py b/app/worker/notification/providers/__init__.py new file mode 100644 index 0000000..32e1188 --- /dev/null +++ b/app/worker/notification/providers/__init__.py @@ -0,0 +1,8 @@ +"""Provider entry points.""" +from __future__ import annotations + +from app.worker.notification.providers.apn import send_apn_notification +from app.worker.notification.providers.fcm import send_fcm_notification +from app.worker.notification.providers.webpush import send_web_push_notification + +__all__ = ["send_apn_notification", "send_fcm_notification", "send_web_push_notification"] diff --git a/app/worker/notification/providers/apn.py b/app/worker/notification/providers/apn.py new file mode 100644 index 0000000..6d6ca15 --- /dev/null +++ b/app/worker/notification/providers/apn.py @@ -0,0 +1,43 @@ +import asyncio +from typing import Any, Mapping + +from apns2.payload import PayloadAlert +from app.core.logger import logger +from app.worker.notification.schema.notification import NotificationEventPayload +from app.worker.notification.settings import settings +from apns2.client import APNsClient +from apns2.payload import Payload as APNPayload + + +async def send_apn_notification(payload: NotificationEventPayload) -> None: + if APNsClient is None or APNPayload is None: + logger.debug("APNs client unavailable; skipping APN delivery") + return + + device_info: Mapping[str, Any] | None = payload.device_info + if device_info is None: + logger.warning("Payload missing device_info, cannot send APN message: %s", payload) + return + + token = device_info.get("apn_token") + if not isinstance(token, str): + logger.warning("Missing APN token in device_info for payload %s", payload) + return + + apn_payload = APNPayload(alert=PayloadAlert(title=)) + client = APNsClient( + credentials=settings.apn_certificate_path, + use_sandbox=settings.apn_use_sandbox, + use_alternative_port=settings.apn_use_alternative_port, + ) + + send_args = (token, apn_payload) + send_kwargs: dict[str, Any] = {} + if settings.apn_topic is not None: + send_kwargs["topic"] = settings.apn_topic + + try: + await asyncio.to_thread(client.send_notification, *send_args, **send_kwargs) + logger.info("APN notification queued for user %s token %s", payload.user_id, token) + except Exception as exc: + logger.exception("APN send failed for token %s: %s", token, exc) diff --git a/app/worker/notification/providers/fcm.py b/app/worker/notification/providers/fcm.py new file mode 100644 index 0000000..5042ef0 --- /dev/null +++ b/app/worker/notification/providers/fcm.py @@ -0,0 +1,38 @@ + +import asyncio +from typing import Any, Mapping + +from app.core.logger import logger +from app.worker.notification.schema.notification import NotificationEventPayload +from firebase_admin import messaging as firebase_messaging + + +async def send_fcm_notification(payload: NotificationEventPayload) -> None: + + if firebase_messaging is None: + logger.debug("Firebase Admin not installed; skipping FCM delivery") + return + + device_info: Mapping[str, Any] | None = payload.device_info + if device_info is None: + logger.warning("Payload missing device_info, cannot send FCM message: %s", payload) + return + + token = device_info.get("fcm_token") + if not isinstance(token, str): + logger.warning("Missing FCM token in device_info for payload %s", payload) + return + + message = firebase_messaging.Message( + token=token, + notification=firebase_messaging.Notification( + title=payload.title, body=payload.body + ), + data=payload.data or None, + ) + + try: + await asyncio.to_thread(firebase_messaging.send, message) + logger.info("FCM notification queued for user %s token %s", payload.user_id, token) + except Exception as exc: + logger.exception("FCM send failed for token %s: %s", token, exc) diff --git a/app/worker/notification/providers/webpush.py b/app/worker/notification/providers/webpush.py new file mode 100644 index 0000000..942ba90 --- /dev/null +++ b/app/worker/notification/providers/webpush.py @@ -0,0 +1,41 @@ +"""Web Push integration helpers.""" +from __future__ import annotations + +import asyncio +import json +from typing import Any, Mapping + +from app.core.logger import logger +from app.worker.notification.schema.notification import NotificationEventPayload +from app.worker.notification.settings import settings +from pywebpush import WebPushException, webpush + + +async def send_web_push_notification(payload: NotificationEventPayload) -> None: + if webpush is None or WebPushException is None: + logger.debug("pywebpush unavailable; skipping web push delivery") + return + + if not payload.device_info: + logger.warning("Web notification missing subscription info: %s", payload) + return + + if not settings.webpush_vapid_private_key: + logger.warning("VAPID private key missing, cannot send web push") + return + + subscription_info = payload.device_info + vapid_claims = {"sub": settings.webpush_vapid_claims_subject} + data = json.dumps({"title": payload.title, "body": payload.body, "data": payload.data}) + + try: + await asyncio.to_thread( + webpush, + subscription_info=subscription_info, + data=data, + vapid_private_key=settings.webpush_vapid_private_key, + vapid_claims=vapid_claims, + ) + logger.info("Web push queued for user %s", payload.user_id) + except WebPushException as exc: + logger.exception("Web push failed for user %s: %s", payload.user_id, exc) diff --git a/app/worker/notification/schema/__init__.py b/app/worker/notification/schema/__init__.py new file mode 100644 index 0000000..70e23dd --- /dev/null +++ b/app/worker/notification/schema/__init__.py @@ -0,0 +1,6 @@ +"""Notification worker shared schema exports.""" +from __future__ import annotations + +from .notification import NotificationEventPayload + +__all__ = ["NotificationEventPayload"] diff --git a/app/worker/notification/schema/notification.py b/app/worker/notification/schema/notification.py new file mode 100644 index 0000000..85feaaf --- /dev/null +++ b/app/worker/notification/schema/notification.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import dataclasses +import uuid +from typing import Any, Mapping + +from app.core.constant import NotificationChannel +from app.core.logger import logger + + +@dataclasses.dataclass +class NotificationEventPayload: + + user_id: uuid.UUID + channel: NotificationChannel + title: str | None = None + body: str | None = None + data: dict[str, str] = dataclasses.field(default_factory=dict) + device_info: Mapping[str, Any] | None = None + metadata: Mapping[str, Any] | None = None + + @classmethod + def from_mapping(cls, payload: Mapping[str, Any]) -> "NotificationEventPayload" | None: + raw_user_id = payload.get("user_id") + raw_channel = payload.get("channel") + if not isinstance(raw_user_id, str) or not isinstance(raw_channel, str): + logger.warning("Notification payload missing user_id or channel: %s", payload) + return None + try: + user_id = uuid.UUID(raw_user_id) + except ValueError as exc: + logger.warning("Invalid user_id %s: %s", raw_user_id, exc) + return None + try: + channel = NotificationChannel(raw_channel) + except ValueError: + logger.warning("Unsupported notification channel %s", raw_channel) + return None + + raw_data = payload.get("data") + data_dict: dict[str, str] = {} + if isinstance(raw_data, Mapping): + data_dict = {str(k): str(v) for k, v in raw_data.items()} + + device_info = payload.get("device_info") + if device_info is not None and not isinstance(device_info, Mapping): + logger.warning("device_info must be an object, dropping it: %s", payload) + device_info = None + + metadata = payload.get("metadata") + if metadata is not None and not isinstance(metadata, Mapping): + logger.warning("metadata must be an object, dropping it: %s", payload) + metadata = None + + return cls( + user_id=user_id, + channel=channel, + title=payload.get("title"), + body=payload.get("body"), + data=data_dict, + device_info=device_info, + metadata=metadata, + ) diff --git a/app/worker/notification/settings.py b/app/worker/notification/settings.py new file mode 100644 index 0000000..1ef343a --- /dev/null +++ b/app/worker/notification/settings.py @@ -0,0 +1,31 @@ +"""Configuration shared between notification providers.""" +from __future__ import annotations + +from typing import Optional + +from pydantic import BaseSettings, Field + + +class NotificationWorkerSettings(BaseSettings): + """Environment driven configuration for the notification worker.""" + + apn_certificate_path: str = Field( + "/path/to/certificate.pem", description="Path to the APNs certificate in PEM format" + ) + apn_use_sandbox: bool = Field(True, description="Whether to speak to the APNs sandbox endpoint") + apn_use_alternative_port: bool = Field( + False, description="Use the alternative port when connecting to APNs" + ) + apn_topic: Optional[str] = Field( + None, description="APNs topic (i.e. bundle ID) to target" + ) + webpush_vapid_private_key: Optional[str] = Field(None, description="VAPID private key for web push") + webpush_vapid_claims_subject: str = Field( + "mailto:alerts@example.com", description="VAPID subject for push subscriptions" + ) + + class Config: + env_prefix = "NOTIFICATION_" + + +settings = NotificationWorkerSettings() diff --git a/app/worker/storage_cleaner.py b/app/worker/storage_cleaner.py deleted file mode 100644 index 3d61af5..0000000 --- a/app/worker/storage_cleaner.py +++ /dev/null @@ -1 +0,0 @@ -async def initilize_ \ No newline at end of file diff --git a/app/worker/storage_cleaner/main.py b/app/worker/storage_cleaner/main.py new file mode 100644 index 0000000..e69de29 diff --git a/app/worker/storage_cleaner/settings.py b/app/worker/storage_cleaner/settings.py new file mode 100644 index 0000000..e69de29 diff --git a/typings/googleapiclient/__init__.pyi b/typings/googleapiclient/__init__.pyi new file mode 100644 index 0000000..2b096b9 --- /dev/null +++ b/typings/googleapiclient/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +import logging +from logging import NullHandler + diff --git a/typings/googleapiclient/_auth.pyi b/typings/googleapiclient/_auth.pyi new file mode 100644 index 0000000..7d7cb17 --- /dev/null +++ b/typings/googleapiclient/_auth.pyi @@ -0,0 +1,56 @@ +""" +This type stub file was generated by pyright. +""" + +"""Helpers for authentication using oauth2client or google-auth.""" +HAS_GOOGLE_AUTH = ... +HAS_OAUTH2CLIENT = ... +def credentials_from_file(filename, scopes=..., quota_project_id=...): # -> Credentials | ServiceAccountCredentials: + """Returns credentials loaded from a file.""" + ... + +def default_credentials(scopes=..., quota_project_id=...): + """Returns Application Default Credentials.""" + ... + +def with_scopes(credentials, scopes): # -> Scoped: + """Scopes the credentials if necessary. + + Args: + credentials (Union[ + google.auth.credentials.Credentials, + oauth2client.client.Credentials]): The credentials to scope. + scopes (Sequence[str]): The list of scopes. + + Returns: + Union[google.auth.credentials.Credentials, + oauth2client.client.Credentials]: The scoped credentials. + """ + ... + +def authorized_http(credentials): # -> AuthorizedHttp: + """Returns an http client that is authorized with the given credentials. + + Args: + credentials (Union[ + google.auth.credentials.Credentials, + oauth2client.client.Credentials]): The credentials to use. + + Returns: + Union[httplib2.Http, google_auth_httplib2.AuthorizedHttp]: An + authorized http client. + """ + ... + +def refresh_credentials(credentials): + ... + +def apply_credentials(credentials, headers): + ... + +def is_valid(credentials): # -> bool: + ... + +def get_credentials_from_http(http): # -> None: + ... + diff --git a/typings/googleapiclient/_helpers.pyi b/typings/googleapiclient/_helpers.pyi new file mode 100644 index 0000000..2097e75 --- /dev/null +++ b/typings/googleapiclient/_helpers.pyi @@ -0,0 +1,120 @@ +""" +This type stub file was generated by pyright. +""" + +"""Helper functions for commonly used utilities.""" +logger = ... +POSITIONAL_WARNING = ... +POSITIONAL_EXCEPTION = ... +POSITIONAL_IGNORE = ... +POSITIONAL_SET = ... +positional_parameters_enforcement = ... +_SYM_LINK_MESSAGE = ... +_IS_DIR_MESSAGE = ... +_MISSING_FILE_MESSAGE = ... +def positional(max_positional_args): # -> Callable[..., _Wrapped[Callable[..., Any], Any, Callable[..., Any], Any]]: + """A decorator to declare that only the first N arguments may be positional. + + This decorator makes it easy to support Python 3 style keyword-only + parameters. For example, in Python 3 it is possible to write:: + + def fn(pos1, *, kwonly1=None, kwonly2=None): + ... + + All named parameters after ``*`` must be a keyword:: + + fn(10, 'kw1', 'kw2') # Raises exception. + fn(10, kwonly1='kw1') # Ok. + + Example + ^^^^^^^ + + To define a function like above, do:: + + @positional(1) + def fn(pos1, kwonly1=None, kwonly2=None): + ... + + If no default value is provided to a keyword argument, it becomes a + required keyword argument:: + + @positional(0) + def fn(required_kw): + ... + + This must be called with the keyword parameter:: + + fn() # Raises exception. + fn(10) # Raises exception. + fn(required_kw=10) # Ok. + + When defining instance or class methods always remember to account for + ``self`` and ``cls``:: + + class MyClass(object): + + @positional(2) + def my_method(self, pos1, kwonly1=None): + ... + + @classmethod + @positional(2) + def my_method(cls, pos1, kwonly1=None): + ... + + The positional decorator behavior is controlled by + ``_helpers.positional_parameters_enforcement``, which may be set to + ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or + ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do + nothing, respectively, if a declaration is violated. + + Args: + max_positional_arguments: Maximum number of positional arguments. All + parameters after this index must be + keyword only. + + Returns: + A decorator that prevents using arguments after max_positional_args + from being used as positional parameters. + + Raises: + TypeError: if a keyword-only argument is provided as a positional + parameter, but only if + _helpers.positional_parameters_enforcement is set to + POSITIONAL_EXCEPTION. + """ + ... + +def parse_unique_urlencoded(content): # -> dict[Any, Any]: + """Parses unique key-value parameters from urlencoded content. + + Args: + content: string, URL-encoded key-value pairs. + + Returns: + dict, The key-value pairs from ``content``. + + Raises: + ValueError: if one of the keys is repeated. + """ + ... + +def update_query_params(uri, params): + """Updates a URI with new query parameters. + + If a given key from ``params`` is repeated in the ``uri``, then + the URI will be considered invalid and an error will occur. + + If the URI is valid, then each value from ``params`` will + replace the corresponding value in the query parameters (if + it exists). + + Args: + uri: string, A valid URI, with potential existing query parameters. + params: dict, A dictionary of query parameters. + + Returns: + The same URI but with the new query parameters added. + """ + ... + diff --git a/typings/googleapiclient/channel.pyi b/typings/googleapiclient/channel.pyi new file mode 100644 index 0000000..a4d9ea3 --- /dev/null +++ b/typings/googleapiclient/channel.pyi @@ -0,0 +1,211 @@ +""" +This type stub file was generated by pyright. +""" + +from googleapiclient import _helpers as util + +"""Channel notifications support. + +Classes and functions to support channel subscriptions and notifications +on those channels. + +Notes: + - This code is based on experimental APIs and is subject to change. + - Notification does not do deduplication of notification ids, that's up to + the receiver. + - Storing the Channel between calls is up to the caller. + + +Example setting up a channel: + + # Create a new channel that gets notifications via webhook. + channel = new_webhook_channel("https://example.com/my_web_hook") + + # Store the channel, keyed by 'channel.id'. Store it before calling the + # watch method because notifications may start arriving before the watch + # method returns. + ... + + resp = service.objects().watchAll( + bucket="some_bucket_id", body=channel.body()).execute() + channel.update(resp) + + # Store the channel, keyed by 'channel.id'. Store it after being updated + # since the resource_id value will now be correct, and that's needed to + # stop a subscription. + ... + + +An example Webhook implementation using webapp2. Note that webapp2 puts +headers in a case insensitive dictionary, as headers aren't guaranteed to +always be upper case. + + id = self.request.headers[X_GOOG_CHANNEL_ID] + + # Retrieve the channel by id. + channel = ... + + # Parse notification from the headers, including validating the id. + n = notification_from_headers(channel, self.request.headers) + + # Do app specific stuff with the notification here. + if n.resource_state == 'sync': + # Code to handle sync state. + elif n.resource_state == 'exists': + # Code to handle the exists state. + elif n.resource_state == 'not_exists': + # Code to handle the not exists state. + + +Example of unsubscribing. + + service.channels().stop(channel.body()).execute() +""" +EPOCH = ... +CHANNEL_PARAMS = ... +X_GOOG_CHANNEL_ID = ... +X_GOOG_MESSAGE_NUMBER = ... +X_GOOG_RESOURCE_STATE = ... +X_GOOG_RESOURCE_URI = ... +X_GOOG_RESOURCE_ID = ... +class Notification: + """A Notification from a Channel. + + Notifications are not usually constructed directly, but are returned + from functions like notification_from_headers(). + + Attributes: + message_number: int, The unique id number of this notification. + state: str, The state of the resource being monitored. + uri: str, The address of the resource being monitored. + resource_id: str, The unique identifier of the version of the resource at + this event. + """ + @util.positional(5) + def __init__(self, message_number, state, resource_uri, resource_id) -> None: + """Notification constructor. + + Args: + message_number: int, The unique id number of this notification. + state: str, The state of the resource being monitored. Can be one + of "exists", "not_exists", or "sync". + resource_uri: str, The address of the resource being monitored. + resource_id: str, The identifier of the watched resource. + """ + ... + + + +class Channel: + """A Channel for notifications. + + Usually not constructed directly, instead it is returned from helper + functions like new_webhook_channel(). + + Attributes: + type: str, The type of delivery mechanism used by this channel. For + example, 'web_hook'. + id: str, A UUID for the channel. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each event delivered + over this channel. + address: str, The address of the receiving entity where events are + delivered. Specific to the channel type. + expiration: int, The time, in milliseconds from the epoch, when this + channel will expire. + params: dict, A dictionary of string to string, with additional parameters + controlling delivery channel behavior. + resource_id: str, An opaque id that identifies the resource that is + being watched. Stable across different API versions. + resource_uri: str, The canonicalized ID of the watched resource. + """ + @util.positional(5) + def __init__(self, type, id, token, address, expiration=..., params=..., resource_id=..., resource_uri=...) -> None: + """Create a new Channel. + + In user code, this Channel constructor will not typically be called + manually since there are functions for creating channels for each specific + type with a more customized set of arguments to pass. + + Args: + type: str, The type of delivery mechanism used by this channel. For + example, 'web_hook'. + id: str, A UUID for the channel. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each event delivered + over this channel. + address: str, The address of the receiving entity where events are + delivered. Specific to the channel type. + expiration: int, The time, in milliseconds from the epoch, when this + channel will expire. + params: dict, A dictionary of string to string, with additional parameters + controlling delivery channel behavior. + resource_id: str, An opaque id that identifies the resource that is + being watched. Stable across different API versions. + resource_uri: str, The canonicalized ID of the watched resource. + """ + ... + + def body(self): # -> dict[str, Any]: + """Build a body from the Channel. + + Constructs a dictionary that's appropriate for passing into watch() + methods as the value of body argument. + + Returns: + A dictionary representation of the channel. + """ + ... + + def update(self, resp): # -> None: + """Update a channel with information from the response of watch(). + + When a request is sent to watch() a resource, the response returned + from the watch() request is a dictionary with updated channel information, + such as the resource_id, which is needed when stopping a subscription. + + Args: + resp: dict, The response from a watch() method. + """ + ... + + + +def notification_from_headers(channel, headers): # -> Notification: + """Parse a notification from the webhook request headers, validate + the notification, and return a Notification object. + + Args: + channel: Channel, The channel that the notification is associated with. + headers: dict, A dictionary like object that contains the request headers + from the webhook HTTP request. + + Returns: + A Notification object. + + Raises: + errors.InvalidNotificationError if the notification is invalid. + ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int. + """ + ... + +@util.positional(2) +def new_webhook_channel(url, token=..., expiration=..., params=...): # -> Channel: + """Create a new webhook Channel. + + Args: + url: str, URL to post notifications to. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each notification delivered + over this channel. + expiration: datetime.datetime, A time in the future when the channel + should expire. Can also be None if the subscription should use the + default expiration. Note that different services may have different + limits on how long a subscription lasts. Check the response from the + watch() method to see the value the service has set for an expiration + time. + params: dict, Extra parameters to pass on channel creation. Currently + not used for webhook channels. + """ + ... + diff --git a/typings/googleapiclient/discovery.pyi b/typings/googleapiclient/discovery.pyi new file mode 100644 index 0000000..44c05b1 --- /dev/null +++ b/typings/googleapiclient/discovery.pyi @@ -0,0 +1,333 @@ +""" +This type stub file was generated by pyright. +""" + +from email.generator import BytesGenerator +from googleapiclient._helpers import positional + +"""Client for discovery based APIs. + +A client library for Google's discovery based APIs. +""" +__author__ = ... +__all__ = ["build", "build_from_document", "fix_method_name", "key2param"] +HAS_UNIVERSE = ... +logger = ... +URITEMPLATE = ... +VARNAME = ... +DISCOVERY_URI = ... +V1_DISCOVERY_URI = ... +V2_DISCOVERY_URI = ... +DEFAULT_METHOD_DOC = ... +HTTP_PAYLOAD_METHODS = ... +_MEDIA_SIZE_BIT_SHIFTS = ... +BODY_PARAMETER_DEFAULT_VALUE = ... +MEDIA_BODY_PARAMETER_DEFAULT_VALUE = ... +MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = ... +_PAGE_TOKEN_NAMES = ... +GOOGLE_API_USE_CLIENT_CERTIFICATE = ... +GOOGLE_API_USE_MTLS_ENDPOINT = ... +GOOGLE_CLOUD_UNIVERSE_DOMAIN = ... +DEFAULT_UNIVERSE = ... +STACK_QUERY_PARAMETERS = ... +STACK_QUERY_PARAMETER_DEFAULT_VALUE = ... +class APICoreVersionError(ValueError): + def __init__(self) -> None: + ... + + + +RESERVED_WORDS = ... +class _BytesGenerator(BytesGenerator): + _write_lines = ... + + +def fix_method_name(name): + """Fix method names to avoid '$' characters and reserved word conflicts. + + Args: + name: string, method name. + + Returns: + The name with '_' appended if the name is a reserved word and '$' and '-' + replaced with '_'. + """ + ... + +def key2param(key): # -> LiteralString: + """Converts key names into parameter names. + + For example, converting "max-results" -> "max_results" + + Args: + key: string, the method key name. + + Returns: + A safe method name based on the key name. + """ + ... + +@positional(2) +def build(serviceName, version, http=..., discoveryServiceUrl=..., developerKey=..., model=..., requestBuilder=..., credentials=..., cache_discovery=..., cache=..., client_options=..., adc_cert_path=..., adc_key_path=..., num_retries=..., static_discovery=..., always_use_jwt_access=...): + """Construct a Resource for interacting with an API. + + Construct a Resource object for interacting with an API. The serviceName and + version are the names from the Discovery service. + + Args: + serviceName: string, name of the service. + version: string, the version of the service. + http: httplib2.Http, An instance of httplib2.Http or something that acts + like it that HTTP requests will be made through. + discoveryServiceUrl: string, a URI Template that points to the location of + the discovery service. It should have two parameters {api} and + {apiVersion} that when filled in produce an absolute URI to the discovery + document for that service. + developerKey: string, key obtained from + https://code.google.com/apis/console. + model: googleapiclient.Model, converts to and from the wire format. + requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP + request. + credentials: oauth2client.Credentials or + google.auth.credentials.Credentials, credentials to be used for + authentication. + cache_discovery: Boolean, whether or not to cache the discovery doc. + cache: googleapiclient.discovery_cache.base.CacheBase, an optional + cache object for the discovery documents. + client_options: Mapping object or google.api_core.client_options, client + options to set user options on the client. + (1) The API endpoint should be set through client_options. If API endpoint + is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used + to control which endpoint to use. + (2) client_cert_source is not supported, client cert should be provided using + client_encrypted_cert_source instead. In order to use the provided client + cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be + set to `true`. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + adc_cert_path: str, client certificate file path to save the application + default client certificate for mTLS. This field is required if you want to + use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE` + environment variable must be set to `true` in order to use this field, + otherwise this field doesn't nothing. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + adc_key_path: str, client encrypted private key file path to save the + application default client encrypted private key for mTLS. This field is + required if you want to use the default client certificate. + `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to + `true` in order to use this field, otherwise this field doesn't nothing. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + num_retries: Integer, number of times to retry discovery with + randomized exponential backoff in case of intermittent/connection issues. + static_discovery: Boolean, whether or not to use the static discovery docs + included in the library. The default value for `static_discovery` depends + on the value of `discoveryServiceUrl`. `static_discovery` will default to + `True` when `discoveryServiceUrl` is also not provided, otherwise it will + default to `False`. + always_use_jwt_access: Boolean, whether always use self signed JWT for service + account credentials. This only applies to + google.oauth2.service_account.Credentials. + + Returns: + A Resource object with methods for interacting with the service. + + Raises: + google.auth.exceptions.MutualTLSChannelError: if there are any problems + setting up mutual TLS channel. + """ + ... + +@positional(1) +def build_from_document(service, base=..., future=..., http=..., developerKey=..., model=..., requestBuilder=..., credentials=..., client_options=..., adc_cert_path=..., adc_key_path=..., always_use_jwt_access=...): + """Create a Resource for interacting with an API. + + Same as `build()`, but constructs the Resource object from a discovery + document that is it given, as opposed to retrieving one over HTTP. + + Args: + service: string or object, the JSON discovery document describing the API. + The value passed in may either be the JSON string or the deserialized + JSON. + base: string, base URI for all HTTP requests, usually the discovery URI. + This parameter is no longer used as rootUrl and servicePath are included + within the discovery document. (deprecated) + future: string, discovery document with future capabilities (deprecated). + http: httplib2.Http, An instance of httplib2.Http or something that acts + like it that HTTP requests will be made through. + developerKey: string, Key for controlling API usage, generated + from the API Console. + model: Model class instance that serializes and de-serializes requests and + responses. + requestBuilder: Takes an http request and packages it up to be executed. + credentials: oauth2client.Credentials or + google.auth.credentials.Credentials, credentials to be used for + authentication. + client_options: Mapping object or google.api_core.client_options, client + options to set user options on the client. + (1) The API endpoint should be set through client_options. If API endpoint + is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used + to control which endpoint to use. + (2) client_cert_source is not supported, client cert should be provided using + client_encrypted_cert_source instead. In order to use the provided client + cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be + set to `true`. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + adc_cert_path: str, client certificate file path to save the application + default client certificate for mTLS. This field is required if you want to + use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE` + environment variable must be set to `true` in order to use this field, + otherwise this field doesn't nothing. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + adc_key_path: str, client encrypted private key file path to save the + application default client encrypted private key for mTLS. This field is + required if you want to use the default client certificate. + `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to + `true` in order to use this field, otherwise this field doesn't nothing. + More details on the environment variables are here: + https://google.aip.dev/auth/4114 + always_use_jwt_access: Boolean, whether always use self signed JWT for service + account credentials. This only applies to + google.oauth2.service_account.Credentials. + + Returns: + A Resource object with methods for interacting with the service. + + Raises: + google.auth.exceptions.MutualTLSChannelError: if there are any problems + setting up mutual TLS channel. + """ + ... + +class ResourceMethodParameters: + """Represents the parameters associated with a method. + + Attributes: + argmap: Map from method parameter name (string) to query parameter name + (string). + required_params: List of required parameters (represented by parameter + name as string). + repeated_params: List of repeated parameters (represented by parameter + name as string). + pattern_params: Map from method parameter name (string) to regular + expression (as a string). If the pattern is set for a parameter, the + value for that parameter must match the regular expression. + query_params: List of parameters (represented by parameter name as string) + that will be used in the query string. + path_params: Set of parameters (represented by parameter name as string) + that will be used in the base URL path. + param_types: Map from method parameter name (string) to parameter type. Type + can be any valid JSON schema type; valid values are 'any', 'array', + 'boolean', 'integer', 'number', 'object', or 'string'. Reference: + http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1 + enum_params: Map from method parameter name (string) to list of strings, + where each list of strings is the list of acceptable enum values. + """ + def __init__(self, method_desc) -> None: + """Constructor for ResourceMethodParameters. + + Sets default values and defers to set_parameters to populate. + + Args: + method_desc: Dictionary with metadata describing an API method. Value + comes from the dictionary of methods stored in the 'methods' key in + the deserialized discovery document. + """ + ... + + def set_parameters(self, method_desc): # -> None: + """Populates maps and lists based on method description. + + Iterates through each parameter for the method and parses the values from + the parameter dictionary. + + Args: + method_desc: Dictionary with metadata describing an API method. Value + comes from the dictionary of methods stored in the 'methods' key in + the deserialized discovery document. + """ + ... + + + +def createMethod(methodName, methodDesc, rootDesc, schema): # -> tuple[Any, Callable[..., Any]]: + """Creates a method for attaching to a Resource. + + Args: + methodName: string, name of the method to use. + methodDesc: object, fragment of deserialized discovery document that + describes the method. + rootDesc: object, the entire deserialized discovery document. + schema: object, mapping of schema names to schema descriptions. + """ + ... + +def createNextMethod(methodName, pageTokenName=..., nextPageTokenName=..., isPageTokenParameter=...): # -> tuple[Any, Callable[..., Any | None]]: + """Creates any _next methods for attaching to a Resource. + + The _next methods allow for easy iteration through list() responses. + + Args: + methodName: string, name of the method to use. + pageTokenName: string, name of request page token field. + nextPageTokenName: string, name of response page token field. + isPageTokenParameter: Boolean, True if request page token is a query + parameter, False if request page token is a field of the request body. + """ + ... + +class Resource: + """A class for interacting with a resource.""" + def __init__(self, http, baseUrl, model, requestBuilder, developerKey, resourceDesc, rootDesc, schema, universe_domain=...) -> None: + """Build a Resource from the API description. + + Args: + http: httplib2.Http, Object to make http requests with. + baseUrl: string, base URL for the API. All requests are relative to this + URI. + model: googleapiclient.Model, converts to and from the wire format. + requestBuilder: class or callable that instantiates an + googleapiclient.HttpRequest object. + developerKey: string, key obtained from + https://code.google.com/apis/console + resourceDesc: object, section of deserialized discovery document that + describes a resource. Note that the top level discovery document + is considered a resource. + rootDesc: object, the entire deserialized discovery document. + schema: object, mapping of schema names to schema descriptions. + universe_domain: string, the universe for the API. The default universe + is "googleapis.com". + """ + ... + + def __getstate__(self): # -> dict[str, Any]: + """Trim the state down to something that can be pickled. + + Uses the fact that the instance variable _dynamic_attrs holds attrs that + will be wiped and restored on pickle serialization. + """ + ... + + def __setstate__(self, state): # -> None: + """Reconstitute the state of the object from being pickled. + + Uses the fact that the instance variable _dynamic_attrs holds attrs that + will be wiped and restored on pickle serialization. + """ + ... + + def __enter__(self): # -> Self: + ... + + def __exit__(self, exc_type, exc, exc_tb): # -> None: + ... + + def close(self): # -> None: + """Close httplib2 connections.""" + ... + + + diff --git a/typings/googleapiclient/discovery_cache/__init__.pyi b/typings/googleapiclient/discovery_cache/__init__.pyi new file mode 100644 index 0000000..bf3642a --- /dev/null +++ b/typings/googleapiclient/discovery_cache/__init__.pyi @@ -0,0 +1,34 @@ +""" +This type stub file was generated by pyright. +""" + +import logging +import os + +"""Caching utility for the discovery document.""" +LOGGER = ... +DISCOVERY_DOC_MAX_AGE = ... +DISCOVERY_DOC_DIR = ... +def autodetect(): # -> googleapiclient.discovery_cache.appengine_memcache.Cache | googleapiclient.discovery_cache.file_cache.Cache | None: + """Detects an appropriate cache module and returns it. + + Returns: + googleapiclient.discovery_cache.base.Cache, a cache object which + is auto detected, or None if no cache object is available. + """ + ... + +def get_static_doc(serviceName, version): # -> str | None: + """Retrieves the discovery document from the directory defined in + DISCOVERY_DOC_DIR corresponding to the serviceName and version provided. + + Args: + serviceName: string, name of the service. + version: string, the version of the service. + + Returns: + A string containing the contents of the JSON discovery document, + otherwise None if the JSON discovery document was not found. + """ + ... + diff --git a/typings/googleapiclient/discovery_cache/appengine_memcache.pyi b/typings/googleapiclient/discovery_cache/appengine_memcache.pyi new file mode 100644 index 0000000..ef8bfd8 --- /dev/null +++ b/typings/googleapiclient/discovery_cache/appengine_memcache.pyi @@ -0,0 +1,28 @@ +""" +This type stub file was generated by pyright. +""" + +from . import base + +"""App Engine memcache based cache for the discovery document.""" +LOGGER = ... +NAMESPACE = ... +class Cache(base.Cache): + """A cache with app engine memcache API.""" + def __init__(self, max_age) -> None: + """Constructor. + + Args: + max_age: Cache expiration in seconds. + """ + ... + + def get(self, url): # -> None: + ... + + def set(self, url, content): # -> None: + ... + + + +cache = ... diff --git a/typings/googleapiclient/discovery_cache/base.pyi b/typings/googleapiclient/discovery_cache/base.pyi new file mode 100644 index 0000000..abf479b --- /dev/null +++ b/typings/googleapiclient/discovery_cache/base.pyi @@ -0,0 +1,35 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""An abstract class for caching the discovery document.""" +class Cache: + """A base abstract cache class.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def get(self, url): + """Gets the content from the memcache with a given key. + + Args: + url: string, the key for the cache. + + Returns: + object, the value in the cache for the given key, or None if the key is + not in the cache. + """ + ... + + @abc.abstractmethod + def set(self, url, content): + """Sets the given key and content in the cache. + + Args: + url: string, the key for the cache. + content: string, the discovery document. + """ + ... + + + diff --git a/typings/googleapiclient/discovery_cache/file_cache.pyi b/typings/googleapiclient/discovery_cache/file_cache.pyi new file mode 100644 index 0000000..04576a0 --- /dev/null +++ b/typings/googleapiclient/discovery_cache/file_cache.pyi @@ -0,0 +1,35 @@ +""" +This type stub file was generated by pyright. +""" + +from . import base + +"""File based cache for the discovery document. + +The cache is stored in a single file so that multiple processes can +share the same cache. It locks the file whenever accessing to the +file. When the cache content is corrupted, it will be initialized with +an empty cache. +""" +LOGGER = ... +FILENAME = ... +EPOCH = ... +class Cache(base.Cache): + """A file based cache for the discovery documents.""" + def __init__(self, max_age) -> None: + """Constructor. + + Args: + max_age: Cache expiration in seconds. + """ + ... + + def get(self, url): # -> Any | None: + ... + + def set(self, url, content): # -> None: + ... + + + +cache = ... diff --git a/typings/googleapiclient/errors.pyi b/typings/googleapiclient/errors.pyi new file mode 100644 index 0000000..097974f --- /dev/null +++ b/typings/googleapiclient/errors.pyi @@ -0,0 +1,108 @@ +""" +This type stub file was generated by pyright. +""" + +from googleapiclient import _helpers as util + +"""Errors for the library. + +All exceptions defined by the library +should be defined in this file. +""" +__author__ = ... +class Error(Exception): + """Base error for this module.""" + ... + + +class HttpError(Error): + """HTTP data was invalid or unexpected.""" + @util.positional(3) + def __init__(self, resp, content, uri=...) -> None: + ... + + @property + def status_code(self): + """Return the HTTP status code from the response content.""" + ... + + def __repr__(self): # -> str: + ... + + __str__ = ... + + +class InvalidJsonError(Error): + """The JSON returned could not be parsed.""" + ... + + +class UnknownFileType(Error): + """File type unknown or unexpected.""" + ... + + +class UnknownLinkType(Error): + """Link type unknown or unexpected.""" + ... + + +class UnknownApiNameOrVersion(Error): + """No API with that name and version exists.""" + ... + + +class UnacceptableMimeTypeError(Error): + """That is an unacceptable mimetype for this operation.""" + ... + + +class MediaUploadSizeError(Error): + """Media is larger than the method can accept.""" + ... + + +class ResumableUploadError(HttpError): + """Error occurred during resumable upload.""" + ... + + +class InvalidChunkSizeError(Error): + """The given chunksize is not valid.""" + ... + + +class InvalidNotificationError(Error): + """The channel Notification is invalid.""" + ... + + +class BatchError(HttpError): + """Error occurred during batch operations.""" + @util.positional(2) + def __init__(self, reason, resp=..., content=...) -> None: + ... + + def __repr__(self): # -> LiteralString: + ... + + __str__ = ... + + +class UnexpectedMethodError(Error): + """Exception raised by RequestMockBuilder on unexpected calls.""" + @util.positional(1) + def __init__(self, methodId=...) -> None: + """Constructor for an UnexpectedMethodError.""" + ... + + + +class UnexpectedBodyError(Error): + """Exception raised by RequestMockBuilder on unexpected bodies.""" + def __init__(self, expected, provided) -> None: + """Constructor for an UnexpectedMethodError.""" + ... + + + diff --git a/typings/googleapiclient/http.pyi b/typings/googleapiclient/http.pyi new file mode 100644 index 0000000..9e36e63 --- /dev/null +++ b/typings/googleapiclient/http.pyi @@ -0,0 +1,857 @@ +""" +This type stub file was generated by pyright. +""" + +from googleapiclient import _helpers as util + +"""Classes to encapsulate a single HTTP request. + +The classes implement a command pattern, with every +object supporting an execute() method that does the +actual HTTP request. +""" +__author__ = ... +LOGGER = ... +DEFAULT_CHUNK_SIZE = ... +MAX_URI_LENGTH = ... +MAX_BATCH_LIMIT = ... +_TOO_MANY_REQUESTS = ... +DEFAULT_HTTP_TIMEOUT_SEC = ... +_LEGACY_BATCH_URI = ... +class MediaUploadProgress: + """Status of a resumable upload.""" + def __init__(self, resumable_progress, total_size) -> None: + """Constructor. + + Args: + resumable_progress: int, bytes sent so far. + total_size: int, total bytes in complete upload, or None if the total + upload size isn't known ahead of time. + """ + ... + + def progress(self): # -> float: + """Percent of upload completed, as a float. + + Returns: + the percentage complete as a float, returning 0.0 if the total size of + the upload is unknown. + """ + ... + + + +class MediaDownloadProgress: + """Status of a resumable download.""" + def __init__(self, resumable_progress, total_size) -> None: + """Constructor. + + Args: + resumable_progress: int, bytes received so far. + total_size: int, total bytes in complete download. + """ + ... + + def progress(self): # -> float: + """Percent of download completed, as a float. + + Returns: + the percentage complete as a float, returning 0.0 if the total size of + the download is unknown. + """ + ... + + + +class MediaUpload: + """Describes a media object to upload. + + Base class that defines the interface of MediaUpload subclasses. + + Note that subclasses of MediaUpload may allow you to control the chunksize + when uploading a media object. It is important to keep the size of the chunk + as large as possible to keep the upload efficient. Other factors may influence + the size of the chunk you use, particularly if you are working in an + environment where individual HTTP requests may have a hardcoded time limit, + such as under certain classes of requests under Google App Engine. + + Streams are io.Base compatible objects that support seek(). Some MediaUpload + subclasses support using streams directly to upload data. Support for + streaming may be indicated by a MediaUpload sub-class and if appropriate for a + platform that stream will be used for uploading the media object. The support + for streaming is indicated by has_stream() returning True. The stream() method + should return an io.Base object that supports seek(). On platforms where the + underlying httplib module supports streaming, for example Python 2.6 and + later, the stream will be passed into the http library which will result in + less memory being used and possibly faster uploads. + + If you need to upload media that can't be uploaded using any of the existing + MediaUpload sub-class then you can sub-class MediaUpload for your particular + needs. + """ + def chunksize(self): + """Chunk size for resumable uploads. + + Returns: + Chunk size in bytes. + """ + ... + + def mimetype(self): # -> Literal['application/octet-stream']: + """Mime type of the body. + + Returns: + Mime type. + """ + ... + + def size(self): # -> None: + """Size of upload. + + Returns: + Size of the body, or None of the size is unknown. + """ + ... + + def resumable(self): # -> Literal[False]: + """Whether this upload is resumable. + + Returns: + True if resumable upload or False. + """ + ... + + def getbytes(self, begin, end): + """Get bytes from the media. + + Args: + begin: int, offset from beginning of file. + length: int, number of bytes to read, starting at begin. + + Returns: + A string of bytes read. May be shorter than length if EOF was reached + first. + """ + ... + + def has_stream(self): # -> Literal[False]: + """Does the underlying upload support a streaming interface. + + Streaming means it is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + + Returns: + True if the call to stream() will return an instance of a seekable io.Base + subclass. + """ + ... + + def stream(self): + """A stream interface to the data being uploaded. + + Returns: + The returned value is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + """ + ... + + def to_json(self): # -> str: + """Create a JSON representation of an instance of MediaUpload. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + ... + + @classmethod + def new_from_json(cls, s): # -> Any: + """Utility class method to instantiate a MediaUpload subclass from a JSON + representation produced by to_json(). + + Args: + s: string, JSON from to_json(). + + Returns: + An instance of the subclass of MediaUpload that was serialized with + to_json(). + """ + ... + + + +class MediaIoBaseUpload(MediaUpload): + """A MediaUpload for a io.Base objects. + + Note that the Python file object is compatible with io.Base and can be used + with this class also. + + fh = BytesIO('...Some data to upload...') + media = MediaIoBaseUpload(fh, mimetype='image/png', + chunksize=1024*1024, resumable=True) + farm.animals().insert( + id='cow', + name='cow.png', + media_body=media).execute() + + Depending on the platform you are working on, you may pass -1 as the + chunksize, which indicates that the entire file should be uploaded in a single + request. If the underlying platform supports streams, such as Python 2.6 or + later, then this can be very efficient as it avoids multiple connections, and + also avoids loading the entire file into memory before sending it. Note that + Google App Engine has a 5MB limit on request size, so you should never set + your chunksize larger than 5MB, or to -1. + """ + @util.positional(3) + def __init__(self, fd, mimetype, chunksize=..., resumable=...) -> None: + """Constructor. + + Args: + fd: io.Base or file object, The source of the bytes to upload. MUST be + opened in blocking mode, do not use streams opened in non-blocking mode. + The given stream must be seekable, that is, it must be able to call + seek() on fd. + mimetype: string, Mime-type of the file. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. Pass in a value of -1 if the file is to be + uploaded as a single chunk. Note that Google App Engine has a 5MB limit + on request size, so you should never set your chunksize larger than 5MB, + or to -1. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + ... + + def chunksize(self): # -> int: + """Chunk size for resumable uploads. + + Returns: + Chunk size in bytes. + """ + ... + + def mimetype(self): # -> Any: + """Mime type of the body. + + Returns: + Mime type. + """ + ... + + def size(self): + """Size of upload. + + Returns: + Size of the body, or None of the size is unknown. + """ + ... + + def resumable(self): # -> bool: + """Whether this upload is resumable. + + Returns: + True if resumable upload or False. + """ + ... + + def getbytes(self, begin, length): + """Get bytes from the media. + + Args: + begin: int, offset from beginning of file. + length: int, number of bytes to read, starting at begin. + + Returns: + A string of bytes read. May be shorted than length if EOF was reached + first. + """ + ... + + def has_stream(self): # -> Literal[True]: + """Does the underlying upload support a streaming interface. + + Streaming means it is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + + Returns: + True if the call to stream() will return an instance of a seekable io.Base + subclass. + """ + ... + + def stream(self): # -> Any: + """A stream interface to the data being uploaded. + + Returns: + The returned value is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + """ + ... + + def to_json(self): + """This upload type is not serializable.""" + ... + + + +class MediaFileUpload(MediaIoBaseUpload): + """A MediaUpload for a file. + + Construct a MediaFileUpload and pass as the media_body parameter of the + method. For example, if we had a service that allowed uploading images: + + media = MediaFileUpload('cow.png', mimetype='image/png', + chunksize=1024*1024, resumable=True) + farm.animals().insert( + id='cow', + name='cow.png', + media_body=media).execute() + + Depending on the platform you are working on, you may pass -1 as the + chunksize, which indicates that the entire file should be uploaded in a single + request. If the underlying platform supports streams, such as Python 2.6 or + later, then this can be very efficient as it avoids multiple connections, and + also avoids loading the entire file into memory before sending it. Note that + Google App Engine has a 5MB limit on request size, so you should never set + your chunksize larger than 5MB, or to -1. + """ + @util.positional(2) + def __init__(self, filename, mimetype=..., chunksize=..., resumable=...) -> None: + """Constructor. + + Args: + filename: string, Name of the file. + mimetype: string, Mime-type of the file. If None then a mime-type will be + guessed from the file extension. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. Pass in a value of -1 if the file is to be + uploaded in a single chunk. Note that Google App Engine has a 5MB limit + on request size, so you should never set your chunksize larger than 5MB, + or to -1. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + ... + + def __del__(self): # -> None: + ... + + def to_json(self): # -> str: + """Creating a JSON representation of an instance of MediaFileUpload. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + ... + + @staticmethod + def from_json(s): # -> MediaFileUpload: + ... + + + +class MediaInMemoryUpload(MediaIoBaseUpload): + """MediaUpload for a chunk of bytes. + + DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for + the stream. + """ + @util.positional(2) + def __init__(self, body, mimetype=..., chunksize=..., resumable=...) -> None: + """Create a new MediaInMemoryUpload. + + DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for + the stream. + + Args: + body: string, Bytes of body content. + mimetype: string, Mime-type of the file or default of + 'application/octet-stream'. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + ... + + + +class MediaIoBaseDownload: + """ "Download media resources. + + Note that the Python file object is compatible with io.Base and can be used + with this class also. + + + Example: + request = farms.animals().get_media(id='cow') + fh = io.FileIO('cow.png', mode='wb') + downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) + + done = False + while done is False: + status, done = downloader.next_chunk() + if status: + print "Download %d%%." % int(status.progress() * 100) + print "Download Complete!" + """ + @util.positional(3) + def __init__(self, fd, request, chunksize=...) -> None: + """Constructor. + + Args: + fd: io.Base or file object, The stream in which to write the downloaded + bytes. + request: googleapiclient.http.HttpRequest, the media request to perform in + chunks. + chunksize: int, File will be downloaded in chunks of this many bytes. + """ + ... + + @util.positional(1) + def next_chunk(self, num_retries=...): # -> tuple[MediaDownloadProgress, bool] | tuple[MediaDownloadProgress, Literal[True]]: + """Get the next chunk of the download. + + Args: + num_retries: Integer, number of times to retry with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + (status, done): (MediaDownloadProgress, boolean) + The value of 'done' will be True when the media has been fully + downloaded or the total size of the media is unknown. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occurred. + """ + ... + + + +class _StreamSlice: + """Truncated stream. + + Takes a stream and presents a stream that is a slice of the original stream. + This is used when uploading media in chunks. In later versions of Python a + stream can be passed to httplib in place of the string of data to send. The + problem is that httplib just blindly reads to the end of the stream. This + wrapper presents a virtual stream that only reads to the end of the chunk. + """ + def __init__(self, stream, begin, chunksize) -> None: + """Constructor. + + Args: + stream: (io.Base, file object), the stream to wrap. + begin: int, the seek position the chunk begins at. + chunksize: int, the size of the chunk. + """ + ... + + def read(self, n=...): + """Read n bytes. + + Args: + n, int, the number of bytes to read. + + Returns: + A string of length 'n', or less if EOF is reached. + """ + ... + + + +class HttpRequest: + """Encapsulates a single HTTP request.""" + @util.positional(4) + def __init__(self, http, postproc, uri, method=..., body=..., headers=..., methodId=..., resumable=...) -> None: + """Constructor for an HttpRequest. + + Args: + http: httplib2.Http, the transport object to use to make a request + postproc: callable, called on the HTTP response and content to transform + it into a data object before returning, or raising an exception + on an error. + uri: string, the absolute URI to send the request to + method: string, the HTTP method to use + body: string, the request body of the HTTP request, + headers: dict, the HTTP request headers + methodId: string, a unique identifier for the API method being called. + resumable: MediaUpload, None if this is not a resumbale request. + """ + ... + + @util.positional(1) + def execute(self, http=..., num_retries=...): + """Execute the request. + + Args: + http: httplib2.Http, an http object to be used in place of the + one the HttpRequest request object was constructed with. + num_retries: Integer, number of times to retry with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + A deserialized object model of the response body as determined + by the postproc. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occurred. + """ + ... + + @util.positional(2) + def add_response_callback(self, cb): # -> None: + """add_response_headers_callback + + Args: + cb: Callback to be called on receiving the response headers, of signature: + + def cb(resp): + # Where resp is an instance of httplib2.Response + """ + ... + + @util.positional(1) + def next_chunk(self, http=..., num_retries=...): # -> tuple[MediaUploadProgress | None, Any] | tuple[None, Any] | tuple[MediaUploadProgress, None]: + """Execute the next step of a resumable upload. + + Can only be used if the method being executed supports media uploads and + the MediaUpload object passed in was flagged as using resumable upload. + + Example: + + media = MediaFileUpload('cow.png', mimetype='image/png', + chunksize=1000, resumable=True) + request = farm.animals().insert( + id='cow', + name='cow.png', + media_body=media) + + response = None + while response is None: + status, response = request.next_chunk() + if status: + print "Upload %d%% complete." % int(status.progress() * 100) + + + Args: + http: httplib2.Http, an http object to be used in place of the + one the HttpRequest request object was constructed with. + num_retries: Integer, number of times to retry with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + (status, body): (ResumableMediaStatus, object) + The body will be None until the resumable media is fully uploaded. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occurred. + """ + ... + + def to_json(self): # -> str: + """Returns a JSON representation of the HttpRequest.""" + ... + + @staticmethod + def from_json(s, http, postproc): # -> HttpRequest: + """Returns an HttpRequest populated with info from a JSON object.""" + ... + + @staticmethod + def null_postproc(resp, contents): # -> tuple[Any, Any]: + ... + + + +class BatchHttpRequest: + """Batches multiple HttpRequest objects into a single HTTP request. + + Example: + from googleapiclient.http import BatchHttpRequest + + def list_animals(request_id, response, exception): + \"\"\"Do something with the animals list response.\"\"\" + if exception is not None: + # Do something with the exception. + pass + else: + # Do something with the response. + pass + + def list_farmers(request_id, response, exception): + \"\"\"Do something with the farmers list response.\"\"\" + if exception is not None: + # Do something with the exception. + pass + else: + # Do something with the response. + pass + + service = build('farm', 'v2') + + batch = BatchHttpRequest() + + batch.add(service.animals().list(), list_animals) + batch.add(service.farmers().list(), list_farmers) + batch.execute(http=http) + """ + @util.positional(1) + def __init__(self, callback=..., batch_uri=...) -> None: + """Constructor for a BatchHttpRequest. + + Args: + callback: callable, A callback to be called for each response, of the + form callback(id, response, exception). The first parameter is the + request id, and the second is the deserialized response object. The + third is an googleapiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no error occurred. + batch_uri: string, URI to send batch requests to. + """ + ... + + @util.positional(2) + def add(self, request, callback=..., request_id=...): # -> None: + """Add a new request. + + Every callback added will be paired with a unique id, the request_id. That + unique id will be passed back to the callback when the response comes back + from the server. The default behavior is to have the library generate it's + own unique id. If the caller passes in a request_id then they must ensure + uniqueness for each request_id, and if they are not an exception is + raised. Callers should either supply all request_ids or never supply a + request id, to avoid such an error. + + Args: + request: HttpRequest, Request to add to the batch. + callback: callable, A callback to be called for this response, of the + form callback(id, response, exception). The first parameter is the + request id, and the second is the deserialized response object. The + third is an googleapiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no errors occurred. + request_id: string, A unique id for the request. The id will be passed + to the callback with the response. + + Returns: + None + + Raises: + BatchError if a media request is added to a batch. + KeyError is the request_id is not unique. + """ + ... + + @util.positional(1) + def execute(self, http=...): # -> None: + """Execute all the requests as a single batched HTTP request. + + Args: + http: httplib2.Http, an http object to be used in place of the one the + HttpRequest request object was constructed with. If one isn't supplied + then use a http object from the requests in this batch. + + Returns: + None + + Raises: + httplib2.HttpLib2Error if a transport error has occurred. + googleapiclient.errors.BatchError if the response is the wrong format. + """ + ... + + + +class HttpRequestMock: + """Mock of HttpRequest. + + Do not construct directly, instead use RequestMockBuilder. + """ + def __init__(self, resp, content, postproc) -> None: + """Constructor for HttpRequestMock + + Args: + resp: httplib2.Response, the response to emulate coming from the request + content: string, the response body + postproc: callable, the post processing function usually supplied by + the model class. See model.JsonModel.response() as an example. + """ + ... + + def execute(self, http=...): + """Execute the request. + + Same behavior as HttpRequest.execute(), but the response is + mocked and not really from an HTTP request/response. + """ + ... + + + +class RequestMockBuilder: + """A simple mock of HttpRequest + + Pass in a dictionary to the constructor that maps request methodIds to + tuples of (httplib2.Response, content, opt_expected_body) that should be + returned when that method is called. None may also be passed in for the + httplib2.Response, in which case a 200 OK response will be generated. + If an opt_expected_body (str or dict) is provided, it will be compared to + the body and UnexpectedBodyError will be raised on inequality. + + Example: + response = '{"data": {"id": "tag:google.c...' + requestBuilder = RequestMockBuilder( + { + 'plus.activities.get': (None, response), + } + ) + googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) + + Methods that you do not supply a response for will return a + 200 OK with an empty string as the response content or raise an excpetion + if check_unexpected is set to True. The methodId is taken from the rpcName + in the discovery document. + + For more details see the project wiki. + """ + def __init__(self, responses, check_unexpected=...) -> None: + """Constructor for RequestMockBuilder + + The constructed object should be a callable object + that can replace the class HttpResponse. + + responses - A dictionary that maps methodIds into tuples + of (httplib2.Response, content). The methodId + comes from the 'rpcName' field in the discovery + document. + check_unexpected - A boolean setting whether or not UnexpectedMethodError + should be raised on unsupplied method. + """ + ... + + def __call__(self, http, postproc, uri, method=..., body=..., headers=..., methodId=..., resumable=...): # -> HttpRequestMock: + """Implements the callable interface that discovery.build() expects + of requestBuilder, which is to build an object compatible with + HttpRequest.execute(). See that method for the description of the + parameters and the expected response. + """ + ... + + + +class HttpMock: + """Mock of httplib2.Http""" + def __init__(self, filename=..., headers=...) -> None: + """ + Args: + filename: string, absolute filename to read response from + headers: dict, header to return with response + """ + ... + + def request(self, uri, method=..., body=..., headers=..., redirections=..., connection_type=...): # -> tuple[Response[str], bytes | None]: + ... + + def close(self): # -> None: + ... + + + +class HttpMockSequence: + """Mock of httplib2.Http + + Mocks a sequence of calls to request returning different responses for each + call. Create an instance initialized with the desired response headers + and content and then use as if an httplib2.Http instance. + + http = HttpMockSequence([ + ({'status': '401'}, ''), + ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), + ({'status': '200'}, 'echo_request_headers'), + ]) + resp, content = http.request("http://examples.com") + + There are special values you can pass in for content to trigger + behavours that are helpful in testing. + + 'echo_request_headers' means return the request headers in the response body + 'echo_request_headers_as_json' means return the request headers in + the response body + 'echo_request_body' means return the request body in the response body + 'echo_request_uri' means return the request uri in the response body + """ + def __init__(self, iterable) -> None: + """ + Args: + iterable: iterable, a sequence of pairs of (headers, body) + """ + ... + + def request(self, uri, method=..., body=..., headers=..., redirections=..., connection_type=...): # -> tuple[Response[Any], bytes | Any | bytearray | memoryview[_I] | None]: + ... + + + +def set_user_agent(http, user_agent): + """Set the user-agent on every request. + + Args: + http - An instance of httplib2.Http + or something that acts like it. + user_agent: string, the value for the user-agent header. + + Returns: + A modified instance of http that was passed in. + + Example: + + h = httplib2.Http() + h = set_user_agent(h, "my-app-name/6.0") + + Most of the time the user-agent will be set doing auth, this is for the rare + cases where you are accessing an unauthenticated endpoint. + """ + ... + +def tunnel_patch(http): + """Tunnel PATCH requests over POST. + Args: + http - An instance of httplib2.Http + or something that acts like it. + + Returns: + A modified instance of http that was passed in. + + Example: + + h = httplib2.Http() + h = tunnel_patch(h, "my-app-name/6.0") + + Useful if you are running on a platform that doesn't support PATCH. + Apply this last if you are using OAuth 1.0, as changing the method + will result in a different signature. + """ + ... + +def build_http(): # -> Http: + """Builds httplib2.Http object + + Returns: + A httplib2.Http object, which is used to make http requests, and which has timeout set by default. + To override default timeout call + + socket.setdefaulttimeout(timeout_in_sec) + + before interacting with this method. + """ + ... + diff --git a/typings/googleapiclient/mimeparse.pyi b/typings/googleapiclient/mimeparse.pyi new file mode 100644 index 0000000..5f99ab7 --- /dev/null +++ b/typings/googleapiclient/mimeparse.pyi @@ -0,0 +1,107 @@ +""" +This type stub file was generated by pyright. +""" + +"""MIME-Type Parser + +This module provides basic functions for handling mime-types. It can handle +matching mime-types against a list of media-ranges. See section 14.1 of the +HTTP specification [RFC 2616] for a complete explanation. + + http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 + +Contents: + - parse_mime_type(): Parses a mime-type into its component parts. + - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' + quality parameter. + - quality(): Determines the quality ('q') of a mime-type when + compared against a list of media-ranges. + - quality_parsed(): Just like quality() except the second parameter must be + pre-parsed. + - best_match(): Choose the mime-type with the highest quality ('q') + from a list of candidates. +""" +__version__ = ... +__author__ = ... +__email__ = ... +__license__ = ... +__credits__ = ... +def parse_mime_type(mime_type): # -> tuple[LiteralString | Any, LiteralString | Any, dict[Any, Any]]: + """Parses a mime-type into its component parts. + + Carves up a mime-type and returns a tuple of the (type, subtype, params) + where 'params' is a dictionary of all the parameters for the media range. + For example, the media range 'application/xhtml;q=0.5' would get parsed + into: + + ('application', 'xhtml', {'q', '0.5'}) + """ + ... + +def parse_media_range(range): # -> tuple[LiteralString | Any, LiteralString | Any, dict[Any, Any]]: + """Parse a media-range into its component parts. + + Carves up a media range and returns a tuple of the (type, subtype, + params) where 'params' is a dictionary of all the parameters for the media + range. For example, the media range 'application/*;q=0.5' would get parsed + into: + + ('application', '*', {'q', '0.5'}) + + In addition this function also guarantees that there is a value for 'q' + in the params dictionary, filling it in with a proper default if + necessary. + """ + ... + +def fitness_and_quality_parsed(mime_type, parsed_ranges): # -> tuple[Any | int, float]: + """Find the best match for a mime-type amongst parsed media-ranges. + + Find the best match for a given mime-type against a list of media_ranges + that have already been parsed by parse_media_range(). Returns a tuple of + the fitness value and the value of the 'q' quality parameter of the best + match, or (-1, 0) if no match was found. Just as for quality_parsed(), + 'parsed_ranges' must be a list of parsed media ranges. + """ + ... + +def quality_parsed(mime_type, parsed_ranges): # -> float: + """Find the best match for a mime-type amongst parsed media-ranges. + + Find the best match for a given mime-type against a list of media_ranges + that have already been parsed by parse_media_range(). Returns the 'q' + quality parameter of the best match, 0 if no match was found. This function + bahaves the same as quality() except that 'parsed_ranges' must be a list of + parsed media ranges. + """ + ... + +def quality(mime_type, ranges): # -> float: + """Return the quality ('q') of a mime-type against a list of media-ranges. + + Returns the quality 'q' of a mime-type when compared against the + media-ranges in ranges. For example: + + >>> quality('text/html','text/*;q=0.3, text/html;q=0.7, + text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5') + 0.7 + + """ + ... + +def best_match(supported, header): # -> Literal['']: + """Return mime-type with the highest quality ('q') from list of candidates. + + Takes a list of supported mime-types and finds the best match for all the + media-ranges listed in header. The value of header must be a string that + conforms to the format of the HTTP Accept: header. The value of 'supported' + is a list of mime-types. The list of supported mime-types should be sorted + in order of increasing desirability, in case of a situation where there is + a tie. + + >>> best_match(['application/xbel+xml', 'text/xml'], + 'text/*;q=0.5,*/*; q=0.1') + 'text/xml' + """ + ... + diff --git a/typings/googleapiclient/model.pyi b/typings/googleapiclient/model.pyi new file mode 100644 index 0000000..27ca3dd --- /dev/null +++ b/typings/googleapiclient/model.pyi @@ -0,0 +1,262 @@ +""" +This type stub file was generated by pyright. +""" + +"""Model objects for requests and responses. + +Each API may support one or more serializations, such +as JSON, Atom, etc. The model classes are responsible +for converting between the wire format and the Python +object representation. +""" +__author__ = ... +HAS_API_VERSION = ... +_LIBRARY_VERSION = ... +_PY_VERSION = ... +LOGGER = ... +dump_request_response = ... +class Model: + """Model base class. + + All Model classes should implement this interface. + The Model serializes and de-serializes between a wire + format such as JSON and a Python object representation. + """ + def request(self, headers, path_params, query_params, body_value): # -> None: + """Updates outgoing requests with a serialized body. + + Args: + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query_params: dict, parameters that appear in the query + body_value: object, the request body as a Python object, which must be + serializable. + Returns: + A tuple of (headers, path_params, query, body) + + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query: string, query part of the request URI + body: string, the body serialized in the desired wire format. + """ + ... + + def response(self, resp, content): # -> None: + """Convert the response wire format into a Python object. + + Args: + resp: httplib2.Response, the HTTP response headers and status + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + + Raises: + googleapiclient.errors.HttpError if a non 2xx response is received. + """ + ... + + + +class BaseModel(Model): + """Base model class. + + Subclasses should provide implementations for the "serialize" and + "deserialize" methods, as well as values for the following class attributes. + + Attributes: + accept: The value to use for the HTTP Accept header. + content_type: The value to use for the HTTP Content-type header. + no_content_response: The value to return when deserializing a 204 "No + Content" response. + alt_param: The value to supply as the "alt" query parameter for requests. + """ + accept = ... + content_type = ... + no_content_response = ... + alt_param = ... + def request(self, headers, path_params, query_params, body_value, api_version=...): # -> tuple[Any, Any, Any, Any | None]: + """Updates outgoing requests with a serialized body. + + Args: + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query_params: dict, parameters that appear in the query + body_value: object, the request body as a Python object, which must be + serializable by json. + api_version: str, The precise API version represented by this request, + which will result in an API Version header being sent along with the + HTTP request. + Returns: + A tuple of (headers, path_params, query, body) + + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query: string, query part of the request URI + body: string, the body serialized as JSON + """ + ... + + def response(self, resp, content): # -> None: + """Convert the response wire format into a Python object. + + Args: + resp: httplib2.Response, the HTTP response headers and status + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + + Raises: + googleapiclient.errors.HttpError if a non 2xx response is received. + """ + ... + + def serialize(self, body_value): # -> None: + """Perform the actual Python object serialization. + + Args: + body_value: object, the request body as a Python object. + + Returns: + string, the body in serialized form. + """ + ... + + def deserialize(self, content): # -> None: + """Perform the actual deserialization from response string to Python + object. + + Args: + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + """ + ... + + + +class JsonModel(BaseModel): + """Model class for JSON. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request and response bodies. + """ + accept = ... + content_type = ... + alt_param = ... + def __init__(self, data_wrapper=...) -> None: + """Construct a JsonModel. + + Args: + data_wrapper: boolean, wrap requests and responses in a data wrapper + """ + ... + + def serialize(self, body_value): # -> str: + ... + + def deserialize(self, content): # -> Any: + ... + + @property + def no_content_response(self): # -> dict[Any, Any]: + ... + + + +class RawModel(JsonModel): + """Model class for requests that don't return JSON. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request, and returns the raw bytes + of the response body. + """ + accept = ... + content_type = ... + alt_param = ... + def deserialize(self, content): + ... + + @property + def no_content_response(self): # -> Literal['']: + ... + + + +class MediaModel(JsonModel): + """Model class for requests that return Media. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request, and returns the raw bytes + of the response body. + """ + accept = ... + content_type = ... + alt_param = ... + def deserialize(self, content): + ... + + @property + def no_content_response(self): # -> Literal['']: + ... + + + +class ProtocolBufferModel(BaseModel): + """Model class for protocol buffers. + + Serializes and de-serializes the binary protocol buffer sent in the HTTP + request and response bodies. + """ + accept = ... + content_type = ... + alt_param = ... + def __init__(self, protocol_buffer) -> None: + """Constructs a ProtocolBufferModel. + + The serialized protocol buffer returned in an HTTP response will be + de-serialized using the given protocol buffer class. + + Args: + protocol_buffer: The protocol buffer class used to de-serialize a + response from the API. + """ + ... + + def serialize(self, body_value): + ... + + def deserialize(self, content): + ... + + @property + def no_content_response(self): + ... + + + +def makepatch(original, modified): # -> dict[Any, Any]: + """Create a patch object. + + Some methods support PATCH, an efficient way to send updates to a resource. + This method allows the easy construction of patch bodies by looking at the + differences between a resource before and after it was modified. + + Args: + original: object, the original deserialized resource + modified: object, the modified deserialized resource + Returns: + An object that contains only the changes from original to modified, in a + form suitable to pass to a PATCH method. + + Example usage: + item = service.activities().get(postid=postid, userid=userid).execute() + original = copy.deepcopy(item) + item['object']['content'] = 'This is updated.' + service.activities.patch(postid=postid, userid=userid, + body=makepatch(original, item)).execute() + """ + ... + diff --git a/typings/googleapiclient/sample_tools.pyi b/typings/googleapiclient/sample_tools.pyi new file mode 100644 index 0000000..af85775 --- /dev/null +++ b/typings/googleapiclient/sample_tools.pyi @@ -0,0 +1,36 @@ +""" +This type stub file was generated by pyright. +""" + +"""Utilities for making samples. + +Consolidates a lot of code commonly repeated in sample applications. +""" +__author__ = ... +__all__ = ["init"] +def init(argv, name, version, doc, filename, scope=..., parents=..., discovery_filename=...): # -> tuple[Any, Namespace]: + """A common initialization routine for samples. + + Many of the sample applications do the same initialization, which has now + been consolidated into this function. This function uses common idioms found + in almost all the samples, i.e. for an API with name 'apiname', the + credentials are stored in a file named apiname.dat, and the + client_secrets.json file is stored in the same directory as the application + main file. + + Args: + argv: list of string, the command-line parameters of the application. + name: string, name of the API. + version: string, version of the API. + doc: string, description of the application. Usually set to __doc__. + file: string, filename of the application. Usually set to __file__. + parents: list of argparse.ArgumentParser, additional command-line flags. + scope: string, The OAuth scope used. + discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL. + + Returns: + A tuple of (service, flags), where service is the service object and flags + is the parsed command-line flags. + """ + ... + diff --git a/typings/googleapiclient/schema.pyi b/typings/googleapiclient/schema.pyi new file mode 100644 index 0000000..0a81ca6 --- /dev/null +++ b/typings/googleapiclient/schema.pyi @@ -0,0 +1,160 @@ +""" +This type stub file was generated by pyright. +""" + +from googleapiclient import _helpers as util + +"""Schema processing for discovery based APIs + +Schemas holds an APIs discovery schemas. It can return those schema as +deserialized JSON objects, or pretty print them as prototype objects that +conform to the schema. + +For example, given the schema: + + schema = \"\"\"{ + "Foo": { + "type": "object", + "properties": { + "etag": { + "type": "string", + "description": "ETag of the collection." + }, + "kind": { + "type": "string", + "description": "Type of the collection ('calendar#acl').", + "default": "calendar#acl" + }, + "nextPageToken": { + "type": "string", + "description": "Token used to access the next + page of this result. Omitted if no further results are available." + } + } + } + }\"\"\" + + s = Schemas(schema) + print s.prettyPrintByName('Foo') + + Produces the following output: + + { + "nextPageToken": "A String", # Token used to access the + # next page of this result. Omitted if no further results are available. + "kind": "A String", # Type of the collection ('calendar#acl'). + "etag": "A String", # ETag of the collection. + }, + +The constructor takes a discovery document in which to look up named schema. +""" +__author__ = ... +class Schemas: + """Schemas for an API.""" + def __init__(self, discovery) -> None: + """Constructor. + + Args: + discovery: object, Deserialized discovery document from which we pull + out the named schema. + """ + ... + + def prettyPrintByName(self, name): + """Get pretty printed object prototype from the schema name. + + Args: + name: string, Name of schema in the discovery document. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + ... + + def prettyPrintSchema(self, schema): # -> LiteralString: + """Get pretty printed object prototype of schema. + + Args: + schema: object, Parsed JSON schema. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + ... + + def get(self, name, default=...): + """Get deserialized JSON schema from the schema name. + + Args: + name: string, Schema name. + default: object, return value if name not found. + """ + ... + + + +class _SchemaToStruct: + """Convert schema to a prototype object.""" + @util.positional(3) + def __init__(self, schema, seen, dent=...) -> None: + """Constructor. + + Args: + schema: object, Parsed JSON schema. + seen: list, List of names of schema already seen while parsing. Used to + handle recursive definitions. + dent: int, Initial indentation depth. + """ + ... + + def emit(self, text): # -> None: + """Add text as a line to the output. + + Args: + text: string, Text to output. + """ + ... + + def emitBegin(self, text): # -> None: + """Add text to the output, but with no line terminator. + + Args: + text: string, Text to output. + """ + ... + + def emitEnd(self, text, comment): # -> None: + """Add text and comment to the output with line terminator. + + Args: + text: string, Text to output. + comment: string, Python comment. + """ + ... + + def indent(self): # -> None: + """Increase indentation level.""" + ... + + def undent(self): # -> None: + """Decrease indentation level.""" + ... + + def to_str(self, from_cache): # -> LiteralString: + """Prototype object based on the schema, in Python code with comments. + + Args: + from_cache: callable(name, seen), Callable that retrieves an object + prototype for a schema with the given name. Seen is a list of schema + names already seen as we recursively descend the schema definition. + + Returns: + Prototype object based on the schema, in Python code with comments. + The lines of the code will all be properly indented. + """ + ... + + + diff --git a/typings/googleapiclient/version.pyi b/typings/googleapiclient/version.pyi new file mode 100644 index 0000000..fc797c9 --- /dev/null +++ b/typings/googleapiclient/version.pyi @@ -0,0 +1,5 @@ +""" +This type stub file was generated by pyright. +""" + +__version__ = ... diff --git a/typings/pywebpush/__init__.pyi b/typings/pywebpush/__init__.pyi new file mode 100644 index 0000000..345c3bc --- /dev/null +++ b/typings/pywebpush/__init__.pyi @@ -0,0 +1,276 @@ +""" +This type stub file was generated by pyright. +""" + +import asyncio +import base64 +import json +import os +import time +import logging +import aiohttp +import http_ece +import requests +from copy import deepcopy +from typing import Dict, Union, cast +from urlparse import urlparse +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives import serialization +from functools import partial +from py_vapid import Vapid, Vapid01 +from requests import Response + +class WebPushException(Exception): + """Web Push failure. + + This may contain the requests.Response + + """ + def __init__(self, message, response=...) -> None: + ... + + def __str__(self) -> str: + ... + + + +class NoData(Exception): + """Message contained No Data, no encoding required.""" + ... + + +class CaseInsensitiveDict(dict): + """A dictionary that has case-insensitive keys""" + def __init__(self, data=..., **kwargs) -> None: + ... + + def __contains__(self, key) -> bool: + ... + + def __setitem__(self, key, value): # -> None: + ... + + def __getitem__(self, key): + ... + + def __delitem__(self, key): # -> None: + ... + + def get(self, key, default=...): # -> None: + ... + + def update(self, data) -> None: + ... + + + +class WebPusher: + """WebPusher encrypts a data block using HTTP Encrypted Content Encoding + for WebPush. + + See https://tools.ietf.org/html/draft-ietf-webpush-protocol-04 + for the current specification, and + https://developer.mozilla.org/en-US/docs/Web/API/Push_API for an + overview of Web Push. + + Example of use: + + The javascript promise handler for PushManager.subscribe() + receives a subscription_info object. subscription_info.getJSON() + will return a JSON representation. + (e.g. + .. code-block:: javascript + subscription_info.getJSON() == + {"endpoint": "https://push.server.com/...", + "keys":{"auth": "...", "p256dh": "..."} + } + ) + + This subscription_info block can be stored. + + To send a subscription update: + + .. code-block:: python + # Optional + # headers = py_vapid.sign({"aud": "https://push.server.com/", + "sub": "mailto:your_admin@your.site.com"}) + data = "Mary had a little lamb, with a nice mint jelly" + WebPusher(subscription_info).send(data, headers) + + """ + subscription_info = ... + valid_encodings = ... + verbose = ... + def __init__(self, subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], requests_session: Union[None, requests.Session] = ..., aiohttp_session: Union[None, aiohttp.client.ClientSession] = ..., verbose: bool = ...) -> None: + """Initialize using the info provided by the client PushSubscription + object (See + https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe) + + :param subscription_info: a dict containing the subscription_info from + the client. + :type subscription_info: dict + + :param requests_session: a requests.Session object to optimize requests + to the same client. + :type requests_session: requests.Session + + :param verbose: provide verbose feedback + :type verbose: bool + + """ + ... + + def verb(self, msg: str, *args, **kwargs) -> None: + ... + + def encode(self, data: bytes, content_encoding: str = ...) -> CaseInsensitiveDict: + """Encrypt the data. + + :param data: A serialized block of byte data (String, JSON, bit array, + etc.) Make sure that whatever you send, your client knows how + to understand it. + :type data: str + :param content_encoding: The content_encoding type to use to encrypt + the data. Defaults to RFC8188 "aes128gcm". The previous draft-01 is + "aesgcm", however this format is now deprecated. + :type content_encoding: enum("aesgcm", "aes128gcm") + + """ + ... + + def as_curl(self, endpoint: str, encoded_data: bytes, headers: Dict[str, str]) -> str: + """Return the send as a curl command. + + Useful for debugging. This will write out the encoded data to a local + file named `encrypted.data` + + :param endpoint: Push service endpoint URL + :type endpoint: basestring + :param encoded_data: byte array of encoded data + :type encoded_data: bytearray + :param headers: Additional headers for the send + :type headers: dict + :returns string + + """ + ... + + def send(self, *args, **kwargs) -> Union[Response, str]: + """Encode and send the data to the Push Service""" + ... + + async def send_async(self, *args, **kwargs) -> Union[aiohttp.ClientResponse, str]: + ... + + + +def webpush(subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], data: Union[None, str] = ..., vapid_private_key: Union[None, Vapid, str] = ..., vapid_claims: Union[None, Dict[str, Union[str, int]]] = ..., content_encoding: str = ..., curl: bool = ..., timeout: Union[None, float] = ..., ttl: int = ..., verbose: bool = ..., headers: Union[None, Dict[str, Union[str, int, float]]] = ..., requests_session: Union[None, requests.Session] = ...) -> Union[str, requests.Response]: + """ + One call solution to endcode and send `data` to the endpoint + contained in `subscription_info` using optional VAPID auth headers. + + in example: + + .. code-block:: python + + from pywebpush import python + + webpush( + subscription_info={ + "endpoint": "https://push.example.com/v1/abcd", + "keys": {"p256dh": "0123abcd...", + "auth": "001122..."} + }, + data="Mary had a little lamb, with a nice mint jelly", + vapid_private_key="path/to/key.pem", + vapid_claims={"sub": "YourNameHere@example.com"} + ) + + No additional method call is required. Any non-success will throw a + `WebPushException`. + + :param subscription_info: Provided by the client call + :type subscription_info: dict + :param data: Serialized data to send + :type data: str + :param vapid_private_key: Vapid instance or path to vapid private key PEM \ + or encoded str + :type vapid_private_key: Union[Vapid, str] + :param vapid_claims: Dictionary of claims ('sub' required) + :type vapid_claims: dict + :param content_encoding: Optional content type string + :type content_encoding: str + :param curl: Return as "curl" string instead of sending + :type curl: bool + :param timeout: POST requests timeout + :type timeout: float + :param ttl: Time To Live + :type ttl: int + :param verbose: Provide verbose feedback + :type verbose: bool + :return requests.Response or string + :param headers: Dictionary of extra HTTP headers to include + :type headers: dict + + """ + ... + +async def webpush_async(subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], data: Union[None, str] = ..., vapid_private_key: Union[None, Vapid, str] = ..., vapid_claims: Union[None, Dict[str, Union[str, int]]] = ..., content_encoding: str = ..., curl: bool = ..., timeout: Union[None, float] = ..., ttl: int = ..., verbose: bool = ..., headers: Union[None, Dict[str, Union[str, int, float]]] = ..., aiohttp_session: Union[None, aiohttp.ClientSession] = ...) -> Union[str, aiohttp.ClientResponse]: + """ + Async version of webpush function. One call solution to encode and send + `data` to the endpoint contained in `subscription_info` using optional + VAPID auth headers. + + Example: + + .. code-block:: python + + from pywebpush import webpush_async + import asyncio + + async def send_notification(): + response = await webpush_async( + subscription_info={ + "endpoint": "https://push.example.com/v1/abcd", + "keys": {"p256dh": "0123abcd...", + "auth": "001122..."} + }, + data="Mary had a little lamb, with a nice mint jelly", + vapid_private_key="path/to/key.pem", + vapid_claims={"sub": "YourNameHere@example.com"} + ) + + asyncio.run(send_notification()) + + No additional method call is required. Any non-success will throw a + `WebPushException`. + + :param subscription_info: Provided by the client call + :type subscription_info: dict + :param data: Serialized data to send + :type data: str + :param vapid_private_key: Vapid instance or path to vapid private key PEM \ + or encoded str + :type vapid_private_key: Union[Vapid, str] + :param vapid_claims: Dictionary of claims ('sub' required) + :type vapid_claims: dict + :param content_encoding: Optional content type string + :type content_encoding: str + :param curl: Return as "curl" string instead of sending + :type curl: bool + :param timeout: POST requests timeout + :type timeout: float + :param ttl: Time To Live + :type ttl: int + :param verbose: Provide verbose feedback + :type verbose: bool + :param headers: Dictionary of extra HTTP headers to include + :type headers: dict + :param aiohttp_session: Optional aiohttp ClientSession for connection reuse + :type aiohttp_session: aiohttp.ClientSession + :return aiohttp.ClientResponse or string + + """ + ... + diff --git a/typings/pywebpush/__main__.pyi b/typings/pywebpush/__main__.pyi new file mode 100644 index 0000000..935e773 --- /dev/null +++ b/typings/pywebpush/__main__.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +def get_config(): # -> Namespace: + ... + +def main() -> None: + """Send data""" + ... + +if __name__ == "__main__": + ... diff --git a/typings/pywebpush/foo.pyi b/typings/pywebpush/foo.pyi new file mode 100644 index 0000000..cee36dd --- /dev/null +++ b/typings/pywebpush/foo.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +def send_push_notification(subscription, payload): # -> None: + ... + diff --git a/typings/pywebpush/tests/__init__.pyi b/typings/pywebpush/tests/__init__.pyi new file mode 100644 index 0000000..006bc27 --- /dev/null +++ b/typings/pywebpush/tests/__init__.pyi @@ -0,0 +1,4 @@ +""" +This type stub file was generated by pyright. +""" + From b5700441ba9f12e832a68b373c33702d2ca914f2 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 03:18:17 +0100 Subject: [PATCH 06/19] feat: Introduce an audit logging system with a dedicated worker, service, NATS event processing, and database schema. --- app/container.py | 7 ++ app/core/constant.py | 12 +++ app/infra/nats.py | 3 +- app/service/audit.py | 30 ++++++ app/worker/audit/__init__.py | 6 ++ app/worker/audit/main.py | 119 +++++++++++++++++++++++ app/worker/audit/schema/__init__.py | 6 ++ app/worker/audit/schema/audit.py | 21 ++++ app/worker/audit/settings.py | 21 ++++ app/worker/notification/providers/apn.py | 5 +- db/generated/audit.py | 40 ++++++++ db/generated/models.py | 18 ++++ db/queries/audit.sql | 9 ++ 13 files changed, 292 insertions(+), 5 deletions(-) create mode 100644 app/worker/audit/__init__.py create mode 100644 app/worker/audit/schema/__init__.py create mode 100644 app/worker/audit/schema/audit.py create mode 100644 db/generated/audit.py create mode 100644 db/queries/audit.sql diff --git a/app/container.py b/app/container.py index 707e466..fe56610 100644 --- a/app/container.py +++ b/app/container.py @@ -12,6 +12,7 @@ from app.service.staff_notifications import StaffNotificationsService from app.service.staff_user import StaffUserService +from app.service.audit import AuditService from app.service.upload_requests import UploadRequestsService from app.service.users import AuthService from app.service.user_notification import UserNotificationService @@ -29,6 +30,7 @@ from db.generated import eventParticipant as participant_queries from db.generated import stuff_user as staff_queries from db.generated import notifications as notification_queries +from db.generated import audit as audit_queries from app.service.event import EventService class Container: @@ -52,6 +54,7 @@ def __init__( self.photo_querier = photo_queries.AsyncQuerier(conn) self.staff_notification_querier = staff_notification_queries.AsyncQuerier(conn) self.notification_querier = notification_queries.AsyncQuerier(conn) + self.audit_querier = audit_queries.AsyncQuerier(conn) self.event_querier = event_queries.AsyncQuerier(conn) self.participant_querier = participant_queries.AsyncQuerier(conn) self.staff_querier = staff_queries.AsyncQuerier(conn) @@ -101,6 +104,10 @@ def __init__( notification_querier=self.notification_querier, ) + self.audit_service = AuditService( + audit_querier=self.audit_querier, + ) + self.staff_user_service = StaffUserService() self.staff_user_service.init( diff --git a/app/core/constant.py b/app/core/constant.py index 7a3b139..36f4951 100644 --- a/app/core/constant.py +++ b/app/core/constant.py @@ -13,6 +13,18 @@ class NotificationChannel(str, Enum): NOTIFICATION_EVENT_SUBJECT = "notification.event" + +class AuditEventType(str, Enum): + USER_SIGNUP = "user.signup" + USER_LOGIN = "user.login" + USER_LOGOUT = "user.logout" + UPLOAD_REQUEST_CREATED = "upload_request.created" + UPLOAD_REQUEST_APPROVED = "upload_request.approved" + UPLOAD_REQUEST_REJECTED = "upload_request.rejected" + + +AUDIT_EVENT_SUBJECT = "audit.event" + IMAGE_ALLOWED_TYPES = { "image/jpeg", "image/png", diff --git a/app/infra/nats.py b/app/infra/nats.py index 5394e98..8e5d1f6 100644 --- a/app/infra/nats.py +++ b/app/infra/nats.py @@ -7,7 +7,7 @@ from pydantic import BaseModel from app.core.config import settings -from app.core.constant import NOTIFICATION_EVENT_SUBJECT +from app.core.constant import NOTIFICATION_EVENT_SUBJECT, AUDIT_EVENT_SUBJECT class Message(BaseModel): @@ -19,6 +19,7 @@ class NatsSubjects(Enum): USER_LOGIN = "user.login" USER_LOGOUT = "user.logout" NOTIFICATION_EVENT = NOTIFICATION_EVENT_SUBJECT + AUDIT_EVENT = AUDIT_EVENT_SUBJECT STAFF_UPLOAD_REQUEST_CREATED = "staff.upload_request.created" STAFF_UPLOAD_REQUEST_APPROVED = "staff.upload_request.approved" STAFF_UPLOAD_REQUEST_REJECTED = "staff.upload_request.rejected" diff --git a/app/service/audit.py b/app/service/audit.py index e69de29..95f535d 100644 --- a/app/service/audit.py +++ b/app/service/audit.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import Any +from uuid import UUID + +from app.core.constant import AuditEventType +from app.core.exceptions import AppException +from db.generated import audit as audit_queries +from db.generated.models import AuditEvent + + +class AuditService: + def __init__(self, audit_querier: audit_queries.AsyncQuerier) -> None: + self.audit_querier = audit_querier + + async def record_event( + self, + *, + event_type: AuditEventType, + user_id: UUID | None = None, + metadata: dict[str, Any] | None = None, + ) -> AuditEvent: + audit = await self.audit_querier.create_audit_event( + event_type=event_type.value, + user_id=user_id, + metadata=metadata, + ) + if audit is None: + raise AppException.internal_error("Failed to persist audit event") + return audit diff --git a/app/worker/audit/__init__.py b/app/worker/audit/__init__.py new file mode 100644 index 0000000..dcd57fc --- /dev/null +++ b/app/worker/audit/__init__.py @@ -0,0 +1,6 @@ +"""Audit worker package exports.""" +from __future__ import annotations + +from .main import main # noqa: F401 + +__all__ = ["main"] diff --git a/app/worker/audit/main.py b/app/worker/audit/main.py index e69de29..ecf3cc2 100644 --- a/app/worker/audit/main.py +++ b/app/worker/audit/main.py @@ -0,0 +1,119 @@ +"""Audit worker that stores security-relevant events emitted over NATS.""" +from __future__ import annotations + +import asyncio +import json +from typing import Any + +import sqlalchemy.ext.asyncio +from pydantic import ValidationError + +from app.core.constant import AUDIT_EVENT_SUBJECT +from app.core.logger import logger +from app.infra.database import engine +from app.infra.nats import NatsClient, NatsSubjects +from app.service.audit import AuditService +from db.generated import audit as audit_queries + +from app.worker.audit.schema import AuditEventMessage +from app.worker.audit.settings import settings + + +async def init_worker() -> None: + logger.info("Audit worker starting with metadata limit %s", settings.max_metadata_entries) + + +class AuditDeliveryWorker: + def __init__(self) -> None: + self._conn: sqlalchemy.ext.asyncio.AsyncConnection | None = None + self._audit_service: AuditService | None = None + + async def start(self) -> None: + if self._conn is not None: + return + self._conn = await engine.connect() + self._audit_service = AuditService(audit_queries.AsyncQuerier(self._conn)) + + async def stop(self) -> None: + if self._conn is not None: + await self._conn.close() + self._conn = None + self._audit_service = None + + async def persist(self, payload: AuditEventMessage) -> None: + if self._audit_service is None: + logger.warning("Audit service is unavailable for %s", payload.event_type) + return + metadata = self._prune_metadata(payload.metadata) + await self._audit_service.record_event( + event_type=payload.event_type, + user_id=payload.user_id, + metadata=metadata, + ) + logger.info("Persisted audit %s for %s", payload.event_type, payload.user_id) + + @staticmethod + def _prune_metadata(metadata: dict[str, Any] | None) -> dict[str, Any] | None: + if not metadata: + return metadata + if len(metadata) <= settings.max_metadata_entries: + return metadata + trimmed = {} + for idx, key in enumerate(list(metadata)): + if idx >= settings.max_metadata_entries: + break + trimmed[key] = metadata[key] + logger.warning("Trimmed audit metadata to %s entries", settings.max_metadata_entries) + return trimmed + + +def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: + try: + parsed = json.loads(raw_data.decode("utf-8")) + if not isinstance(parsed, dict): + logger.warning("Audit payload must be an object, got %s", type(parsed)) + return None + return parsed + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + logger.error("Cannot parse audit payload: %s", exc) + return None + + +async def _handle_event(worker: AuditDeliveryWorker, raw_data: bytes) -> None: + parsed = _parse_payload(raw_data) + if parsed is None: + return + try: + payload = AuditEventMessage.parse_obj(parsed) + except ValidationError as exc: + logger.warning("Audit payload validation failed: %s", exc) + return + try: + await worker.persist(payload) + except Exception: + logger.exception("Failed to persist audit for %s", payload.event_type) + + +async def listen_nats_event(worker: AuditDeliveryWorker) -> None: + await NatsClient.subscribe( + NatsSubjects.AUDIT_EVENT, + lambda data: _handle_event(worker, data), + ) + logger.info("Listening for audit events on %s", AUDIT_EVENT_SUBJECT) + + +async def main() -> None: + await init_worker() + worker = AuditDeliveryWorker() + await worker.start() + await NatsClient.connect() + try: + await listen_nats_event(worker) + await asyncio.Event().wait() + finally: + await worker.stop() + await NatsClient.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/app/worker/audit/schema/__init__.py b/app/worker/audit/schema/__init__.py new file mode 100644 index 0000000..2a9aef1 --- /dev/null +++ b/app/worker/audit/schema/__init__.py @@ -0,0 +1,6 @@ +"""Audit worker schema exports.""" +from __future__ import annotations + +from .audit import AuditEventMessage + +__all__ = ["AuditEventMessage"] diff --git a/app/worker/audit/schema/audit.py b/app/worker/audit/schema/audit.py new file mode 100644 index 0000000..a6e5cd4 --- /dev/null +++ b/app/worker/audit/schema/audit.py @@ -0,0 +1,21 @@ +"""Pydantic models for audit events.""" +from __future__ import annotations + +from typing import Any +from uuid import UUID + +from pydantic import BaseModel, Extra + +from app.core.constant import AuditEventType + + +class AuditEventMessage(BaseModel): + """Validates the payload sent to the audit worker over NATS.""" + + event_type: AuditEventType + user_id: UUID | None = None + metadata: dict[str, Any] | None = None + description: str | None = None + + class Config: + extra = Extra.forbid diff --git a/app/worker/audit/settings.py b/app/worker/audit/settings.py index e69de29..3ad8cc4 100644 --- a/app/worker/audit/settings.py +++ b/app/worker/audit/settings.py @@ -0,0 +1,21 @@ +"""Configuration for the audit worker.""" +from __future__ import annotations + +from pydantic import BaseSettings, Field + + +class AuditWorkerSettings(BaseSettings): + """Basic feature flags for the audit worker.""" + + max_metadata_entries: int = Field( + 40, + description="Max number of metadata keys kept when persisting audit entries", + ge=1, + le=200, + ) + + class Config: + env_prefix = "AUDIT_" + + +settings = AuditWorkerSettings() diff --git a/app/worker/notification/providers/apn.py b/app/worker/notification/providers/apn.py index 6d6ca15..a2db32c 100644 --- a/app/worker/notification/providers/apn.py +++ b/app/worker/notification/providers/apn.py @@ -10,10 +10,7 @@ async def send_apn_notification(payload: NotificationEventPayload) -> None: - if APNsClient is None or APNPayload is None: - logger.debug("APNs client unavailable; skipping APN delivery") - return - + device_info: Mapping[str, Any] | None = payload.device_info if device_info is None: logger.warning("Payload missing device_info, cannot send APN message: %s", payload) diff --git a/db/generated/audit.py b/db/generated/audit.py new file mode 100644 index 0000000..5e8d260 --- /dev/null +++ b/db/generated/audit.py @@ -0,0 +1,40 @@ +# Code generated by sqlc. DO NOT EDIT. +# versions: +# sqlc v1.30.0 +# source: audit.sql +from typing import Any, Optional +import uuid + +import sqlalchemy +import sqlalchemy.ext.asyncio + +from db.generated import models + + +CREATE_AUDIT_EVENT = """-- name: create_audit_event \\:one +INSERT INTO audit_events ( + event_type, + user_id, + metadata +) VALUES ( + :p1, :p2, :p3 +) +RETURNING id, event_type, user_id, metadata, created_at +""" + + +class AsyncQuerier: + def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): + self._conn = conn + + async def create_audit_event(self, *, event_type: Any, user_id: Optional[uuid.UUID], metadata: Optional[Any]) -> Optional[models.AuditEvent]: + row = (await self._conn.execute(sqlalchemy.text(CREATE_AUDIT_EVENT), {"p1": event_type, "p2": user_id, "p3": metadata})).first() + if row is None: + return None + return models.AuditEvent( + id=row[0], + event_type=row[1], + user_id=row[2], + metadata=row[3], + created_at=row[4], + ) diff --git a/db/generated/models.py b/db/generated/models.py index 3a87f2b..f8353a1 100644 --- a/db/generated/models.py +++ b/db/generated/models.py @@ -8,6 +8,15 @@ import uuid +class AuditEventType(str, enum.Enum): + USERSIGNUP = "user.signup" + USERLOGIN = "user.login" + USERLOGOUT = "user.logout" + UPLOAD_REQUESTCREATED = "upload_request.created" + UPLOAD_REQUESTAPPROVED = "upload_request.approved" + UPLOAD_REQUESTREJECTED = "upload_request.rejected" + + class EventStatus(str, enum.Enum): DRAFT = "draft" SCHEDULED = "scheduled" @@ -44,6 +53,15 @@ class AlembicVersion: version_num: str +@dataclasses.dataclass() +class AuditEvent: + id: uuid.UUID + event_type: Any + user_id: Optional[uuid.UUID] + metadata: Optional[Any] + created_at: datetime.datetime + + @dataclasses.dataclass() class Event: id: uuid.UUID diff --git a/db/queries/audit.sql b/db/queries/audit.sql new file mode 100644 index 0000000..ecdfe5c --- /dev/null +++ b/db/queries/audit.sql @@ -0,0 +1,9 @@ +-- name: CreateAuditEvent :one +INSERT INTO audit_events ( + event_type, + user_id, + metadata +) VALUES ( + $1, $2, $3 +) +RETURNING id, event_type, user_id, metadata, created_at; From ec6cd71031acbe97fcff50d31d4dc2883eacc7fa Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 03:27:23 +0100 Subject: [PATCH 07/19] feat: Add audit table migrations and refactor audit and notification workers for Pydantic V2 compatibility and module cleanup. --- app/worker/audit/main.py | 10 ++------ app/worker/audit/schema/__init__.py | 6 ----- app/worker/audit/schema/audit.py | 12 ++------- app/worker/audit/settings.py | 8 +++--- app/worker/notification/main.py | 11 +++----- app/worker/notification/providers/__init__.py | 8 ------ app/worker/notification/providers/webpush.py | 7 +----- app/worker/notification/schema/__init__.py | 6 ----- .../notification/schema/notification.py | 2 +- app/worker/notification/settings.py | 19 +++++++------- migrations/sql/down/add-audit-table.sql | 5 ++++ migrations/sql/up/add-audit-table.sql | 25 +++++++++++++++++++ .../versions/a1f1d0b6e553_add_audit_table.py | 25 +++++++++++++++++++ 13 files changed, 76 insertions(+), 68 deletions(-) delete mode 100644 app/worker/audit/schema/__init__.py delete mode 100644 app/worker/notification/providers/__init__.py delete mode 100644 app/worker/notification/schema/__init__.py create mode 100644 migrations/sql/down/add-audit-table.sql create mode 100644 migrations/sql/up/add-audit-table.sql create mode 100644 migrations/versions/a1f1d0b6e553_add_audit_table.py diff --git a/app/worker/audit/main.py b/app/worker/audit/main.py index ecf3cc2..10760ec 100644 --- a/app/worker/audit/main.py +++ b/app/worker/audit/main.py @@ -1,21 +1,15 @@ -"""Audit worker that stores security-relevant events emitted over NATS.""" -from __future__ import annotations - import asyncio import json from typing import Any - import sqlalchemy.ext.asyncio from pydantic import ValidationError - from app.core.constant import AUDIT_EVENT_SUBJECT from app.core.logger import logger from app.infra.database import engine from app.infra.nats import NatsClient, NatsSubjects from app.service.audit import AuditService from db.generated import audit as audit_queries - -from app.worker.audit.schema import AuditEventMessage +from app.worker.audit.schema.audit import AuditEventMessage from app.worker.audit.settings import settings @@ -84,7 +78,7 @@ async def _handle_event(worker: AuditDeliveryWorker, raw_data: bytes) -> None: if parsed is None: return try: - payload = AuditEventMessage.parse_obj(parsed) + payload = AuditEventMessage.model_validate(parsed) except ValidationError as exc: logger.warning("Audit payload validation failed: %s", exc) return diff --git a/app/worker/audit/schema/__init__.py b/app/worker/audit/schema/__init__.py deleted file mode 100644 index 2a9aef1..0000000 --- a/app/worker/audit/schema/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Audit worker schema exports.""" -from __future__ import annotations - -from .audit import AuditEventMessage - -__all__ = ["AuditEventMessage"] diff --git a/app/worker/audit/schema/audit.py b/app/worker/audit/schema/audit.py index a6e5cd4..145a6fd 100644 --- a/app/worker/audit/schema/audit.py +++ b/app/worker/audit/schema/audit.py @@ -1,21 +1,13 @@ -"""Pydantic models for audit events.""" -from __future__ import annotations - from typing import Any from uuid import UUID - -from pydantic import BaseModel, Extra - +from pydantic import BaseModel from app.core.constant import AuditEventType class AuditEventMessage(BaseModel): - """Validates the payload sent to the audit worker over NATS.""" - event_type: AuditEventType user_id: UUID | None = None metadata: dict[str, Any] | None = None description: str | None = None - class Config: - extra = Extra.forbid + diff --git a/app/worker/audit/settings.py b/app/worker/audit/settings.py index 3ad8cc4..3be6e44 100644 --- a/app/worker/audit/settings.py +++ b/app/worker/audit/settings.py @@ -1,15 +1,13 @@ -"""Configuration for the audit worker.""" from __future__ import annotations -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class AuditWorkerSettings(BaseSettings): - """Basic feature flags for the audit worker.""" max_metadata_entries: int = Field( 40, - description="Max number of metadata keys kept when persisting audit entries", ge=1, le=200, ) @@ -18,4 +16,4 @@ class Config: env_prefix = "AUDIT_" -settings = AuditWorkerSettings() +settings = AuditWorkerSettings() # type: ignore diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py index 66f2880..9af3d08 100644 --- a/app/worker/notification/main.py +++ b/app/worker/notification/main.py @@ -1,21 +1,16 @@ import asyncio import json from typing import Any - import sqlalchemy.ext.asyncio - from app.core.constant import NOTIFICATION_EVENT_SUBJECT, NotificationChannel from app.core.logger import logger from app.infra.database import engine from app.infra.nats import NatsClient, NatsSubjects from app.service.device import DeviceService from db.generated import devices as device_queries - -from app.worker.notification.providers import ( - send_apn_notification, - send_fcm_notification, - send_web_push_notification, -) +from app.worker.notification.providers.apn import send_apn_notification +from app.worker.notification.providers.fcm import send_fcm_notification +from app.worker.notification.providers.webpush import send_web_push_notification from app.worker.notification.schema.notification import NotificationEventPayload diff --git a/app/worker/notification/providers/__init__.py b/app/worker/notification/providers/__init__.py deleted file mode 100644 index 32e1188..0000000 --- a/app/worker/notification/providers/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Provider entry points.""" -from __future__ import annotations - -from app.worker.notification.providers.apn import send_apn_notification -from app.worker.notification.providers.fcm import send_fcm_notification -from app.worker.notification.providers.webpush import send_web_push_notification - -__all__ = ["send_apn_notification", "send_fcm_notification", "send_web_push_notification"] diff --git a/app/worker/notification/providers/webpush.py b/app/worker/notification/providers/webpush.py index 942ba90..422d545 100644 --- a/app/worker/notification/providers/webpush.py +++ b/app/worker/notification/providers/webpush.py @@ -1,9 +1,6 @@ -"""Web Push integration helpers.""" -from __future__ import annotations import asyncio import json -from typing import Any, Mapping from app.core.logger import logger from app.worker.notification.schema.notification import NotificationEventPayload @@ -12,9 +9,7 @@ async def send_web_push_notification(payload: NotificationEventPayload) -> None: - if webpush is None or WebPushException is None: - logger.debug("pywebpush unavailable; skipping web push delivery") - return + if not payload.device_info: logger.warning("Web notification missing subscription info: %s", payload) diff --git a/app/worker/notification/schema/__init__.py b/app/worker/notification/schema/__init__.py deleted file mode 100644 index 70e23dd..0000000 --- a/app/worker/notification/schema/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Notification worker shared schema exports.""" -from __future__ import annotations - -from .notification import NotificationEventPayload - -__all__ = ["NotificationEventPayload"] diff --git a/app/worker/notification/schema/notification.py b/app/worker/notification/schema/notification.py index 85feaaf..d4c241b 100644 --- a/app/worker/notification/schema/notification.py +++ b/app/worker/notification/schema/notification.py @@ -15,7 +15,7 @@ class NotificationEventPayload: channel: NotificationChannel title: str | None = None body: str | None = None - data: dict[str, str] = dataclasses.field(default_factory=dict) + data: dict[str, str] = dataclasses.field(default_factory=dict[str,str]) device_info: Mapping[str, Any] | None = None metadata: Mapping[str, Any] | None = None diff --git a/app/worker/notification/settings.py b/app/worker/notification/settings.py index 1ef343a..4dd2ef2 100644 --- a/app/worker/notification/settings.py +++ b/app/worker/notification/settings.py @@ -1,31 +1,30 @@ -"""Configuration shared between notification providers.""" from __future__ import annotations from typing import Optional -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class NotificationWorkerSettings(BaseSettings): - """Environment driven configuration for the notification worker.""" apn_certificate_path: str = Field( - "/path/to/certificate.pem", description="Path to the APNs certificate in PEM format" + "/path/to/certificate.pem" ) - apn_use_sandbox: bool = Field(True, description="Whether to speak to the APNs sandbox endpoint") + apn_use_sandbox: bool = Field(True) apn_use_alternative_port: bool = Field( - False, description="Use the alternative port when connecting to APNs" + False ) apn_topic: Optional[str] = Field( - None, description="APNs topic (i.e. bundle ID) to target" + None ) - webpush_vapid_private_key: Optional[str] = Field(None, description="VAPID private key for web push") + webpush_vapid_private_key: Optional[str] = Field(None) webpush_vapid_claims_subject: str = Field( - "mailto:alerts@example.com", description="VAPID subject for push subscriptions" + "mailto:alerts@example.com" ) class Config: env_prefix = "NOTIFICATION_" -settings = NotificationWorkerSettings() +settings = NotificationWorkerSettings() # type: ignore diff --git a/migrations/sql/down/add-audit-table.sql b/migrations/sql/down/add-audit-table.sql new file mode 100644 index 0000000..0e7800a --- /dev/null +++ b/migrations/sql/down/add-audit-table.sql @@ -0,0 +1,5 @@ +ALTER TABLE public.audit_events DROP CONSTRAINT IF EXISTS audit_events_user_id_fkey; +DROP INDEX IF EXISTS idx_audit_events_event_type; +DROP INDEX IF EXISTS idx_audit_events_user_id; +DROP TABLE IF EXISTS public.audit_events; +DROP TYPE IF EXISTS public.audit_event_type; diff --git a/migrations/sql/up/add-audit-table.sql b/migrations/sql/up/add-audit-table.sql new file mode 100644 index 0000000..430f043 --- /dev/null +++ b/migrations/sql/up/add-audit-table.sql @@ -0,0 +1,25 @@ +CREATE TYPE IF NOT EXISTS public.audit_event_type AS ENUM ( + 'user.signup', + 'user.login', + 'user.logout', + 'upload_request.created', + 'upload_request.approved', + 'upload_request.rejected' +); + +CREATE TABLE IF NOT EXISTS public.audit_events ( + id uuid DEFAULT public.uuid_generate_v4() NOT NULL, + event_type public.audit_event_type NOT NULL, + user_id uuid, + metadata jsonb DEFAULT '{}'::jsonb, + created_at timestamp with time zone DEFAULT now() NOT NULL +); + +CREATE INDEX IF NOT EXISTS idx_audit_events_event_type ON public.audit_events USING btree (event_type); +CREATE INDEX IF NOT EXISTS idx_audit_events_user_id ON public.audit_events USING btree (user_id); + +ALTER TABLE ONLY public.audit_events + ADD CONSTRAINT audit_events_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.audit_events + ADD CONSTRAINT audit_events_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE SET NULL; diff --git a/migrations/versions/a1f1d0b6e553_add_audit_table.py b/migrations/versions/a1f1d0b6e553_add_audit_table.py new file mode 100644 index 0000000..11ef5a9 --- /dev/null +++ b/migrations/versions/a1f1d0b6e553_add_audit_table.py @@ -0,0 +1,25 @@ +"""add-audit-table + +Revision ID: a1f1d0b6e553 +Revises: 5ead72a95638 +Create Date: 2026-03-20 00:00:00.000000 + +""" +from typing import Sequence, Union + +from migrations.helper import run_sql_down, run_sql_up + + +# revision identifiers, used by Alembic. +revision: str = 'a1f1d0b6e553' +down_revision: Union[str, Sequence[str], None] = '5ead72a95638' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + run_sql_up("add-audit-table") + + +def downgrade() -> None: + run_sql_down("add-audit-table") From 195f49bd330784b52f9ecbede58e9756dd8a8152 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 12:51:22 +0100 Subject: [PATCH 08/19] refactor: migrate notification event payload schema to Pydantic with new mobile and web subscription details and add a makefile target for type checking. --- .../notification/schema/notification.py | 94 +++++++++---------- makefile | 3 + 2 files changed, 46 insertions(+), 51 deletions(-) diff --git a/app/worker/notification/schema/notification.py b/app/worker/notification/schema/notification.py index d4c241b..aa07b36 100644 --- a/app/worker/notification/schema/notification.py +++ b/app/worker/notification/schema/notification.py @@ -1,63 +1,55 @@ +"""Payload definitions shared across the notification worker.""" from __future__ import annotations -import dataclasses -import uuid from typing import Any, Mapping +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field, root_validator from app.core.constant import NotificationChannel -from app.core.logger import logger -@dataclasses.dataclass -class NotificationEventPayload: +class MobileDeviceInfo(BaseModel): + fcm_token: str | None = None + apn_token: str | None = None + platform: str | None = None + + +class WebPushSubscription(BaseModel): + endpoint: str + keys: dict[str, str] + expiration_time: int | None = None - user_id: uuid.UUID + +class NotificationEventPayload(BaseModel): + user_id: UUID channel: NotificationChannel title: str | None = None body: str | None = None - data: dict[str, str] = dataclasses.field(default_factory=dict[str,str]) - device_info: Mapping[str, Any] | None = None - metadata: Mapping[str, Any] | None = None - - @classmethod - def from_mapping(cls, payload: Mapping[str, Any]) -> "NotificationEventPayload" | None: - raw_user_id = payload.get("user_id") - raw_channel = payload.get("channel") - if not isinstance(raw_user_id, str) or not isinstance(raw_channel, str): - logger.warning("Notification payload missing user_id or channel: %s", payload) - return None - try: - user_id = uuid.UUID(raw_user_id) - except ValueError as exc: - logger.warning("Invalid user_id %s: %s", raw_user_id, exc) - return None - try: - channel = NotificationChannel(raw_channel) - except ValueError: - logger.warning("Unsupported notification channel %s", raw_channel) - return None - - raw_data = payload.get("data") - data_dict: dict[str, str] = {} + data: dict[str, str] = Field(default_factory=dict) + mobile_device_info: MobileDeviceInfo | None = None + web_subscription: WebPushSubscription | None = None + metadata: dict[str, Any] | None = None + + model_config = ConfigDict(extra="ignore") + + @root_validator(pre=True) + def _normalize_payload(cls, values: dict[str, Any]) -> dict[str, Any]: + normalized = dict(values) + raw_data = normalized.get("data") if isinstance(raw_data, Mapping): - data_dict = {str(k): str(v) for k, v in raw_data.items()} - - device_info = payload.get("device_info") - if device_info is not None and not isinstance(device_info, Mapping): - logger.warning("device_info must be an object, dropping it: %s", payload) - device_info = None - - metadata = payload.get("metadata") - if metadata is not None and not isinstance(metadata, Mapping): - logger.warning("metadata must be an object, dropping it: %s", payload) - metadata = None - - return cls( - user_id=user_id, - channel=channel, - title=payload.get("title"), - body=payload.get("body"), - data=data_dict, - device_info=device_info, - metadata=metadata, - ) + normalized["data"] = { + key: value + for key, value in raw_data.items() + if isinstance(value, str) + } + raw_metadata = normalized.get("metadata") + if isinstance(raw_metadata, Mapping): + normalized["metadata"] = dict(raw_metadata) + raw_device_info = normalized.pop("device_info", None) + if isinstance(raw_device_info, Mapping): + normalized["mobile_device_info"] = dict(raw_device_info) + raw_subscription = normalized.pop("subscription", None) + if isinstance(raw_subscription, Mapping): + normalized["web_subscription"] = dict(raw_subscription) + return normalized diff --git a/makefile b/makefile index a5d58e5..3acf145 100644 --- a/makefile +++ b/makefile @@ -59,3 +59,6 @@ run-app: lint: uv run ruff check . + +check_type: + uv run mypy . From 2e4bb3403c647024840ae3f8d937525aaa379a8b Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Fri, 20 Mar 2026 19:27:44 +0100 Subject: [PATCH 09/19] Refactor notification providers to directly use device/subscription information and remove audit metadata pruning. --- app/core/constant.py | 2 +- app/worker/audit/main.py | 25 +++-------- app/worker/audit/settings.py | 8 +--- app/worker/notification/providers/apn.py | 45 ++++++++++---------- app/worker/notification/providers/fcm.py | 31 ++++++-------- app/worker/notification/providers/webpush.py | 23 ++++++---- 6 files changed, 57 insertions(+), 77 deletions(-) diff --git a/app/core/constant.py b/app/core/constant.py index 36f4951..95bc507 100644 --- a/app/core/constant.py +++ b/app/core/constant.py @@ -11,7 +11,7 @@ class NotificationChannel(str, Enum): MOBILE = "mobile" -NOTIFICATION_EVENT_SUBJECT = "notification.event" +NOTIFICATION_EVENT_SUBJECT = "notification_event" class AuditEventType(str, Enum): diff --git a/app/worker/audit/main.py b/app/worker/audit/main.py index 10760ec..0ba7f1e 100644 --- a/app/worker/audit/main.py +++ b/app/worker/audit/main.py @@ -10,11 +10,10 @@ from app.service.audit import AuditService from db.generated import audit as audit_queries from app.worker.audit.schema.audit import AuditEventMessage -from app.worker.audit.settings import settings async def init_worker() -> None: - logger.info("Audit worker starting with metadata limit %s", settings.max_metadata_entries) + logger.info("Audit worker starting with metadata limit ") class AuditDeliveryWorker: @@ -38,36 +37,22 @@ async def persist(self, payload: AuditEventMessage) -> None: if self._audit_service is None: logger.warning("Audit service is unavailable for %s", payload.event_type) return - metadata = self._prune_metadata(payload.metadata) await self._audit_service.record_event( event_type=payload.event_type, user_id=payload.user_id, - metadata=metadata, + metadata=payload.metadata, ) logger.info("Persisted audit %s for %s", payload.event_type, payload.user_id) - @staticmethod - def _prune_metadata(metadata: dict[str, Any] | None) -> dict[str, Any] | None: - if not metadata: - return metadata - if len(metadata) <= settings.max_metadata_entries: - return metadata - trimmed = {} - for idx, key in enumerate(list(metadata)): - if idx >= settings.max_metadata_entries: - break - trimmed[key] = metadata[key] - logger.warning("Trimmed audit metadata to %s entries", settings.max_metadata_entries) - return trimmed - + def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: try: parsed = json.loads(raw_data.decode("utf-8")) if not isinstance(parsed, dict): - logger.warning("Audit payload must be an object, got %s", type(parsed)) + logger.warning("Audit payload must be an object, got %s", type(parsed)) # type: ignore return None - return parsed + return parsed # type: ignore except (UnicodeDecodeError, json.JSONDecodeError) as exc: logger.error("Cannot parse audit payload: %s", exc) return None diff --git a/app/worker/audit/settings.py b/app/worker/audit/settings.py index 3be6e44..e5d1cd6 100644 --- a/app/worker/audit/settings.py +++ b/app/worker/audit/settings.py @@ -1,17 +1,11 @@ from __future__ import annotations -from pydantic import Field from pydantic_settings import BaseSettings class AuditWorkerSettings(BaseSettings): - max_metadata_entries: int = Field( - 40, - ge=1, - le=200, - ) - + class Config: env_prefix = "AUDIT_" diff --git a/app/worker/notification/providers/apn.py b/app/worker/notification/providers/apn.py index a2db32c..3e8d9d6 100644 --- a/app/worker/notification/providers/apn.py +++ b/app/worker/notification/providers/apn.py @@ -1,40 +1,39 @@ import asyncio -from typing import Any, Mapping -from apns2.payload import PayloadAlert -from app.core.logger import logger -from app.worker.notification.schema.notification import NotificationEventPayload -from app.worker.notification.settings import settings from apns2.client import APNsClient -from apns2.payload import Payload as APNPayload +from apns2.payload import Payload as APNPayload, PayloadAlert +from app.core.logger import logger +from app.worker.notification.schema.notification import ( + MobileDeviceInfo, + NotificationEventPayload, +) +from app.worker.notification.settings import settings -async def send_apn_notification(payload: NotificationEventPayload) -> None: - - device_info: Mapping[str, Any] | None = payload.device_info - if device_info is None: - logger.warning("Payload missing device_info, cannot send APN message: %s", payload) - return - token = device_info.get("apn_token") - if not isinstance(token, str): - logger.warning("Missing APN token in device_info for payload %s", payload) +async def send_apn_notification( + payload: NotificationEventPayload, + device_info: MobileDeviceInfo, +) -> None: + token = device_info.apn_token + if not token: + logger.warning("Missing APN token for user %s", payload.user_id) return - - apn_payload = APNPayload(alert=PayloadAlert(title=)) + alert = PayloadAlert(title=payload.title or "", body=payload.body) + apn_payload = APNPayload( + alert=alert, + custom=payload.data or None, + ) client = APNsClient( credentials=settings.apn_certificate_path, use_sandbox=settings.apn_use_sandbox, use_alternative_port=settings.apn_use_alternative_port, ) - - send_args = (token, apn_payload) - send_kwargs: dict[str, Any] = {} - if settings.apn_topic is not None: + send_kwargs: dict[str, object] = {} + if settings.apn_topic: send_kwargs["topic"] = settings.apn_topic - try: - await asyncio.to_thread(client.send_notification, *send_args, **send_kwargs) + await asyncio.to_thread(client.send_notification, token, apn_payload, **send_kwargs) logger.info("APN notification queued for user %s token %s", payload.user_id, token) except Exception as exc: logger.exception("APN send failed for token %s: %s", token, exc) diff --git a/app/worker/notification/providers/fcm.py b/app/worker/notification/providers/fcm.py index 5042ef0..895a059 100644 --- a/app/worker/notification/providers/fcm.py +++ b/app/worker/notification/providers/fcm.py @@ -1,36 +1,33 @@ - import asyncio -from typing import Any, Mapping +from typing import Any from app.core.logger import logger -from app.worker.notification.schema.notification import NotificationEventPayload +from app.worker.notification.schema.notification import ( + MobileDeviceInfo, + NotificationEventPayload, +) from firebase_admin import messaging as firebase_messaging -async def send_fcm_notification(payload: NotificationEventPayload) -> None: - +async def send_fcm_notification( + payload: NotificationEventPayload, + device_info: MobileDeviceInfo, +) -> None: if firebase_messaging is None: logger.debug("Firebase Admin not installed; skipping FCM delivery") return - - device_info: Mapping[str, Any] | None = payload.device_info - if device_info is None: - logger.warning("Payload missing device_info, cannot send FCM message: %s", payload) + token = device_info.fcm_token + if not token: + logger.warning("Missing FCM token for user %s", payload.user_id) return - - token = device_info.get("fcm_token") - if not isinstance(token, str): - logger.warning("Missing FCM token in device_info for payload %s", payload) - return - message = firebase_messaging.Message( token=token, notification=firebase_messaging.Notification( - title=payload.title, body=payload.body + title=payload.title, + body=payload.body, ), data=payload.data or None, ) - try: await asyncio.to_thread(firebase_messaging.send, message) logger.info("FCM notification queued for user %s token %s", payload.user_id, token) diff --git a/app/worker/notification/providers/webpush.py b/app/worker/notification/providers/webpush.py index 422d545..bc48314 100644 --- a/app/worker/notification/providers/webpush.py +++ b/app/worker/notification/providers/webpush.py @@ -1,4 +1,3 @@ - import asyncio import json @@ -9,20 +8,26 @@ async def send_web_push_notification(payload: NotificationEventPayload) -> None: - - - if not payload.device_info: + subscription = payload.web_subscription + if subscription is None: logger.warning("Web notification missing subscription info: %s", payload) return - if not settings.webpush_vapid_private_key: logger.warning("VAPID private key missing, cannot send web push") return - - subscription_info = payload.device_info + subscription_info: dict[str, object] = { + "endpoint": subscription.endpoint, + "keys": subscription.keys, + } + if subscription.expiration_time is not None: + subscription_info["expirationTime"] = subscription.expiration_time + payload_data = { + "title": payload.title, + "body": payload.body, + "data": payload.data, + } + data = json.dumps(payload_data) vapid_claims = {"sub": settings.webpush_vapid_claims_subject} - data = json.dumps({"title": payload.title, "body": payload.body, "data": payload.data}) - try: await asyncio.to_thread( webpush, From 65660efc49f8a6e742e5d5286da938b75feed735 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Tue, 24 Mar 2026 21:07:50 +0100 Subject: [PATCH 10/19] feat:refactor the worker to unify firebase message sending and handling failed token and token with problemes --- app/core/config.py | 1 + app/core/constant.py | 8 +- app/infra/firebase.py | 84 +++++++ app/infra/invalid_tokens.py | 18 ++ app/infra/nats.py | 16 +- app/infra/notification_queue.py | 43 ++++ app/main.py | 5 + app/router/notifications.py | 19 ++ app/schema/notification.py | 53 ++++ app/worker/notification/main.py | 237 +++++++++++------- app/worker/notification/providers/apn.py | 39 --- app/worker/notification/providers/fcm.py | 35 --- app/worker/notification/providers/webpush.py | 41 --- .../notification/schema/notification.py | 55 ---- app/worker/notification/settings.py | 34 ++- 15 files changed, 399 insertions(+), 289 deletions(-) create mode 100644 app/infra/firebase.py create mode 100644 app/infra/invalid_tokens.py create mode 100644 app/infra/notification_queue.py create mode 100644 app/router/notifications.py create mode 100644 app/schema/notification.py delete mode 100644 app/worker/notification/providers/apn.py delete mode 100644 app/worker/notification/providers/fcm.py delete mode 100644 app/worker/notification/providers/webpush.py delete mode 100644 app/worker/notification/schema/notification.py diff --git a/app/core/config.py b/app/core/config.py index 3267862..671f6fb 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -46,6 +46,7 @@ class Settings(BaseSettings): ) FACE_ENCRYPTION_KEY: str + FIREBASE_CREDENTIALS_PATH: str = "multiai-c9380-firebase-adminsdk-fbsvc-cb6e5ce41b.json" class Config: env_file = ".env" diff --git a/app/core/constant.py b/app/core/constant.py index 95bc507..0cae9dc 100644 --- a/app/core/constant.py +++ b/app/core/constant.py @@ -4,14 +4,11 @@ class RedisKey(str, Enum): UserSession = "user_session" UserSessionByUser = "user_session:{user_id}" - - -class NotificationChannel(str, Enum): - WEB = "web" - MOBILE = "mobile" + INVALID_TOKEN_SET_KEY= "notifications:invalid_tokens" NOTIFICATION_EVENT_SUBJECT = "notification_event" +AUDIT_EVENT_SUBJECT = "audit.event" class AuditEventType(str, Enum): @@ -23,7 +20,6 @@ class AuditEventType(str, Enum): UPLOAD_REQUEST_REJECTED = "upload_request.rejected" -AUDIT_EVENT_SUBJECT = "audit.event" IMAGE_ALLOWED_TYPES = { "image/jpeg", diff --git a/app/infra/firebase.py b/app/infra/firebase.py new file mode 100644 index 0000000..e80e765 --- /dev/null +++ b/app/infra/firebase.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import firebase_admin +from firebase_admin import messaging + +from app.core.config import settings +from app.core.logger import logger +from app.schema.notification import UnifiedNotification + + +INVALID_TOKEN_CODES = { + "messaging/registration-token-not-registered", + "messaging/invalid-registration-token", +} + + +class NotificationDeliveryError(Exception): + def __init__( + self, + *, + failed_tokens: list[str], + invalid_tokens: list[str], + ) -> None: + self.failed_tokens = failed_tokens + self.invalid_tokens = invalid_tokens + super().__init__( + f"Failed tokens: {failed_tokens}, invalid tokens: {invalid_tokens}" + ) + + +def init_firebase_app() -> None: + if firebase_admin._apps: + return + credentials_path = settings.FIREBASE_CREDENTIALS_PATH + if credentials_path: + cred = credentials.Certificate(credentials_path) + firebase_admin.initialize_app(cred) + logger.info("Firebase initialized with credentials from %s", credentials_path) + return + firebase_admin.initialize_app() + logger.info("Firebase initialized with default credentials") + + +def _classify_token_failure(error: Exception) -> bool: + if isinstance(error, (messaging.UnregisteredError, messaging.InvalidArgumentError)): + return True + code = getattr(error, "code", None) + return code in INVALID_TOKEN_CODES + + +def send_notification(notification: UnifiedNotification) -> None: + if not notification.tokens: + logger.debug("Skipping notification without tokens: %s", notification) + return + multicast = messaging.MulticastMessage( + tokens=notification.tokens, + notification=messaging.Notification( + title=notification.title, + body=notification.body, + ), + data=notification.data or None, + ) + response = messaging.send_multicast(multicast) + + failed_tokens: list[str] = [] + invalid_tokens: list[str] = [] + + for token, result in zip(notification.tokens, response.responses): + if result.success or result.exception is None: + continue + if _classify_token_failure(result.exception): + invalid_tokens.append(token) + else: + failed_tokens.append(token) + + if failed_tokens or invalid_tokens: + raise NotificationDeliveryError( + failed_tokens=failed_tokens, + invalid_tokens=invalid_tokens, + ) + + logger.info( + "Notification delivered to %d tokens", len(notification.tokens) + ) diff --git a/app/infra/invalid_tokens.py b/app/infra/invalid_tokens.py new file mode 100644 index 0000000..19577cc --- /dev/null +++ b/app/infra/invalid_tokens.py @@ -0,0 +1,18 @@ +from __future__ import annotations +from typing import Iterable +from redis.asyncio import Redis +from app.core.logger import logger +from app.core.constant import RedisKey + + + +class InvalidTokenStore: + def __init__(self, redis: Redis) -> None: + self._redis = redis + + async def mark_invalid(self, tokens: Iterable[str]) -> None: + normalized = [token for token in tokens if token] + if not normalized: + return + await self._redis.sadd(RedisKey.INVALID_TOKEN_SET_KEY, *normalized) + logger.warning("Marked %d tokens for cleanup", len(normalized)) diff --git a/app/infra/nats.py b/app/infra/nats.py index 8e5d1f6..a44a908 100644 --- a/app/infra/nats.py +++ b/app/infra/nats.py @@ -50,15 +50,16 @@ async def close() -> None: @staticmethod - async def publish(subject: NatsSubjects, message: bytes) -> None: + async def publish(subject: NatsSubjects | str, message: bytes) -> None: if NatsClient._nc is None: await NatsClient.connect() nc = NatsClient._nc assert nc is not None - await nc.publish(subject.value, message) + subject_name = subject.value if isinstance(subject, NatsSubjects) else subject + await nc.publish(subject_name, message) @staticmethod - async def subscribe(subject: NatsSubjects, callback: Callable[[Any], Any]) -> None: + async def subscribe(subject: NatsSubjects | str, callback: Callable[[Any], Any]) -> None: if NatsClient._nc is None: await NatsClient.connect() nc = NatsClient._nc @@ -66,7 +67,8 @@ async def subscribe(subject: NatsSubjects, callback: Callable[[Any], Any]) -> No async def _wrapper(msg: Msg) -> None: await callback(msg.data) - await nc.subscribe(subject.value, cb=_wrapper) # type: ignore + subject_name = subject.value if isinstance(subject, NatsSubjects) else subject + await nc.subscribe(subject_name, cb=_wrapper) # type: ignore @staticmethod @@ -75,7 +77,8 @@ async def js_publish(subject: NatsSubjects, message: bytes, stream_name: str) -> await NatsClient.connect() js = NatsClient._js assert js is not None - await js.publish(subject.value, message, stream=stream_name) + subject_name = subject.value if isinstance(subject, NatsSubjects) else subject # type: ignore + await js.publish(subject_name, message, stream=stream_name) @staticmethod async def js_subscribe( @@ -93,8 +96,9 @@ async def _wrapper(msg: Msg) -> None: await msg.ack() js = NatsClient._js assert js is not None + subject_name = subject.value if isinstance(subject, NatsSubjects) else subject await js.subscribe( - subject=subject.value, + subject=subject_name, stream=stream_name, durable=durable_name, cb=_wrapper, diff --git a/app/infra/notification_queue.py b/app/infra/notification_queue.py new file mode 100644 index 0000000..ee47515 --- /dev/null +++ b/app/infra/notification_queue.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import Sequence + +from pydantic import BaseModel, ConfigDict, Field + +from app.infra.nats import NatsClient +from app.schema.notification import NotificationPriority, PRIORITY_ORDER, UnifiedNotification +from app.worker.notification.settings import NotificationWorkerSettings + + +class NotificationQueueEntry(BaseModel): + notification: UnifiedNotification + attempts: int = Field(default=0, ge=0) + + model_config = ConfigDict(extra="forbid") + + +class NotificationQueue: + def __init__(self, settings: NotificationWorkerSettings) -> None: + self._settings = settings + + def _subject_for(self, priority: NotificationPriority) -> str: + return self._settings.subject_for(priority) + + async def enqueue(self, notification: UnifiedNotification, attempts: int = 0) -> None: + entry = NotificationQueueEntry(notification=notification, attempts=attempts) + await self._publish(entry) + + async def enqueue_entry(self, entry: NotificationQueueEntry) -> None: + await self._publish(entry) + + async def _publish(self, entry: NotificationQueueEntry) -> None: + subject = self._subject_for(entry.notification.priority) + payload = entry.model_dump_json().encode("utf-8") + await NatsClient.publish(subject, payload) + + @staticmethod + def priority_index(priority: NotificationPriority) -> int: + return PRIORITY_ORDER.index(priority) + + def priority_subjects(self) -> Sequence[str]: + return self._settings.priority_subjects() diff --git a/app/main.py b/app/main.py index 41b4f8d..02d59da 100644 --- a/app/main.py +++ b/app/main.py @@ -7,10 +7,12 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from app.core.config import settings +from app.infra.firebase import init_firebase_app from app.infra.minio import init_minio_client from app.infra.nats import NatsClient from app.infra.redis import RedisClient from app.router.mobile import router as mobile_router +from app.router.notifications import router as notifications_router from app.router.staff import router as staff_router from app.router.web import router as web_router from app.deps.ai_deps import get_face_embedding_service @@ -73,6 +75,8 @@ async def lifespan(app: FastAPI) -> AsyncIterator[None]: password=settings.REDIS_PASSWORD, ) + init_firebase_app() + await NatsClient.connect() get_face_embedding_service() @@ -114,3 +118,4 @@ def health_check() -> dict[str, str]: app.include_router(mobile_router) app.include_router(staff_router) app.include_router(web_router) +app.include_router(notifications_router) diff --git a/app/router/notifications.py b/app/router/notifications.py new file mode 100644 index 0000000..3e02799 --- /dev/null +++ b/app/router/notifications.py @@ -0,0 +1,19 @@ +from fastapi import APIRouter, status + +from app.core.logger import logger +from app.infra.notification_queue import NotificationQueue +from app.schema.notification import UnifiedNotification +from app.worker.notification.settings import NotificationWorkerSettings + + +settings = NotificationWorkerSettings() +queue = NotificationQueue(settings=settings) + +router = APIRouter(prefix="/notifications", tags=["notifications"]) + + +@router.post("/enqueue", status_code=status.HTTP_202_ACCEPTED) +async def enqueue_notification(notification: UnifiedNotification) -> dict[str, str]: + await queue.enqueue(notification) + logger.debug("Enqueued notification priority=%s tokens=%d", notification.priority, len(notification.tokens)) + return {"status": "queued"} diff --git a/app/schema/notification.py b/app/schema/notification.py new file mode 100644 index 0000000..dfcef17 --- /dev/null +++ b/app/schema/notification.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from enum import Enum +from typing import Mapping + +from pydantic import BaseModel, ConfigDict, Field, field_validator + + +class NotificationPriority(str, Enum): + HIGH = "high" + NORMAL = "normal" + LOW = "low" + + +class UnifiedNotification(BaseModel): + title: str + body: str + data: dict[str, str] = Field(default_factory=dict) + tokens: list[str] + priority: NotificationPriority = NotificationPriority.NORMAL + + model_config = ConfigDict(extra="forbid") + + @field_validator("title", "body", mode="before") + def _normalize_text(cls, value: str) -> str: + normalized = str(value).strip() + if not normalized: + raise ValueError("Text fields must not be empty") + return normalized + + @field_validator("data", mode="before") + def _normalize_data(cls, value: Mapping[str, object] | None) -> dict[str, str]: + if value is None: + return {} + if not isinstance(value, Mapping): + raise ValueError("Data must be a mapping") + return {str(key): str(val) for key, val in value.items()} + + @field_validator("tokens", mode="before") + def _normalize_tokens(cls, value: list[str] | tuple[str, ...]) -> list[str]: + if not isinstance(value, (list, tuple)): + raise ValueError("Tokens must be a list") + cleaned = [str(token).strip() for token in value if str(token).strip()] + if not cleaned: + raise ValueError("At least one FCM token is required") + return cleaned + + +PRIORITY_ORDER: tuple[NotificationPriority, ...] = ( + NotificationPriority.HIGH, + NotificationPriority.NORMAL, + NotificationPriority.LOW, +) diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py index 9af3d08..1695568 100644 --- a/app/worker/notification/main.py +++ b/app/worker/notification/main.py @@ -1,110 +1,171 @@ +from __future__ import annotations + import asyncio -import json -from typing import Any -import sqlalchemy.ext.asyncio -from app.core.constant import NOTIFICATION_EVENT_SUBJECT, NotificationChannel +from functools import partial +from typing import Sequence + +from app.core.config import settings from app.core.logger import logger -from app.infra.database import engine -from app.infra.nats import NatsClient, NatsSubjects -from app.service.device import DeviceService -from db.generated import devices as device_queries -from app.worker.notification.providers.apn import send_apn_notification -from app.worker.notification.providers.fcm import send_fcm_notification -from app.worker.notification.providers.webpush import send_web_push_notification -from app.worker.notification.schema.notification import NotificationEventPayload +from app.infra.firebase import ( + NotificationDeliveryError, + init_firebase_app, + send_notification, +) +from app.infra.invalid_tokens import InvalidTokenStore +from app.infra.notification_queue import NotificationQueue, NotificationQueueEntry +from app.infra.redis import RedisClient +from app.infra.nats import NatsClient +from app.worker.notification.settings import NotifSettings, NotificationWorkerSettings + + +MAX_SEND_ATTEMPTS = 5 +RETRY_DELAY_SECONDS = 2 + + +async def _process_loop( + queue: NotificationQueue, + invalid_tokens: InvalidTokenStore, + pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], +) -> None: + while True: + try: + _, entry = await pending.get() + await _process_entry(entry, queue, invalid_tokens) + except asyncio.CancelledError: + break + except Exception: + logger.exception("Notification worker encountered unexpected error") + await asyncio.sleep(RETRY_DELAY_SECONDS) + + +def _parse_entry(raw_payload: bytes | str) -> NotificationQueueEntry | None: + if isinstance(raw_payload, bytes): + raw_payload = raw_payload.decode("utf-8") + try: + return NotificationQueueEntry.model_validate_json(raw_payload) + except Exception: + logger.exception("Failed to deserialize notification entry") + return None -async def init_push_integrations() -> None: - logger.info("Notification worker ready to deliver pushes") +async def _enqueue_from_nats( + raw_payload: bytes | str, + queue: NotificationQueue, + pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], +) -> None: + entry = _parse_entry(raw_payload) + if entry is None: + return + priority = entry.notification.priority + index = queue.priority_index(priority) + await pending.put((index, entry)) + + +async def _subscribe( + queue: NotificationQueue, + pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], +) -> None: + subjects = queue.priority_subjects() + for subject in subjects: + handler = partial(_enqueue_from_nats, queue=queue, pending=pending) + await NatsClient.subscribe(subject, handler) + + +async def _process_entry( + entry: NotificationQueueEntry, + queue: NotificationQueue, + invalid_tokens: InvalidTokenStore, +) -> None: + try: + await asyncio.to_thread(send_notification, entry.notification) + except NotificationDeliveryError as exc: + await _handle_partial_failure(entry, invalid_tokens, queue, exc) + except Exception: + logger.exception("Failed to deliver notification, will retry") + await _retry(entry, queue) + + +async def _handle_partial_failure( + entry: NotificationQueueEntry, + invalid_tokens: InvalidTokenStore, + queue: NotificationQueue, + error: NotificationDeliveryError, +) -> None: + if error.invalid_tokens: + await invalid_tokens.mark_invalid(error.invalid_tokens) + logger.warning("Detected %d invalid tokens", len(error.invalid_tokens)) + if error.failed_tokens: + await _retry(entry, queue, tokens=error.failed_tokens) + + +async def _retry( + entry: NotificationQueueEntry, + queue: NotificationQueue, + tokens: Sequence[str] | None = None, +) -> None: + attempts = entry.attempts + 1 + if attempts >= MAX_SEND_ATTEMPTS: + logger.warning( + "Dropping notification after %d attempts (tokens=%d)", + attempts, + len(tokens or entry.notification.tokens), + ) + return + notification = entry.notification + if tokens is not None: + notification = entry.notification.model_copy(update={"tokens": list(tokens)}) + if not notification.tokens: + return + await asyncio.sleep(RETRY_DELAY_SECONDS) + await queue.enqueue(notification, attempts=attempts) + logger.info("Requeued notification attempt=%d", attempts) -class NotificationDeliveryWorker: - def __init__(self) -> None: - self._conn: sqlalchemy.ext.asyncio.AsyncConnection | None = None - self._device_service: DeviceService | None = None +async def run_worker(queue: NotificationQueue, invalid_tokens: InvalidTokenStore) -> None: + logger.info("Notification worker started") + pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]] = ( + asyncio.PriorityQueue() + ) + await _subscribe(queue, pending) + try: + await _process_loop(queue, invalid_tokens, pending) + finally: + logger.info("Notification worker shutting down") - async def start(self) -> None: - if self._conn is not None: - return - self._conn = await engine.connect() - self._device_service = DeviceService() - self._device_service.init(device_querier=device_queries.AsyncQuerier(self._conn)) - - async def stop(self) -> None: - if self._conn is not None: - await self._conn.close() - self._conn = None - self._device_service = None - - async def deliver(self, payload: NotificationEventPayload) -> None: - if payload.channel == NotificationChannel.MOBILE: - await self._deliver_to_mobile(payload) - return - if payload.channel == NotificationChannel.WEB: - await send_web_push_notification(payload) - return - logger.warning("Unhandled notification channel %s", payload.channel) - async def _deliver_to_mobile(self, payload: NotificationEventPayload) -> None: - if self._device_service is None: - logger.warning("Device service unavailable for mobile delivery") - return - devices, _ = await self._device_service.get_all_devices(user_id=payload.user_id) - if not devices: - logger.debug("No devices registered for user %s", payload.user_id) - return - for device in devices: - device_type = (device.device_type or "").lower() - if device_type == "ios": - await send_apn_notification(payload) - else: - await send_fcm_notification(payload) +def _setup_notification_queue() -> NotificationQueue: + return NotificationQueue(settings=NotifSettings) -def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: - try: - parsed = json.loads(raw_data.decode("utf-8")) - if not isinstance(parsed, dict): - logger.warning("Notification payload must be an object, got %s", type(parsed)) - return None - return parsed - except (UnicodeDecodeError, json.JSONDecodeError) as exc: - logger.error("Cannot parse notification payload: %s", exc) - return None +def _setup_redis() -> RedisClient: + return RedisClient( + host=NotifSettings.REDIS_HOST, + port=NotifSettings.REDIS_PORT, + password=NotifSettings.REDIS_PASSWORD, + ) -async def _handle_event(worker: NotificationDeliveryWorker, raw_data: Any) -> None: - parsed = _parse_payload(raw_data) - if parsed is None: - return - payload = NotificationEventPayload.from_mapping(parsed) - if payload is None: - return - try: - await worker.deliver(payload) - except Exception: - logger.exception("Failed to deliver payload for %s", parsed.get("user_id")) +def _setup_invalid_token_store(redis_client: RedisClient) -> InvalidTokenStore: + return InvalidTokenStore(redis_client.client) -async def listen_nats_event(worker: NotificationDeliveryWorker) -> None: - await NatsClient.subscribe( - NatsSubjects.NOTIFICATION_EVENT, - lambda data: _handle_event(worker, data), - ) - logger.info("Listening for notification events on %s", NOTIFICATION_EVENT_SUBJECT) +async def _initialize_infrastructure() -> tuple[NotificationQueue, InvalidTokenStore]: + init_firebase_app() + await NatsClient.connect() + redis_client = _setup_redis() + queue = _setup_notification_queue() + invalid_tokens = _setup_invalid_token_store(redis_client) + return queue, invalid_tokens async def main() -> None: - await init_push_integrations() - worker = NotificationDeliveryWorker() - await worker.start() - await NatsClient.connect() + queue, invalid_tokens = await _initialize_infrastructure() try: - await listen_nats_event(worker) - await asyncio.Event().wait() + await run_worker(queue, invalid_tokens) + except asyncio.CancelledError: + logger.info("Notification worker cancelled") finally: - await worker.stop() - await NatsClient.close() + logger.info("Notification worker stopped") if __name__ == "__main__": diff --git a/app/worker/notification/providers/apn.py b/app/worker/notification/providers/apn.py deleted file mode 100644 index 3e8d9d6..0000000 --- a/app/worker/notification/providers/apn.py +++ /dev/null @@ -1,39 +0,0 @@ -import asyncio - -from apns2.client import APNsClient -from apns2.payload import Payload as APNPayload, PayloadAlert - -from app.core.logger import logger -from app.worker.notification.schema.notification import ( - MobileDeviceInfo, - NotificationEventPayload, -) -from app.worker.notification.settings import settings - - -async def send_apn_notification( - payload: NotificationEventPayload, - device_info: MobileDeviceInfo, -) -> None: - token = device_info.apn_token - if not token: - logger.warning("Missing APN token for user %s", payload.user_id) - return - alert = PayloadAlert(title=payload.title or "", body=payload.body) - apn_payload = APNPayload( - alert=alert, - custom=payload.data or None, - ) - client = APNsClient( - credentials=settings.apn_certificate_path, - use_sandbox=settings.apn_use_sandbox, - use_alternative_port=settings.apn_use_alternative_port, - ) - send_kwargs: dict[str, object] = {} - if settings.apn_topic: - send_kwargs["topic"] = settings.apn_topic - try: - await asyncio.to_thread(client.send_notification, token, apn_payload, **send_kwargs) - logger.info("APN notification queued for user %s token %s", payload.user_id, token) - except Exception as exc: - logger.exception("APN send failed for token %s: %s", token, exc) diff --git a/app/worker/notification/providers/fcm.py b/app/worker/notification/providers/fcm.py deleted file mode 100644 index 895a059..0000000 --- a/app/worker/notification/providers/fcm.py +++ /dev/null @@ -1,35 +0,0 @@ -import asyncio -from typing import Any - -from app.core.logger import logger -from app.worker.notification.schema.notification import ( - MobileDeviceInfo, - NotificationEventPayload, -) -from firebase_admin import messaging as firebase_messaging - - -async def send_fcm_notification( - payload: NotificationEventPayload, - device_info: MobileDeviceInfo, -) -> None: - if firebase_messaging is None: - logger.debug("Firebase Admin not installed; skipping FCM delivery") - return - token = device_info.fcm_token - if not token: - logger.warning("Missing FCM token for user %s", payload.user_id) - return - message = firebase_messaging.Message( - token=token, - notification=firebase_messaging.Notification( - title=payload.title, - body=payload.body, - ), - data=payload.data or None, - ) - try: - await asyncio.to_thread(firebase_messaging.send, message) - logger.info("FCM notification queued for user %s token %s", payload.user_id, token) - except Exception as exc: - logger.exception("FCM send failed for token %s: %s", token, exc) diff --git a/app/worker/notification/providers/webpush.py b/app/worker/notification/providers/webpush.py deleted file mode 100644 index bc48314..0000000 --- a/app/worker/notification/providers/webpush.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import json - -from app.core.logger import logger -from app.worker.notification.schema.notification import NotificationEventPayload -from app.worker.notification.settings import settings -from pywebpush import WebPushException, webpush - - -async def send_web_push_notification(payload: NotificationEventPayload) -> None: - subscription = payload.web_subscription - if subscription is None: - logger.warning("Web notification missing subscription info: %s", payload) - return - if not settings.webpush_vapid_private_key: - logger.warning("VAPID private key missing, cannot send web push") - return - subscription_info: dict[str, object] = { - "endpoint": subscription.endpoint, - "keys": subscription.keys, - } - if subscription.expiration_time is not None: - subscription_info["expirationTime"] = subscription.expiration_time - payload_data = { - "title": payload.title, - "body": payload.body, - "data": payload.data, - } - data = json.dumps(payload_data) - vapid_claims = {"sub": settings.webpush_vapid_claims_subject} - try: - await asyncio.to_thread( - webpush, - subscription_info=subscription_info, - data=data, - vapid_private_key=settings.webpush_vapid_private_key, - vapid_claims=vapid_claims, - ) - logger.info("Web push queued for user %s", payload.user_id) - except WebPushException as exc: - logger.exception("Web push failed for user %s: %s", payload.user_id, exc) diff --git a/app/worker/notification/schema/notification.py b/app/worker/notification/schema/notification.py deleted file mode 100644 index aa07b36..0000000 --- a/app/worker/notification/schema/notification.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Payload definitions shared across the notification worker.""" -from __future__ import annotations - -from typing import Any, Mapping -from uuid import UUID - -from pydantic import BaseModel, ConfigDict, Field, root_validator - -from app.core.constant import NotificationChannel - - -class MobileDeviceInfo(BaseModel): - fcm_token: str | None = None - apn_token: str | None = None - platform: str | None = None - - -class WebPushSubscription(BaseModel): - endpoint: str - keys: dict[str, str] - expiration_time: int | None = None - - -class NotificationEventPayload(BaseModel): - user_id: UUID - channel: NotificationChannel - title: str | None = None - body: str | None = None - data: dict[str, str] = Field(default_factory=dict) - mobile_device_info: MobileDeviceInfo | None = None - web_subscription: WebPushSubscription | None = None - metadata: dict[str, Any] | None = None - - model_config = ConfigDict(extra="ignore") - - @root_validator(pre=True) - def _normalize_payload(cls, values: dict[str, Any]) -> dict[str, Any]: - normalized = dict(values) - raw_data = normalized.get("data") - if isinstance(raw_data, Mapping): - normalized["data"] = { - key: value - for key, value in raw_data.items() - if isinstance(value, str) - } - raw_metadata = normalized.get("metadata") - if isinstance(raw_metadata, Mapping): - normalized["metadata"] = dict(raw_metadata) - raw_device_info = normalized.pop("device_info", None) - if isinstance(raw_device_info, Mapping): - normalized["mobile_device_info"] = dict(raw_device_info) - raw_subscription = normalized.pop("subscription", None) - if isinstance(raw_subscription, Mapping): - normalized["web_subscription"] = dict(raw_subscription) - return normalized diff --git a/app/worker/notification/settings.py b/app/worker/notification/settings.py index 4dd2ef2..f1e461c 100644 --- a/app/worker/notification/settings.py +++ b/app/worker/notification/settings.py @@ -1,30 +1,26 @@ from __future__ import annotations -from typing import Optional +from typing import Sequence -from pydantic import Field +from pydantic import Field from pydantic_settings import BaseSettings +from app.schema.notification import NotificationPriority, PRIORITY_ORDER class NotificationWorkerSettings(BaseSettings): - - apn_certificate_path: str = Field( - "/path/to/certificate.pem" - ) - apn_use_sandbox: bool = Field(True) - apn_use_alternative_port: bool = Field( - False - ) - apn_topic: Optional[str] = Field( - None - ) - webpush_vapid_private_key: Optional[str] = Field(None) - webpush_vapid_claims_subject: str = Field( - "mailto:alerts@example.com" - ) + subject_prefix: str = Field("notifications.delivery") + queue_group: str | None = Field(None) + REDIS_HOST:str + REDIS_PORT:int + REDIS_PASSWORD:str class Config: - env_prefix = "NOTIFICATION_" + env_prefix = "NOTIFICATIONS_" + + def subject_for(self, priority: NotificationPriority) -> str: + return f"{self.subject_prefix}.{priority.value}" + def priority_subjects(self) -> Sequence[str]: + return [self.subject_for(priority) for priority in PRIORITY_ORDER] -settings = NotificationWorkerSettings() # type: ignore +NotifSettings = NotificationWorkerSettings() # type: ignore \ No newline at end of file From aef98612b4151c4d3e3d3b863dafc93e2cf4bd6f Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 12:19:17 +0100 Subject: [PATCH 11/19] chore:remove firebase stubs replace it with internal typing cast and then update the architectire of notif worker to use periority queue of nats instead of in memory asyncio queue and use rate limiter locks in memory for matching firebase quotas --- app/infra/invalid_tokens.py | 18 - app/infra/nats.py | 16 +- app/infra/redis.py | 67 +- app/main.py | 3 - app/router/notifications.py | 5 +- app/schema/notification.py | 31 +- app/worker/notification/__init__.py | 5 - .../notification}/firebase.py | 39 +- app/worker/notification/invalid_tokens.py | 38 + app/worker/notification/main.py | 192 ++-- app/worker/notification/rate_limiter.py | 26 + app/worker/notification/settings.py | 20 +- typings/apns2/client.pyi | 78 -- typings/apns2/credentials.pyi | 39 - typings/apns2/errors.pyi | 175 ---- typings/apns2/payload.pyi | 25 - typings/firebase_admin/__about__.pyi | 10 - typings/firebase_admin/__init__.pyi | 125 --- typings/firebase_admin/_auth_client.pyi | 618 ------------- typings/firebase_admin/_auth_providers.pyi | 192 ---- typings/firebase_admin/_auth_utils.pyi | 234 ----- typings/firebase_admin/_gapic_utils.pyi | 43 - typings/firebase_admin/_http_client.pyi | 106 --- typings/firebase_admin/_messaging_encoder.pyi | 204 ----- typings/firebase_admin/_messaging_utils.pyi | 402 -------- typings/firebase_admin/_rfc3339.pyi | 24 - typings/firebase_admin/_sseclient.pyi | 92 -- typings/firebase_admin/_token_gen.pyi | 177 ---- typings/firebase_admin/_user_identifier.pyi | 91 -- typings/firebase_admin/_user_import.pyi | 405 --------- typings/firebase_admin/_user_mgt.pyi | 527 ----------- typings/firebase_admin/_utils.pyi | 77 -- typings/firebase_admin/app_check.pyi | 54 -- typings/firebase_admin/auth.pyi | 716 --------------- typings/firebase_admin/credentials.pyi | 155 ---- typings/firebase_admin/db.pyi | 573 ------------ typings/firebase_admin/exceptions.pyi | 191 ---- typings/firebase_admin/firestore.pyi | 48 - typings/firebase_admin/firestore_async.pyi | 48 - typings/firebase_admin/functions.pyi | 226 ----- typings/firebase_admin/instance_id.pyi | 41 - typings/firebase_admin/messaging.pyi | 285 ------ typings/firebase_admin/ml.pyi | 529 ----------- typings/firebase_admin/project_management.pyi | 422 --------- typings/firebase_admin/remote_config.pyi | 340 ------- typings/firebase_admin/storage.pyi | 48 - typings/firebase_admin/tenant_mgt.pyi | 261 ------ typings/googleapiclient/__init__.pyi | 7 - typings/googleapiclient/_auth.pyi | 56 -- typings/googleapiclient/_helpers.pyi | 120 --- typings/googleapiclient/channel.pyi | 211 ----- typings/googleapiclient/discovery.pyi | 333 ------- .../discovery_cache/__init__.pyi | 34 - .../discovery_cache/appengine_memcache.pyi | 28 - .../googleapiclient/discovery_cache/base.pyi | 35 - .../discovery_cache/file_cache.pyi | 35 - typings/googleapiclient/errors.pyi | 108 --- typings/googleapiclient/http.pyi | 857 ------------------ typings/googleapiclient/mimeparse.pyi | 107 --- typings/googleapiclient/model.pyi | 262 ------ typings/googleapiclient/sample_tools.pyi | 36 - typings/googleapiclient/schema.pyi | 160 ---- typings/googleapiclient/version.pyi | 5 - typings/pywebpush/__init__.pyi | 276 ------ typings/pywebpush/__main__.pyi | 13 - typings/pywebpush/foo.pyi | 7 - typings/pywebpush/tests/__init__.pyi | 4 - 67 files changed, 245 insertions(+), 10490 deletions(-) delete mode 100644 app/infra/invalid_tokens.py rename app/{infra => worker/notification}/firebase.py (71%) create mode 100644 app/worker/notification/invalid_tokens.py create mode 100644 app/worker/notification/rate_limiter.py delete mode 100644 typings/apns2/client.pyi delete mode 100644 typings/apns2/credentials.pyi delete mode 100644 typings/apns2/errors.pyi delete mode 100644 typings/apns2/payload.pyi delete mode 100644 typings/firebase_admin/__about__.pyi delete mode 100644 typings/firebase_admin/__init__.pyi delete mode 100644 typings/firebase_admin/_auth_client.pyi delete mode 100644 typings/firebase_admin/_auth_providers.pyi delete mode 100644 typings/firebase_admin/_auth_utils.pyi delete mode 100644 typings/firebase_admin/_gapic_utils.pyi delete mode 100644 typings/firebase_admin/_http_client.pyi delete mode 100644 typings/firebase_admin/_messaging_encoder.pyi delete mode 100644 typings/firebase_admin/_messaging_utils.pyi delete mode 100644 typings/firebase_admin/_rfc3339.pyi delete mode 100644 typings/firebase_admin/_sseclient.pyi delete mode 100644 typings/firebase_admin/_token_gen.pyi delete mode 100644 typings/firebase_admin/_user_identifier.pyi delete mode 100644 typings/firebase_admin/_user_import.pyi delete mode 100644 typings/firebase_admin/_user_mgt.pyi delete mode 100644 typings/firebase_admin/_utils.pyi delete mode 100644 typings/firebase_admin/app_check.pyi delete mode 100644 typings/firebase_admin/auth.pyi delete mode 100644 typings/firebase_admin/credentials.pyi delete mode 100644 typings/firebase_admin/db.pyi delete mode 100644 typings/firebase_admin/exceptions.pyi delete mode 100644 typings/firebase_admin/firestore.pyi delete mode 100644 typings/firebase_admin/firestore_async.pyi delete mode 100644 typings/firebase_admin/functions.pyi delete mode 100644 typings/firebase_admin/instance_id.pyi delete mode 100644 typings/firebase_admin/messaging.pyi delete mode 100644 typings/firebase_admin/ml.pyi delete mode 100644 typings/firebase_admin/project_management.pyi delete mode 100644 typings/firebase_admin/remote_config.pyi delete mode 100644 typings/firebase_admin/storage.pyi delete mode 100644 typings/firebase_admin/tenant_mgt.pyi delete mode 100644 typings/googleapiclient/__init__.pyi delete mode 100644 typings/googleapiclient/_auth.pyi delete mode 100644 typings/googleapiclient/_helpers.pyi delete mode 100644 typings/googleapiclient/channel.pyi delete mode 100644 typings/googleapiclient/discovery.pyi delete mode 100644 typings/googleapiclient/discovery_cache/__init__.pyi delete mode 100644 typings/googleapiclient/discovery_cache/appengine_memcache.pyi delete mode 100644 typings/googleapiclient/discovery_cache/base.pyi delete mode 100644 typings/googleapiclient/discovery_cache/file_cache.pyi delete mode 100644 typings/googleapiclient/errors.pyi delete mode 100644 typings/googleapiclient/http.pyi delete mode 100644 typings/googleapiclient/mimeparse.pyi delete mode 100644 typings/googleapiclient/model.pyi delete mode 100644 typings/googleapiclient/sample_tools.pyi delete mode 100644 typings/googleapiclient/schema.pyi delete mode 100644 typings/googleapiclient/version.pyi delete mode 100644 typings/pywebpush/__init__.pyi delete mode 100644 typings/pywebpush/__main__.pyi delete mode 100644 typings/pywebpush/foo.pyi delete mode 100644 typings/pywebpush/tests/__init__.pyi diff --git a/app/infra/invalid_tokens.py b/app/infra/invalid_tokens.py deleted file mode 100644 index 19577cc..0000000 --- a/app/infra/invalid_tokens.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations -from typing import Iterable -from redis.asyncio import Redis -from app.core.logger import logger -from app.core.constant import RedisKey - - - -class InvalidTokenStore: - def __init__(self, redis: Redis) -> None: - self._redis = redis - - async def mark_invalid(self, tokens: Iterable[str]) -> None: - normalized = [token for token in tokens if token] - if not normalized: - return - await self._redis.sadd(RedisKey.INVALID_TOKEN_SET_KEY, *normalized) - logger.warning("Marked %d tokens for cleanup", len(normalized)) diff --git a/app/infra/nats.py b/app/infra/nats.py index a44a908..5a9a101 100644 --- a/app/infra/nats.py +++ b/app/infra/nats.py @@ -29,13 +29,19 @@ class NatsClient: _js: Optional[JetStreamContext] = None @staticmethod - async def connect() -> None: + async def connect( + *, + host: str | None = None, + port: int | None = None, + user: str | None = None, + password: str | None = None, + ) -> None: if NatsClient._nc is None: nc = NATS() await nc.connect( - servers=[f"nats://{settings.NATS_HOST}:{settings.NATS_PORT}"], - user=settings.NATS_USER, - password=settings.NATS_PASSWORD, + servers=[f"nats://{host or settings.NATS_HOST}:{port or settings.NATS_PORT}"], + user=user or settings.NATS_USER, + password=password or settings.NATS_PASSWORD, ) NatsClient._nc = nc NatsClient._js = nc.jetstream() # type: ignore @@ -96,7 +102,7 @@ async def _wrapper(msg: Msg) -> None: await msg.ack() js = NatsClient._js assert js is not None - subject_name = subject.value if isinstance(subject, NatsSubjects) else subject + subject_name = subject.value await js.subscribe( subject=subject_name, stream=stream_name, diff --git a/app/infra/redis.py b/app/infra/redis.py index 66f9da1..66518e9 100644 --- a/app/infra/redis.py +++ b/app/infra/redis.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import cast, ClassVar from redis.asyncio import Redis @@ -6,19 +6,32 @@ class RedisClient: - client: Redis - _instance = None + _client: Redis + _instance: ClassVar["RedisClient | None"] = None - def __new__(cls, *args: Any, **kwargs: Any) -> "RedisClient": + def __init__(self, host: str, port: int, password: str) -> None: + self._client = Redis.from_url( # type: ignore + f"redis://{host}:{port}", + password=password, + decode_responses=True, + ) + + + @classmethod + def init(cls, host: str, port: int, password: str) -> "RedisClient": + if cls._instance is not None: + raise RuntimeError("RedisClient already initialized") + + cls._instance = cls(host, port, password) + return cls._instance + + @classmethod + def get_instance(cls) -> "RedisClient": if cls._instance is None: - cls._instance = super().__new__(cls) + raise RuntimeError("RedisClient not initialized") + return cls._instance - def __init__(self, host: str, port: int, password: str) -> None: - if not hasattr(self, "client"): - self.client = Redis.from_url( # type: ignore - f"redis://{host}:{port}", password=password, decode_responses=True - ) async def set( self, @@ -27,25 +40,37 @@ async def set( expire: int | None = None, nx: bool = False, ) -> bool: - return await self.client.set(key, value, ex=expire, nx=nx) + result = await self._client.set(key, value, ex=expire, nx=nx) + return bool(result) async def get(self, key: RedisKey | str) -> str | None: - return await self.client.get(key) + return await self._client.get(key) async def delete(self, key: RedisKey | str) -> int: - return await self.client.delete(key) + result = await self._client.delete(key) + return int(cast(int, result)) async def exists(self, key: RedisKey | str) -> bool: - return await self.client.exists(key) > 0 + result = await self._client.exists(key) + return int(cast(int, result)) > 0 async def expire(self, key: RedisKey | str, seconds: int) -> bool: - return await self.client.expire(key, seconds) + result = await self._client.expire(key, seconds) + return int(cast(int, result)) == 1 + + + async def sadd(self, key: RedisKey | str, *values: str) -> int: + result = self._client.sadd(key, *values) + return int(cast(int, result)) + + async def sismember(self, key: RedisKey | str, value: str) -> bool: + result = self._client.sismember(key, value) + return int(cast(int, result)) == 1 + + async def srem(self, key: RedisKey | str, *values: str) -> int: + result = self._client.srem(key, *values) + return int(cast(int, result)) - @classmethod - def get_instance(cls) -> "RedisClient": - if cls._instance is None: - raise RuntimeError("RedisClient not initialized") - return cls._instance async def close(self) -> None: - await self.client.close() + await self._client.close() \ No newline at end of file diff --git a/app/main.py b/app/main.py index 02d59da..cbf4ffc 100644 --- a/app/main.py +++ b/app/main.py @@ -7,7 +7,6 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from app.core.config import settings -from app.infra.firebase import init_firebase_app from app.infra.minio import init_minio_client from app.infra.nats import NatsClient from app.infra.redis import RedisClient @@ -75,8 +74,6 @@ async def lifespan(app: FastAPI) -> AsyncIterator[None]: password=settings.REDIS_PASSWORD, ) - init_firebase_app() - await NatsClient.connect() get_face_embedding_service() diff --git a/app/router/notifications.py b/app/router/notifications.py index 3e02799..f732332 100644 --- a/app/router/notifications.py +++ b/app/router/notifications.py @@ -3,11 +3,10 @@ from app.core.logger import logger from app.infra.notification_queue import NotificationQueue from app.schema.notification import UnifiedNotification -from app.worker.notification.settings import NotificationWorkerSettings +from app.worker.notification.settings import NotifSetting -settings = NotificationWorkerSettings() -queue = NotificationQueue(settings=settings) +queue = NotificationQueue(settings=NotifSetting) router = APIRouter(prefix="/notifications", tags=["notifications"]) diff --git a/app/schema/notification.py b/app/schema/notification.py index dfcef17..574a856 100644 --- a/app/schema/notification.py +++ b/app/schema/notification.py @@ -1,9 +1,5 @@ -from __future__ import annotations - from enum import Enum -from typing import Mapping - -from pydantic import BaseModel, ConfigDict, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field class NotificationPriority(str, Enum): @@ -21,31 +17,6 @@ class UnifiedNotification(BaseModel): model_config = ConfigDict(extra="forbid") - @field_validator("title", "body", mode="before") - def _normalize_text(cls, value: str) -> str: - normalized = str(value).strip() - if not normalized: - raise ValueError("Text fields must not be empty") - return normalized - - @field_validator("data", mode="before") - def _normalize_data(cls, value: Mapping[str, object] | None) -> dict[str, str]: - if value is None: - return {} - if not isinstance(value, Mapping): - raise ValueError("Data must be a mapping") - return {str(key): str(val) for key, val in value.items()} - - @field_validator("tokens", mode="before") - def _normalize_tokens(cls, value: list[str] | tuple[str, ...]) -> list[str]: - if not isinstance(value, (list, tuple)): - raise ValueError("Tokens must be a list") - cleaned = [str(token).strip() for token in value if str(token).strip()] - if not cleaned: - raise ValueError("At least one FCM token is required") - return cleaned - - PRIORITY_ORDER: tuple[NotificationPriority, ...] = ( NotificationPriority.HIGH, NotificationPriority.NORMAL, diff --git a/app/worker/notification/__init__.py b/app/worker/notification/__init__.py index 51ddbc5..e69de29 100644 --- a/app/worker/notification/__init__.py +++ b/app/worker/notification/__init__.py @@ -1,5 +0,0 @@ -from __future__ import annotations - -from .main import main # noqa: F401 - -__all__ = ["main"] diff --git a/app/infra/firebase.py b/app/worker/notification/firebase.py similarity index 71% rename from app/infra/firebase.py rename to app/worker/notification/firebase.py index e80e765..c1d9c43 100644 --- a/app/infra/firebase.py +++ b/app/worker/notification/firebase.py @@ -1,7 +1,8 @@ from __future__ import annotations +from typing import cast -import firebase_admin -from firebase_admin import messaging +import firebase_admin # pyright: ignore[reportMissingTypeStubs] +from firebase_admin import credentials, messaging # pyright: ignore[reportMissingTypeStubs] from app.core.config import settings from app.core.logger import logger @@ -14,6 +15,15 @@ } +class _SendResponse: + success: bool + exception: Exception | None + + +class _BatchResponse: + responses: list[_SendResponse] + + class NotificationDeliveryError(Exception): def __init__( self, @@ -28,24 +38,28 @@ def __init__( ) -def init_firebase_app() -> None: - if firebase_admin._apps: +def init_firebase_app(credentials_path: str | None = None) -> None: + if firebase_admin._apps: # type: ignore return - credentials_path = settings.FIREBASE_CREDENTIALS_PATH + if credentials_path is None: + credentials_path = settings.FIREBASE_CREDENTIALS_PATH if credentials_path: cred = credentials.Certificate(credentials_path) - firebase_admin.initialize_app(cred) + firebase_admin.initialize_app(cred) # type: ignore logger.info("Firebase initialized with credentials from %s", credentials_path) return - firebase_admin.initialize_app() + firebase_admin.initialize_app() # type: ignore logger.info("Firebase initialized with default credentials") def _classify_token_failure(error: Exception) -> bool: - if isinstance(error, (messaging.UnregisteredError, messaging.InvalidArgumentError)): - return True code = getattr(error, "code", None) - return code in INVALID_TOKEN_CODES + + if code in INVALID_TOKEN_CODES: + return True + + name = error.__class__.__name__ + return name in {"UnregisteredError", "InvalidArgumentError"} def send_notification(notification: UnifiedNotification) -> None: @@ -60,7 +74,10 @@ def send_notification(notification: UnifiedNotification) -> None: ), data=notification.data or None, ) - response = messaging.send_multicast(multicast) + response = cast( + _BatchResponse, + messaging.send_multicast(multicast) # type: ignore +) failed_tokens: list[str] = [] invalid_tokens: list[str] = [] diff --git a/app/worker/notification/invalid_tokens.py b/app/worker/notification/invalid_tokens.py new file mode 100644 index 0000000..39a497b --- /dev/null +++ b/app/worker/notification/invalid_tokens.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import Iterable, Sequence + +from app.core.constant import RedisKey +from app.core.logger import logger +from app.infra.redis import RedisClient + + +class InvalidTokenStore: + def __init__(self, redis: RedisClient) -> None: + self._redis = redis + + async def mark_invalid(self, tokens: Iterable[str]) -> None: + normalized: list[str] = [t for t in tokens if t] + + if not normalized: + return + + await self._redis.sadd(RedisKey.INVALID_TOKEN_SET_KEY, *normalized) + + logger.warning("Marked %d tokens for cleanup", len(normalized)) + + async def is_invalid(self, token: str) -> bool: + if not token: + return False + + return await self._redis.sismember( + RedisKey.INVALID_TOKEN_SET_KEY, token + ) + + async def remove(self, tokens: Sequence[str]) -> None: + if not tokens: + return + + await self._redis.srem( + RedisKey.INVALID_TOKEN_SET_KEY, *tokens + ) \ No newline at end of file diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py index 1695568..784cf19 100644 --- a/app/worker/notification/main.py +++ b/app/worker/notification/main.py @@ -1,172 +1,134 @@ from __future__ import annotations import asyncio -from functools import partial from typing import Sequence -from app.core.config import settings from app.core.logger import logger -from app.infra.firebase import ( +from app.worker.notification.firebase import ( NotificationDeliveryError, init_firebase_app, send_notification, ) -from app.infra.invalid_tokens import InvalidTokenStore +from app.worker.notification.invalid_tokens import InvalidTokenStore from app.infra.notification_queue import NotificationQueue, NotificationQueueEntry from app.infra.redis import RedisClient from app.infra.nats import NatsClient -from app.worker.notification.settings import NotifSettings, NotificationWorkerSettings +from app.worker.notification.rate_limiter import RateLimiter +from app.worker.notification.settings import NotifSetting -MAX_SEND_ATTEMPTS = 5 -RETRY_DELAY_SECONDS = 2 - - -async def _process_loop( +async def process_entry( + entry: NotificationQueueEntry, queue: NotificationQueue, invalid_tokens: InvalidTokenStore, - pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], ) -> None: - while True: - try: - _, entry = await pending.get() - await _process_entry(entry, queue, invalid_tokens) - except asyncio.CancelledError: - break - except Exception: - logger.exception("Notification worker encountered unexpected error") - await asyncio.sleep(RETRY_DELAY_SECONDS) - - -def _parse_entry(raw_payload: bytes | str) -> NotificationQueueEntry | None: - if isinstance(raw_payload, bytes): - raw_payload = raw_payload.decode("utf-8") try: - return NotificationQueueEntry.model_validate_json(raw_payload) - except Exception: - logger.exception("Failed to deserialize notification entry") - return None - - -async def _enqueue_from_nats( - raw_payload: bytes | str, - queue: NotificationQueue, - pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], -) -> None: - entry = _parse_entry(raw_payload) - if entry is None: - return - priority = entry.notification.priority - index = queue.priority_index(priority) - await pending.put((index, entry)) - + await asyncio.to_thread(send_notification, entry.notification) -async def _subscribe( - queue: NotificationQueue, - pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]], -) -> None: - subjects = queue.priority_subjects() - for subject in subjects: - handler = partial(_enqueue_from_nats, queue=queue, pending=pending) - await NatsClient.subscribe(subject, handler) + except NotificationDeliveryError as e: + if e.invalid_tokens: + await invalid_tokens.mark_invalid(e.invalid_tokens) + if e.failed_tokens: + await retry(entry, queue, tokens=e.failed_tokens) -async def _process_entry( - entry: NotificationQueueEntry, - queue: NotificationQueue, - invalid_tokens: InvalidTokenStore, -) -> None: - try: - await asyncio.to_thread(send_notification, entry.notification) - except NotificationDeliveryError as exc: - await _handle_partial_failure(entry, invalid_tokens, queue, exc) except Exception: - logger.exception("Failed to deliver notification, will retry") - await _retry(entry, queue) - + logger.exception("Unexpected error, retrying") + await retry(entry, queue) -async def _handle_partial_failure( - entry: NotificationQueueEntry, - invalid_tokens: InvalidTokenStore, - queue: NotificationQueue, - error: NotificationDeliveryError, -) -> None: - if error.invalid_tokens: - await invalid_tokens.mark_invalid(error.invalid_tokens) - logger.warning("Detected %d invalid tokens", len(error.invalid_tokens)) - if error.failed_tokens: - await _retry(entry, queue, tokens=error.failed_tokens) -async def _retry( +async def retry( entry: NotificationQueueEntry, queue: NotificationQueue, tokens: Sequence[str] | None = None, ) -> None: attempts = entry.attempts + 1 - if attempts >= MAX_SEND_ATTEMPTS: - logger.warning( - "Dropping notification after %d attempts (tokens=%d)", - attempts, - len(tokens or entry.notification.tokens), - ) + + if attempts >= NotifSetting.MAX_SEND_ATTEMPTS: + logger.warning("Dropping notification after %d attempts", attempts) return + notification = entry.notification + if tokens is not None: - notification = entry.notification.model_copy(update={"tokens": list(tokens)}) + notification = notification.model_copy(update={"tokens": list(tokens)}) if not notification.tokens: return - await asyncio.sleep(RETRY_DELAY_SECONDS) + + delay = min(NotifSetting.BASE_RETRY_DELAY * (2 ** attempts), 60) + + await asyncio.sleep(delay) await queue.enqueue(notification, attempts=attempts) - logger.info("Requeued notification attempt=%d", attempts) -async def run_worker(queue: NotificationQueue, invalid_tokens: InvalidTokenStore) -> None: - logger.info("Notification worker started") - pending: asyncio.PriorityQueue[tuple[int, NotificationQueueEntry]] = ( - asyncio.PriorityQueue() - ) - await _subscribe(queue, pending) + +async def handle_message( + raw_payload: bytes | str, + queue: NotificationQueue, + invalid_tokens: InvalidTokenStore, +) -> None: try: - await _process_loop(queue, invalid_tokens, pending) - finally: - logger.info("Notification worker shutting down") + if isinstance(raw_payload, bytes): + raw_payload = raw_payload.decode() + entry = NotificationQueueEntry.model_validate_json(raw_payload) -def _setup_notification_queue() -> NotificationQueue: - return NotificationQueue(settings=NotifSettings) + except Exception: + logger.exception("Invalid message payload") + return + await process_entry(entry, queue, invalid_tokens) -def _setup_redis() -> RedisClient: - return RedisClient( - host=NotifSettings.REDIS_HOST, - port=NotifSettings.REDIS_PORT, - password=NotifSettings.REDIS_PASSWORD, - ) -def _setup_invalid_token_store(redis_client: RedisClient) -> InvalidTokenStore: - return InvalidTokenStore(redis_client.client) +async def run_worker( + queue: NotificationQueue, + invalid_tokens: InvalidTokenStore, +) -> None: + logger.info("Notification worker started") + + semaphore = asyncio.Semaphore(NotifSetting.CONCURRENCY) + rate_limiter = RateLimiter(NotifSetting.RATE_LIMIT, NotifSetting.RATE_PERIOD) + async def wrapped_handler(msg: bytes | str) -> None: + async with semaphore: + await rate_limiter.acquire() + await handle_message(msg, queue, invalid_tokens) + + for subject in queue.priority_subjects(): + await NatsClient.subscribe(subject, wrapped_handler) + + await asyncio.Event().wait() -async def _initialize_infrastructure() -> tuple[NotificationQueue, InvalidTokenStore]: - init_firebase_app() - await NatsClient.connect() - redis_client = _setup_redis() - queue = _setup_notification_queue() - invalid_tokens = _setup_invalid_token_store(redis_client) - return queue, invalid_tokens async def main() -> None: - queue, invalid_tokens = await _initialize_infrastructure() + init_firebase_app(NotifSetting.firebase_credentials_path) + + await NatsClient.connect( + host=NotifSetting.nats_host, + port=NotifSetting.nats_port, + user=NotifSetting.nats_user, + password=NotifSetting.nats_password, + ) + + redis = RedisClient( + host=NotifSetting.redis_host, + port=NotifSetting.redis_port, + password=NotifSetting.redis_password, + ) + + queue = NotificationQueue(settings=NotifSetting) + invalid_tokens = InvalidTokenStore(redis) + try: await run_worker(queue, invalid_tokens) - except asyncio.CancelledError: - logger.info("Notification worker cancelled") + finally: - logger.info("Notification worker stopped") + await redis.close() + logger.info("Worker shutdown") if __name__ == "__main__": - asyncio.run(main()) + asyncio.run(main()) \ No newline at end of file diff --git a/app/worker/notification/rate_limiter.py b/app/worker/notification/rate_limiter.py new file mode 100644 index 0000000..4e67350 --- /dev/null +++ b/app/worker/notification/rate_limiter.py @@ -0,0 +1,26 @@ +import asyncio +import time + + +class RateLimiter: + def __init__(self, rate: int, per: float) -> None: + self._rate = rate + self._per = per + self._tokens = rate + self._last = time.monotonic() + self._lock = asyncio.Lock() + + async def acquire(self) -> None: + async with self._lock: + now = time.monotonic() + elapsed = now - self._last + refill = elapsed * (self._rate / self._per) + self._tokens = min(self._rate, self._tokens + refill) + self._last = now + + if self._tokens < 1: + sleep_time = (1 - self._tokens) * (self._per / self._rate) + await asyncio.sleep(sleep_time) + self._tokens = 0 + else: + self._tokens -= 1 \ No newline at end of file diff --git a/app/worker/notification/settings.py b/app/worker/notification/settings.py index f1e461c..25713b6 100644 --- a/app/worker/notification/settings.py +++ b/app/worker/notification/settings.py @@ -4,15 +4,27 @@ from pydantic import Field from pydantic_settings import BaseSettings + from app.schema.notification import NotificationPriority, PRIORITY_ORDER class NotificationWorkerSettings(BaseSettings): subject_prefix: str = Field("notifications.delivery") queue_group: str | None = Field(None) - REDIS_HOST:str - REDIS_PORT:int - REDIS_PASSWORD:str + redis_host: str = Field("localhost") + redis_port: int = Field(6379) + redis_password: str = Field("") + nats_host: str = Field("localhost") + nats_port: int = Field(4222) + nats_user: str = Field("") + nats_password: str = Field("") + firebase_credentials_path: str | None = Field(None) + MAX_SEND_ATTEMPTS = 5 + BASE_RETRY_DELAY = 2 + + CONCURRENCY = 10 + RATE_LIMIT = 50 + RATE_PERIOD = 1.0 class Config: env_prefix = "NOTIFICATIONS_" @@ -23,4 +35,4 @@ def subject_for(self, priority: NotificationPriority) -> str: def priority_subjects(self) -> Sequence[str]: return [self.subject_for(priority) for priority in PRIORITY_ORDER] -NotifSettings = NotificationWorkerSettings() # type: ignore \ No newline at end of file +NotifSetting = NotificationWorkerSettings() # type: ignore \ No newline at end of file diff --git a/typings/apns2/client.pyi b/typings/apns2/client.pyi deleted file mode 100644 index 65c9dca..0000000 --- a/typings/apns2/client.pyi +++ /dev/null @@ -1,78 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from enum import Enum -from typing import Dict, Iterable, Optional, Tuple, Union -from .credentials import Credentials -from .payload import Payload - -class NotificationPriority(Enum): - Immediate = ... - Delayed = ... - - -class NotificationType(Enum): - Alert = ... - Background = ... - VoIP = ... - Complication = ... - FileProvider = ... - MDM = ... - - -RequestStream = ... -Notification = ... -DEFAULT_APNS_PRIORITY = ... -CONCURRENT_STREAMS_SAFETY_MAXIMUM = ... -MAX_CONNECTION_RETRIES = ... -logger = ... -class APNsClient: - SANDBOX_SERVER = ... - LIVE_SERVER = ... - DEFAULT_PORT = ... - ALTERNATIVE_PORT = ... - def __init__(self, credentials: Union[Credentials, str], use_sandbox: bool = ..., use_alternative_port: bool = ..., proto: Optional[str] = ..., json_encoder: Optional[type] = ..., password: Optional[str] = ..., proxy_host: Optional[str] = ..., proxy_port: Optional[int] = ..., heartbeat_period: Optional[float] = ...) -> None: - ... - - def send_notification(self, token_hex: str, notification: Payload, topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ...) -> None: - ... - - def send_notification_async(self, token_hex: str, notification: Payload, topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ..., push_type: Optional[NotificationType] = ...) -> int: - ... - - def get_notification_result(self, stream_id: int) -> Union[str, Tuple[str, str]]: - """ - Get result for specified stream - The function returns: 'Success' or 'failure reason' or ('Unregistered', timestamp) - """ - ... - - def send_notification_batch(self, notifications: Iterable[Notification], topic: Optional[str] = ..., priority: NotificationPriority = ..., expiration: Optional[int] = ..., collapse_id: Optional[str] = ..., push_type: Optional[NotificationType] = ...) -> Dict[str, Union[str, Tuple[str, str]]]: - """ - Send a notification to a list of tokens in batch. Instead of sending a synchronous request - for each token, send multiple requests concurrently. This is done on the same connection, - using HTTP/2 streams (one request per stream). - - APNs allows many streams simultaneously, but the number of streams can vary depending on - server load. This method reads the SETTINGS frame sent by the server to figure out the - maximum number of concurrent streams. Typically, APNs reports a maximum of 500. - - The function returns a dictionary mapping each token to its result. The result is "Success" - if the token was sent successfully, or the string returned by APNs in the 'reason' field of - the response, if the token generated an error. - """ - ... - - def update_max_concurrent_streams(self) -> None: - ... - - def connect(self) -> None: - """ - Establish a connection to APNs. If already connected, the function does nothing. If the - connection fails, the function retries up to MAX_CONNECTION_RETRIES times. - """ - ... - - - diff --git a/typings/apns2/credentials.pyi b/typings/apns2/credentials.pyi deleted file mode 100644 index fa4fb0f..0000000 --- a/typings/apns2/credentials.pyi +++ /dev/null @@ -1,39 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Optional, TYPE_CHECKING -from hyper import HTTP20Connection -from hyper.ssl_compat import SSLContext - -if TYPE_CHECKING: - ... -DEFAULT_TOKEN_LIFETIME = ... -DEFAULT_TOKEN_ENCRYPTION_ALGORITHM = ... -class Credentials: - def __init__(self, ssl_context: Optional[SSLContext] = ...) -> None: - ... - - def create_connection(self, server: str, port: int, proto: Optional[str], proxy_host: Optional[str] = ..., proxy_port: Optional[int] = ...) -> HTTP20Connection: - ... - - def get_authorization_header(self, topic: Optional[str]) -> Optional[str]: - ... - - - -class CertificateCredentials(Credentials): - def __init__(self, cert_file: Optional[str] = ..., password: Optional[str] = ..., cert_chain: Optional[str] = ...) -> None: - ... - - - -class TokenCredentials(Credentials): - def __init__(self, auth_key_path: str, auth_key_id: str, team_id: str, encryption_algorithm: str = ..., token_lifetime: int = ...) -> None: - ... - - def get_authorization_header(self, topic: Optional[str]) -> str: - ... - - - diff --git a/typings/apns2/errors.pyi b/typings/apns2/errors.pyi deleted file mode 100644 index d8e6670..0000000 --- a/typings/apns2/errors.pyi +++ /dev/null @@ -1,175 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Optional, Type - -class APNsException(Exception): - ... - - -class ConnectionFailed(APNsException): - """There was an error connecting to APNs.""" - ... - - -class InternalException(APNsException): - """This exception should not be raised. If it is, please report this as a bug.""" - ... - - -class BadPayloadException(APNsException): - """Something bad with the payload.""" - ... - - -class BadCollapseId(BadPayloadException): - """The collapse identifier exceeds the maximum allowed size""" - ... - - -class BadDeviceToken(APNsException): - """The specified device token was bad. - Verify that the request contains a valid token and that the token matches the environment.""" - ... - - -class BadExpirationDate(BadPayloadException): - """The apns-expiration value is bad.""" - ... - - -class BadMessageId(InternalException): - """The apns-id value is bad.""" - ... - - -class BadPriority(InternalException): - """The apns-priority value is bad.""" - ... - - -class BadTopic(BadPayloadException): - """The apns-topic was invalid.""" - ... - - -class DeviceTokenNotForTopic(APNsException): - """The device token does not match the specified topic.""" - ... - - -class DuplicateHeaders(InternalException): - """One or more headers were repeated.""" - ... - - -class IdleTimeout(APNsException): - """Idle time out.""" - ... - - -class MissingDeviceToken(APNsException): - """The device token is not specified in the request :path. - Verify that the :path header contains the device token.""" - ... - - -class MissingTopic(BadPayloadException): - """The apns-topic header of the request was not specified and was required. - The apns-topic header is mandatory when the client is connected using a certificate - that supports multiple topics.""" - ... - - -class PayloadEmpty(BadPayloadException): - """The message payload was empty.""" - ... - - -class TopicDisallowed(BadPayloadException): - """Pushing to this topic is not allowed.""" - ... - - -class BadCertificate(APNsException): - """The certificate was bad.""" - ... - - -class BadCertificateEnvironment(APNsException): - """The client certificate was for the wrong environment.""" - ... - - -class ExpiredProviderToken(APNsException): - """The provider token is stale and a new token should be generated.""" - ... - - -class Forbidden(APNsException): - """The specified action is not allowed.""" - ... - - -class InvalidProviderToken(APNsException): - """The provider token is not valid or the token signature could not be verified.""" - ... - - -class MissingProviderToken(APNsException): - """No provider certificate was used to connect to APNs and Authorization header was missing or no provider token - was specified. """ - ... - - -class BadPath(APNsException): - """The request contained a bad :path value.""" - ... - - -class MethodNotAllowed(InternalException): - """The specified :method was not POST.""" - ... - - -class Unregistered(APNsException): - """The device token is inactive for the specified topic.""" - def __init__(self, timestamp: Optional[str] = ...) -> None: - ... - - - -class PayloadTooLarge(BadPayloadException): - """The message payload was too large. The maximum payload size is 4096 bytes.""" - ... - - -class TooManyProviderTokenUpdates(APNsException): - """The provider token is being updated too often.""" - ... - - -class TooManyRequests(APNsException): - """Too many requests were made consecutively to the same device token.""" - ... - - -class InternalServerError(APNsException): - """An internal server error occurred.""" - ... - - -class ServiceUnavailable(APNsException): - """The service is unavailable.""" - ... - - -class Shutdown(APNsException): - """The server is shutting down.""" - ... - - -def exception_class_for_reason(reason: str) -> Type[APNsException]: - ... - diff --git a/typings/apns2/payload.pyi b/typings/apns2/payload.pyi deleted file mode 100644 index ea26787..0000000 --- a/typings/apns2/payload.pyi +++ /dev/null @@ -1,25 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Any, Dict, Iterable, List, Optional, Union - -MAX_PAYLOAD_SIZE = ... -class PayloadAlert: - def __init__(self, title: Optional[str] = ..., title_localized_key: Optional[str] = ..., title_localized_args: Optional[List[str]] = ..., body: Optional[str] = ..., body_localized_key: Optional[str] = ..., body_localized_args: Optional[List[str]] = ..., action_localized_key: Optional[str] = ..., action: Optional[str] = ..., launch_image: Optional[str] = ...) -> None: - ... - - def dict(self) -> Dict[str, Any]: - ... - - - -class Payload: - def __init__(self, alert: Union[PayloadAlert, str, None] = ..., badge: Optional[int] = ..., sound: Optional[str] = ..., category: Optional[str] = ..., url_args: Optional[Iterable[str]] = ..., custom: Optional[Dict[str, Any]] = ..., thread_id: Optional[str] = ..., content_available: bool = ..., mutable_content: bool = ...) -> None: - ... - - def dict(self) -> Dict[str, Any]: - ... - - - diff --git a/typings/firebase_admin/__about__.pyi b/typings/firebase_admin/__about__.pyi deleted file mode 100644 index b3c0997..0000000 --- a/typings/firebase_admin/__about__.pyi +++ /dev/null @@ -1,10 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""About information (version, etc) for Firebase Admin SDK.""" -__version__ = ... -__title__ = ... -__author__ = ... -__license__ = ... -__url__ = ... diff --git a/typings/firebase_admin/__init__.pyi b/typings/firebase_admin/__init__.pyi deleted file mode 100644 index 4efa37a..0000000 --- a/typings/firebase_admin/__init__.pyi +++ /dev/null @@ -1,125 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import datetime -import json -import os -import threading -from google.auth.credentials import Credentials as GoogleAuthCredentials -from google.auth.exceptions import DefaultCredentialsError -from firebase_admin import credentials -from firebase_admin.__about__ import __version__ - -"""Firebase Admin SDK for Python.""" -_apps = ... -_apps_lock = ... -_clock = ... -_DEFAULT_APP_NAME = ... -_FIREBASE_CONFIG_ENV_VAR = ... -_CONFIG_VALID_KEYS = ... -def initialize_app(credential=..., options=..., name=...): # -> App: - """Initializes and returns a new App instance. - - Creates a new App instance using the specified options - and the app name. If an instance already exists by the same - app name a ValueError is raised. - If options are not provided an attempt is made to load the options from the environment. - This is done by looking up the ``FIREBASE_CONFIG`` environment variable. If the value of - the variable starts with ``"{"``, it is parsed as a JSON object. Otherwise it is treated - as a file name and the JSON content is read from the corresponding file. - Use this function whenever a new App instance is required. Do not directly invoke the - App constructor. - - Args: - credential: A credential object used to initialize the SDK (optional). If none is provided, - Google Application Default Credentials are used. - options: A dictionary of configuration options (optional). Supported options include - ``databaseURL``, ``storageBucket``, ``projectId``, ``databaseAuthVariableOverride``, - ``serviceAccountId`` and ``httpTimeout``. If ``httpTimeout`` is not set, the SDK uses - a default timeout of 120 seconds. - - name: Name of the app (optional). - Returns: - App: A newly initialized instance of App. - - Raises: - ValueError: If the app name is already in use, or any of the - provided arguments are invalid. - """ - ... - -def delete_app(app): # -> None: - """Gracefully deletes an App instance. - - Args: - app: The app instance to be deleted. - - Raises: - ValueError: If the app is not initialized. - """ - ... - -def get_app(name=...): - """Retrieves an App instance by name. - - Args: - name: Name of the App instance to retrieve (optional). - - Returns: - App: An App instance with the given name. - - Raises: - ValueError: If the specified name is not a string, or if the specified - app does not exist. - """ - ... - -class _AppOptions: - """A collection of configuration options for an App.""" - def __init__(self, options) -> None: - ... - - def get(self, key, default=...): # -> Any | None: - """Returns the option identified by the provided key.""" - ... - - - -class App: - """The entry point for Firebase Python SDK. - - Represents a Firebase app, while holding the configuration and state - common to all Firebase APIs. - """ - def __init__(self, name, credential, options) -> None: - """Constructs a new App using the provided name and options. - - Args: - name: Name of the application. - credential: A credential object. - options: A dictionary of configuration options. - - Raises: - ValueError: If an argument is None or invalid. - """ - ... - - @property - def name(self): # -> str: - ... - - @property - def credential(self): # -> _ExternalCredentials | Base: - ... - - @property - def options(self): # -> _AppOptions: - ... - - @property - def project_id(self): # -> str | Any | None: - ... - - - diff --git a/typings/firebase_admin/_auth_client.pyi b/typings/firebase_admin/_auth_client.pyi deleted file mode 100644 index 374d332..0000000 --- a/typings/firebase_admin/_auth_client.pyi +++ /dev/null @@ -1,618 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase auth client sub module.""" -class Client: - """Firebase Authentication client scoped to a specific tenant.""" - def __init__(self, app, tenant_id=...) -> None: - ... - - @property - def tenant_id(self): # -> None: - """Tenant ID associated with this client.""" - ... - - def create_custom_token(self, uid, developer_claims=...): # -> bytes: - """Builds and signs a Firebase custom auth token. - - Args: - uid: ID of the user for whom the token is created. - developer_claims: A dictionary of claims to be included in the token - (optional). - - Returns: - bytes: A token minted from the input parameters. - - Raises: - ValueError: If input parameters are invalid. - TokenSignError: If an error occurs while signing the token using the remote IAM service. - """ - ... - - def verify_id_token(self, id_token, check_revoked=..., clock_skew_seconds=...): # -> Any | Mapping[str, Any]: - """Verifies the signature and data for the provided JWT. - - Accepts a signed token string, verifies that it is current, was issued - to this project, and that it was correctly signed by Google. - - Args: - id_token: A string of the encoded JWT. - check_revoked: Boolean, If true, checks whether the token has been revoked or - the user disabled (optional). - clock_skew_seconds: The number of seconds to tolerate when checking the token. - Must be between 0-60. Defaults to 0. - - Returns: - dict: A dictionary of key-value pairs parsed from the decoded JWT. - - Raises: - ValueError: If ``id_token`` is a not a string or is empty. - InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. - ExpiredIdTokenError: If the specified ID token has expired. - RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been - revoked. - TenantIdMismatchError: If ``id_token`` belongs to a tenant that is different than - this ``Client`` instance. - CertificateFetchError: If an error occurs while fetching the public key certificates - required to verify the ID token. - UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user - record is disabled. - """ - ... - - def revoke_refresh_tokens(self, uid): # -> None: - """Revokes all refresh tokens for an existing user. - - This method updates the user's ``tokens_valid_after_timestamp`` to the current UTC - in seconds since the epoch. It is important that the server on which this is called has its - clock set correctly and synchronized. - - While this revokes all sessions for a specified user and disables any new ID tokens for - existing sessions from getting minted, existing ID tokens may remain active until their - natural expiration (one hour). To verify that ID tokens are revoked, use - ``verify_id_token(idToken, check_revoked=True)``. - - Args: - uid: A user ID string. - - Raises: - ValueError: If the user ID is None, empty or malformed. - FirebaseError: If an error occurs while revoking the refresh token. - """ - ... - - def get_user(self, uid): # -> UserRecord: - """Gets the user data corresponding to the specified user ID. - - Args: - uid: A user ID string. - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the user ID is None, empty or malformed. - UserNotFoundError: If the specified user ID does not exist. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - - def get_user_by_email(self, email): # -> UserRecord: - """Gets the user data corresponding to the specified user email. - - Args: - email: A user email address string. - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the email is None, empty or malformed. - UserNotFoundError: If no user exists for the specified email address. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - - def get_user_by_phone_number(self, phone_number): # -> UserRecord: - """Gets the user data corresponding to the specified phone number. - - Args: - phone_number: A phone number string. - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the phone number is ``None``, empty or malformed. - UserNotFoundError: If no user exists for the specified phone number. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - - def get_users(self, identifiers): # -> GetUsersResult: - """Gets the user data corresponding to the specified identifiers. - - There are no ordering guarantees; in particular, the nth entry in the - result list is not guaranteed to correspond to the nth entry in the input - parameters list. - - A maximum of 100 identifiers may be supplied. If more than 100 - identifiers are supplied, this method raises a `ValueError`. - - Args: - identifiers (list[Identifier]): A list of ``Identifier`` instances used - to indicate which user records should be returned. Must have <= 100 - entries. - - Returns: - GetUsersResult: A ``GetUsersResult`` instance corresponding to the - specified identifiers. - - Raises: - ValueError: If any of the identifiers are invalid or if more than 100 - identifiers are specified. - """ - ... - - def list_users(self, page_token=..., max_results=...): # -> ListUsersPage: - """Retrieves a page of user accounts from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of user accounts that may be included in the returned - page. This function never returns ``None``. If there are no user accounts in the Firebase - project, this returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the - page (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in - the returned page (optional). Defaults to 1000, which is also the maximum number - allowed. - - Returns: - ListUsersPage: A page of user accounts. - - Raises: - ValueError: If max_results or page_token are invalid. - FirebaseError: If an error occurs while retrieving the user accounts. - """ - ... - - def create_user(self, **kwargs): # -> UserRecord: - """Creates a new user account with the specified properties. - - Args: - **kwargs: A series of keyword arguments (optional). - - Keyword Args: - uid: User ID to assign to the newly created user (optional). - display_name: The user's display name (optional). - email: The user's primary email (optional). - email_verified: A boolean indicating whether or not the user's primary email is - verified (optional). - phone_number: The user's primary phone number (optional). - photo_url: The user's photo URL (optional). - password: The user's raw, unhashed password. (optional). - disabled: A boolean indicating whether or not the user account is disabled (optional). - - Returns: - UserRecord: A UserRecord instance for the newly created user. - - Raises: - ValueError: If the specified user properties are invalid. - FirebaseError: If an error occurs while creating the user account. - """ - ... - - def update_user(self, uid, **kwargs): # -> UserRecord: - """Updates an existing user account with the specified properties. - - Args: - uid: A user ID string. - **kwargs: A series of keyword arguments (optional). - - Keyword Args: - display_name: The user's display name (optional). Can be removed by explicitly passing - ``auth.DELETE_ATTRIBUTE``. - email: The user's primary email (optional). - email_verified: A boolean indicating whether or not the user's primary email is - verified (optional). - phone_number: The user's primary phone number (optional). Can be removed by explicitly - passing ``auth.DELETE_ATTRIBUTE``. - photo_url: The user's photo URL (optional). Can be removed by explicitly passing - ``auth.DELETE_ATTRIBUTE``. - password: The user's raw, unhashed password. (optional). - disabled: A boolean indicating whether or not the user account is disabled (optional). - custom_claims: A dictionary or a JSON string contining the custom claims to be set on - the user account (optional). To remove all custom claims, pass - ``auth.DELETE_ATTRIBUTE``. - valid_since: An integer signifying the seconds since the epoch (optional). This field - is set by ``revoke_refresh_tokens`` and it is discouraged to set this field - directly. - providers_to_delete: The list of provider IDs to unlink, - eg: 'google.com', 'password', etc. - - Returns: - UserRecord: An updated UserRecord instance for the user. - - Raises: - ValueError: If the specified user ID or properties are invalid. - FirebaseError: If an error occurs while updating the user account. - """ - ... - - def set_custom_user_claims(self, uid, custom_claims): # -> None: - """Sets additional claims on an existing user account. - - Custom claims set via this function can be used to define user roles and privilege levels. - These claims propagate to all the devices where the user is already signed in (after token - expiration or when token refresh is forced), and next time the user signs in. The claims - can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, - iat, iss, etc), an error is thrown. Claims payload must also not be larger then 1000 - characters when serialized into a JSON string. - - Args: - uid: A user ID string. - custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any - claims set previously. - - Raises: - ValueError: If the specified user ID or the custom claims are invalid. - FirebaseError: If an error occurs while updating the user account. - """ - ... - - def delete_user(self, uid): # -> None: - """Deletes the user identified by the specified user ID. - - Args: - uid: A user ID string. - - Raises: - ValueError: If the user ID is None, empty or malformed. - FirebaseError: If an error occurs while deleting the user account. - """ - ... - - def delete_users(self, uids): # -> DeleteUsersResult: - """Deletes the users specified by the given identifiers. - - Deleting a non-existing user does not generate an error (the method is - idempotent.) Non-existing users are considered to be successfully - deleted and are therefore included in the - `DeleteUserResult.success_count` value. - - A maximum of 1000 identifiers may be supplied. If more than 1000 - identifiers are supplied, this method raises a `ValueError`. - - Args: - uids: A list of strings indicating the uids of the users to be deleted. - Must have <= 1000 entries. - - Returns: - DeleteUsersResult: The total number of successful/failed deletions, as - well as the array of errors that correspond to the failed - deletions. - - Raises: - ValueError: If any of the identifiers are invalid or if more than 1000 - identifiers are specified. - """ - ... - - def import_users(self, users, hash_alg=...): # -> UserImportResult: - """Imports the specified list of users into Firebase Auth. - - At most 1000 users can be imported at a time. This operation is optimized for bulk imports - and ignores checks on identifier uniqueness, which could result in duplications. The - ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the - ``UserImportHash`` class for supported hash algorithms. - - Args: - users: A list of ``ImportUserRecord`` instances to import. Length of the list must not - exceed 1000. - hash_alg: A ``UserImportHash`` object (optional). Required when importing users with - passwords. - - Returns: - UserImportResult: An object summarizing the result of the import operation. - - Raises: - ValueError: If the provided arguments are invalid. - FirebaseError: If an error occurs while importing users. - """ - ... - - def generate_password_reset_link(self, email, action_code_settings=...): - """Generates the out-of-band email action link for password reset flows for the specified - email address. - - Args: - email: The email of the user whose password is to be reset. - action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether - the link is to be handled by a mobile app and the additional state information to - be passed in the deep link. - - Returns: - link: The password reset link created by the API - - Raises: - ValueError: If the provided arguments are invalid - EmailNotFoundError: If no user exists for the specified email address. - FirebaseError: If an error occurs while generating the link - """ - ... - - def generate_email_verification_link(self, email, action_code_settings=...): - """Generates the out-of-band email action link for email verification flows for the - specified email address. - - Args: - email: The email of the user to be verified. - action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether - the link is to be handled by a mobile app and the additional state information to - be passed in the deep link. - - Returns: - link: The email verification link created by the API - - Raises: - ValueError: If the provided arguments are invalid - UserNotFoundError: If no user exists for the specified email address. - FirebaseError: If an error occurs while generating the link - """ - ... - - def generate_sign_in_with_email_link(self, email, action_code_settings): - """Generates the out-of-band email action link for email link sign-in flows, using the - action code settings provided. - - Args: - email: The email of the user signing in. - action_code_settings: ``ActionCodeSettings`` instance. Defines whether - the link is to be handled by a mobile app and the additional state information to be - passed in the deep link. - - Returns: - link: The email sign-in link created by the API - - Raises: - ValueError: If the provided arguments are invalid - FirebaseError: If an error occurs while generating the link - """ - ... - - def get_oidc_provider_config(self, provider_id): # -> OIDCProviderConfig: - """Returns the ``OIDCProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - - Returns: - SAMLProviderConfig: An OIDC provider config instance. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. - ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. - FirebaseError: If an error occurs while retrieving the OIDC provider. - """ - ... - - def create_oidc_provider_config(self, provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: - """Creates a new OIDC provider config from the given parameters. - - OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about - GCIP, including pricing and features, see https://cloud.google.com/identity-platform. - - Args: - provider_id: Provider ID string. Must have the prefix ``oidc.``. - client_id: Client ID of the new config. - issuer: Issuer of the new config. Must be a valid URL. - display_name: The user-friendly display name to the current configuration (optional). - This name is also used as the provider label in the Cloud Console. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). A user cannot sign in using a disabled provider. - client_secret: A string which sets the client secret for the new provider. - This is required for the code flow. - code_response_type: A boolean which sets whether to enable the code response flow for - the new provider. By default, this is not enabled if no response type is - specified. A client secret must be set for this response type. - Having both the code and ID token response flows is currently not supported. - id_token_response_type: A boolean which sets whether to enable the ID token response - flow for the new provider. By default, this is enabled if no response type is - specified. - Having both the code and ID token response flows is currently not supported. - - Returns: - OIDCProviderConfig: The newly created OIDC provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while creating the new OIDC provider config. - """ - ... - - def update_oidc_provider_config(self, provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: - """Updates an existing OIDC provider config with the given parameters. - - Args: - provider_id: Provider ID string. Must have the prefix ``oidc.``. - client_id: Client ID of the new config (optional). - issuer: Issuer of the new config (optional). Must be a valid URL. - display_name: The user-friendly display name to the current configuration (optional). - Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). - client_secret: A string which sets the client secret for the new provider. - This is required for the code flow. - code_response_type: A boolean which sets whether to enable the code response flow for - the new provider. By default, this is not enabled if no response type is specified. - A client secret must be set for this response type. - Having both the code and ID token response flows is currently not supported. - id_token_response_type: A boolean which sets whether to enable the ID token response - flow for the new provider. By default, this is enabled if no response type is - specified. - Having both the code and ID token response flows is currently not supported. - - Returns: - OIDCProviderConfig: The updated OIDC provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while updating the OIDC provider config. - """ - ... - - def delete_oidc_provider_config(self, provider_id): # -> None: - """Deletes the ``OIDCProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. - ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. - FirebaseError: If an error occurs while deleting the OIDC provider. - """ - ... - - def list_oidc_provider_configs(self, page_token=..., max_results=...): # -> _ListOIDCProviderConfigsPage: - """Retrieves a page of OIDC provider configs from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of configs that may be included in the returned - page. This function never returns ``None``. If there are no OIDC configs in the Firebase - project, this returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the - page (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in - the returned page (optional). Defaults to 100, which is also the maximum number - allowed. - - Returns: - ListProviderConfigsPage: A page of OIDC provider config instances. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the OIDC provider configs. - """ - ... - - def get_saml_provider_config(self, provider_id): # -> SAMLProviderConfig: - """Returns the ``SAMLProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - - Returns: - SAMLProviderConfig: A SAML provider config instance. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. - ConfigurationNotFoundError: If no SAML provider is available with the given identifier. - FirebaseError: If an error occurs while retrieving the SAML provider. - """ - ... - - def create_saml_provider_config(self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=...): # -> SAMLProviderConfig: - """Creates a new SAML provider config from the given parameters. - - SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about - GCIP, including pricing and features, see https://cloud.google.com/identity-platform. - - Args: - provider_id: Provider ID string. Must have the prefix ``saml.``. - idp_entity_id: The SAML IdP entity identifier. - sso_url: The SAML IdP SSO URL. Must be a valid URL. - x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this - provider. Multiple certificates are accepted to prevent outages during IdP key - rotation (for example ADFS rotates every 10 days). When the Auth server receives a - SAML response, it will match the SAML response with the certificate on record. - Otherwise the response is rejected. Developers are expected to manage the - certificate updates as keys are rotated. - rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by - the developer but needs to be provided to the SAML IdP. - callback_url: Callback URL string. This is fixed and must always be the same as the - OAuth redirect URL provisioned by Firebase Auth, unless a custom authDomain is - used. - display_name: The user-friendly display name to the current configuration (optional). - This name is also used as the provider label in the Cloud Console. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). A user cannot sign in using a disabled provider. - - Returns: - SAMLProviderConfig: The newly created SAML provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while creating the new SAML provider config. - """ - ... - - def update_saml_provider_config(self, provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=...): # -> SAMLProviderConfig: - """Updates an existing SAML provider config with the given parameters. - - Args: - provider_id: Provider ID string. Must have the prefix ``saml.``. - idp_entity_id: The SAML IdP entity identifier (optional). - sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). - x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this - provider (optional). - rp_entity_id: The SAML relying party entity ID (optional). - callback_url: Callback URL string (optional). - display_name: The user-friendly display name of the current configuration (optional). - Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). - - Returns: - SAMLProviderConfig: The updated SAML provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while updating the SAML provider config. - """ - ... - - def delete_saml_provider_config(self, provider_id): # -> None: - """Deletes the ``SAMLProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. - ConfigurationNotFoundError: If no SAML provider is available with the given identifier. - FirebaseError: If an error occurs while deleting the SAML provider. - """ - ... - - def list_saml_provider_configs(self, page_token=..., max_results=...): # -> _ListSAMLProviderConfigsPage: - """Retrieves a page of SAML provider configs from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of configs that may be included in the returned - page. This function never returns ``None``. If there are no SAML configs in the Firebase - project, this returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the - page (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in - the returned page (optional). Defaults to 100, which is also the maximum number - allowed. - - Returns: - ListProviderConfigsPage: A page of SAML provider config instances. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the SAML provider configs. - """ - ... - - - diff --git a/typings/firebase_admin/_auth_providers.pyi b/typings/firebase_admin/_auth_providers.pyi deleted file mode 100644 index ec65f65..0000000 --- a/typings/firebase_admin/_auth_providers.pyi +++ /dev/null @@ -1,192 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _auth_utils - -"""Firebase auth providers management sub module.""" -MAX_LIST_CONFIGS_RESULTS = ... -class ProviderConfig: - """Parent type for all authentication provider config types.""" - def __init__(self, data) -> None: - ... - - @property - def provider_id(self): - ... - - @property - def display_name(self): - ... - - @property - def enabled(self): - ... - - - -class OIDCProviderConfig(ProviderConfig): - """Represents the OIDC auth provider configuration. - - See https://openid.net/specs/openid-connect-core-1_0-final.html. - """ - @property - def issuer(self): - ... - - @property - def client_id(self): - ... - - @property - def client_secret(self): - ... - - @property - def id_token_response_type(self): - ... - - @property - def code_response_type(self): - ... - - - -class SAMLProviderConfig(ProviderConfig): - """Represents he SAML auth provider configuration. - - See http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html. - """ - @property - def idp_entity_id(self): - ... - - @property - def sso_url(self): - ... - - @property - def x509_certificates(self): # -> list[Any]: - ... - - @property - def callback_url(self): - ... - - @property - def rp_entity_id(self): - ... - - - -class ListProviderConfigsPage: - """Represents a page of AuthProviderConfig instances retrieved from a Firebase project. - - Provides methods for traversing the provider configs included in this page, as well as - retrieving subsequent pages. The iterator returned by ``iterate_all()`` can be used to iterate - through all provider configs in the Firebase project starting from this page. - """ - def __init__(self, download, page_token, max_results) -> None: - ... - - @property - def provider_configs(self): - """A list of ``AuthProviderConfig`` instances available in this page.""" - ... - - @property - def next_page_token(self): - """Page token string for the next page (empty string indicates no more pages).""" - ... - - @property - def has_next_page(self): # -> bool: - """A boolean indicating whether more pages are available.""" - ... - - def get_next_page(self): # -> Self | None: - """Retrieves the next page of provider configs, if available. - - Returns: - ListProviderConfigsPage: Next page of provider configs, or None if this is the last - page. - """ - ... - - def iterate_all(self): # -> _ProviderConfigIterator: - """Retrieves an iterator for provider configs. - - Returned iterator will iterate through all the provider configs in the Firebase project - starting from this page. The iterator will never buffer more than one page of configs - in memory at a time. - - Returns: - iterator: An iterator of AuthProviderConfig instances. - """ - ... - - - -class _ListOIDCProviderConfigsPage(ListProviderConfigsPage): - @property - def provider_configs(self): # -> list[OIDCProviderConfig]: - ... - - - -class _ListSAMLProviderConfigsPage(ListProviderConfigsPage): - @property - def provider_configs(self): # -> list[SAMLProviderConfig]: - ... - - - -class _ProviderConfigIterator(_auth_utils.PageIterator): - @property - def items(self): - ... - - - -class ProviderConfigClient: - """Client for managing Auth provider configurations.""" - PROVIDER_CONFIG_URL = ... - def __init__(self, http_client, project_id, tenant_id=..., url_override=...) -> None: - ... - - def get_oidc_provider_config(self, provider_id): # -> OIDCProviderConfig: - ... - - def create_oidc_provider_config(self, provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: - """Creates a new OIDC provider config from the given parameters.""" - ... - - def update_oidc_provider_config(self, provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=...): # -> OIDCProviderConfig: - """Updates an existing OIDC provider config with the given parameters.""" - ... - - def delete_oidc_provider_config(self, provider_id): # -> None: - ... - - def list_oidc_provider_configs(self, page_token=..., max_results=...): # -> _ListOIDCProviderConfigsPage: - ... - - def get_saml_provider_config(self, provider_id): # -> SAMLProviderConfig: - ... - - def create_saml_provider_config(self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=...): # -> SAMLProviderConfig: - """Creates a new SAML provider config from the given parameters.""" - ... - - def update_saml_provider_config(self, provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=...): # -> SAMLProviderConfig: - """Updates an existing SAML provider config with the given parameters.""" - ... - - def delete_saml_provider_config(self, provider_id): # -> None: - ... - - def list_saml_provider_configs(self, page_token=..., max_results=...): # -> _ListSAMLProviderConfigsPage: - ... - - - diff --git a/typings/firebase_admin/_auth_utils.pyi b/typings/firebase_admin/_auth_utils.pyi deleted file mode 100644 index 5de7d7b..0000000 --- a/typings/firebase_admin/_auth_utils.pyi +++ /dev/null @@ -1,234 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import exceptions - -"""Firebase auth utils.""" -EMULATOR_HOST_ENV_VAR = ... -MAX_CLAIMS_PAYLOAD_SIZE = ... -RESERVED_CLAIMS = ... -VALID_EMAIL_ACTION_TYPES = ... -class PageIterator: - """An iterator that allows iterating over a sequence of items, one at a time. - - This implementation loads a page of items into memory, and iterates on them. When the whole - page has been traversed, it loads another page. This class never keeps more than one page - of entries in memory. - """ - def __init__(self, current_page) -> None: - ... - - def __next__(self): - ... - - def __iter__(self): # -> Self: - ... - - @property - def items(self): - ... - - - -def get_emulator_host(): # -> str: - ... - -def is_emulated(): # -> bool: - ... - -def validate_uid(uid, required=...): # -> str | None: - ... - -def validate_email(email, required=...): # -> str | None: - ... - -def validate_phone(phone, required=...): # -> str | None: - """Validates the specified phone number. - - Phone number vlidation is very lax here. Backend will enforce E.164 spec compliance, and - normalize accordingly. Here we check if the number starts with + sign, and contains at - least one alphanumeric character. - """ - ... - -def validate_password(password, required=...): # -> str | None: - ... - -def validate_bytes(value, label, required=...): # -> bytes | None: - ... - -def validate_display_name(display_name, required=...): # -> str | None: - ... - -def validate_provider_id(provider_id, required=...): # -> str | None: - ... - -def validate_provider_uid(provider_uid, required=...): # -> str | None: - ... - -def validate_photo_url(photo_url, required=...): # -> str | None: - """Parses and validates the given URL string.""" - ... - -def validate_timestamp(timestamp, label, required=...): # -> int | None: - """Validates the given timestamp value. Timestamps must be positive integers.""" - ... - -def validate_int(value, label, low=..., high=...): # -> int: - """Validates that the given value represents an integer. - - There are several ways to represent an integer in Python (e.g. 2, 2L, 2.0). This method allows - for all such representations except for booleans. Booleans also behave like integers, but - always translate to 1 and 0. Passing a boolean to an API that expects integers is most likely - a developer error. - """ - ... - -def validate_string(value, label): # -> str: - """Validates that the given value is a string.""" - ... - -def validate_boolean(value, label): # -> bool: - """Validates that the given value is a boolean.""" - ... - -def validate_custom_claims(custom_claims, required=...): # -> str | None: - """Validates the specified custom claims. - - Custom claims must be specified as a JSON string. The string must not exceed 1000 - characters, and the parsed JSON payload must not contain reserved JWT claims. - """ - ... - -def validate_action_type(action_type): - ... - -def validate_provider_ids(provider_ids, required=...): # -> list[Any]: - ... - -def build_update_mask(params): # -> list[Any]: - """Creates an update mask list from the given dictionary.""" - ... - -class UidAlreadyExistsError(exceptions.AlreadyExistsError): - """The user with the provided uid already exists.""" - default_message = ... - def __init__(self, message, cause, http_response) -> None: - ... - - - -class EmailAlreadyExistsError(exceptions.AlreadyExistsError): - """The user with the provided email already exists.""" - default_message = ... - def __init__(self, message, cause, http_response) -> None: - ... - - - -class InsufficientPermissionError(exceptions.PermissionDeniedError): - """The credential used to initialize the SDK lacks required permissions.""" - default_message = ... - def __init__(self, message, cause, http_response) -> None: - ... - - - -class InvalidDynamicLinkDomainError(exceptions.InvalidArgumentError): - """Dynamic link domain in ActionCodeSettings is not authorized.""" - default_message = ... - def __init__(self, message, cause, http_response) -> None: - ... - - - -class InvalidIdTokenError(exceptions.InvalidArgumentError): - """The provided ID token is not a valid Firebase ID token.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class PhoneNumberAlreadyExistsError(exceptions.AlreadyExistsError): - """The user with the provided phone number already exists.""" - default_message = ... - def __init__(self, message, cause, http_response) -> None: - ... - - - -class UnexpectedResponseError(exceptions.UnknownError): - """Backend service responded with an unexpected or malformed response.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UserNotFoundError(exceptions.NotFoundError): - """No user record found for the specified identifier.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class EmailNotFoundError(exceptions.NotFoundError): - """No user record found for the specified email.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class TenantNotFoundError(exceptions.NotFoundError): - """No tenant found for the specified identifier.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class TenantIdMismatchError(exceptions.InvalidArgumentError): - """Missing or invalid tenant ID field in the given JWT.""" - def __init__(self, message) -> None: - ... - - - -class ConfigurationNotFoundError(exceptions.NotFoundError): - """No auth provider found for the specified identifier.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UserDisabledError(exceptions.InvalidArgumentError): - """An operation failed due to a user record being disabled.""" - default_message = ... - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class TooManyAttemptsTryLaterError(exceptions.ResourceExhaustedError): - """Rate limited because of too many attempts.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class ResetPasswordExceedLimitError(exceptions.ResourceExhaustedError): - """Reset password emails exceeded their limits.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -_CODE_TO_EXC_TYPE = ... -def handle_auth_backend_error(error): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Converts a requests error received from the Firebase Auth service into a FirebaseError.""" - ... - diff --git a/typings/firebase_admin/_gapic_utils.pyi b/typings/firebase_admin/_gapic_utils.pyi deleted file mode 100644 index 1b0f444..0000000 --- a/typings/firebase_admin/_gapic_utils.pyi +++ /dev/null @@ -1,43 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Internal utilities for interacting with Google API client.""" -def handle_platform_error_from_googleapiclient(error, handle_func=...): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Constructs a ``FirebaseError`` from the given googleapiclient error. - - This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. - - Args: - error: An error raised by the googleapiclient while making an HTTP call to a GCP API. - handle_func: A function that can be used to handle platform errors in a custom way. When - specified, this function will be called with three arguments. It has the same - signature as ```_handle_func_googleapiclient``, but may return ``None``. - - Returns: - FirebaseError: A ``FirebaseError`` that can be raised to the user code. - """ - ... - -def handle_googleapiclient_error(error, message=..., code=..., http_response=...): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Constructs a ``FirebaseError`` from the given googleapiclient error. - - This method is agnostic of the remote service that produced the error, whether it is a GCP - service or otherwise. Therefore, this method does not attempt to parse the error response in - any way. - - Args: - error: An error raised by the googleapiclient module while making an HTTP call. - message: A message to be included in the resulting ``FirebaseError`` (optional). If not - specified the string representation of the ``error`` argument is used as the message. - code: A GCP error code that will be used to determine the resulting error type (optional). - If not specified the HTTP status code on the error response is used to determine a - suitable error code. - http_response: A requests HTTP response object to associate with the exception (optional). - If not specified, one will be created from the ``error``. - - Returns: - FirebaseError: A ``FirebaseError`` that can be raised to the user code. - """ - ... - diff --git a/typings/firebase_admin/_http_client.pyi b/typings/firebase_admin/_http_client.pyi deleted file mode 100644 index def2156..0000000 --- a/typings/firebase_admin/_http_client.pyi +++ /dev/null @@ -1,106 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from requests.packages.urllib3.util import retry - -"""Internal HTTP client module. - - This module provides utilities for making HTTP calls using the requests library. - """ -if hasattr(retry.Retry.DEFAULT, 'allowed_methods'): - _ANY_METHOD = ... -else: - _ANY_METHOD = ... -DEFAULT_RETRY_CONFIG = ... -DEFAULT_TIMEOUT_SECONDS = ... -METRICS_HEADERS = ... -class HttpClient: - """Base HTTP client used to make HTTP calls. - - HttpClient maintains an HTTP session, and handles request authentication and retries if - necessary. - """ - def __init__(self, credential=..., session=..., base_url=..., headers=..., retries=..., timeout=...) -> None: - """Creates a new HttpClient instance from the provided arguments. - - If a credential is provided, initializes a new HTTP session authorized with it. If neither - a credential nor a session is provided, initializes a new unauthorized session. - - Args: - credential: A Google credential that can be used to authenticate requests (optional). - session: A custom HTTP session (optional). - base_url: A URL prefix to be added to all outgoing requests (optional). - headers: A map of headers to be added to all outgoing requests (optional). - retries: A urllib retry configuration. Default settings would retry once for low-level - connection and socket read errors, and up to 4 times for HTTP 500 and 503 errors. - Pass a False value to disable retries (optional). - timeout: HTTP timeout in seconds. Defaults to 120 seconds when not specified. Set to - None to disable timeouts (optional). - """ - ... - - @property - def session(self): # -> Session | None: - ... - - @property - def base_url(self): # -> str: - ... - - @property - def timeout(self): # -> int: - ... - - def parse_body(self, resp): - ... - - def request(self, method, url, **kwargs): # -> Response: - """Makes an HTTP call using the Python requests library. - - This is the sole entry point to the requests library. All other helper methods in this - class call this method to send HTTP requests out. Refer to - http://docs.python-requests.org/en/master/api/ for more information on supported options - and features. - - Args: - method: HTTP method name as a string (e.g. get, post). - url: URL of the remote endpoint. - **kwargs: An additional set of keyword arguments to be passed into the requests API - (e.g. json, params, timeout). - - Returns: - Response: An HTTP response object. - - Raises: - RequestException: Any requests exceptions encountered while making the HTTP call. - """ - ... - - def headers(self, method, url, **kwargs): # -> CaseInsensitiveDict[str]: - ... - - def body_and_response(self, method, url, **kwargs): # -> tuple[Any, Response | Any]: - ... - - def body(self, method, url, **kwargs): - ... - - def headers_and_body(self, method, url, **kwargs): # -> tuple[CaseInsensitiveDict[str] | Any, Any]: - ... - - def close(self): # -> None: - ... - - - -class JsonHttpClient(HttpClient): - """An HTTP client that parses response messages as JSON.""" - def __init__(self, **kwargs) -> None: - ... - - def parse_body(self, resp): - ... - - - diff --git a/typings/firebase_admin/_messaging_encoder.pyi b/typings/firebase_admin/_messaging_encoder.pyi deleted file mode 100644 index 1680e9e..0000000 --- a/typings/firebase_admin/_messaging_encoder.pyi +++ /dev/null @@ -1,204 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import json - -"""Encoding and validation utils for the messaging (FCM) module.""" -class Message: - """A message that can be sent via Firebase Cloud Messaging. - - Contains payload information as well as recipient information. In particular, the message must - contain exactly one of token, topic or condition fields. - - Args: - data: A dictionary of data fields (optional). All keys and values in the dictionary must be - strings. - notification: An instance of ``messaging.Notification`` (optional). - android: An instance of ``messaging.AndroidConfig`` (optional). - webpush: An instance of ``messaging.WebpushConfig`` (optional). - apns: An instance of ``messaging.ApnsConfig`` (optional). - fcm_options: An instance of ``messaging.FCMOptions`` (optional). - token: The registration token of the device to which the message should be sent (optional). - topic: Name of the FCM topic to which the message should be sent (optional). Topic name - may contain the ``/topics/`` prefix. - condition: The FCM condition to which the message should be sent (optional). - """ - def __init__(self, data=..., notification=..., android=..., webpush=..., apns=..., fcm_options=..., token=..., topic=..., condition=...) -> None: - ... - - def __str__(self) -> str: - ... - - - -class MulticastMessage: - """A message that can be sent to multiple tokens via Firebase Cloud Messaging. - - Args: - tokens: A list of registration tokens of targeted devices. - data: A dictionary of data fields (optional). All keys and values in the dictionary must be - strings. - notification: An instance of ``messaging.Notification`` (optional). - android: An instance of ``messaging.AndroidConfig`` (optional). - webpush: An instance of ``messaging.WebpushConfig`` (optional). - apns: An instance of ``messaging.ApnsConfig`` (optional). - fcm_options: An instance of ``messaging.FCMOptions`` (optional). - """ - def __init__(self, tokens, data=..., notification=..., android=..., webpush=..., apns=..., fcm_options=...) -> None: - ... - - - -class _Validators: - """A collection of data validation utilities. - - Methods provided in this class raise ``ValueErrors`` if any validations fail. - """ - @classmethod - def check_string(cls, label, value, non_empty=...): # -> str | None: - """Checks if the given value is a string.""" - ... - - @classmethod - def check_number(cls, label, value): # -> Number | None: - ... - - @classmethod - def check_string_dict(cls, label, value): # -> dict[Any, Any] | None: - """Checks if the given value is a dictionary comprised only of string keys and values.""" - ... - - @classmethod - def check_string_list(cls, label, value): # -> list[Any] | None: - """Checks if the given value is a list comprised only of strings.""" - ... - - @classmethod - def check_number_list(cls, label, value): # -> list[Any] | None: - """Checks if the given value is a list comprised only of numbers.""" - ... - - @classmethod - def check_analytics_label(cls, label, value): # -> str | None: - """Checks if the given value is a valid analytics label.""" - ... - - @classmethod - def check_boolean(cls, label, value): # -> bool | None: - """Checks if the given value is boolean.""" - ... - - @classmethod - def check_datetime(cls, label, value): # -> datetime | None: - """Checks if the given value is a datetime.""" - ... - - - -class MessageEncoder(json.JSONEncoder): - """A custom ``JSONEncoder`` implementation for serializing Message instances into JSON.""" - @classmethod - def remove_null_values(cls, dict_value): # -> dict[Any, Any]: - ... - - @classmethod - def encode_android(cls, android): # -> dict[Any, Any] | None: - """Encodes an ``AndroidConfig`` instance into JSON.""" - ... - - @classmethod - def encode_android_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: - """Encodes an ``AndroidFCMOptions`` instance into JSON.""" - ... - - @classmethod - def encode_ttl(cls, ttl): # -> str | None: - """Encodes an ``AndroidConfig`` ``TTL`` duration into a string.""" - ... - - @classmethod - def encode_milliseconds(cls, label, msec): # -> str | None: - """Encodes a duration in milliseconds into a string.""" - ... - - @classmethod - def encode_android_notification(cls, notification): # -> dict[Any, Any] | None: - """Encodes an ``AndroidNotification`` instance into JSON.""" - ... - - @classmethod - def encode_light_settings(cls, light_settings): # -> dict[Any, Any] | None: - """Encodes a ``LightSettings`` instance into JSON.""" - ... - - @classmethod - def encode_webpush(cls, webpush): # -> dict[Any, Any] | None: - """Encodes a ``WebpushConfig`` instance into JSON.""" - ... - - @classmethod - def encode_webpush_notification(cls, notification): # -> dict[Any, Any] | None: - """Encodes a ``WebpushNotification`` instance into JSON.""" - ... - - @classmethod - def encode_webpush_notification_actions(cls, actions): # -> list[Any] | None: - """Encodes a list of ``WebpushNotificationActions`` into JSON.""" - ... - - @classmethod - def encode_webpush_fcm_options(cls, options): # -> dict[Any, Any] | None: - """Encodes a ``WebpushFCMOptions`` instance into JSON.""" - ... - - @classmethod - def encode_apns(cls, apns): # -> dict[Any, Any] | None: - """Encodes an ``APNSConfig`` instance into JSON.""" - ... - - @classmethod - def encode_apns_payload(cls, payload): # -> dict[Any, Any] | None: - """Encodes an ``APNSPayload`` instance into JSON.""" - ... - - @classmethod - def encode_apns_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: - """Encodes an ``APNSFCMOptions`` instance into JSON.""" - ... - - @classmethod - def encode_aps(cls, aps): # -> dict[Any, Any]: - """Encodes an ``Aps`` instance into JSON.""" - ... - - @classmethod - def encode_aps_sound(cls, sound): # -> str | dict[Any, Any] | None: - """Encodes an APNs sound configuration into JSON.""" - ... - - @classmethod - def encode_aps_alert(cls, alert): # -> str | dict[Any, Any] | None: - """Encodes an ``ApsAlert`` instance into JSON.""" - ... - - @classmethod - def encode_notification(cls, notification): # -> dict[Any, Any] | None: - """Encodes a ``Notification`` instance into JSON.""" - ... - - @classmethod - def sanitize_topic_name(cls, topic): # -> None: - """Removes the /topics/ prefix from the topic name, if present.""" - ... - - def default(self, o): # -> Any | dict[Any, Any]: - ... - - @classmethod - def encode_fcm_options(cls, fcm_options): # -> dict[Any, Any] | None: - """Encodes an ``FCMOptions`` instance into JSON.""" - ... - - - diff --git a/typings/firebase_admin/_messaging_utils.pyi b/typings/firebase_admin/_messaging_utils.pyi deleted file mode 100644 index db9da16..0000000 --- a/typings/firebase_admin/_messaging_utils.pyi +++ /dev/null @@ -1,402 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import exceptions - -"""Types and utilities used by the messaging (FCM) module.""" -class Notification: - """A notification that can be included in a message. - - Args: - title: Title of the notification (optional). - body: Body of the notification (optional). - image: Image url of the notification (optional) - """ - def __init__(self, title=..., body=..., image=...) -> None: - ... - - - -class AndroidConfig: - """Android-specific options that can be included in a message. - - Args: - collapse_key: Collapse key string for the message (optional). This is an identifier for a - group of messages that can be collapsed, so that only the last message is sent when - delivery can be resumed. A maximum of 4 different collapse keys may be active at a - given time. - priority: Priority of the message (optional). Must be one of ``high`` or ``normal``. - ttl: The time-to-live duration of the message (optional). This can be specified - as a numeric seconds value or a ``datetime.timedelta`` instance. - restricted_package_name: The package name of the application where the registration tokens - must match in order to receive the message (optional). - data: A dictionary of data fields (optional). All keys and values in the dictionary must be - strings. When specified, overrides any data fields set via ``Message.data``. - notification: A ``messaging.AndroidNotification`` to be included in the message (optional). - fcm_options: A ``messaging.AndroidFCMOptions`` to be included in the message (optional). - direct_boot_ok: A boolean indicating whether messages will be allowed to be delivered to - the app while the device is in direct boot mode (optional). - """ - def __init__(self, collapse_key=..., priority=..., ttl=..., restricted_package_name=..., data=..., notification=..., fcm_options=..., direct_boot_ok=...) -> None: - ... - - - -class AndroidNotification: - """Android-specific notification parameters. - - Args: - title: Title of the notification (optional). If specified, overrides the title set via - ``messaging.Notification``. - body: Body of the notification (optional). If specified, overrides the body set via - ``messaging.Notification``. - icon: Icon of the notification (optional). - color: Color of the notification icon expressed in ``#rrggbb`` form (optional). - sound: Sound to be played when the device receives the notification (optional). This is - usually the file name of the sound resource. - tag: Tag of the notification (optional). This is an identifier used to replace existing - notifications in the notification drawer. If not specified, each request creates a new - notification. - click_action: The action associated with a user click on the notification (optional). If - specified, an activity with a matching intent filter is launched when a user clicks on - the notification. - body_loc_key: Key of the body string in the app's string resources to use to localize the - body text (optional). - body_loc_args: A list of resource keys that will be used in place of the format specifiers - in ``body_loc_key`` (optional). - title_loc_key: Key of the title string in the app's string resources to use to localize the - title text (optional). - title_loc_args: A list of resource keys that will be used in place of the format specifiers - in ``title_loc_key`` (optional). - channel_id: channel_id of the notification (optional). - image: Image url of the notification (optional). - ticker: Sets the ``ticker`` text, which is sent to accessibility services. Prior to API - level 21 (Lollipop), sets the text that is displayed in the status bar when the - notification first arrives (optional). - sticky: When set to ``False`` or unset, the notification is automatically dismissed when the - user clicks it in the panel. When set to ``True``, the notification persists even when - the user clicks it (optional). - event_timestamp: For notifications that inform users about events with an absolute time - reference, sets the time that the event in the notification occurred as a - ``datetime.datetime`` instance. If the ``datetime.datetime`` instance is naive, it - defaults to be in the UTC timezone. Notifications in the panel are sorted by this time - (optional). - local_only: Sets whether or not this notification is relevant only to the current device. - Some notifications can be bridged to other devices for remote display, such as a Wear OS - watch. This hint can be set to recommend this notification not be bridged (optional). - See Wear OS guides: - https://developer.android.com/training/wearables/notifications/bridger#existing-method-of-preventing-bridging - priority: Sets the relative priority for this notification. Low-priority notifications may - be hidden from the user in certain situations. Note this priority differs from - ``AndroidMessagePriority``. This priority is processed by the client after the message - has been delivered. Whereas ``AndroidMessagePriority`` is an FCM concept that controls - when the message is delivered (optional). Must be one of ``default``, ``min``, ``low``, - ``high``, ``max`` or ``normal``. - vibrate_timings_millis: Sets the vibration pattern to use. Pass in an array of milliseconds - to turn the vibrator on or off. The first value indicates the duration to wait before - turning the vibrator on. The next value indicates the duration to keep the vibrator on. - Subsequent values alternate between duration to turn the vibrator off and to turn the - vibrator on. If ``vibrate_timings`` is set and ``default_vibrate_timings`` is set to - ``True``, the default value is used instead of the user-specified ``vibrate_timings``. - default_vibrate_timings: If set to ``True``, use the Android framework's default vibrate - pattern for the notification (optional). Default values are specified in ``config.xml`` - https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. - If ``default_vibrate_timings`` is set to ``True`` and ``vibrate_timings`` is also set, - the default value is used instead of the user-specified ``vibrate_timings``. - default_sound: If set to ``True``, use the Android framework's default sound for the - notification (optional). Default values are specified in ``config.xml`` - https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml - light_settings: Settings to control the notification's LED blinking rate and color if LED is - available on the device. The total blinking time is controlled by the OS (optional). - default_light_settings: If set to ``True``, use the Android framework's default LED light - settings for the notification. Default values are specified in ``config.xml`` - https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. - If ``default_light_settings`` is set to ``True`` and ``light_settings`` is also set, the - user-specified ``light_settings`` is used instead of the default value. - visibility: Sets the visibility of the notification. Must be either ``private``, ``public``, - or ``secret``. If unspecified, it remains undefined in the Admin SDK, and defers to - the FCM backend's default mapping. - notification_count: Sets the number of items this notification represents. May be displayed - as a badge count for Launchers that support badging. See ``NotificationBadge`` - https://developer.android.com/training/notify-user/badges. For example, this might be - useful if you're using just one notification to represent multiple new messages but you - want the count here to represent the number of total new messages. If zero or - unspecified, systems that support badging use the default, which is to increment a - number displayed on the long-press menu each time a new notification arrives. - proxy: Sets if the notification may be proxied. Must be one of ``allow``, ``deny``, or - ``if_priority_lowered``. If unspecified, it remains undefined in the Admin SDK, and - defers to the FCM backend's default mapping. - - - """ - def __init__(self, title=..., body=..., icon=..., color=..., sound=..., tag=..., click_action=..., body_loc_key=..., body_loc_args=..., title_loc_key=..., title_loc_args=..., channel_id=..., image=..., ticker=..., sticky=..., event_timestamp=..., local_only=..., priority=..., vibrate_timings_millis=..., default_vibrate_timings=..., default_sound=..., light_settings=..., default_light_settings=..., visibility=..., notification_count=..., proxy=...) -> None: - ... - - - -class LightSettings: - """Represents settings to control notification LED that can be included in a - ``messaging.AndroidNotification``. - - Args: - color: Sets the color of the LED in ``#rrggbb`` or ``#rrggbbaa`` format. - light_on_duration_millis: Along with ``light_off_duration``, defines the blink rate of LED - flashes. - light_off_duration_millis: Along with ``light_on_duration``, defines the blink rate of LED - flashes. - """ - def __init__(self, color, light_on_duration_millis, light_off_duration_millis) -> None: - ... - - - -class AndroidFCMOptions: - """Options for features provided by the FCM SDK for Android. - - Args: - analytics_label: contains additional options for features provided by the FCM Android SDK - (optional). - """ - def __init__(self, analytics_label=...) -> None: - ... - - - -class WebpushConfig: - """Webpush-specific options that can be included in a message. - - Args: - headers: A dictionary of headers (optional). Refer `Webpush Specification`_ for supported - headers. - data: A dictionary of data fields (optional). All keys and values in the dictionary must be - strings. When specified, overrides any data fields set via ``Message.data``. - notification: A ``messaging.WebpushNotification`` to be included in the message (optional). - fcm_options: A ``messaging.WebpushFCMOptions`` instance to be included in the message - (optional). - - .. _Webpush Specification: https://tools.ietf.org/html/rfc8030#section-5 - """ - def __init__(self, headers=..., data=..., notification=..., fcm_options=...) -> None: - ... - - - -class WebpushNotificationAction: - """An action available to the users when the notification is presented. - - Args: - action: Action string. - title: Title string. - icon: Icon URL for the action (optional). - """ - def __init__(self, action, title, icon=...) -> None: - ... - - - -class WebpushNotification: - """Webpush-specific notification parameters. - - Refer to the `Notification Reference`_ for more information. - - Args: - title: Title of the notification (optional). If specified, overrides the title set via - ``messaging.Notification``. - body: Body of the notification (optional). If specified, overrides the body set via - ``messaging.Notification``. - icon: Icon URL of the notification (optional). - actions: A list of ``messaging.WebpushNotificationAction`` instances (optional). - badge: URL of the image used to represent the notification when there is - not enough space to display the notification itself (optional). - data: Any arbitrary JSON data that should be associated with the notification (optional). - direction: The direction in which to display the notification (optional). Must be either - 'auto', 'ltr' or 'rtl'. - image: The URL of an image to be displayed in the notification (optional). - language: Notification language (optional). - renotify: A boolean indicating whether the user should be notified after a new - notification replaces an old one (optional). - require_interaction: A boolean indicating whether a notification should remain active - until the user clicks or dismisses it, rather than closing automatically (optional). - silent: ``True`` to indicate that the notification should be silent (optional). - tag: An identifying tag on the notification (optional). - timestamp_millis: A timestamp value in milliseconds on the notification (optional). - vibrate: A vibration pattern for the device's vibration hardware to emit when the - notification fires (optional). The pattern is specified as an integer array. - custom_data: A dict of custom key-value pairs to be included in the notification - (optional) - - .. _Notification Reference: https://developer.mozilla.org/en-US/docs/Web/API\ - /notification/Notification - """ - def __init__(self, title=..., body=..., icon=..., actions=..., badge=..., data=..., direction=..., image=..., language=..., renotify=..., require_interaction=..., silent=..., tag=..., timestamp_millis=..., vibrate=..., custom_data=...) -> None: - ... - - - -class WebpushFCMOptions: - """Options for features provided by the FCM SDK for Web. - - Args: - link: The link to open when the user clicks on the notification. Must be an HTTPS URL - (optional). - """ - def __init__(self, link=...) -> None: - ... - - - -class APNSConfig: - """APNS-specific options that can be included in a message. - - Refer to `APNS Documentation`_ for more information. - - Args: - headers: A dictionary of headers (optional). - payload: A ``messaging.APNSPayload`` to be included in the message (optional). - fcm_options: A ``messaging.APNSFCMOptions`` instance to be included in the message - (optional). - - .. _APNS Documentation: https://developer.apple.com/library/content/documentation\ - /NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html - """ - def __init__(self, headers=..., payload=..., fcm_options=...) -> None: - ... - - - -class APNSPayload: - """Payload of an APNS message. - - Args: - aps: A ``messaging.Aps`` instance to be included in the payload. - **kwargs: Arbitrary keyword arguments to be included as custom fields in the payload - (optional). - """ - def __init__(self, aps, **kwargs) -> None: - ... - - - -class Aps: - """Aps dictionary to be included in an APNS payload. - - Args: - alert: A string or a ``messaging.ApsAlert`` instance (optional). - badge: A number representing the badge to be displayed with the message (optional). - sound: Name of the sound file to be played with the message or a - ``messaging.CriticalSound`` instance (optional). - content_available: A boolean indicating whether to configure a background update - notification (optional). - category: String identifier representing the message type (optional). - thread_id: An app-specific string identifier for grouping messages (optional). - mutable_content: A boolean indicating whether to support mutating notifications at - the client using app extensions (optional). - custom_data: A dict of custom key-value pairs to be included in the Aps dictionary - (optional). - """ - def __init__(self, alert=..., badge=..., sound=..., content_available=..., category=..., thread_id=..., mutable_content=..., custom_data=...) -> None: - ... - - - -class CriticalSound: - """Critical alert sound configuration that can be included in ``messaging.Aps``. - - Args: - name: The name of a sound file in your app's main bundle or in the ``Library/Sounds`` - folder of your app's container directory. Specify the string ``default`` to play the - system sound. - critical: Set to ``True`` to set the critical alert flag on the sound configuration - (optional). - volume: The volume for the critical alert's sound. Must be a value between 0.0 (silent) - and 1.0 (full volume) (optional). - """ - def __init__(self, name, critical=..., volume=...) -> None: - ... - - - -class ApsAlert: - """An alert that can be included in ``messaging.Aps``. - - Args: - title: Title of the alert (optional). If specified, overrides the title set via - ``messaging.Notification``. - subtitle: Subtitle of the alert (optional). - body: Body of the alert (optional). If specified, overrides the body set via - ``messaging.Notification``. - loc_key: Key of the body string in the app's string resources to use to localize the - body text (optional). - loc_args: A list of resource keys that will be used in place of the format specifiers - in ``loc_key`` (optional). - title_loc_key: Key of the title string in the app's string resources to use to localize the - title text (optional). - title_loc_args: A list of resource keys that will be used in place of the format specifiers - in ``title_loc_key`` (optional). - action_loc_key: Key of the text in the app's string resources to use to localize the - action button text (optional). - launch_image: Image for the notification action (optional). - custom_data: A dict of custom key-value pairs to be included in the ApsAlert dictionary - (optional) - """ - def __init__(self, title=..., subtitle=..., body=..., loc_key=..., loc_args=..., title_loc_key=..., title_loc_args=..., action_loc_key=..., launch_image=..., custom_data=...) -> None: - ... - - - -class APNSFCMOptions: - """Options for features provided by the FCM SDK for iOS. - - Args: - analytics_label: contains additional options for features provided by the FCM iOS SDK - (optional). - image: contains the URL of an image that is going to be displayed in a notification - (optional). - """ - def __init__(self, analytics_label=..., image=...) -> None: - ... - - - -class FCMOptions: - """Options for features provided by SDK. - - Args: - analytics_label: contains additional options to use across all platforms (optional). - """ - def __init__(self, analytics_label=...) -> None: - ... - - - -class ThirdPartyAuthError(exceptions.UnauthenticatedError): - """APNs certificate or web push auth key was invalid or missing.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class QuotaExceededError(exceptions.ResourceExhaustedError): - """Sending limit exceeded for the message target.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class SenderIdMismatchError(exceptions.PermissionDeniedError): - """The authenticated sender ID is different from the sender ID for the registration token.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UnregisteredError(exceptions.NotFoundError): - """App instance was unregistered from FCM. - - This usually means that the token used is no longer valid and a new one must be used.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - diff --git a/typings/firebase_admin/_rfc3339.pyi b/typings/firebase_admin/_rfc3339.pyi deleted file mode 100644 index ff017f7..0000000 --- a/typings/firebase_admin/_rfc3339.pyi +++ /dev/null @@ -1,24 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Parse RFC3339 date strings""" -def parse_to_epoch(datestr): # -> float: - """Parse an RFC3339 date string and return the number of seconds since the - epoch (as a float). - - In particular, this method is meant to parse the strings returned by the - JSON mapping of protobuf google.protobuf.timestamp.Timestamp instances: - https://github.com/protocolbuffers/protobuf/blob/4cf5bfee9546101d98754d23ff378ff718ba8438/src/google/protobuf/timestamp.proto#L99 - - This method has microsecond precision; nanoseconds will be truncated. - - Args: - datestr: A string in RFC3339 format. - Returns: - Float: The number of seconds since the Unix epoch. - Raises: - ValueError: Raised if the `datestr` is not a valid RFC3339 date string. - """ - ... - diff --git a/typings/firebase_admin/_sseclient.pyi b/typings/firebase_admin/_sseclient.pyi deleted file mode 100644 index 00b97e1..0000000 --- a/typings/firebase_admin/_sseclient.pyi +++ /dev/null @@ -1,92 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import requests -from google.auth import transport - -"""SSEClient module to stream realtime updates from the Firebase Database. - -Based on a similar implementation from Pyrebase. -""" -end_of_field = ... -class KeepAuthSession(transport.requests.AuthorizedSession): - """A session that does not drop authentication on redirects between domains.""" - def __init__(self, credential) -> None: - ... - - def rebuild_auth(self, prepared_request, response): # -> None: - ... - - - -class _EventBuffer: - """A helper class for buffering and parsing raw SSE data.""" - def __init__(self) -> None: - ... - - def append(self, char): # -> None: - ... - - def truncate(self): # -> None: - ... - - @property - def is_end_of_field(self): # -> bool: - ... - - @property - def buffer_string(self): # -> str: - ... - - - -class SSEClient: - """SSE client implementation.""" - def __init__(self, url, session, retry=..., **kwargs) -> None: - """Initializes the SSEClient. - - Args: - url: The remote url to connect to. - session: The requests session. - retry: The retry interval in milliseconds (optional). - **kwargs: Extra kwargs that will be sent to ``requests.get()`` (optional). - """ - ... - - def close(self): # -> None: - """Closes the SSEClient instance.""" - ... - - def __iter__(self): # -> Self: - ... - - def __next__(self): # -> Event | None: - ... - - def next(self): # -> Event | None: - ... - - - -class Event: - """Event represents the events fired by SSE.""" - sse_line_pattern = ... - def __init__(self, data=..., event_type=..., event_id=..., retry=...) -> None: - ... - - @classmethod - def parse(cls, raw): # -> Self: - """Given a possibly-multiline string representing an SSE message, parses it - and returns an Event object. - - Args: - raw: the raw data to parse. - - Returns: - Event: A new ``Event`` with the parameters initialized. - """ - ... - - - diff --git a/typings/firebase_admin/_token_gen.pyi b/typings/firebase_admin/_token_gen.pyi deleted file mode 100644 index fe6cc5d..0000000 --- a/typings/firebase_admin/_token_gen.pyi +++ /dev/null @@ -1,177 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from google.auth import transport -from firebase_admin import _auth_utils, exceptions - -"""Firebase token minting and validation sub module.""" -ID_TOKEN_ISSUER_PREFIX = ... -ID_TOKEN_CERT_URI = ... -COOKIE_ISSUER_PREFIX = ... -COOKIE_CERT_URI = ... -MIN_SESSION_COOKIE_DURATION_SECONDS = ... -MAX_SESSION_COOKIE_DURATION_SECONDS = ... -MAX_TOKEN_LIFETIME_SECONDS = ... -FIREBASE_AUDIENCE = ... -RESERVED_CLAIMS = ... -METADATA_SERVICE_URL = ... -ALGORITHM_RS256 = ... -ALGORITHM_NONE = ... -AUTH_EMULATOR_EMAIL = ... -class _EmulatedSigner(google.auth.crypt.Signer): - key_id = ... - def __init__(self) -> None: - ... - - def sign(self, message): # -> Literal[b""]: - ... - - - -class _SigningProvider: - """Stores a reference to a google.auth.crypto.Signer.""" - def __init__(self, signer, signer_email, alg=...) -> None: - ... - - @property - def signer(self): # -> Any: - ... - - @property - def signer_email(self): # -> Any: - ... - - @property - def alg(self): # -> str: - ... - - @classmethod - def from_credential(cls, google_cred): # -> _SigningProvider: - ... - - @classmethod - def from_iam(cls, request, google_cred, service_account): # -> _SigningProvider: - ... - - @classmethod - def for_emulator(cls): # -> _SigningProvider: - ... - - - -class TokenGenerator: - """Generates custom tokens and session cookies.""" - ID_TOOLKIT_URL = ... - def __init__(self, app, http_client, url_override=...) -> None: - ... - - @property - def signing_provider(self): # -> _SigningProvider: - """Initializes and returns the SigningProvider instance to be used.""" - ... - - def create_custom_token(self, uid, developer_claims=..., tenant_id=...): # -> bytes: - """Builds and signs a Firebase custom auth token.""" - ... - - def create_session_cookie(self, id_token, expires_in): - """Creates a session cookie from the provided ID token.""" - ... - - - -class CertificateFetchRequest(transport.Request): - """A google-auth transport that supports HTTP cache-control. - - Also injects a timeout to each outgoing HTTP request. - """ - def __init__(self, timeout_seconds=...) -> None: - ... - - @property - def session(self): # -> Session: - ... - - @property - def timeout_seconds(self): # -> None: - ... - - def __call__(self, url, method=..., body=..., headers=..., timeout=..., **kwargs): - ... - - - -class TokenVerifier: - """Verifies ID tokens and session cookies.""" - def __init__(self, app) -> None: - ... - - def verify_id_token(self, id_token, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: - ... - - def verify_session_cookie(self, cookie, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: - ... - - - -class _JWTVerifier: - """Verifies Firebase JWTs (ID tokens or session cookies).""" - def __init__(self, **kwargs) -> None: - ... - - def verify(self, token, request, clock_skew_seconds=...): # -> Any | Mapping[str, Any]: - """Verifies the signature and data for the provided JWT.""" - ... - - - -class TokenSignError(exceptions.UnknownError): - """Unexpected error while signing a Firebase custom token.""" - def __init__(self, message, cause) -> None: - ... - - - -class CertificateFetchError(exceptions.UnknownError): - """Failed to fetch some public key certificates required to verify a token.""" - def __init__(self, message, cause) -> None: - ... - - - -class ExpiredIdTokenError(_auth_utils.InvalidIdTokenError): - """The provided ID token is expired.""" - def __init__(self, message, cause) -> None: - ... - - - -class RevokedIdTokenError(_auth_utils.InvalidIdTokenError): - """The provided ID token has been revoked.""" - def __init__(self, message) -> None: - ... - - - -class InvalidSessionCookieError(exceptions.InvalidArgumentError): - """The provided string is not a valid Firebase session cookie.""" - def __init__(self, message, cause=...) -> None: - ... - - - -class ExpiredSessionCookieError(InvalidSessionCookieError): - """The provided session cookie is expired.""" - def __init__(self, message, cause) -> None: - ... - - - -class RevokedSessionCookieError(InvalidSessionCookieError): - """The provided session cookie has been revoked.""" - def __init__(self, message) -> None: - ... - - - diff --git a/typings/firebase_admin/_user_identifier.pyi b/typings/firebase_admin/_user_identifier.pyi deleted file mode 100644 index c852707..0000000 --- a/typings/firebase_admin/_user_identifier.pyi +++ /dev/null @@ -1,91 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Classes to uniquely identify a user.""" -class UserIdentifier: - """Identifies a user to be looked up.""" - ... - - -class UidIdentifier(UserIdentifier): - """Used for looking up an account by uid. - - See ``auth.get_user()``. - """ - def __init__(self, uid) -> None: - """Constructs a new `UidIdentifier` object. - - Args: - uid: A user ID string. - """ - ... - - @property - def uid(self): # -> str | None: - ... - - - -class EmailIdentifier(UserIdentifier): - """Used for looking up an account by email. - - See ``auth.get_user()``. - """ - def __init__(self, email) -> None: - """Constructs a new `EmailIdentifier` object. - - Args: - email: A user email address string. - """ - ... - - @property - def email(self): # -> str | None: - ... - - - -class PhoneIdentifier(UserIdentifier): - """Used for looking up an account by phone number. - - See ``auth.get_user()``. - """ - def __init__(self, phone_number) -> None: - """Constructs a new `PhoneIdentifier` object. - - Args: - phone_number: A phone number string. - """ - ... - - @property - def phone_number(self): # -> str | None: - ... - - - -class ProviderIdentifier(UserIdentifier): - """Used for looking up an account by provider. - - See ``auth.get_user()``. - """ - def __init__(self, provider_id, provider_uid) -> None: - """Constructs a new `ProviderIdentifier` object. - -   Args: -     provider_id: A provider ID string. -     provider_uid: A provider UID string. - """ - ... - - @property - def provider_id(self): # -> str | None: - ... - - @property - def provider_uid(self): # -> str | None: - ... - - - diff --git a/typings/firebase_admin/_user_import.pyi b/typings/firebase_admin/_user_import.pyi deleted file mode 100644 index 3d8e677..0000000 --- a/typings/firebase_admin/_user_import.pyi +++ /dev/null @@ -1,405 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase user import sub module.""" -def b64_encode(bytes_value): # -> str: - ... - -class UserProvider: - """Represents a user identity provider that can be associated with a Firebase user. - - One or more providers can be specified in an ``ImportUserRecord`` when importing users via - ``auth.import_users()``. - - Args: - uid: User's unique ID assigned by the identity provider. - provider_id: ID of the identity provider. This can be a short domain name or the identifier - of an OpenID identity provider. - email: User's email address (optional). - display_name: User's display name (optional). - photo_url: User's photo URL (optional). - """ - def __init__(self, uid, provider_id, email=..., display_name=..., photo_url=...) -> None: - ... - - @property - def uid(self): # -> str | None: - ... - - @uid.setter - def uid(self, uid): # -> None: - ... - - @property - def provider_id(self): # -> str | None: - ... - - @provider_id.setter - def provider_id(self, provider_id): # -> None: - ... - - @property - def email(self): # -> str | None: - ... - - @email.setter - def email(self, email): # -> None: - ... - - @property - def display_name(self): # -> str | None: - ... - - @display_name.setter - def display_name(self, display_name): # -> None: - ... - - @property - def photo_url(self): # -> str | None: - ... - - @photo_url.setter - def photo_url(self, photo_url): # -> None: - ... - - def to_dict(self): # -> dict[str, str]: - ... - - - -class ImportUserRecord: - """Represents a user account to be imported to Firebase Auth. - - Must specify the ``uid`` field at a minimum. A sequence of ``ImportUserRecord`` objects can be - passed to the ``auth.import_users()`` function, in order to import those users into Firebase - Auth in bulk. If the ``password_hash`` is set on a user, a hash configuration must be - specified when calling ``import_users()``. - - Args: - uid: User's unique ID. Must be a non-empty string not longer than 128 characters. - email: User's email address (optional). - email_verified: A boolean indicating whether the user's email has been verified (optional). - display_name: User's display name (optional). - phone_number: User's phone number (optional). - photo_url: User's photo URL (optional). - disabled: A boolean indicating whether this user account has been disabled (optional). - user_metadata: An ``auth.UserMetadata`` instance with additional user metadata (optional). - provider_data: A list of ``auth.UserProvider`` instances (optional). - custom_claims: A ``dict`` of custom claims to be set on the user account (optional). - password_hash: User's password hash as a ``bytes`` sequence (optional). - password_salt: User's password salt as a ``bytes`` sequence (optional). - - Raises: - ValueError: If provided arguments are invalid. - """ - def __init__(self, uid, email=..., email_verified=..., display_name=..., phone_number=..., photo_url=..., disabled=..., user_metadata=..., provider_data=..., custom_claims=..., password_hash=..., password_salt=...) -> None: - ... - - @property - def uid(self): # -> str | None: - ... - - @uid.setter - def uid(self, uid): # -> None: - ... - - @property - def email(self): # -> str | None: - ... - - @email.setter - def email(self, email): # -> None: - ... - - @property - def display_name(self): # -> str | None: - ... - - @display_name.setter - def display_name(self, display_name): # -> None: - ... - - @property - def phone_number(self): # -> str | None: - ... - - @phone_number.setter - def phone_number(self, phone_number): # -> None: - ... - - @property - def photo_url(self): # -> str | None: - ... - - @photo_url.setter - def photo_url(self, photo_url): # -> None: - ... - - @property - def password_hash(self): # -> bytes | None: - ... - - @password_hash.setter - def password_hash(self, password_hash): # -> None: - ... - - @property - def password_salt(self): # -> bytes | None: - ... - - @password_salt.setter - def password_salt(self, password_salt): # -> None: - ... - - @property - def user_metadata(self): - ... - - @user_metadata.setter - def user_metadata(self, user_metadata): # -> None: - ... - - @property - def provider_data(self): - ... - - @provider_data.setter - def provider_data(self, provider_data): # -> None: - ... - - @property - def custom_claims(self): # -> dict[Any, Any]: - ... - - @custom_claims.setter - def custom_claims(self, custom_claims): # -> None: - ... - - def to_dict(self): # -> dict[str, Any]: - """Returns a dict representation of the user. For internal use only.""" - ... - - - -class UserImportHash: - """Represents a hash algorithm used to hash user passwords. - - An instance of this class must be specified when importing users with passwords via the - ``auth.import_users()`` API. Use one of the provided class methods to obtain new - instances when required. Refer to `documentation`_ for more details. - - .. _documentation: https://firebase.google.com/docs/auth/admin/import-users - """ - def __init__(self, name, data=...) -> None: - ... - - def to_dict(self): # -> dict[str, Any]: - ... - - @classmethod - def hmac_sha512(cls, key): # -> UserImportHash: - """Creates a new HMAC SHA512 algorithm instance. - - Args: - key: Signer key as a byte sequence. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def hmac_sha256(cls, key): # -> UserImportHash: - """Creates a new HMAC SHA256 algorithm instance. - - Args: - key: Signer key as a byte sequence. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def hmac_sha1(cls, key): # -> UserImportHash: - """Creates a new HMAC SHA1 algorithm instance. - - Args: - key: Signer key as a byte sequence. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def hmac_md5(cls, key): # -> UserImportHash: - """Creates a new HMAC MD5 algorithm instance. - - Args: - key: Signer key as a byte sequence. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def md5(cls, rounds): # -> UserImportHash: - """Creates a new MD5 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 0 and 8192. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def sha1(cls, rounds): # -> UserImportHash: - """Creates a new SHA1 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 1 and 8192. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def sha256(cls, rounds): # -> UserImportHash: - """Creates a new SHA256 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 1 and 8192. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def sha512(cls, rounds): # -> UserImportHash: - """Creates a new SHA512 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 1 and 8192. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def pbkdf_sha1(cls, rounds): # -> UserImportHash: - """Creates a new PBKDF SHA1 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 0 and 120000. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def pbkdf2_sha256(cls, rounds): # -> UserImportHash: - """Creates a new PBKDF2 SHA256 algorithm instance. - - Args: - rounds: Number of rounds. Must be an integer between 0 and 120000. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def scrypt(cls, key, rounds, memory_cost, salt_separator=...): # -> UserImportHash: - """Creates a new Scrypt algorithm instance. - - This is the modified Scrypt algorithm used by Firebase Auth. See ``standard_scrypt()`` - function for the standard Scrypt algorith, - - Args: - key: Signer key as a byte sequence. - rounds: Number of rounds. Must be an integer between 1 and 8. - memory_cost: Memory cost as an integer between 1 and 14. - salt_separator: Salt separator as a byte sequence (optional). - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def bcrypt(cls): # -> UserImportHash: - """Creates a new Bcrypt algorithm instance. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - @classmethod - def standard_scrypt(cls, memory_cost, parallelization, block_size, derived_key_length): # -> UserImportHash: - """Creates a new standard Scrypt algorithm instance. - - Args: - memory_cost: CPU Memory cost as a non-negative integer. - parallelization: Parallelization as a non-negative integer. - block_size: Block size as a non-negative integer. - derived_key_length: Derived key length as a non-negative integer. - - Returns: - UserImportHash: A new ``UserImportHash``. - """ - ... - - - -class ErrorInfo: - """Represents an error encountered while performing a batch operation such - as importing users or deleting multiple user accounts. - """ - def __init__(self, error) -> None: - ... - - @property - def index(self): - ... - - @property - def reason(self): - ... - - - -class UserImportResult: - """Represents the result of a bulk user import operation. - - See ``auth.import_users()`` API for more details. - """ - def __init__(self, result, total) -> None: - ... - - @property - def success_count(self): - """Returns the number of users successfully imported.""" - ... - - @property - def failure_count(self): # -> int: - """Returns the number of users that failed to be imported.""" - ... - - @property - def errors(self): # -> list[ErrorInfo]: - """Returns a list of ``auth.ErrorInfo`` instances describing the errors encountered.""" - ... - - - diff --git a/typings/firebase_admin/_user_mgt.pyi b/typings/firebase_admin/_user_mgt.pyi deleted file mode 100644 index 11ce7a0..0000000 --- a/typings/firebase_admin/_user_mgt.pyi +++ /dev/null @@ -1,527 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _auth_utils - -"""Firebase user management sub module.""" -MAX_LIST_USERS_RESULTS = ... -MAX_IMPORT_USERS_SIZE = ... -B64_REDACTED = ... -class Sentinel: - def __init__(self, description) -> None: - ... - - - -DELETE_ATTRIBUTE = ... -class UserMetadata: - """Contains additional metadata associated with a user account.""" - def __init__(self, creation_timestamp=..., last_sign_in_timestamp=..., last_refresh_timestamp=...) -> None: - ... - - @property - def creation_timestamp(self): # -> int | None: - """ Creation timestamp in milliseconds since the epoch. - - Returns: - integer: The user creation timestamp in milliseconds since the epoch. - """ - ... - - @property - def last_sign_in_timestamp(self): # -> int | None: - """ Last sign in timestamp in milliseconds since the epoch. - - Returns: - integer: The last sign in timestamp in milliseconds since the epoch. - """ - ... - - @property - def last_refresh_timestamp(self): # -> int | None: - """The time at which the user was last active (ID token refreshed). - - Returns: - integer: Milliseconds since epoch timestamp, or `None` if the user was - never active. - """ - ... - - - -class UserInfo: - """A collection of standard profile information for a user. - - Used to expose profile information returned by an identity provider. - """ - @property - def uid(self): - """Returns the user ID of this user.""" - ... - - @property - def display_name(self): - """Returns the display name of this user.""" - ... - - @property - def email(self): - """Returns the email address associated with this user.""" - ... - - @property - def phone_number(self): - """Returns the phone number associated with this user.""" - ... - - @property - def photo_url(self): - """Returns the photo URL of this user.""" - ... - - @property - def provider_id(self): - """Returns the ID of the identity provider. - - This can be a short domain name (e.g. google.com), or the identity of an OpenID - identity provider. - """ - ... - - - -class UserRecord(UserInfo): - """Contains metadata associated with a Firebase user account.""" - def __init__(self, data) -> None: - ... - - @property - def uid(self): # -> None: - """Returns the user ID of this user. - - Returns: - string: A user ID string. This value is never None or empty. - """ - ... - - @property - def display_name(self): # -> None: - """Returns the display name of this user. - - Returns: - string: A display name string or None. - """ - ... - - @property - def email(self): # -> None: - """Returns the email address associated with this user. - - Returns: - string: An email address string or None. - """ - ... - - @property - def phone_number(self): # -> None: - """Returns the phone number associated with this user. - - Returns: - string: A phone number string or None. - """ - ... - - @property - def photo_url(self): # -> None: - """Returns the photo URL of this user. - - Returns: - string: A URL string or None. - """ - ... - - @property - def provider_id(self): # -> Literal['firebase']: - """Returns the provider ID of this user. - - Returns: - string: A constant provider ID value. - """ - ... - - @property - def email_verified(self): # -> bool: - """Returns whether the email address of this user has been verified. - - Returns: - bool: True if the email has been verified, and False otherwise. - """ - ... - - @property - def disabled(self): # -> bool: - """Returns whether this user account is disabled. - - Returns: - bool: True if the user account is disabled, and False otherwise. - """ - ... - - @property - def tokens_valid_after_timestamp(self): # -> int: - """Returns the time, in milliseconds since the epoch, before which tokens are invalid. - - Note: this is truncated to 1 second accuracy. - - Returns: - int: Timestamp in milliseconds since the epoch, truncated to the second. - All tokens issued before that time are considered revoked. - """ - ... - - @property - def user_metadata(self): # -> UserMetadata: - """Returns additional metadata associated with this user. - - Returns: - UserMetadata: A UserMetadata instance. Does not return None. - """ - ... - - @property - def provider_data(self): # -> list[ProviderUserInfo]: - """Returns a list of UserInfo instances. - - Each object represents an identity from an identity provider that is linked to this user. - - Returns: - list: A list of UserInfo objects, which may be empty. - """ - ... - - @property - def custom_claims(self): # -> Any | None: - """Returns any custom claims set on this user account. - - Returns: - dict: A dictionary of claims or None. - """ - ... - - @property - def tenant_id(self): # -> None: - """Returns the tenant ID of this user. - - Returns: - string: A tenant ID string or None. - """ - ... - - - -class ExportedUserRecord(UserRecord): - """Contains metadata associated with a user including password hash and salt.""" - @property - def password_hash(self): # -> None: - """The user's password hash as a base64-encoded string. - - If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this - is the base64-encoded password hash of the user. If a different hashing algorithm was - used to create this user, as is typical when migrating from another Auth system, this - is an empty string. If no password is set, or if the service account doesn't have permission - to read the password, then this is ``None``. - """ - ... - - @property - def password_salt(self): # -> None: - """The user's password salt as a base64-encoded string. - - If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this - is the base64-encoded password salt of the user. If a different hashing algorithm was - used to create this user, as is typical when migrating from another Auth system, this is - an empty string. If no password is set, or if the service account doesn't have permission to - read the password, then this is ``None``. - """ - ... - - - -class GetUsersResult: - """Represents the result of the ``auth.get_users()`` API.""" - def __init__(self, users, not_found) -> None: - """Constructs a `GetUsersResult` object. - - Args: - users: List of `UserRecord` instances. - not_found: List of `UserIdentifier` instances. - """ - ... - - @property - def users(self): # -> Any: - """Set of `UserRecord` instances, corresponding to the set of users - that were requested. Only users that were found are listed here. The - result set is unordered. - """ - ... - - @property - def not_found(self): # -> Any: - """Set of `UserIdentifier` instances that were requested, but not - found. - """ - ... - - - -class ListUsersPage: - """Represents a page of user records exported from a Firebase project. - - Provides methods for traversing the user accounts included in this page, as well as retrieving - subsequent pages of users. The iterator returned by ``iterate_all()`` can be used to iterate - through all users in the Firebase project starting from this page. - """ - def __init__(self, download, page_token, max_results) -> None: - ... - - @property - def users(self): # -> list[ExportedUserRecord]: - """A list of ``ExportedUserRecord`` instances available in this page.""" - ... - - @property - def next_page_token(self): - """Page token string for the next page (empty string indicates no more pages).""" - ... - - @property - def has_next_page(self): # -> bool: - """A boolean indicating whether more pages are available.""" - ... - - def get_next_page(self): # -> ListUsersPage | None: - """Retrieves the next page of user accounts, if available. - - Returns: - ListUsersPage: Next page of users, or None if this is the last page. - """ - ... - - def iterate_all(self): # -> _UserIterator: - """Retrieves an iterator for user accounts. - - Returned iterator will iterate through all the user accounts in the Firebase project - starting from this page. The iterator will never buffer more than one page of users - in memory at a time. - - Returns: - iterator: An iterator of ExportedUserRecord instances. - """ - ... - - - -class DeleteUsersResult: - """Represents the result of the ``auth.delete_users()`` API.""" - def __init__(self, result, total) -> None: - """Constructs a `DeleteUsersResult` object. - - Args: - result: The proto response, wrapped in a - `BatchDeleteAccountsResponse` instance. - total: Total integer number of deletion attempts. - """ - ... - - @property - def success_count(self): - """Returns the number of users that were deleted successfully (possibly - zero). - - Users that did not exist prior to calling `delete_users()` are - considered to be successfully deleted. - """ - ... - - @property - def failure_count(self): # -> int: - """Returns the number of users that failed to be deleted (possibly - zero). - """ - ... - - @property - def errors(self): - """A list of `auth.ErrorInfo` instances describing the errors that - were encountered during the deletion. Length of this list is equal to - `failure_count`. - """ - ... - - - -class BatchDeleteAccountsResponse: - """Represents the results of a `delete_users()` call.""" - def __init__(self, errors=...) -> None: - """Constructs a `BatchDeleteAccountsResponse` instance, corresponding to - the JSON representing the `BatchDeleteAccountsResponse` proto. - - Args: - errors: List of dictionaries, with each dictionary representing an - `ErrorInfo` instance as returned by the server. `None` implies - an empty list. - """ - ... - - - -class ProviderUserInfo(UserInfo): - """Contains metadata regarding how a user is known by a particular identity provider.""" - def __init__(self, data) -> None: - ... - - @property - def uid(self): # -> None: - ... - - @property - def display_name(self): # -> None: - ... - - @property - def email(self): # -> None: - ... - - @property - def phone_number(self): # -> None: - ... - - @property - def photo_url(self): # -> None: - ... - - @property - def provider_id(self): # -> None: - ... - - - -class ActionCodeSettings: - """Contains required continue/state URL with optional Android and iOS settings. - Used when invoking the email action link generation APIs. - """ - def __init__(self, url, handle_code_in_app=..., dynamic_link_domain=..., ios_bundle_id=..., android_package_name=..., android_install_app=..., android_minimum_version=...) -> None: - ... - - - -def encode_action_code_settings(settings): # -> dict[Any, Any]: - """ Validates the provided action code settings for email link generation and - populates the REST api parameters. - - settings - ``ActionCodeSettings`` object provided to be encoded - returns - dict of parameters to be passed for link gereration. - """ - ... - -class UserManager: - """Provides methods for interacting with the Google Identity Toolkit.""" - ID_TOOLKIT_URL = ... - def __init__(self, http_client, project_id, tenant_id=..., url_override=...) -> None: - ... - - def get_user(self, **kwargs): - """Gets the user data corresponding to the provided key.""" - ... - - def get_users(self, identifiers): # -> list[Any]: - """Looks up multiple users by their identifiers (uid, email, etc.) - - Args: - identifiers: UserIdentifier[]: The identifiers indicating the user - to be looked up. Must have <= 100 entries. - - Returns: - list[dict[string, string]]: List of dicts representing the JSON - `UserInfo` responses from the server. - - Raises: - ValueError: If any of the identifiers are invalid or if more than - 100 identifiers are specified. - UnexpectedResponseError: If the backend server responds with an - unexpected message. - """ - ... - - def list_users(self, page_token=..., max_results=...): - """Retrieves a batch of users.""" - ... - - def create_user(self, uid=..., display_name=..., email=..., phone_number=..., photo_url=..., password=..., disabled=..., email_verified=...): - """Creates a new user account with the specified properties.""" - ... - - def update_user(self, uid, display_name=..., email=..., phone_number=..., photo_url=..., password=..., disabled=..., email_verified=..., valid_since=..., custom_claims=..., providers_to_delete=...): - """Updates an existing user account with the specified properties""" - ... - - def delete_user(self, uid): # -> None: - """Deletes the user identified by the specified user ID.""" - ... - - def delete_users(self, uids, force_delete=...): # -> BatchDeleteAccountsResponse: - """Deletes the users identified by the specified user ids. - - Args: - uids: A list of strings indicating the uids of the users to be deleted. - Must have <= 1000 entries. - force_delete: Optional parameter that indicates if users should be - deleted, even if they're not disabled. Defaults to False. - - - Returns: - BatchDeleteAccountsResponse: Server's proto response, wrapped in a - python object. - - Raises: - ValueError: If any of the identifiers are invalid or if more than 1000 - identifiers are specified. - UnexpectedResponseError: If the backend server responds with an - unexpected message. - """ - ... - - def import_users(self, users, hash_alg=...): # -> dict[Any, Any]: - """Imports the given list of users to Firebase Auth.""" - ... - - def generate_email_action_link(self, action_type, email, action_code_settings=...): - """Fetches the email action links for types - - Args: - action_type: String. Valid values ['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET'] - email: Email of the user for which the action is performed - action_code_settings: ``ActionCodeSettings`` object or dict (optional). Defines whether - the link is to be handled by a mobile app and the additional state information to be - passed in the deep link, etc. - Returns: - link_url: action url to be emailed to the user - - Raises: - UnexpectedResponseError: If the backend server responds with an unexpected message - FirebaseError: If an error occurs while generating the link - ValueError: If the provided arguments are invalid - """ - ... - - - -class _UserIterator(_auth_utils.PageIterator): - @property - def items(self): - ... - - - diff --git a/typings/firebase_admin/_utils.pyi b/typings/firebase_admin/_utils.pyi deleted file mode 100644 index fee37e5..0000000 --- a/typings/firebase_admin/_utils.pyi +++ /dev/null @@ -1,77 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import google.auth - -"""Internal utilities common to all modules.""" -_ERROR_CODE_TO_EXCEPTION_TYPE = ... -_HTTP_STATUS_TO_ERROR_CODE = ... -_RPC_CODE_TO_ERROR_CODE = ... -def get_metrics_header(): # -> str: - ... - -def get_app_service(app, name, initializer): - ... - -def handle_platform_error_from_requests(error, handle_func=...): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Constructs a ``FirebaseError`` from the given requests error. - - This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. - - Args: - error: An error raised by the requests module while making an HTTP call to a GCP API. - handle_func: A function that can be used to handle platform errors in a custom way. When - specified, this function will be called with three arguments. It has the same - signature as ```_handle_func_requests``, but may return ``None``. - - Returns: - FirebaseError: A ``FirebaseError`` that can be raised to the user code. - """ - ... - -def handle_operation_error(error): # -> UnknownError: - """Constructs a ``FirebaseError`` from the given operation error. - - Args: - error: An error returned by a long running operation. - - Returns: - FirebaseError: A ``FirebaseError`` that can be raised to the user code. - """ - ... - -def handle_requests_error(error, message=..., code=...): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Constructs a ``FirebaseError`` from the given requests error. - - This method is agnostic of the remote service that produced the error, whether it is a GCP - service or otherwise. Therefore, this method does not attempt to parse the error response in - any way. - - Args: - error: An error raised by the requests module while making an HTTP call. - message: A message to be included in the resulting ``FirebaseError`` (optional). If not - specified the string representation of the ``error`` argument is used as the message. - code: A GCP error code that will be used to determine the resulting error type (optional). - If not specified the HTTP status code on the error response is used to determine a - suitable error code. - - Returns: - FirebaseError: A ``FirebaseError`` that can be raised to the user code. - """ - ... - -class EmulatorAdminCredentials(google.auth.credentials.Credentials): - """ Credentials for use with the firebase local emulator. - - This is used instead of user-supplied credentials or ADC. It will silently do nothing when - asked to refresh credentials. - """ - def __init__(self) -> None: - ... - - def refresh(self, request): # -> None: - ... - - - diff --git a/typings/firebase_admin/app_check.pyi b/typings/firebase_admin/app_check.pyi deleted file mode 100644 index 93efe2f..0000000 --- a/typings/firebase_admin/app_check.pyi +++ /dev/null @@ -1,54 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Any, Dict - -"""Firebase App Check module.""" -_APP_CHECK_ATTRIBUTE = ... -def verify_token(token: str, app=...) -> Dict[str, Any]: - """Verifies a Firebase App Check token. - - Args: - token: A token from App Check. - app: An App instance (optional). - - Returns: - Dict[str, Any]: The token's decoded claims. - - Raises: - ValueError: If the app's ``project_id`` is invalid or unspecified, - or if the token's headers or payload are invalid. - PyJWKClientError: If PyJWKClient fails to fetch a valid signing key. - """ - ... - -class _AppCheckService: - """Service class that implements Firebase App Check functionality.""" - _APP_CHECK_ISSUER = ... - _JWKS_URL = ... - _project_id = ... - _scoped_project_id = ... - _jwks_client = ... - _APP_CHECK_HEADERS = ... - def __init__(self, app) -> None: - ... - - def verify_token(self, token: str) -> Dict[str, Any]: - """Verifies a Firebase App Check token.""" - ... - - - -class _Validators: - """A collection of data validation utilities. - - Methods provided in this class raise ``ValueErrors`` if any validations fail. - """ - @classmethod - def check_string(cls, label: str, value: Any): # -> None: - """Checks if the given value is a string.""" - ... - - - diff --git a/typings/firebase_admin/auth.pyi b/typings/firebase_admin/auth.pyi deleted file mode 100644 index 8ebad28..0000000 --- a/typings/firebase_admin/auth.pyi +++ /dev/null @@ -1,716 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _auth_client, _auth_providers, _auth_utils, _token_gen, _user_identifier, _user_import, _user_mgt - -"""Firebase Authentication module. - -This module contains functions for minting and verifying JWTs used for -authenticating against Firebase services. It also provides functions for -creating and managing user accounts in Firebase projects. -""" -_AUTH_ATTRIBUTE = ... -__all__ = ['ActionCodeSettings', 'CertificateFetchError', 'Client', 'ConfigurationNotFoundError', 'DELETE_ATTRIBUTE', 'EmailAlreadyExistsError', 'EmailNotFoundError', 'ErrorInfo', 'ExpiredIdTokenError', 'ExpiredSessionCookieError', 'ExportedUserRecord', 'DeleteUsersResult', 'GetUsersResult', 'ImportUserRecord', 'InsufficientPermissionError', 'InvalidDynamicLinkDomainError', 'InvalidIdTokenError', 'InvalidSessionCookieError', 'ListProviderConfigsPage', 'ListUsersPage', 'OIDCProviderConfig', 'PhoneNumberAlreadyExistsError', 'ProviderConfig', 'ResetPasswordExceedLimitError', 'RevokedIdTokenError', 'RevokedSessionCookieError', 'SAMLProviderConfig', 'TokenSignError', 'TooManyAttemptsTryLaterError', 'UidAlreadyExistsError', 'UnexpectedResponseError', 'UserDisabledError', 'UserImportHash', 'UserImportResult', 'UserInfo', 'UserMetadata', 'UserNotFoundError', 'UserProvider', 'UserRecord', 'UserIdentifier', 'UidIdentifier', 'EmailIdentifier', 'PhoneIdentifier', 'ProviderIdentifier', 'create_custom_token', 'create_oidc_provider_config', 'create_saml_provider_config', 'create_session_cookie', 'create_user', 'delete_oidc_provider_config', 'delete_saml_provider_config', 'delete_user', 'delete_users', 'generate_email_verification_link', 'generate_password_reset_link', 'generate_sign_in_with_email_link', 'get_oidc_provider_config', 'get_saml_provider_config', 'get_user', 'get_user_by_email', 'get_user_by_phone_number', 'get_users', 'import_users', 'list_saml_provider_configs', 'list_users', 'revoke_refresh_tokens', 'set_custom_user_claims', 'update_oidc_provider_config', 'update_saml_provider_config', 'update_user', 'verify_id_token', 'verify_session_cookie'] -ActionCodeSettings = _user_mgt.ActionCodeSettings -CertificateFetchError = _token_gen.CertificateFetchError -Client = _auth_client.Client -ConfigurationNotFoundError = _auth_utils.ConfigurationNotFoundError -DELETE_ATTRIBUTE = ... -DeleteUsersResult = _user_mgt.DeleteUsersResult -EmailAlreadyExistsError = _auth_utils.EmailAlreadyExistsError -EmailNotFoundError = _auth_utils.EmailNotFoundError -ErrorInfo = _user_import.ErrorInfo -ExpiredIdTokenError = _token_gen.ExpiredIdTokenError -ExpiredSessionCookieError = _token_gen.ExpiredSessionCookieError -ExportedUserRecord = _user_mgt.ExportedUserRecord -GetUsersResult = _user_mgt.GetUsersResult -ImportUserRecord = _user_import.ImportUserRecord -InsufficientPermissionError = _auth_utils.InsufficientPermissionError -InvalidDynamicLinkDomainError = _auth_utils.InvalidDynamicLinkDomainError -InvalidIdTokenError = _auth_utils.InvalidIdTokenError -InvalidSessionCookieError = _token_gen.InvalidSessionCookieError -ListProviderConfigsPage = _auth_providers.ListProviderConfigsPage -ListUsersPage = _user_mgt.ListUsersPage -OIDCProviderConfig = _auth_providers.OIDCProviderConfig -PhoneNumberAlreadyExistsError = _auth_utils.PhoneNumberAlreadyExistsError -ProviderConfig = _auth_providers.ProviderConfig -ResetPasswordExceedLimitError = _auth_utils.ResetPasswordExceedLimitError -RevokedIdTokenError = _token_gen.RevokedIdTokenError -RevokedSessionCookieError = _token_gen.RevokedSessionCookieError -SAMLProviderConfig = _auth_providers.SAMLProviderConfig -TokenSignError = _token_gen.TokenSignError -TooManyAttemptsTryLaterError = _auth_utils.TooManyAttemptsTryLaterError -UidAlreadyExistsError = _auth_utils.UidAlreadyExistsError -UnexpectedResponseError = _auth_utils.UnexpectedResponseError -UserDisabledError = _auth_utils.UserDisabledError -UserImportHash = _user_import.UserImportHash -UserImportResult = _user_import.UserImportResult -UserInfo = _user_mgt.UserInfo -UserMetadata = _user_mgt.UserMetadata -UserNotFoundError = _auth_utils.UserNotFoundError -UserProvider = _user_import.UserProvider -UserRecord = _user_mgt.UserRecord -UserIdentifier = _user_identifier.UserIdentifier -UidIdentifier = _user_identifier.UidIdentifier -EmailIdentifier = _user_identifier.EmailIdentifier -PhoneIdentifier = _user_identifier.PhoneIdentifier -ProviderIdentifier = _user_identifier.ProviderIdentifier -def create_custom_token(uid, developer_claims=..., app=...): - """Builds and signs a Firebase custom auth token. - - Args: - uid: ID of the user for whom the token is created. - developer_claims: A dictionary of claims to be included in the token - (optional). - app: An App instance (optional). - - Returns: - bytes: A token minted from the input parameters. - - Raises: - ValueError: If input parameters are invalid. - TokenSignError: If an error occurs while signing the token using the remote IAM service. - """ - ... - -def verify_id_token(id_token, app=..., check_revoked=..., clock_skew_seconds=...): - """Verifies the signature and data for the provided JWT. - - Accepts a signed token string, verifies that it is current, and issued - to this project, and that it was correctly signed by Google. - - Args: - id_token: A string of the encoded JWT. - app: An App instance (optional). - check_revoked: Boolean, If true, checks whether the token has been revoked or - the user disabled (optional). - clock_skew_seconds: The number of seconds to tolerate when checking the token. - Must be between 0-60. Defaults to 0. - Returns: - dict: A dictionary of key-value pairs parsed from the decoded JWT. - - Raises: - ValueError: If ``id_token`` is a not a string or is empty. - InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. - ExpiredIdTokenError: If the specified ID token has expired. - RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been revoked. - CertificateFetchError: If an error occurs while fetching the public key certificates - required to verify the ID token. - UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user - record is disabled. - """ - ... - -def create_session_cookie(id_token, expires_in, app=...): - """Creates a new Firebase session cookie from the given ID token and options. - - The returned JWT can be set as a server-side session cookie with a custom cookie policy. - - Args: - id_token: The Firebase ID token to exchange for a session cookie. - expires_in: Duration until the cookie is expired. This can be specified - as a numeric seconds value or a ``datetime.timedelta`` instance. - app: An App instance (optional). - - Returns: - bytes: A session cookie generated from the input parameters. - - Raises: - ValueError: If input parameters are invalid. - FirebaseError: If an error occurs while creating the cookie. - """ - ... - -def verify_session_cookie(session_cookie, check_revoked=..., app=..., clock_skew_seconds=...): - """Verifies a Firebase session cookie. - - Accepts a session cookie string, verifies that it is current, and issued - to this project, and that it was correctly signed by Google. - - Args: - session_cookie: A session cookie string to verify. - check_revoked: Boolean, if true, checks whether the cookie has been revoked or the - user disabled (optional). - app: An App instance (optional). - clock_skew_seconds: The number of seconds to tolerate when checking the cookie. - - Returns: - dict: A dictionary of key-value pairs parsed from the decoded JWT. - - Raises: - ValueError: If ``session_cookie`` is a not a string or is empty. - InvalidSessionCookieError: If ``session_cookie`` is not a valid Firebase session cookie. - ExpiredSessionCookieError: If the specified session cookie has expired. - RevokedSessionCookieError: If ``check_revoked`` is ``True`` and the cookie has been revoked. - CertificateFetchError: If an error occurs while fetching the public key certificates - required to verify the session cookie. - UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user - record is disabled. - """ - ... - -def revoke_refresh_tokens(uid, app=...): # -> None: - """Revokes all refresh tokens for an existing user. - - This function updates the user's ``tokens_valid_after_timestamp`` to the current UTC - in seconds since the epoch. It is important that the server on which this is called has its - clock set correctly and synchronized. - - While this revokes all sessions for a specified user and disables any new ID tokens for - existing sessions from getting minted, existing ID tokens may remain active until their - natural expiration (one hour). To verify that ID tokens are revoked, use - ``verify_id_token(idToken, check_revoked=True)``. - - Args: - uid: A user ID string. - app: An App instance (optional). - - Raises: - ValueError: If the user ID is None, empty or malformed. - FirebaseError: If an error occurs while revoking the refresh token. - """ - ... - -def get_user(uid, app=...): - """Gets the user data corresponding to the specified user ID. - - Args: - uid: A user ID string. - app: An App instance (optional). - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the user ID is None, empty or malformed. - UserNotFoundError: If the specified user ID does not exist. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - -def get_user_by_email(email, app=...): - """Gets the user data corresponding to the specified user email. - - Args: - email: A user email address string. - app: An App instance (optional). - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the email is None, empty or malformed. - UserNotFoundError: If no user exists by the specified email address. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - -def get_user_by_phone_number(phone_number, app=...): - """Gets the user data corresponding to the specified phone number. - - Args: - phone_number: A phone number string. - app: An App instance (optional). - - Returns: - UserRecord: A user record instance. - - Raises: - ValueError: If the phone number is None, empty or malformed. - UserNotFoundError: If no user exists by the specified phone number. - FirebaseError: If an error occurs while retrieving the user. - """ - ... - -def get_users(identifiers, app=...): - """Gets the user data corresponding to the specified identifiers. - - There are no ordering guarantees; in particular, the nth entry in the - result list is not guaranteed to correspond to the nth entry in the input - parameters list. - - A maximum of 100 identifiers may be supplied. If more than 100 - identifiers are supplied, this method raises a `ValueError`. - - Args: - identifiers (list[UserIdentifier]): A list of ``UserIdentifier`` - instances used to indicate which user records should be returned. - Must have <= 100 entries. - app: An App instance (optional). - - Returns: - GetUsersResult: A ``GetUsersResult`` instance corresponding to the - specified identifiers. - - Raises: - ValueError: If any of the identifiers are invalid or if more than 100 - identifiers are specified. - """ - ... - -def list_users(page_token=..., max_results=..., app=...): - """Retrieves a page of user accounts from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of user accounts that may be included in the returned page. - This function never returns None. If there are no user accounts in the Firebase project, this - returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the page - (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in the - returned page (optional). Defaults to 1000, which is also the maximum number allowed. - app: An App instance (optional). - - Returns: - ListUsersPage: A page of user accounts. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the user accounts. - """ - ... - -def create_user(**kwargs): - """Creates a new user account with the specified properties. - - Args: - **kwargs: A series of keyword arguments (optional). - - Keyword Args: - uid: User ID to assign to the newly created user (optional). - display_name: The user's display name (optional). - email: The user's primary email (optional). - email_verified: A boolean indicating whether or not the user's primary email is - verified (optional). - phone_number: The user's primary phone number (optional). - photo_url: The user's photo URL (optional). - password: The user's raw, unhashed password. (optional). - disabled: A boolean indicating whether or not the user account is disabled (optional). - app: An App instance (optional). - - Returns: - UserRecord: A user record instance for the newly created user. - - Raises: - ValueError: If the specified user properties are invalid. - FirebaseError: If an error occurs while creating the user account. - """ - ... - -def update_user(uid, **kwargs): - """Updates an existing user account with the specified properties. - - Args: - uid: A user ID string. - **kwargs: A series of keyword arguments (optional). - - Keyword Args: - display_name: The user's display name (optional). Can be removed by explicitly passing - ``auth.DELETE_ATTRIBUTE``. - email: The user's primary email (optional). - email_verified: A boolean indicating whether or not the user's primary email is - verified (optional). - phone_number: The user's primary phone number (optional). Can be removed by explicitly - passing ``auth.DELETE_ATTRIBUTE``. - photo_url: The user's photo URL (optional). Can be removed by explicitly passing - ``auth.DELETE_ATTRIBUTE``. - password: The user's raw, unhashed password. (optional). - disabled: A boolean indicating whether or not the user account is disabled (optional). - custom_claims: A dictionary or a JSON string containing the custom claims to be set on the - user account (optional). To remove all custom claims, pass ``auth.DELETE_ATTRIBUTE``. - valid_since: An integer signifying the seconds since the epoch (optional). This field is - set by ``revoke_refresh_tokens`` and it is discouraged to set this field directly. - app: An App instance (optional). - - Returns: - UserRecord: An updated user record instance for the user. - - Raises: - ValueError: If the specified user ID or properties are invalid. - FirebaseError: If an error occurs while updating the user account. - """ - ... - -def set_custom_user_claims(uid, custom_claims, app=...): # -> None: - """Sets additional claims on an existing user account. - - Custom claims set via this function can be used to define user roles and privilege levels. - These claims propagate to all the devices where the user is already signed in (after token - expiration or when token refresh is forced), and next time the user signs in. The claims - can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, iat, - iss, etc), an error is thrown. Claims payload must also not be larger then 1000 characters - when serialized into a JSON string. - - Args: - uid: A user ID string. - custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any - claims set previously. - app: An App instance (optional). - - Raises: - ValueError: If the specified user ID or the custom claims are invalid. - FirebaseError: If an error occurs while updating the user account. - """ - ... - -def delete_user(uid, app=...): # -> None: - """Deletes the user identified by the specified user ID. - - Args: - uid: A user ID string. - app: An App instance (optional). - - Raises: - ValueError: If the user ID is None, empty or malformed. - FirebaseError: If an error occurs while deleting the user account. - """ - ... - -def delete_users(uids, app=...): - """Deletes the users specified by the given identifiers. - - Deleting a non-existing user does not generate an error (the method is - idempotent.) Non-existing users are considered to be successfully deleted - and are therefore included in the `DeleteUserResult.success_count` value. - - A maximum of 1000 identifiers may be supplied. If more than 1000 - identifiers are supplied, this method raises a `ValueError`. - - Args: - uids: A list of strings indicating the uids of the users to be deleted. - Must have <= 1000 entries. - app: An App instance (optional). - - Returns: - DeleteUsersResult: The total number of successful/failed deletions, as - well as the array of errors that correspond to the failed deletions. - - Raises: - ValueError: If any of the identifiers are invalid or if more than 1000 - identifiers are specified. - """ - ... - -def import_users(users, hash_alg=..., app=...): - """Imports the specified list of users into Firebase Auth. - - At most 1000 users can be imported at a time. This operation is optimized for bulk imports and - will ignore checks on identifier uniqueness which could result in duplications. The - ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the - ``UserImportHash`` class for supported hash algorithms. - - Args: - users: A list of ``ImportUserRecord`` instances to import. Length of the list must not - exceed 1000. - hash_alg: A ``UserImportHash`` object (optional). Required when importing users with - passwords. - app: An App instance (optional). - - Returns: - UserImportResult: An object summarizing the result of the import operation. - - Raises: - ValueError: If the provided arguments are invalid. - FirebaseError: If an error occurs while importing users. - """ - ... - -def generate_password_reset_link(email, action_code_settings=..., app=...): - """Generates the out-of-band email action link for password reset flows for the specified email - address. - - Args: - email: The email of the user whose password is to be reset. - action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether - the link is to be handled by a mobile app and the additional state information to be - passed in the deep link. - app: An App instance (optional). - Returns: - link: The password reset link created by the API - - Raises: - ValueError: If the provided arguments are invalid - FirebaseError: If an error occurs while generating the link - """ - ... - -def generate_email_verification_link(email, action_code_settings=..., app=...): - """Generates the out-of-band email action link for email verification flows for the specified - email address. - - Args: - email: The email of the user to be verified. - action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether - the link is to be handled by a mobile app and the additional state information to be - passed in the deep link. - app: An App instance (optional). - Returns: - link: The email verification link created by the API - - Raises: - ValueError: If the provided arguments are invalid - FirebaseError: If an error occurs while generating the link - """ - ... - -def generate_sign_in_with_email_link(email, action_code_settings, app=...): - """Generates the out-of-band email action link for email link sign-in flows, using the action - code settings provided. - - Args: - email: The email of the user signing in. - action_code_settings: ``ActionCodeSettings`` instance. Defines whether - the link is to be handled by a mobile app and the additional state information to be - passed in the deep link. - app: An App instance (optional). - - Returns: - link: The email sign-in link created by the API - - Raises: - ValueError: If the provided arguments are invalid - FirebaseError: If an error occurs while generating the link - """ - ... - -def get_oidc_provider_config(provider_id, app=...): - """Returns the ``OIDCProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - app: An App instance (optional). - - Returns: - OIDCProviderConfig: An OIDC provider config instance. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. - ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. - FirebaseError: If an error occurs while retrieving the OIDC provider. - """ - ... - -def create_oidc_provider_config(provider_id, client_id, issuer, display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=..., app=...): - """Creates a new OIDC provider config from the given parameters. - - OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about - GCIP, including pricing and features, see https://cloud.google.com/identity-platform. - - Args: - provider_id: Provider ID string. Must have the prefix ``oidc.``. - client_id: Client ID of the new config. - issuer: Issuer of the new config. Must be a valid URL. - display_name: The user-friendly display name to the current configuration (optional). - This name is also used as the provider label in the Cloud Console. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). A user cannot sign in using a disabled provider. - app: An App instance (optional). - client_secret: A string which sets the client secret for the new provider. - This is required for the code flow. - code_response_type: A boolean which sets whether to enable the code response flow for the - new provider. By default, this is not enabled if no response type is specified. - A client secret must be set for this response type. - Having both the code and ID token response flows is currently not supported. - id_token_response_type: A boolean which sets whether to enable the ID token response flow - for the new provider. By default, this is enabled if no response type is specified. - Having both the code and ID token response flows is currently not supported. - - Returns: - OIDCProviderConfig: The newly created OIDC provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while creating the new OIDC provider config. - """ - ... - -def update_oidc_provider_config(provider_id, client_id=..., issuer=..., display_name=..., enabled=..., client_secret=..., id_token_response_type=..., code_response_type=..., app=...): - """Updates an existing OIDC provider config with the given parameters. - - Args: - provider_id: Provider ID string. Must have the prefix ``oidc.``. - client_id: Client ID of the new config (optional). - issuer: Issuer of the new config (optional). Must be a valid URL. - display_name: The user-friendly display name of the current configuration (optional). - Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). - app: An App instance (optional). - client_secret: A string which sets the client secret for the new provider. - This is required for the code flow. - code_response_type: A boolean which sets whether to enable the code response flow for the - new provider. By default, this is not enabled if no response type is specified. - A client secret must be set for this response type. - Having both the code and ID token response flows is currently not supported. - id_token_response_type: A boolean which sets whether to enable the ID token response flow - for the new provider. By default, this is enabled if no response type is specified. - Having both the code and ID token response flows is currently not supported. - - Returns: - OIDCProviderConfig: The updated OIDC provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while updating the OIDC provider config. - """ - ... - -def delete_oidc_provider_config(provider_id, app=...): # -> None: - """Deletes the ``OIDCProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - app: An App instance (optional). - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. - ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. - FirebaseError: If an error occurs while deleting the OIDC provider. - """ - ... - -def list_oidc_provider_configs(page_token=..., max_results=..., app=...): - """Retrieves a page of OIDC provider configs from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of configs that may be included in the returned - page. This function never returns ``None``. If there are no OIDC configs in the Firebase - project, this returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the - page (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in - the returned page (optional). Defaults to 100, which is also the maximum number - allowed. - app: An App instance (optional). - - Returns: - ListProviderConfigsPage: A page of OIDC provider config instances. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the OIDC provider configs. - """ - ... - -def get_saml_provider_config(provider_id, app=...): - """Returns the ``SAMLProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - app: An App instance (optional). - - Returns: - SAMLProviderConfig: A SAML provider config instance. - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. - ConfigurationNotFoundError: If no SAML provider is available with the given identifier. - FirebaseError: If an error occurs while retrieving the SAML provider. - """ - ... - -def create_saml_provider_config(provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, display_name=..., enabled=..., app=...): - """Creates a new SAML provider config from the given parameters. - - SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about - GCIP, including pricing and features, see https://cloud.google.com/identity-platform. - - Args: - provider_id: Provider ID string. Must have the prefix ``saml.``. - idp_entity_id: The SAML IdP entity identifier. - sso_url: The SAML IdP SSO URL. Must be a valid URL. - x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this provider. - Multiple certificates are accepted to prevent outages during IdP key rotation (for - example ADFS rotates every 10 days). When the Auth server receives a SAML response, it - will match the SAML response with the certificate on record. Otherwise the response is - rejected. Developers are expected to manage the certificate updates as keys are - rotated. - rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by the - developer but needs to be provided to the SAML IdP. - callback_url: Callback URL string. This is fixed and must always be the same as the OAuth - redirect URL provisioned by Firebase Auth, unless a custom authDomain is used. - display_name: The user-friendly display name to the current configuration (optional). This - name is also used as the provider label in the Cloud Console. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). A user cannot sign in using a disabled provider. - app: An App instance (optional). - - Returns: - SAMLProviderConfig: The newly created SAML provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while creating the new SAML provider config. - """ - ... - -def update_saml_provider_config(provider_id, idp_entity_id=..., sso_url=..., x509_certificates=..., rp_entity_id=..., callback_url=..., display_name=..., enabled=..., app=...): - """Updates an existing SAML provider config with the given parameters. - - Args: - provider_id: Provider ID string. Must have the prefix ``saml.``. - idp_entity_id: The SAML IdP entity identifier (optional). - sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). - x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this - provider (optional). - rp_entity_id: The SAML relying party entity ID (optional). - callback_url: Callback URL string (optional). - display_name: The user-friendly display name of the current configuration (optional). - Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. - enabled: A boolean indicating whether the provider configuration is enabled or disabled - (optional). - app: An App instance (optional). - - Returns: - SAMLProviderConfig: The updated SAML provider config instance. - - Raises: - ValueError: If any of the specified input parameters are invalid. - FirebaseError: If an error occurs while updating the SAML provider config. - """ - ... - -def delete_saml_provider_config(provider_id, app=...): # -> None: - """Deletes the ``SAMLProviderConfig`` with the given ID. - - Args: - provider_id: Provider ID string. - app: An App instance (optional). - - Raises: - ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. - ConfigurationNotFoundError: If no SAML provider is available with the given identifier. - FirebaseError: If an error occurs while deleting the SAML provider. - """ - ... - -def list_saml_provider_configs(page_token=..., max_results=..., app=...): - """Retrieves a page of SAML provider configs from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of configs that may be included in the returned - page. This function never returns ``None``. If there are no SAML configs in the Firebase - project, this returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the - page (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in - the returned page (optional). Defaults to 100, which is also the maximum number - allowed. - app: An App instance (optional). - - Returns: - ListProviderConfigsPage: A page of SAML provider config instances. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the SAML provider configs. - """ - ... - diff --git a/typings/firebase_admin/credentials.pyi b/typings/firebase_admin/credentials.pyi deleted file mode 100644 index eab4a3c..0000000 --- a/typings/firebase_admin/credentials.pyi +++ /dev/null @@ -1,155 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from google.auth.credentials import Credentials as GoogleAuthCredentials - -"""Firebase credentials module.""" -_request = ... -_scopes = ... -AccessTokenInfo = ... -class Base: - """Provides OAuth2 access tokens for accessing Firebase services.""" - def get_access_token(self): # -> AccessTokenInfo: - """Fetches a Google OAuth2 access token using this credential instance. - - Returns: - AccessTokenInfo: An access token obtained using the credential. - """ - ... - - def get_credential(self): - """Returns the Google credential instance used for authentication.""" - ... - - - -class _ExternalCredentials(Base): - """A wrapper for google.auth.credentials.Credentials typed credential instances""" - def __init__(self, credential: GoogleAuthCredentials) -> None: - ... - - def get_credential(self): # -> Credentials: - """Returns the underlying Google Credential - - Returns: - google.auth.credentials.Credentials: A Google Auth credential instance.""" - ... - - - -class Certificate(Base): - """A credential initialized from a JSON certificate keyfile.""" - _CREDENTIAL_TYPE = ... - def __init__(self, cert) -> None: - """Initializes a credential from a Google service account certificate. - - Service account certificates can be downloaded as JSON files from the Firebase console. - To instantiate a credential from a certificate file, either specify the file path or a - dict representing the parsed contents of the file. - - Args: - cert: Path to a certificate file or a dict representing the contents of a certificate. - - Raises: - IOError: If the specified certificate file doesn't exist or cannot be read. - ValueError: If the specified certificate is invalid. - """ - ... - - @property - def project_id(self): # -> None: - ... - - @property - def signer(self): # -> Any: - ... - - @property - def service_account_email(self): # -> Any: - ... - - def get_credential(self): # -> Credentials: - """Returns the underlying Google credential. - - Returns: - google.auth.credentials.Credentials: A Google Auth credential instance.""" - ... - - - -class ApplicationDefault(Base): - """A Google Application Default credential.""" - def __init__(self) -> None: - """Creates an instance that will use Application Default credentials. - - The credentials will be lazily initialized when get_credential() or - project_id() is called. See those methods for possible errors raised. - """ - ... - - def get_credential(self): # -> None: - """Returns the underlying Google credential. - - Raises: - google.auth.exceptions.DefaultCredentialsError: If Application Default - credentials cannot be initialized in the current environment. - Returns: - google.auth.credentials.Credentials: A Google Auth credential instance.""" - ... - - @property - def project_id(self): # -> str | None: - """Returns the project_id from the underlying Google credential. - - Raises: - google.auth.exceptions.DefaultCredentialsError: If Application Default - credentials cannot be initialized in the current environment. - Returns: - str: The project id.""" - ... - - - -class RefreshToken(Base): - """A credential initialized from an existing refresh token.""" - _CREDENTIAL_TYPE = ... - def __init__(self, refresh_token) -> None: - """Initializes a credential from a refresh token JSON file. - - The JSON must consist of client_id, client_secret and refresh_token fields. Refresh - token files are typically created and managed by the gcloud SDK. To instantiate - a credential from a refresh token file, either specify the file path or a dict - representing the parsed contents of the file. - - Args: - refresh_token: Path to a refresh token file or a dict representing the contents of a - refresh token file. - - Raises: - IOError: If the specified file doesn't exist or cannot be read. - ValueError: If the refresh token configuration is invalid. - """ - ... - - @property - def client_id(self): # -> None: - ... - - @property - def client_secret(self): # -> None: - ... - - @property - def refresh_token(self): # -> None: - ... - - def get_credential(self): # -> Credentials: - """Returns the underlying Google credential. - - Returns: - google.auth.credentials.Credentials: A Google Auth credential instance.""" - ... - - - diff --git a/typings/firebase_admin/db.pyi b/typings/firebase_admin/db.pyi deleted file mode 100644 index f97ff73..0000000 --- a/typings/firebase_admin/db.pyi +++ /dev/null @@ -1,573 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _http_client, exceptions - -"""Firebase Realtime Database module. - -This module contains functions and classes that facilitate interacting with the Firebase Realtime -Database. It supports basic data manipulation operations, as well as complex queries such as -limit queries and range queries. However, it does not support realtime update notifications. This -module uses the Firebase REST API underneath. -""" -_DB_ATTRIBUTE = ... -_INVALID_PATH_CHARACTERS = ... -_RESERVED_FILTERS = ... -_USER_AGENT = ... -_TRANSACTION_MAX_RETRIES = ... -_EMULATOR_HOST_ENV_VAR = ... -def reference(path=..., app=..., url=...): # -> Reference: - """Returns a database ``Reference`` representing the node at the specified path. - - If no path is specified, this function returns a ``Reference`` that represents the database - root. By default, the returned References provide access to the Firebase Database specified at - app initialization. To connect to a different database instance in the same Firebase project, - specify the ``url`` parameter. - - Args: - path: Path to a node in the Firebase realtime database (optional). - app: An App instance (optional). - url: Base URL of the Firebase Database instance (optional). When specified, takes - precedence over the the ``databaseURL`` option set at app initialization. - - Returns: - Reference: A newly initialized Reference. - - Raises: - ValueError: If the specified path or app is invalid. - """ - ... - -class Event: - """Represents a realtime update event received from the database.""" - def __init__(self, sse_event) -> None: - ... - - @property - def data(self): # -> Any: - """Parsed JSON data of this event.""" - ... - - @property - def path(self): # -> Any: - """Path of the database reference that triggered this event.""" - ... - - @property - def event_type(self): - """Event type string (put, patch).""" - ... - - - -class ListenerRegistration: - """Represents the addition of an event listener to a database reference.""" - def __init__(self, callback, sse) -> None: - """Initializes a new listener with given parameters. - - This is an internal API. Use the ``db.Reference.listen()`` method to start a - new listener. - - Args: - callback: The callback function to fire in case of event. - sse: A transport session to make requests with. - """ - ... - - def close(self): # -> None: - """Stops the event listener represented by this registration - - This closes the SSE HTTP connection, and joins the background thread. - """ - ... - - - -class Reference: - """Reference represents a node in the Firebase realtime database.""" - def __init__(self, **kwargs) -> None: - """Creates a new Reference using the provided parameters. - - This method is for internal use only. Use db.reference() to obtain an instance of - Reference. - """ - ... - - @property - def key(self): # -> str | None: - ... - - @property - def path(self): - ... - - @property - def parent(self): # -> Reference | None: - ... - - def child(self, path): # -> Reference: - """Returns a Reference to the specified child node. - - The path may point to an immediate child of the current Reference, or a deeply nested - child. Child paths must not begin with '/'. - - Args: - path: Path to the child node. - - Returns: - Reference: A database Reference representing the specified child node. - - Raises: - ValueError: If the child path is not a string, not well-formed or begins with '/'. - """ - ... - - def get(self, etag=..., shallow=...): # -> tuple[Any, Any]: - """Returns the value, and optionally the ETag, at the current location of the database. - - Args: - etag: A boolean indicating whether the Etag value should be returned or not (optional). - shallow: A boolean indicating whether to execute a shallow read (optional). Shallow - reads do not retrieve the child nodes of the current database location. Cannot be - set to True if ``etag`` is also set to True. - - Returns: - object: If etag is False returns the decoded JSON value of the current database location. - If etag is True, returns a 2-tuple consisting of the decoded JSON value and the Etag - associated with the current database location. - - Raises: - ValueError: If both ``etag`` and ``shallow`` are set to True. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def get_if_changed(self, etag): # -> tuple[Literal[False], None, None] | tuple[Literal[True], Any, Any]: - """Gets data in this location only if the specified ETag does not match. - - Args: - etag: The ETag value to be checked against the ETag of the current location. - - Returns: - tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. If the ETag - specified by the caller did not match, the boolen value will be True and the JSON - and ETag values would reflect the corresponding values in the database. If the ETag - matched, the boolean value will be False and the other elements of the tuple will be - None. - - Raises: - ValueError: If the ETag is not a string. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def set(self, value): # -> None: - """Sets the data at this location to the given value. - - The value must be JSON-serializable and not None. - - Args: - value: JSON-serializable value to be set at this location. - - Raises: - ValueError: If the provided value is None. - TypeError: If the value is not JSON-serializable. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def set_if_unchanged(self, expected_etag, value): # -> tuple[Literal[True], Any, Any] | tuple[Literal[False], Any, Any]: - """Conditonally sets the data at this location to the given value. - - Sets the data at this location to the given value only if ``expected_etag`` is same as the - ETag value in the database. - - Args: - expected_etag: Value of ETag we want to check. - value: JSON-serializable value to be set at this location. - - Returns: - tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. The boolean - indicates whether the set operation was successful or not. The decoded JSON and the - ETag corresponds to the latest value in this database location. - - Raises: - ValueError: If the value is None, or if expected_etag is not a string. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def push(self, value=...): # -> Reference: - """Creates a new child node. - - The optional value argument can be used to provide an initial value for the child node. If - no value is provided, child node will have empty string as the default value. - - Args: - value: JSON-serializable initial value for the child node (optional). - - Returns: - Reference: A Reference representing the newly created child node. - - Raises: - ValueError: If the value is None. - TypeError: If the value is not JSON-serializable. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def update(self, value): # -> None: - """Updates the specified child keys of this Reference to the provided values. - - Args: - value: A dictionary containing the child keys to update, and their new values. - - Raises: - ValueError: If value is empty or not a dictionary. - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def delete(self): # -> None: - """Deletes this node from the database. - - Raises: - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - def listen(self, callback): # -> ListenerRegistration: - """Registers the ``callback`` function to receive realtime updates. - - The specified callback function will get invoked with ``db.Event`` objects for each - realtime update received from the database. It will also get called whenever the SDK - reconnects to the server due to network issues or credential expiration. In general, - the OAuth2 credentials used to authorize connections to the server expire every hour. - Therefore clients should expect the ``callback`` to fire at least once every hour, even if - there are no updates in the database. - - This API is based on the event streaming support available in the Firebase REST API. Each - call to ``listen()`` starts a new HTTP connection and a background thread. This is an - experimental feature. It currently does not honor the auth overrides and timeout settings. - Cannot be used in thread-constrained environments like Google App Engine. - - Args: - callback: A function to be called when a data change is detected. - - Returns: - ListenerRegistration: An object that can be used to stop the event listener. - - Raises: - FirebaseError: If an error occurs while starting the initial HTTP connection. - """ - ... - - def transaction(self, transaction_update): # -> object: - """Atomically modifies the data at this location. - - Unlike a normal ``set()``, which just overwrites the data regardless of its previous state, - ``transaction()`` is used to modify the existing value to a new value, ensuring there are - no conflicts with other clients simultaneously writing to the same location. - - This is accomplished by passing an update function which is used to transform the current - value of this reference into a new value. If another client writes to this location before - the new value is successfully saved, the update function is called again with the new - current value, and the write will be retried. In case of repeated failures, this method - will retry the transaction up to 25 times before giving up and raising a - TransactionAbortedError. The update function may also force an early abort by raising an - exception instead of returning a value. - - Args: - transaction_update: A function which will be passed the current data stored at this - location. The function should return the new value it would like written. If - an exception is raised, the transaction will be aborted, and the data at this - location will not be modified. The exceptions raised by this function are - propagated to the caller of the transaction method. - - Returns: - object: New value of the current database Reference (only if the transaction commits). - - Raises: - TransactionAbortedError: If the transaction aborts after exhausting all retry attempts. - ValueError: If transaction_update is not a function. - """ - ... - - def order_by_child(self, path): # -> Query: - """Returns a Query that orders data by child values. - - Returned Query can be used to set additional parameters, and execute complex database - queries (e.g. limit queries, range queries). - - Args: - path: Path to a valid child of the current Reference. - - Returns: - Query: A database Query instance. - - Raises: - ValueError: If the child path is not a string, not well-formed or None. - """ - ... - - def order_by_key(self): # -> Query: - """Creates a Query that orderes data by key. - - Returned Query can be used to set additional parameters, and execute complex database - queries (e.g. limit queries, range queries). - - Returns: - Query: A database Query instance. - """ - ... - - def order_by_value(self): # -> Query: - """Creates a Query that orderes data by value. - - Returned Query can be used to set additional parameters, and execute complex database - queries (e.g. limit queries, range queries). - - Returns: - Query: A database Query instance. - """ - ... - - - -class Query: - """Represents a complex query that can be executed on a Reference. - - Complex queries can consist of up to 2 components: a required ordering constraint, and an - optional filtering constraint. At the server, data is first sorted according to the given - ordering constraint (e.g. order by child). Then the filtering constraint (e.g. limit, range) - is applied on the sorted data to produce the final result. Despite the ordering constraint, - the final result is returned by the server as an unordered collection. Therefore the Query - interface performs another round of sorting at the client-side before returning the results - to the caller. This client-side sorted results are returned to the user as a Python - OrderedDict. - """ - def __init__(self, **kwargs) -> None: - ... - - def limit_to_first(self, limit): # -> Self: - """Creates a query with limit, and anchors it to the start of the window. - - Args: - limit: The maximum number of child nodes to return. - - Returns: - Query: The updated Query instance. - - Raises: - ValueError: If the value is not an integer, or set_limit_last() was called previously. - """ - ... - - def limit_to_last(self, limit): # -> Self: - """Creates a query with limit, and anchors it to the end of the window. - - Args: - limit: The maximum number of child nodes to return. - - Returns: - Query: The updated Query instance. - - Raises: - ValueError: If the value is not an integer, or set_limit_first() was called previously. - """ - ... - - def start_at(self, start): # -> Self: - """Sets the lower bound for a range query. - - The Query will only return child nodes with a value greater than or equal to the specified - value. - - Args: - start: JSON-serializable value to start at, inclusive. - - Returns: - Query: The updated Query instance. - - Raises: - ValueError: If the value is ``None``. - """ - ... - - def end_at(self, end): # -> Self: - """Sets the upper bound for a range query. - - The Query will only return child nodes with a value less than or equal to the specified - value. - - Args: - end: JSON-serializable value to end at, inclusive. - - Returns: - Query: The updated Query instance. - - Raises: - ValueError: If the value is ``None``. - """ - ... - - def equal_to(self, value): # -> Self: - """Sets an equals constraint on the Query. - - The Query will only return child nodes whose value is equal to the specified value. - - Args: - value: JSON-serializable value to query for. - - Returns: - Query: The updated Query instance. - - Raises: - ValueError: If the value is ``None``. - """ - ... - - def get(self): # -> OrderedDict[int, Any] | list[Any] | dict[Any, Any]: - """Executes this Query and returns the results. - - The results will be returned as a sorted list or an OrderedDict. - - Returns: - object: Decoded JSON result of the Query. - - Raises: - FirebaseError: If an error occurs while communicating with the remote database server. - """ - ... - - - -class TransactionAbortedError(exceptions.AbortedError): - """A transaction was aborted aftr exceeding the maximum number of retries.""" - def __init__(self, message) -> None: - ... - - - -class _Sorter: - """Helper class for sorting query results.""" - def __init__(self, results, order_by) -> None: - ... - - def get(self): # -> OrderedDict[int, Any] | list[Any]: - ... - - - -class _SortEntry: - """A wrapper that is capable of sorting items in a dictionary.""" - _type_none = ... - _type_bool_false = ... - _type_bool_true = ... - _type_numeric = ... - _type_string = ... - _type_object = ... - def __init__(self, key, value, order_by) -> None: - ... - - @property - def key(self): # -> Any: - ... - - @property - def index(self): # -> Any | None: - ... - - @property - def index_type(self): # -> int: - ... - - @property - def value(self): # -> Any: - ... - - def __lt__(self, other) -> bool: - ... - - def __le__(self, other) -> bool: - ... - - def __gt__(self, other) -> bool: - ... - - def __ge__(self, other) -> bool: - ... - - def __eq__(self, other) -> bool: - ... - - - -class _DatabaseService: - """Service that maintains a collection of database clients.""" - _DEFAULT_AUTH_OVERRIDE = ... - def __init__(self, app) -> None: - ... - - def get_client(self, db_url=...): - """Creates a client based on the db_url. Clients may be cached.""" - ... - - def close(self): # -> None: - ... - - - -class _Client(_http_client.JsonHttpClient): - """HTTP client used to make REST calls. - - _Client maintains an HTTP session, and handles authenticating HTTP requests along with - marshalling and unmarshalling of JSON data. - """ - def __init__(self, credential, base_url, timeout, params=...) -> None: - """Creates a new _Client from the given parameters. - - This exists primarily to enable testing. For regular use, obtain _Client instances by - calling the from_app() class method. - - Args: - credential: A Google credential that can be used to authenticate requests. - base_url: A URL prefix to be added to all outgoing requests. This is typically the - Firebase Realtime Database URL. - timeout: HTTP request timeout in seconds. If set to None connections will never - timeout, which is the default behavior of the underlying requests library. - params: Dict of query parameters to add to all outgoing requests. - """ - ... - - def request(self, method, url, **kwargs): # -> Response: - """Makes an HTTP call using the Python requests library. - - Extends the request() method of the parent JsonHttpClient class. Handles default - params like auth overrides, and low-level exceptions. - - Args: - method: HTTP method name as a string (e.g. get, post). - url: URL path of the remote endpoint. This will be appended to the server's base URL. - **kwargs: An additional set of keyword arguments to be passed into requests API - (e.g. json, params). - - Returns: - Response: An HTTP response object. - - Raises: - FirebaseError: If an error occurs while making the HTTP call. - """ - ... - - def create_listener_session(self): # -> KeepAuthSession: - ... - - @classmethod - def handle_rtdb_error(cls, error): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Converts an error encountered while calling RTDB into a FirebaseError.""" - ... - - - diff --git a/typings/firebase_admin/exceptions.pyi b/typings/firebase_admin/exceptions.pyi deleted file mode 100644 index c01e956..0000000 --- a/typings/firebase_admin/exceptions.pyi +++ /dev/null @@ -1,191 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase Exceptions module. - -This module defines the base types for exceptions and the platform-wide error codes as outlined in -https://cloud.google.com/apis/design/errors. - -:class:`FirebaseError` is the parent class of all exceptions raised by the Admin SDK. It contains -the ``code``, ``http_response`` and ``cause`` properties common to all Firebase exception types. -Each exception also carries a message that outlines what went wrong. This can be logged for -audit or debugging purposes. - -When calling an Admin SDK API, developers can catch the parent ``FirebaseError`` and -inspect its ``code`` to implement fine-grained error handling. Alternatively, developers can -catch one or more subtypes of ``FirebaseError``. Under normal conditions, any given API can raise -only a small subset of the available exception subtypes. However, the SDK also exposes rare error -conditions like connection timeouts and other I/O errors as instances of ``FirebaseError``. -Therefore it is always a good idea to have a handler specified for ``FirebaseError``, after all the -subtype error handlers. -""" -INVALID_ARGUMENT = ... -FAILED_PRECONDITION = ... -OUT_OF_RANGE = ... -UNAUTHENTICATED = ... -PERMISSION_DENIED = ... -NOT_FOUND = ... -CONFLICT = ... -ABORTED = ... -ALREADY_EXISTS = ... -RESOURCE_EXHAUSTED = ... -CANCELLED = ... -DATA_LOSS = ... -UNKNOWN = ... -INTERNAL = ... -UNAVAILABLE = ... -DEADLINE_EXCEEDED = ... -class FirebaseError(Exception): - """Base class for all errors raised by the Admin SDK. - - Args: - code: A string error code that represents the type of the exception. Possible error - codes are defined in https://cloud.google.com/apis/design/errors#handling_errors. - message: A human-readable error message string. - cause: The exception that caused this error (optional). - http_response: If this error was caused by an HTTP error response, this property is - set to the ``requests.Response`` object that represents the HTTP response (optional). - See https://docs.python-requests.org/en/master/api/#requests.Response for details of - this object. - """ - def __init__(self, code, message, cause=..., http_response=...) -> None: - ... - - @property - def code(self): # -> Any: - ... - - @property - def cause(self): # -> None: - ... - - @property - def http_response(self): # -> None: - ... - - - -class InvalidArgumentError(FirebaseError): - """Client specified an invalid argument.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class FailedPreconditionError(FirebaseError): - """Request can not be executed in the current system state, such as deleting a non-empty - directory.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class OutOfRangeError(FirebaseError): - """Client specified an invalid range.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UnauthenticatedError(FirebaseError): - """Request not authenticated due to missing, invalid, or expired OAuth token.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class PermissionDeniedError(FirebaseError): - """Client does not have sufficient permission. - - This can happen because the OAuth token does not have the right scopes, the client doesn't - have permission, or the API has not been enabled for the client project. - """ - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class NotFoundError(FirebaseError): - """A specified resource is not found, or the request is rejected by undisclosed reasons, such - as whitelisting.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class ConflictError(FirebaseError): - """Concurrency conflict, such as read-modify-write conflict.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class AbortedError(FirebaseError): - """Concurrency conflict, such as read-modify-write conflict.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class AlreadyExistsError(FirebaseError): - """The resource that a client tried to create already exists.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class ResourceExhaustedError(FirebaseError): - """Either out of resource quota or reaching rate limiting.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class CancelledError(FirebaseError): - """Request cancelled by the client.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class DataLossError(FirebaseError): - """Unrecoverable data loss or data corruption.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UnknownError(FirebaseError): - """Unknown server error.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class InternalError(FirebaseError): - """Internal server error.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class UnavailableError(FirebaseError): - """Service unavailable. Typically the server is down.""" - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - -class DeadlineExceededError(FirebaseError): - """Request deadline exceeded. - - This will happen only if the caller sets a deadline that is shorter than the method's - default deadline (i.e. requested deadline is not enough for the server to process the - request) and the request did not finish within the deadline. - """ - def __init__(self, message, cause=..., http_response=...) -> None: - ... - - - diff --git a/typings/firebase_admin/firestore.pyi b/typings/firebase_admin/firestore.pyi deleted file mode 100644 index 66ceb79..0000000 --- a/typings/firebase_admin/firestore.pyi +++ /dev/null @@ -1,48 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Optional -from firebase_admin import App -from google.cloud import firestore - -"""Cloud Firestore module. - -This module contains utilities for accessing the Google Cloud Firestore databases associated with -Firebase apps. This requires the ``google-cloud-firestore`` Python module. -""" -existing = ... -_FIRESTORE_ATTRIBUTE = ... -def client(app: Optional[App] = ..., database_id: Optional[str] = ...) -> firestore.Client: - """Returns a client that can be used to interact with Google Cloud Firestore. - - Args: - app: An App instance (optional). - database_id: The database ID of the Google Cloud Firestore database to be used. - Defaults to the default Firestore database ID if not specified or an empty string - (optional). - - Returns: - google.cloud.firestore.Firestore: A `Firestore Client`_. - - Raises: - ValueError: If the specified database ID is not a valid string, or if a project ID is not - specified either via options, credentials or environment variables, or if the specified - project ID is not a valid string. - - .. _Firestore Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ - google.cloud.firestore_v1.client.Client - """ - ... - -class _FirestoreService: - """Service that maintains a collection of firestore clients.""" - def __init__(self, app: App) -> None: - ... - - def get_client(self, database_id: Optional[str]) -> firestore.Client: - """Creates a client based on the database_id. These clients are cached.""" - ... - - - diff --git a/typings/firebase_admin/firestore_async.pyi b/typings/firebase_admin/firestore_async.pyi deleted file mode 100644 index 6488317..0000000 --- a/typings/firebase_admin/firestore_async.pyi +++ /dev/null @@ -1,48 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Optional -from firebase_admin import App -from google.cloud import firestore - -"""Cloud Firestore Async module. - -This module contains utilities for asynchronusly accessing the Google Cloud Firestore databases -associated with Firebase apps. This requires the ``google-cloud-firestore`` Python module. -""" -existing = ... -_FIRESTORE_ASYNC_ATTRIBUTE: str = ... -def client(app: Optional[App] = ..., database_id: Optional[str] = ...) -> firestore.AsyncClient: - """Returns an async client that can be used to interact with Google Cloud Firestore. - - Args: - app: An App instance (optional). - database_id: The database ID of the Google Cloud Firestore database to be used. - Defaults to the default Firestore database ID if not specified or an empty string - (optional). - - Returns: - google.cloud.firestore.Firestore_Async: A `Firestore Async Client`_. - - Raises: - ValueError: If the specified database ID is not a valid string, or if a project ID is not - specified either via options, credentials or environment variables, or if the specified - project ID is not a valid string. - - .. _Firestore Async Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ - google.cloud.firestore_v1.async_client.AsyncClient - """ - ... - -class _FirestoreAsyncService: - """Service that maintains a collection of firestore async clients.""" - def __init__(self, app: App) -> None: - ... - - def get_client(self, database_id: Optional[str]) -> firestore.AsyncClient: - """Creates an async client based on the database_id. These clients are cached.""" - ... - - - diff --git a/typings/firebase_admin/functions.pyi b/typings/firebase_admin/functions.pyi deleted file mode 100644 index 6f68652..0000000 --- a/typings/firebase_admin/functions.pyi +++ /dev/null @@ -1,226 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from datetime import datetime -from typing import Any, Dict, Optional -from dataclasses import dataclass -from firebase_admin import App - -"""Firebase Functions module.""" -_FUNCTIONS_ATTRIBUTE = ... -__all__ = ['TaskOptions', 'task_queue'] -_CLOUD_TASKS_API_RESOURCE_PATH = ... -_CLOUD_TASKS_API_URL_FORMAT = ... -_FIREBASE_FUNCTION_URL_FORMAT = ... -_FUNCTIONS_HEADERS = ... -_DEFAULT_LOCATION = ... -def task_queue(function_name: str, extension_id: Optional[str] = ..., app: Optional[App] = ...) -> TaskQueue: - """Creates a reference to a TaskQueue for a given function name. - - The function name can be either: - 1. A fully qualified function resource name: - `projects/{project-id}/locations/{location-id}/functions/{function-name}` - - 2. A partial resource name with location and function name, in which case - the runtime project ID is used: - `locations/{location-id}/functions/{function-name}` - - 3. A partial function name, in which case the runtime project ID and the - default location, `us-central1`, is used: - `{function-name}` - - Args: - function_name: Name of the function. - extension_id: Firebase extension ID (optional). - app: An App instance (optional). - - Returns: - TaskQueue: A TaskQueue instance. - - Raises: - ValueError: If the input arguments are invalid. - """ - ... - -class _FunctionsService: - """Service class that implements Firebase Functions functionality.""" - def __init__(self, app: App) -> None: - ... - - def task_queue(self, function_name: str, extension_id: Optional[str] = ...) -> TaskQueue: - """Creates a TaskQueue instance.""" - ... - - @classmethod - def handle_functions_error(cls, error: Any): # -> DeadlineExceededError | UnavailableError | UnknownError: - """Handles errors received from the Cloud Functions API.""" - ... - - - -class TaskQueue: - """TaskQueue class that implements Firebase Cloud Tasks Queues functionality.""" - def __init__(self, function_name: str, extension_id: Optional[str], project_id, credential, http_client) -> None: - ... - - def enqueue(self, task_data: Any, opts: Optional[TaskOptions] = ...) -> str: - """Creates a task and adds it to the queue. Tasks cannot be updated after creation. - - This action requires `cloudtasks.tasks.create` IAM permission on the service account. - - Args: - task_data: The data payload of the task. - opts: Options when enqueuing a new task (optional). - - Raises: - FirebaseError: If an error occurs while requesting the task to be queued by - the Cloud Functions service. - ValueError: If the input arguments are invalid. - - Returns: - str: The ID of the task relative to this queue. - """ - ... - - def delete(self, task_id: str) -> None: - """Deletes an enqueued task if it has not yet started. - - This action requires `cloudtasks.tasks.delete` IAM permission on the service account. - - Args: - task_id: The ID of the task relative to this queue. - - Raises: - FirebaseError: If an error occurs while requesting the task to be deleted by - the Cloud Functions service. - ValueError: If the input arguments are invalid. - """ - ... - - - -class _Validators: - """A collection of data validation utilities.""" - @classmethod - def check_non_empty_string(cls, label: str, value: Any): # -> None: - """Checks if given value is a non-empty string and throws error if not.""" - ... - - @classmethod - def is_non_empty_string(cls, value: Any): # -> bool: - """Checks if given value is a non-empty string and returns bool.""" - ... - - @classmethod - def is_task_id(cls, task_id: Any): # -> bool: - """Checks if given value is a valid task id.""" - ... - - @classmethod - def is_url(cls, url: Any): # -> bool: - """Checks if given value is a valid url.""" - ... - - - -@dataclass -class TaskOptions: - """Task Options that can be applied to a Task. - - Args: - schedule_delay_seconds: The number of seconds after the current time at which to attempt or - retry the task. Should only be set if ``schedule_time`` is not set. - - schedule_time: The time when the task is scheduled to be attempted or retried. Should only - be set if ``schedule_delay_seconds`` is not set. - - dispatch_deadline_seconds: The deadline for requests sent to the worker. If the worker does - not respond by this deadline then the request is cancelled and the attempt is marked as - a ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the task according to the - ``RetryConfig``. The default is 10 minutes. The deadline must be in the range of 15 - seconds and 30 minutes (1800 seconds). - - task_id: The ID to use for the enqueued task. If not provided, one will be automatically - generated. - - If provided, an explicitly specified task ID enables task de-duplication. - Task IDs should be strings that contain only letters ([A-Za-z]), numbers ([0-9]), - hyphens (-), and underscores (_) with a maximum length of 500 characters. If a task's - ID is identical to that of an existing task or a task that was deleted or executed - recently then the call will throw an error with code `functions/task-already-exists`. - Another task with the same ID can't be created for ~1hour after the original task was - deleted or executed. - - Because there is an extra lookup cost to identify duplicate task IDs, setting ID - significantly increases latency. - - Also, note that the infrastructure relies on an approximately uniform distribution - of task IDs to store and serve tasks efficiently. For this reason, using hashed strings - for the task ID or for the prefix of the task ID is recommended. Choosing task IDs that - are sequential or have sequential prefixes, for example using a timestamp, causes an - increase in latency and error rates in all task commands. - - Push IDs from the Firebase Realtime Database make poor IDs because they are based on - timestamps and will cause contention (slowdowns) in your task queue. Reversed push IDs - however form a perfect distribution and are an ideal key. To reverse a string in Python - use ``reversedString = someString[::-1]`` - - headers: HTTP request headers to include in the request to the task queue function. These - headers represent a subset of the headers that will accompany the task's HTTP request. - Some HTTP request headers will be ignored or replaced: `Authorization`, `Host`, - `Content-Length`, `User-Agent` and others cannot be overridden. - - A complete list of these ignored or replaced headers can be found in the following - definition of the HttpRequest.headers property: - https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#httprequest - - By default, Content-Type is set to 'application/json'. - - The size of the headers must be less than 80KB. - - uri: The full URL that the request will be sent to. Must be a valid RFC3986 https or - http URL. - """ - schedule_delay_seconds: Optional[int] = ... - schedule_time: Optional[datetime] = ... - dispatch_deadline_seconds: Optional[int] = ... - task_id: Optional[str] = ... - headers: Optional[Dict[str, str]] = ... - uri: Optional[str] = ... - - -@dataclass -class Task: - """Contains the relevant fields for enqueueing tasks that trigger Cloud Functions. - - This is a limited subset of the Cloud Functions `Task` resource. See the following - page for definitions of this class's properties: - https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#resource:-task - - Args: - httpRequest: The request to be made by the task worker. - name: The name of the function. See the Cloud docs for the format of this property. - schedule_time: The time when the task is scheduled to be attempted or retried. - dispatch_deadline: The deadline for requests sent to the worker. - """ - http_request: Dict[str, Optional[str | dict]] - name: Optional[str] = ... - schedule_time: Optional[str] = ... - dispatch_deadline: Optional[str] = ... - - -@dataclass -class Resource: - """Contains the parsed address of a resource. - - Args: - resource_id: The ID of the resource. - project_id: The project ID of the resource. - location_id: The location ID of the resource. - """ - resource_id: str - project_id: Optional[str] = ... - location_id: Optional[str] = ... - - diff --git a/typings/firebase_admin/instance_id.pyi b/typings/firebase_admin/instance_id.pyi deleted file mode 100644 index 0b7e554..0000000 --- a/typings/firebase_admin/instance_id.pyi +++ /dev/null @@ -1,41 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase Instance ID module. - -This module enables deleting instance IDs associated with Firebase projects. -""" -_IID_SERVICE_URL = ... -_IID_ATTRIBUTE = ... -def delete_instance_id(instance_id, app=...): # -> None: - """Deletes the specified instance ID and the associated data from Firebase. - - Note that Google Analytics for Firebase uses its own form of Instance ID to - keep track of analytics data. Therefore deleting a regular Instance ID does - not delete Analytics data. See `Delete an Instance ID`_ for more information. - - Args: - instance_id: A non-empty instance ID string. - app: An App instance (optional). - - Raises: - InstanceIdError: If an error occurs while invoking the backend instance ID service. - ValueError: If the specified instance ID or app is invalid. - - .. _Delete an Instance ID: https://firebase.google.com/support/privacy\ - /manage-iids#delete_an_instance_id - """ - ... - -class _InstanceIdService: - """Provides methods for interacting with the remote instance ID service.""" - error_codes = ... - def __init__(self, app) -> None: - ... - - def delete_instance_id(self, instance_id): # -> None: - ... - - - diff --git a/typings/firebase_admin/messaging.pyi b/typings/firebase_admin/messaging.pyi deleted file mode 100644 index 2050069..0000000 --- a/typings/firebase_admin/messaging.pyi +++ /dev/null @@ -1,285 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _messaging_encoder, _messaging_utils - -"""Firebase Cloud Messaging module.""" -_MESSAGING_ATTRIBUTE = ... -__all__ = ['AndroidConfig', 'AndroidFCMOptions', 'AndroidNotification', 'APNSConfig', 'APNSFCMOptions', 'APNSPayload', 'Aps', 'ApsAlert', 'BatchResponse', 'CriticalSound', 'ErrorInfo', 'FCMOptions', 'LightSettings', 'Message', 'MulticastMessage', 'Notification', 'QuotaExceededError', 'SenderIdMismatchError', 'SendResponse', 'ThirdPartyAuthError', 'TopicManagementResponse', 'UnregisteredError', 'WebpushConfig', 'WebpushFCMOptions', 'WebpushNotification', 'WebpushNotificationAction', 'send', 'send_all', 'send_multicast', 'send_each', 'send_each_for_multicast', 'subscribe_to_topic', 'unsubscribe_from_topic'] -AndroidConfig = _messaging_utils.AndroidConfig -AndroidFCMOptions = _messaging_utils.AndroidFCMOptions -AndroidNotification = _messaging_utils.AndroidNotification -APNSConfig = _messaging_utils.APNSConfig -APNSFCMOptions = _messaging_utils.APNSFCMOptions -APNSPayload = _messaging_utils.APNSPayload -Aps = _messaging_utils.Aps -ApsAlert = _messaging_utils.ApsAlert -CriticalSound = _messaging_utils.CriticalSound -FCMOptions = _messaging_utils.FCMOptions -LightSettings = _messaging_utils.LightSettings -Message = _messaging_encoder.Message -MulticastMessage = _messaging_encoder.MulticastMessage -Notification = _messaging_utils.Notification -WebpushConfig = _messaging_utils.WebpushConfig -WebpushFCMOptions = _messaging_utils.WebpushFCMOptions -WebpushNotification = _messaging_utils.WebpushNotification -WebpushNotificationAction = _messaging_utils.WebpushNotificationAction -QuotaExceededError = _messaging_utils.QuotaExceededError -SenderIdMismatchError = _messaging_utils.SenderIdMismatchError -ThirdPartyAuthError = _messaging_utils.ThirdPartyAuthError -UnregisteredError = _messaging_utils.UnregisteredError -def send(message, dry_run=..., app=...): - """Sends the given message via Firebase Cloud Messaging (FCM). - - If the ``dry_run`` mode is enabled, the message will not be actually delivered to the - recipients. Instead FCM performs all the usual validations, and emulates the send operation. - - Args: - message: An instance of ``messaging.Message``. - dry_run: A boolean indicating whether to run the operation in dry run mode (optional). - app: An App instance (optional). - - Returns: - string: A message ID string that uniquely identifies the sent message. - - Raises: - FirebaseError: If an error occurs while sending the message to the FCM service. - ValueError: If the input arguments are invalid. - """ - ... - -def send_each(messages, dry_run=..., app=...): - """Sends each message in the given list via Firebase Cloud Messaging. - - If the ``dry_run`` mode is enabled, the message will not be actually delivered to the - recipients. Instead FCM performs all the usual validations, and emulates the send operation. - - Args: - messages: A list of ``messaging.Message`` instances. - dry_run: A boolean indicating whether to run the operation in dry run mode (optional). - app: An App instance (optional). - - Returns: - BatchResponse: A ``messaging.BatchResponse`` instance. - - Raises: - FirebaseError: If an error occurs while sending the message to the FCM service. - ValueError: If the input arguments are invalid. - """ - ... - -def send_each_for_multicast(multicast_message, dry_run=..., app=...): - """Sends the given mutlicast message to each token via Firebase Cloud Messaging (FCM). - - If the ``dry_run`` mode is enabled, the message will not be actually delivered to the - recipients. Instead FCM performs all the usual validations, and emulates the send operation. - - Args: - multicast_message: An instance of ``messaging.MulticastMessage``. - dry_run: A boolean indicating whether to run the operation in dry run mode (optional). - app: An App instance (optional). - - Returns: - BatchResponse: A ``messaging.BatchResponse`` instance. - - Raises: - FirebaseError: If an error occurs while sending the message to the FCM service. - ValueError: If the input arguments are invalid. - """ - ... - -def send_all(messages, dry_run=..., app=...): - """Sends the given list of messages via Firebase Cloud Messaging as a single batch. - - If the ``dry_run`` mode is enabled, the message will not be actually delivered to the - recipients. Instead FCM performs all the usual validations, and emulates the send operation. - - Args: - messages: A list of ``messaging.Message`` instances. - dry_run: A boolean indicating whether to run the operation in dry run mode (optional). - app: An App instance (optional). - - Returns: - BatchResponse: A ``messaging.BatchResponse`` instance. - - Raises: - FirebaseError: If an error occurs while sending the message to the FCM service. - ValueError: If the input arguments are invalid. - - send_all() is deprecated. Use send_each() instead. - """ - ... - -def send_multicast(multicast_message, dry_run=..., app=...): - """Sends the given mutlicast message to all tokens via Firebase Cloud Messaging (FCM). - - If the ``dry_run`` mode is enabled, the message will not be actually delivered to the - recipients. Instead FCM performs all the usual validations, and emulates the send operation. - - Args: - multicast_message: An instance of ``messaging.MulticastMessage``. - dry_run: A boolean indicating whether to run the operation in dry run mode (optional). - app: An App instance (optional). - - Returns: - BatchResponse: A ``messaging.BatchResponse`` instance. - - Raises: - FirebaseError: If an error occurs while sending the message to the FCM service. - ValueError: If the input arguments are invalid. - - send_multicast() is deprecated. Use send_each_for_multicast() instead. - """ - ... - -def subscribe_to_topic(tokens, topic, app=...): - """Subscribes a list of registration tokens to an FCM topic. - - Args: - tokens: A non-empty list of device registration tokens. List may not have more than 1000 - elements. - topic: Name of the topic to subscribe to. May contain the ``/topics/`` prefix. - app: An App instance (optional). - - Returns: - TopicManagementResponse: A ``TopicManagementResponse`` instance. - - Raises: - FirebaseError: If an error occurs while communicating with instance ID service. - ValueError: If the input arguments are invalid. - """ - ... - -def unsubscribe_from_topic(tokens, topic, app=...): - """Unsubscribes a list of registration tokens from an FCM topic. - - Args: - tokens: A non-empty list of device registration tokens. List may not have more than 1000 - elements. - topic: Name of the topic to unsubscribe from. May contain the ``/topics/`` prefix. - app: An App instance (optional). - - Returns: - TopicManagementResponse: A ``TopicManagementResponse`` instance. - - Raises: - FirebaseError: If an error occurs while communicating with instance ID service. - ValueError: If the input arguments are invalid. - """ - ... - -class ErrorInfo: - """An error encountered when performing a topic management operation.""" - def __init__(self, index, reason) -> None: - ... - - @property - def index(self): # -> Any: - """Index of the registration token to which this error is related to.""" - ... - - @property - def reason(self): # -> Any: - """String describing the nature of the error.""" - ... - - - -class TopicManagementResponse: - """The response received from a topic management operation.""" - def __init__(self, resp) -> None: - ... - - @property - def success_count(self): # -> int: - """Number of tokens that were successfully subscribed or unsubscribed.""" - ... - - @property - def failure_count(self): # -> int: - """Number of tokens that could not be subscribed or unsubscribed due to errors.""" - ... - - @property - def errors(self): # -> list[Any]: - """A list of ``messaging.ErrorInfo`` objects (possibly empty).""" - ... - - - -class BatchResponse: - """The response received from a batch request to the FCM API.""" - def __init__(self, responses) -> None: - ... - - @property - def responses(self): # -> Any: - """A list of ``messaging.SendResponse`` objects (possibly empty).""" - ... - - @property - def success_count(self): # -> int: - ... - - @property - def failure_count(self): # -> int: - ... - - - -class SendResponse: - """The response received from an individual batched request to the FCM API.""" - def __init__(self, resp, exception) -> None: - ... - - @property - def message_id(self): # -> None: - """A message ID string that uniquely identifies the message.""" - ... - - @property - def success(self): # -> bool: - """A boolean indicating if the request was successful.""" - ... - - @property - def exception(self): # -> Any: - """A ``FirebaseError`` if an error occurs while sending the message to the FCM service.""" - ... - - - -class _MessagingService: - """Service class that implements Firebase Cloud Messaging (FCM) functionality.""" - FCM_URL = ... - FCM_BATCH_URL = ... - IID_URL = ... - IID_HEADERS = ... - JSON_ENCODER = ... - FCM_ERROR_TYPES = ... - def __init__(self, app) -> None: - ... - - @classmethod - def encode_message(cls, message): # -> Any | dict[Any, Any]: - ... - - def send(self, message, dry_run=...): - """Sends the given message to FCM via the FCM v1 API.""" - ... - - def send_each(self, messages, dry_run=...): # -> BatchResponse: - """Sends the given messages to FCM via the FCM v1 API.""" - ... - - def send_all(self, messages, dry_run=...): # -> BatchResponse: - """Sends the given messages to FCM via the batch API.""" - ... - - def make_topic_management_request(self, tokens, topic, operation): # -> TopicManagementResponse: - """Invokes the IID service for topic management functionality.""" - ... - - - diff --git a/typings/firebase_admin/ml.pyi b/typings/firebase_admin/ml.pyi deleted file mode 100644 index f533beb..0000000 --- a/typings/firebase_admin/ml.pyi +++ /dev/null @@ -1,529 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase ML module. - -This module contains functions for creating, updating, getting, listing, -deleting, publishing and unpublishing Firebase ML models. -""" -_GCS_ENABLED = ... -_TF_ENABLED = ... -_ML_ATTRIBUTE = ... -_MAX_PAGE_SIZE = ... -_MODEL_ID_PATTERN = ... -_DISPLAY_NAME_PATTERN = ... -_TAG_PATTERN = ... -_GCS_TFLITE_URI_PATTERN = ... -_AUTO_ML_MODEL_PATTERN = ... -_RESOURCE_NAME_PATTERN = ... -_OPERATION_NAME_PATTERN = ... -def create_model(model, app=...): # -> Model: - """Creates a model in the current Firebase project. - - Args: - model: An ml.Model to create. - app: A Firebase app instance (or None to use the default app). - - Returns: - Model: The model that was created in Firebase ML. - """ - ... - -def update_model(model, app=...): # -> Model: - """Updates a model's metadata or model file. - - Args: - model: The ml.Model to update. - app: A Firebase app instance (or None to use the default app). - - Returns: - Model: The updated model. - """ - ... - -def publish_model(model_id, app=...): # -> Model: - """Publishes a Firebase ML model. - - A published model can be downloaded to client apps. - - Args: - model_id: The id of the model to publish. - app: A Firebase app instance (or None to use the default app). - - Returns: - Model: The published model. - """ - ... - -def unpublish_model(model_id, app=...): # -> Model: - """Unpublishes a Firebase ML model. - - Args: - model_id: The id of the model to unpublish. - app: A Firebase app instance (or None to use the default app). - - Returns: - Model: The unpublished model. - """ - ... - -def get_model(model_id, app=...): # -> Model: - """Gets the model specified by the given ID. - - Args: - model_id: The id of the model to get. - app: A Firebase app instance (or None to use the default app). - - Returns: - Model: The requested model. - """ - ... - -def list_models(list_filter=..., page_size=..., page_token=..., app=...): # -> ListModelsPage: - """Lists the current project's models. - - Args: - list_filter: a list filter string such as ``tags:'tag_1'``. None will return all models. - page_size: A number between 1 and 100 inclusive that specifies the maximum - number of models to return per page. None for default. - page_token: A next page token returned from a previous page of results. None - for first page of results. - app: A Firebase app instance (or None to use the default app). - - Returns: - ListModelsPage: A (filtered) list of models. - """ - ... - -def delete_model(model_id, app=...): # -> None: - """Deletes a model from the current project. - - Args: - model_id: The id of the model you wish to delete. - app: A Firebase app instance (or None to use the default app). - """ - ... - -class Model: - """A Firebase ML Model object. - - Args: - display_name: The display name of your model - used to identify your model in code. - tags: Optional list of strings associated with your model. Can be used in list queries. - model_format: A subclass of ModelFormat. (e.g. TFLiteFormat) Specifies the model details. - """ - def __init__(self, display_name=..., tags=..., model_format=...) -> None: - ... - - @classmethod - def from_dict(cls, data, app=...): # -> Model: - """Create an instance of the object from a dict.""" - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - @property - def model_id(self): # -> str | Any | None: - """The model's ID, unique to the project.""" - ... - - @property - def display_name(self): # -> None: - """The model's display name, used to refer to the model in code and in - the Firebase console.""" - ... - - @display_name.setter - def display_name(self, display_name): # -> Self: - ... - - @property - def create_time(self): # -> int | None: - """The time the model was created.""" - ... - - @property - def update_time(self): # -> int | None: - """The time the model was last updated.""" - ... - - @property - def validation_error(self): - """Validation error message.""" - ... - - @property - def published(self): # -> bool: - """True if the model is published and available for clients to - download.""" - ... - - @property - def etag(self): # -> None: - """The entity tag (ETag) of the model resource.""" - ... - - @property - def model_hash(self): # -> None: - """SHA256 hash of the model binary.""" - ... - - @property - def tags(self): # -> None: - """Tag strings, used for filtering query results.""" - ... - - @tags.setter - def tags(self, tags): # -> Self: - ... - - @property - def locked(self): # -> bool: - """True if the Model object is locked by an active operation.""" - ... - - def wait_for_unlocked(self, max_time_seconds=...): # -> None: - """Waits for the model to be unlocked. (All active operations complete) - - Args: - max_time_seconds: The maximum number of seconds to wait for the model to unlock. - (None for no limit) - - Raises: - exceptions.DeadlineExceeded: If max_time_seconds passed and the model is still locked. - """ - ... - - @property - def model_format(self): # -> None: - """The model's ``ModelFormat`` object, which represents the model's - format and storage location.""" - ... - - @model_format.setter - def model_format(self, model_format): # -> Self: - ... - - def as_dict(self, for_upload=...): # -> dict[Any, Any]: - """Returns a serializable representation of the object.""" - ... - - - -class ModelFormat: - """Abstract base class representing a Model Format such as TFLite.""" - def as_dict(self, for_upload=...): - """Returns a serializable representation of the object.""" - ... - - - -class TFLiteFormat(ModelFormat): - """Model format representing a TFLite model. - - Args: - model_source: A TFLiteModelSource sub class. Specifies the details of the model source. - """ - def __init__(self, model_source=...) -> None: - ... - - @classmethod - def from_dict(cls, data): # -> TFLiteFormat: - """Create an instance of the object from a dict.""" - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - @property - def model_source(self): # -> TFLiteModelSource | None: - """The TF Lite model's location.""" - ... - - @model_source.setter - def model_source(self, model_source): # -> None: - ... - - @property - def size_bytes(self): # -> None: - """The size in bytes of the TF Lite model.""" - ... - - def as_dict(self, for_upload=...): # -> dict[str, dict[Any, Any]]: - """Returns a serializable representation of the object.""" - ... - - - -class TFLiteModelSource: - """Abstract base class representing a model source for TFLite format models.""" - def as_dict(self, for_upload=...): - """Returns a serializable representation of the object.""" - ... - - - -class _CloudStorageClient: - """Cloud Storage helper class""" - GCS_URI = ... - BLOB_NAME = ... - @staticmethod - def upload(bucket_name, model_file_name, app): # -> str: - """Upload a model file to the specified Storage bucket.""" - ... - - @staticmethod - def sign_uri(gcs_tflite_uri, app): # -> str: - """Makes the gcs_tflite_uri readable for GET for 10 minutes via signed_uri.""" - ... - - - -class TFLiteGCSModelSource(TFLiteModelSource): - """TFLite model source representing a tflite model file stored in GCS.""" - _STORAGE_CLIENT = ... - def __init__(self, gcs_tflite_uri, app=...) -> None: - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - @classmethod - def from_tflite_model_file(cls, model_file_name, bucket_name=..., app=...): # -> TFLiteGCSModelSource: - """Uploads the model file to an existing Google Cloud Storage bucket. - - Args: - model_file_name: The name of the model file. - bucket_name: The name of an existing bucket. None to use the default bucket configured - in the app. - app: A Firebase app instance (or None to use the default app). - - Returns: - TFLiteGCSModelSource: The source created from the model_file - - Raises: - ImportError: If the Cloud Storage Library has not been installed. - """ - ... - - @classmethod - def from_saved_model(cls, saved_model_dir, model_file_name=..., bucket_name=..., app=...): # -> TFLiteGCSModelSource: - """Creates a Tensor Flow Lite model from the saved model, and uploads the model to GCS. - - Args: - saved_model_dir: The saved model directory. - model_file_name: The name that the tflite model will be saved as in Cloud Storage. - bucket_name: The name of an existing bucket. None to use the default bucket configured - in the app. - app: Optional. A Firebase app instance (or None to use the default app) - - Returns: - TFLiteGCSModelSource: The source created from the saved_model_dir - - Raises: - ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. - """ - ... - - @classmethod - def from_keras_model(cls, keras_model, model_file_name=..., bucket_name=..., app=...): # -> TFLiteGCSModelSource: - """Creates a Tensor Flow Lite model from the keras model, and uploads the model to GCS. - - Args: - keras_model: A tf.keras model. - model_file_name: The name that the tflite model will be saved as in Cloud Storage. - bucket_name: The name of an existing bucket. None to use the default bucket configured - in the app. - app: Optional. A Firebase app instance (or None to use the default app) - - Returns: - TFLiteGCSModelSource: The source created from the keras_model - - Raises: - ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. - """ - ... - - @property - def gcs_tflite_uri(self): # -> Any: - """URI of the model file in Cloud Storage.""" - ... - - @gcs_tflite_uri.setter - def gcs_tflite_uri(self, gcs_tflite_uri): # -> None: - ... - - def as_dict(self, for_upload=...): # -> dict[str, str] | dict[str, Any]: - """Returns a serializable representation of the object.""" - ... - - - -class TFLiteAutoMlSource(TFLiteModelSource): - """TFLite model source representing a tflite model created with AutoML. - - AutoML model support is deprecated and will be removed in the next major version. - """ - def __init__(self, auto_ml_model, app=...) -> None: - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - @property - def auto_ml_model(self): - """Resource name of the model, created by the AutoML API or Cloud console.""" - ... - - @auto_ml_model.setter - def auto_ml_model(self, auto_ml_model): # -> None: - ... - - def as_dict(self, for_upload=...): # -> dict[str, Any]: - """Returns a serializable representation of the object.""" - ... - - - -class ListModelsPage: - """Represents a page of models in a Firebase project. - - Provides methods for traversing the models included in this page, as well as - retrieving subsequent pages of models. The iterator returned by - ``iterate_all()`` can be used to iterate through all the models in the - Firebase project starting from this page. - """ - def __init__(self, list_models_func, list_filter, page_size, page_token, app) -> None: - ... - - @property - def models(self): # -> list[Model]: - """A list of Models from this page.""" - ... - - @property - def list_filter(self): # -> Any: - """The filter string used to filter the models.""" - ... - - @property - def next_page_token(self): - """Token identifying the next page of results.""" - ... - - @property - def has_next_page(self): # -> bool: - """True if more pages are available.""" - ... - - def get_next_page(self): # -> ListModelsPage | None: - """Retrieves the next page of models if available. - - Returns: - ListModelsPage: Next page of models, or None if this is the last page. - """ - ... - - def iterate_all(self): # -> _ModelIterator: - """Retrieves an iterator for Models. - - Returned iterator will iterate through all the models in the Firebase - project starting from this page. The iterator will never buffer more than - one page of models in memory at a time. - - Returns: - iterator: An iterator of Model instances. - """ - ... - - - -class _ModelIterator: - """An iterator that allows iterating over models, one at a time. - - This implementation loads a page of models into memory, and iterates on them. - When the whole page has been traversed, it loads another page. This class - never keeps more than one page of entries in memory. - """ - def __init__(self, current_page) -> None: - ... - - def next(self): # -> Model: - ... - - def __next__(self): # -> Model: - ... - - def __iter__(self): # -> Self: - ... - - - -class _MLService: - """Firebase ML service.""" - PROJECT_URL = ... - OPERATION_URL = ... - POLL_EXPONENTIAL_BACKOFF_FACTOR = ... - POLL_BASE_WAIT_TIME_SECONDS = ... - def __init__(self, app) -> None: - ... - - def get_operation(self, op_name): - ... - - def handle_operation(self, operation, wait_for_operation=..., max_time_seconds=...): # -> dict[Any, Any] | None: - """Handles long running operations. - - Args: - operation: The operation to handle. - wait_for_operation: Should we allow polling for the operation to complete. - If no polling is requested, a locked model will be returned instead. - max_time_seconds: The maximum seconds to try polling for operation complete. - (None for no limit) - - Returns: - dict: A dictionary of the returned model properties. - - Raises: - TypeError: if the operation is not a dictionary. - ValueError: If the operation is malformed. - UnknownError: If the server responds with an unexpected response. - err: If the operation exceeds polling attempts or stop_time - """ - ... - - def create_model(self, model): # -> dict[Any, Any] | None: - ... - - def update_model(self, model, update_mask=...): # -> dict[Any, Any] | None: - ... - - def set_published(self, model_id, publish): # -> dict[Any, Any] | None: - ... - - def get_model(self, model_id): - ... - - def list_models(self, list_filter, page_size, page_token): - """ lists Firebase ML models.""" - ... - - def delete_model(self, model_id): # -> None: - ... - - - diff --git a/typings/firebase_admin/project_management.pyi b/typings/firebase_admin/project_management.pyi deleted file mode 100644 index 1901a7d..0000000 --- a/typings/firebase_admin/project_management.pyi +++ /dev/null @@ -1,422 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Firebase Project Management module. - -This module enables management of resources in Firebase projects, such as Android and iOS apps. -""" -_PROJECT_MANAGEMENT_ATTRIBUTE = ... -def android_app(app_id, app=...): # -> AndroidApp: - """Obtains a reference to an Android app in the associated Firebase project. - - Args: - app_id: The app ID that identifies this Android app. - app: An App instance (optional). - - Returns: - AndroidApp: An ``AndroidApp`` instance. - """ - ... - -def ios_app(app_id, app=...): # -> IOSApp: - """Obtains a reference to an iOS app in the associated Firebase project. - - Args: - app_id: The app ID that identifies this iOS app. - app: An App instance (optional). - - Returns: - IOSApp: An ``IOSApp`` instance. - """ - ... - -def list_android_apps(app=...): - """Lists all Android apps in the associated Firebase project. - - Args: - app: An App instance (optional). - - Returns: - list: a list of ``AndroidApp`` instances referring to each Android app in the Firebase - project. - """ - ... - -def list_ios_apps(app=...): - """Lists all iOS apps in the associated Firebase project. - - Args: - app: An App instance (optional). - - Returns: - list: a list of ``IOSApp`` instances referring to each iOS app in the Firebase project. - """ - ... - -def create_android_app(package_name, display_name=..., app=...): - """Creates a new Android app in the associated Firebase project. - - Args: - package_name: The package name of the Android app to be created. - display_name: A nickname for this Android app (optional). - app: An App instance (optional). - - Returns: - AndroidApp: An ``AndroidApp`` instance that is a reference to the newly created app. - """ - ... - -def create_ios_app(bundle_id, display_name=..., app=...): - """Creates a new iOS app in the associated Firebase project. - - Args: - bundle_id: The bundle ID of the iOS app to be created. - display_name: A nickname for this iOS app (optional). - app: An App instance (optional). - - Returns: - IOSApp: An ``IOSApp`` instance that is a reference to the newly created app. - """ - ... - -class AndroidApp: - """A reference to an Android app within a Firebase project. - - Note: Unless otherwise specified, all methods defined in this class make an RPC. - - Please use the module-level function ``android_app(app_id)`` to obtain instances of this class - instead of instantiating it directly. - """ - def __init__(self, app_id, service) -> None: - ... - - @property - def app_id(self): # -> Any: - """Returns the app ID of the Android app to which this instance refers. - - Note: This method does not make an RPC. - - Returns: - string: The app ID of the Android app to which this instance refers. - """ - ... - - def get_metadata(self): - """Retrieves detailed information about this Android app. - - Returns: - AndroidAppMetadata: An ``AndroidAppMetadata`` instance. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. - """ - ... - - def set_display_name(self, new_display_name): - """Updates the display name attribute of this Android app to the one given. - - Args: - new_display_name: The new display name for this Android app. - - Returns: - NoneType: None. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. - """ - ... - - def get_config(self): - """Retrieves the configuration artifact associated with this Android app.""" - ... - - def get_sha_certificates(self): - """Retrieves the entire list of SHA certificates associated with this Android app. - - Returns: - list: A list of ``SHACertificate`` instances. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. - """ - ... - - def add_sha_certificate(self, certificate_to_add): - """Adds a SHA certificate to this Android app. - - Args: - certificate_to_add: The SHA certificate to add. - - Returns: - NoneType: None. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. (For example, if the certificate_to_add already exists.) - """ - ... - - def delete_sha_certificate(self, certificate_to_delete): - """Removes a SHA certificate from this Android app. - - Args: - certificate_to_delete: The SHA certificate to delete. - - Returns: - NoneType: None. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. (For example, if the certificate_to_delete is not found.) - """ - ... - - - -class IOSApp: - """A reference to an iOS app within a Firebase project. - - Note: Unless otherwise specified, all methods defined in this class make an RPC. - - Please use the module-level function ``ios_app(app_id)`` to obtain instances of this class - instead of instantiating it directly. - """ - def __init__(self, app_id, service) -> None: - ... - - @property - def app_id(self): # -> Any: - """Returns the app ID of the iOS app to which this instance refers. - - Note: This method does not make an RPC. - - Returns: - string: The app ID of the iOS app to which this instance refers. - """ - ... - - def get_metadata(self): - """Retrieves detailed information about this iOS app. - - Returns: - IOSAppMetadata: An ``IOSAppMetadata`` instance. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. - """ - ... - - def set_display_name(self, new_display_name): - """Updates the display name attribute of this iOS app to the one given. - - Args: - new_display_name: The new display name for this iOS app. - - Returns: - NoneType: None. - - Raises: - FirebaseError: If an error occurs while communicating with the Firebase Project - Management Service. - """ - ... - - def get_config(self): - """Retrieves the configuration artifact associated with this iOS app.""" - ... - - - -class _AppMetadata: - """Detailed information about a Firebase Android or iOS app.""" - def __init__(self, name, app_id, display_name, project_id) -> None: - ... - - @property - def app_id(self): # -> str: - """The globally unique, Firebase-assigned identifier of this Android or iOS app. - - This ID is unique even across apps of different platforms. - """ - ... - - @property - def display_name(self): # -> str: - """The user-assigned display name of this Android or iOS app. - - Note that the display name can be None if it has never been set by the user.""" - ... - - @property - def project_id(self): # -> str: - """The permanent, globally unique, user-assigned ID of the parent Firebase project.""" - ... - - def __eq__(self, other) -> bool: - ... - - - -class AndroidAppMetadata(_AppMetadata): - """Android-specific information about an Android Firebase app.""" - def __init__(self, package_name, name, app_id, display_name, project_id) -> None: - """Clients should not instantiate this class directly.""" - ... - - @property - def package_name(self): # -> str: - """The canonical package name of this Android app as it would appear in the Play Store.""" - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - def __hash__(self) -> int: - ... - - - -class IOSAppMetadata(_AppMetadata): - """iOS-specific information about an iOS Firebase app.""" - def __init__(self, bundle_id, name, app_id, display_name, project_id) -> None: - """Clients should not instantiate this class directly.""" - ... - - @property - def bundle_id(self): # -> str: - """The canonical bundle ID of this iOS app as it would appear in the iOS AppStore.""" - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - def __hash__(self) -> int: - ... - - - -class SHACertificate: - """Represents a SHA-1 or SHA-256 certificate associated with an Android app.""" - SHA_1 = ... - SHA_256 = ... - _SHA_1_RE = ... - _SHA_256_RE = ... - def __init__(self, sha_hash, name=...) -> None: - """Creates a new SHACertificate instance. - - Args: - sha_hash: A string; the certificate hash for the Android app. - name: The fully qualified resource name of this certificate; note that this field should - be omitted if the instance is being constructed for the purpose of calling the - add_sha_certificate() method on an ``AndroidApp``. - - Raises: - ValueError: If the sha_hash is not a valid SHA-1 or SHA-256 certificate hash. - """ - ... - - @property - def name(self): # -> None: - """Returns the fully qualified resource name of this certificate, if known. - - Returns: - string: The fully qualified resource name of this certificate, if known; otherwise, the - empty string. - """ - ... - - @property - def sha_hash(self): - """Returns the certificate hash. - - Returns: - string: The certificate hash. - """ - ... - - @property - def cert_type(self): # -> str: - """Returns the type of the SHA certificate encoded in the hash. - - Returns: - string: One of 'SHA_1' or 'SHA_256'. - """ - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - def __hash__(self) -> int: - ... - - - -class _ProjectManagementService: - """Provides methods for interacting with the Firebase Project Management Service.""" - BASE_URL = ... - MAXIMUM_LIST_APPS_PAGE_SIZE = ... - MAXIMUM_POLLING_ATTEMPTS = ... - POLL_BASE_WAIT_TIME_SECONDS = ... - POLL_EXPONENTIAL_BACKOFF_FACTOR = ... - ANDROID_APPS_RESOURCE_NAME = ... - ANDROID_APP_IDENTIFIER_NAME = ... - IOS_APPS_RESOURCE_NAME = ... - IOS_APP_IDENTIFIER_NAME = ... - def __init__(self, app) -> None: - ... - - def get_android_app_metadata(self, app_id): # -> AndroidAppMetadata: - ... - - def get_ios_app_metadata(self, app_id): # -> IOSAppMetadata: - ... - - def set_android_app_display_name(self, app_id, new_display_name): # -> None: - ... - - def set_ios_app_display_name(self, app_id, new_display_name): # -> None: - ... - - def list_android_apps(self): # -> list[Any]: - ... - - def list_ios_apps(self): # -> list[Any]: - ... - - def create_android_app(self, package_name, display_name=...): # -> AndroidApp: - ... - - def create_ios_app(self, bundle_id, display_name=...): # -> IOSApp: - ... - - def get_android_app_config(self, app_id): # -> str: - ... - - def get_ios_app_config(self, app_id): # -> str: - ... - - def get_sha_certificates(self, app_id): # -> list[SHACertificate]: - ... - - def add_sha_certificate(self, app_id, certificate_to_add): # -> None: - ... - - def delete_sha_certificate(self, certificate_to_delete): # -> None: - ... - - - diff --git a/typings/firebase_admin/remote_config.pyi b/typings/firebase_admin/remote_config.pyi deleted file mode 100644 index 65615e8..0000000 --- a/typings/firebase_admin/remote_config.pyi +++ /dev/null @@ -1,340 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Dict, Literal, Optional, Union -from enum import Enum -from firebase_admin import App - -"""Firebase Remote Config Module. -This module has required APIs for the clients to use Firebase Remote Config with python. -""" -logger = ... -_REMOTE_CONFIG_ATTRIBUTE = ... -MAX_CONDITION_RECURSION_DEPTH = ... -ValueSource = Literal['default', 'remote', 'static'] -class PercentConditionOperator(Enum): - """Enum representing the available operators for percent conditions. - """ - LESS_OR_EQUAL = ... - GREATER_THAN = ... - BETWEEN = ... - UNKNOWN = ... - - -class CustomSignalOperator(Enum): - """Enum representing the available operators for custom signal conditions. - """ - STRING_CONTAINS = ... - STRING_DOES_NOT_CONTAIN = ... - STRING_EXACTLY_MATCHES = ... - STRING_CONTAINS_REGEX = ... - NUMERIC_LESS_THAN = ... - NUMERIC_LESS_EQUAL = ... - NUMERIC_EQUAL = ... - NUMERIC_NOT_EQUAL = ... - NUMERIC_GREATER_THAN = ... - NUMERIC_GREATER_EQUAL = ... - SEMANTIC_VERSION_LESS_THAN = ... - SEMANTIC_VERSION_LESS_EQUAL = ... - SEMANTIC_VERSION_EQUAL = ... - SEMANTIC_VERSION_NOT_EQUAL = ... - SEMANTIC_VERSION_GREATER_THAN = ... - SEMANTIC_VERSION_GREATER_EQUAL = ... - UNKNOWN = ... - - -class _ServerTemplateData: - """Parses, validates and encapsulates template data and metadata.""" - def __init__(self, template_data) -> None: - """Initializes a new ServerTemplateData instance. - - Args: - template_data: The data to be parsed for getting the parameters and conditions. - - Raises: - ValueError: If the template data is not valid. - """ - ... - - @property - def parameters(self): # -> dict[Any, Any]: - ... - - @property - def etag(self): # -> str: - ... - - @property - def version(self): # -> str: - ... - - @property - def conditions(self): # -> list[Any]: - ... - - @property - def template_data_json(self): # -> str: - ... - - - -class ServerTemplate: - """Represents a Server Template with implementations for loading and evaluating the template.""" - def __init__(self, app: App = ..., default_config: Optional[Dict[str, str]] = ...) -> None: - """Initializes a ServerTemplate instance. - - Args: - app: App instance to be used. This is optional and the default app instance will - be used if not present. - default_config: The default config to be used in the evaluated config. - """ - ... - - async def load(self): # -> None: - """Fetches the server template and caches the data.""" - ... - - def evaluate(self, context: Optional[Dict[str, Union[str, int]]] = ...) -> ServerConfig: - """Evaluates the cached server template to produce a ServerConfig. - - Args: - context: A dictionary of values to use for evaluating conditions. - - Returns: - A ServerConfig object. - Raises: - ValueError: If the input arguments are invalid. - """ - ... - - def set(self, template_data_json: str): # -> None: - """Updates the cache to store the given template is of type ServerTemplateData. - - Args: - template_data_json: A json string representing ServerTemplateData to be cached. - """ - ... - - def to_json(self): # -> str: - """Provides the server template in a JSON format to be used for initialization later.""" - ... - - - -class ServerConfig: - """Represents a Remote Config Server Side Config.""" - def __init__(self, config_values) -> None: - ... - - def get_boolean(self, key): - """Returns the value as a boolean.""" - ... - - def get_string(self, key): - """Returns the value as a string.""" - ... - - def get_int(self, key): - """Returns the value as an integer.""" - ... - - def get_float(self, key): - """Returns the value as a float.""" - ... - - def get_value_source(self, key): - """Returns the source of the value.""" - ... - - - -class _RemoteConfigService: - """Internal class that facilitates sending requests to the Firebase Remote - Config backend API. - """ - def __init__(self, app) -> None: - """Initialize a JsonHttpClient with necessary inputs. - - Args: - app: App instance to be used for fetching app specific details required - for initializing the http client. - """ - ... - - async def get_server_template(self): # -> _ServerTemplateData: - """Requests for a server template and converts the response to an instance of - ServerTemplateData for storing the template parameters and conditions.""" - ... - - - -class _ConditionEvaluator: - """Internal class that facilitates sending requests to the Firebase Remote - Config backend API.""" - def __init__(self, conditions, parameters, context, config_values) -> None: - ... - - def evaluate(self): # -> Any: - """Internal function that evaluates the cached server template to produce - a ServerConfig""" - ... - - def evaluate_conditions(self, conditions, context) -> Dict[str, bool]: - """Evaluates a list of conditions and returns a dictionary of results. - - Args: - conditions: A list of NamedCondition objects. - context: An EvaluationContext object. - - Returns: - A dictionary that maps condition names to boolean evaluation results. - """ - ... - - def evaluate_condition(self, condition, context, nesting_level: int = ...) -> bool: - """Recursively evaluates a condition. - - Args: - condition: The condition to evaluate. - context: An EvaluationContext object. - nesting_level: The current recursion depth. - - Returns: - The boolean result of the condition evaluation. - """ - ... - - def evaluate_or_condition(self, or_condition, context, nesting_level: int = ...) -> bool: - """Evaluates an OR condition. - - Args: - or_condition: The OR condition to evaluate. - context: An EvaluationContext object. - nesting_level: The current recursion depth. - - Returns: - True if any of the subconditions are true, False otherwise. - """ - ... - - def evaluate_and_condition(self, and_condition, context, nesting_level: int = ...) -> bool: - """Evaluates an AND condition. - - Args: - and_condition: The AND condition to evaluate. - context: An EvaluationContext object. - nesting_level: The current recursion depth. - - Returns: - True if all of the subconditions are met; False otherwise. - """ - ... - - def evaluate_percent_condition(self, percent_condition, context) -> bool: - """Evaluates a percent condition. - - Args: - percent_condition: The percent condition to evaluate. - context: An EvaluationContext object. - - Returns: - True if the condition is met, False otherwise. - """ - ... - - def hash_seeded_randomization_id(self, seeded_randomization_id: str) -> int: - """Hashes a seeded randomization ID. - - Args: - seeded_randomization_id: The seeded randomization ID to hash. - - Returns: - The hashed value. - """ - ... - - def evaluate_custom_signal_condition(self, custom_signal_condition, context) -> bool: - """Evaluates a custom signal condition. - - Args: - custom_signal_condition: The custom signal condition to evaluate. - context: An EvaluationContext object. - - Returns: - True if the condition is met, False otherwise. - """ - ... - - - -async def get_server_template(app: App = ..., default_config: Optional[Dict[str, str]] = ...): # -> ServerTemplate: - """Initializes a new ServerTemplate instance and fetches the server template. - - Args: - app: App instance to be used. This is optional and the default app instance will - be used if not present. - default_config: The default config to be used in the evaluated config. - - Returns: - ServerTemplate: An object having the cached server template to be used for evaluation. - """ - ... - -def init_server_template(app: App = ..., default_config: Optional[Dict[str, str]] = ..., template_data_json: Optional[str] = ...): # -> ServerTemplate: - """Initializes a new ServerTemplate instance. - - Args: - app: App instance to be used. This is optional and the default app instance will - be used if not present. - default_config: The default config to be used in the evaluated config. - template_data_json: An optional template data JSON to be set on initialization. - - Returns: - ServerTemplate: A new ServerTemplate instance initialized with an optional - template and config. - """ - ... - -class _Value: - """Represents a value fetched from Remote Config. - """ - DEFAULT_VALUE_FOR_BOOLEAN = ... - DEFAULT_VALUE_FOR_STRING = ... - DEFAULT_VALUE_FOR_INTEGER = ... - DEFAULT_VALUE_FOR_FLOAT_NUMBER = ... - BOOLEAN_TRUTHY_VALUES = ... - def __init__(self, source: ValueSource, value: str = ...) -> None: - """Initializes a Value instance. - - Args: - source: The source of the value (e.g., 'default', 'remote', 'static'). - "static" indicates the value was defined by a static constant. - "default" indicates the value was defined by default config. - "remote" indicates the value was defined by config produced by evaluating a template. - value: The string value. - """ - ... - - def as_string(self) -> str: - """Returns the value as a string.""" - ... - - def as_boolean(self) -> bool: - """Returns the value as a boolean.""" - ... - - def as_int(self) -> float: - """Returns the value as a number.""" - ... - - def as_float(self) -> float: - """Returns the value as a number.""" - ... - - def get_source(self) -> ValueSource: - """Returns the source of the value.""" - ... - - - diff --git a/typings/firebase_admin/storage.pyi b/typings/firebase_admin/storage.pyi deleted file mode 100644 index d29ff43..0000000 --- a/typings/firebase_admin/storage.pyi +++ /dev/null @@ -1,48 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from google.cloud import storage - -"""Firebase Cloud Storage module. - -This module contains utilities for accessing Google Cloud Storage buckets associated with -Firebase apps. This requires the ``google-cloud-storage`` Python module. -""" -_STORAGE_ATTRIBUTE = ... -def bucket(name=..., app=...) -> storage.Bucket: - """Returns a handle to a Google Cloud Storage bucket. - - If the name argument is not provided, uses the 'storageBucket' option specified when - initializing the App. If that is also not available raises an error. This function - does not make any RPC calls. - - Args: - name: Name of a cloud storage bucket (optional). - app: An App instance (optional). - - Returns: - google.cloud.storage.Bucket: A handle to the specified bucket. - - Raises: - ValueError: If a bucket name is not specified either via options or method arguments, - or if the specified bucket name is not a valid string. - """ - ... - -class _StorageClient: - """Holds a Google Cloud Storage client instance.""" - STORAGE_HEADERS = ... - def __init__(self, credentials, project, default_bucket) -> None: - ... - - @classmethod - def from_app(cls, app): # -> _StorageClient: - ... - - def bucket(self, name=...): # -> Bucket: - """Returns a handle to the specified Cloud Storage Bucket.""" - ... - - - diff --git a/typings/firebase_admin/tenant_mgt.pyi b/typings/firebase_admin/tenant_mgt.pyi deleted file mode 100644 index 1f8f4d6..0000000 --- a/typings/firebase_admin/tenant_mgt.pyi +++ /dev/null @@ -1,261 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from firebase_admin import _auth_utils - -"""Firebase tenant management module. - -This module contains functions for creating and configuring authentication tenants within a -Google Cloud Identity Platform (GCIP) instance. -""" -_TENANT_MGT_ATTRIBUTE = ... -_MAX_LIST_TENANTS_RESULTS = ... -_DISPLAY_NAME_PATTERN = ... -__all__ = ['ListTenantsPage', 'Tenant', 'TenantIdMismatchError', 'TenantNotFoundError', 'auth_for_tenant', 'create_tenant', 'delete_tenant', 'get_tenant', 'list_tenants', 'update_tenant'] -TenantIdMismatchError = _auth_utils.TenantIdMismatchError -TenantNotFoundError = _auth_utils.TenantNotFoundError -def auth_for_tenant(tenant_id, app=...): - """Gets an Auth Client instance scoped to the given tenant ID. - - Args: - tenant_id: A tenant ID string. - app: An App instance (optional). - - Returns: - auth.Client: An ``auth.Client`` object. - - Raises: - ValueError: If the tenant ID is None, empty or not a string. - """ - ... - -def get_tenant(tenant_id, app=...): - """Gets the tenant corresponding to the given ``tenant_id``. - - Args: - tenant_id: A tenant ID string. - app: An App instance (optional). - - Returns: - Tenant: A tenant object. - - Raises: - ValueError: If the tenant ID is None, empty or not a string. - TenantNotFoundError: If no tenant exists by the given ID. - FirebaseError: If an error occurs while retrieving the tenant. - """ - ... - -def create_tenant(display_name, allow_password_sign_up=..., enable_email_link_sign_in=..., app=...): - """Creates a new tenant from the given options. - - Args: - display_name: Display name string for the new tenant. Must begin with a letter and contain - only letters, digits and hyphens. Length must be between 4 and 20. - allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in - provider (optional). - enable_email_link_sign_in: A boolean indicating whether to enable or disable email link - sign-in (optional). Disabling this makes the password required for email sign-in. - app: An App instance (optional). - - Returns: - Tenant: A tenant object. - - Raises: - ValueError: If any of the given arguments are invalid. - FirebaseError: If an error occurs while creating the tenant. - """ - ... - -def update_tenant(tenant_id, display_name=..., allow_password_sign_up=..., enable_email_link_sign_in=..., app=...): - """Updates an existing tenant with the given options. - - Args: - tenant_id: ID of the tenant to update. - display_name: Updated display name string for the tenant (optional). - allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in - provider. - enable_email_link_sign_in: A boolean indicating whether to enable or disable email link - sign-in. Disabling this makes the password required for email sign-in. - app: An App instance (optional). - - Returns: - Tenant: The updated tenant object. - - Raises: - ValueError: If any of the given arguments are invalid. - TenantNotFoundError: If no tenant exists by the given ID. - FirebaseError: If an error occurs while creating the tenant. - """ - ... - -def delete_tenant(tenant_id, app=...): # -> None: - """Deletes the tenant corresponding to the given ``tenant_id``. - - Args: - tenant_id: A tenant ID string. - app: An App instance (optional). - - Raises: - ValueError: If the tenant ID is None, empty or not a string. - TenantNotFoundError: If no tenant exists by the given ID. - FirebaseError: If an error occurs while retrieving the tenant. - """ - ... - -def list_tenants(page_token=..., max_results=..., app=...): # -> ListTenantsPage: - """Retrieves a page of tenants from a Firebase project. - - The ``page_token`` argument governs the starting point of the page. The ``max_results`` - argument governs the maximum number of tenants that may be included in the returned page. - This function never returns None. If there are no user accounts in the Firebase project, this - returns an empty page. - - Args: - page_token: A non-empty page token string, which indicates the starting point of the page - (optional). Defaults to ``None``, which will retrieve the first page of users. - max_results: A positive integer indicating the maximum number of users to include in the - returned page (optional). Defaults to 100, which is also the maximum number allowed. - app: An App instance (optional). - - Returns: - ListTenantsPage: A page of tenants. - - Raises: - ValueError: If ``max_results`` or ``page_token`` are invalid. - FirebaseError: If an error occurs while retrieving the user accounts. - """ - ... - -class Tenant: - """Represents a tenant in a multi-tenant application. - - Multi-tenancy support requires Google Cloud Identity Platform (GCIP). To learn more about - GCIP including pricing and features, see https://cloud.google.com/identity-platform. - - Before multi-tenancy can be used in a Google Cloud Identity Platform project, tenants must be - enabled in that project via the Cloud Console UI. A Tenant instance provides information - such as the display name, tenant identifier and email authentication configuration. - """ - def __init__(self, data) -> None: - ... - - @property - def tenant_id(self): - ... - - @property - def display_name(self): # -> None: - ... - - @property - def allow_password_sign_up(self): - ... - - @property - def enable_email_link_sign_in(self): - ... - - - -class _TenantManagementService: - """Firebase tenant management service.""" - TENANT_MGT_URL = ... - def __init__(self, app) -> None: - ... - - def auth_for_tenant(self, tenant_id): # -> Client: - """Gets an Auth Client instance scoped to the given tenant ID.""" - ... - - def get_tenant(self, tenant_id): # -> Tenant: - """Gets the tenant corresponding to the given ``tenant_id``.""" - ... - - def create_tenant(self, display_name, allow_password_sign_up=..., enable_email_link_sign_in=...): # -> Tenant: - """Creates a new tenant from the given parameters.""" - ... - - def update_tenant(self, tenant_id, display_name=..., allow_password_sign_up=..., enable_email_link_sign_in=...): # -> Tenant: - """Updates the specified tenant with the given parameters.""" - ... - - def delete_tenant(self, tenant_id): # -> None: - """Deletes the tenant corresponding to the given ``tenant_id``.""" - ... - - def list_tenants(self, page_token=..., max_results=...): - """Retrieves a batch of tenants.""" - ... - - - -class ListTenantsPage: - """Represents a page of tenants fetched from a Firebase project. - - Provides methods for traversing tenants included in this page, as well as retrieving - subsequent pages of tenants. The iterator returned by ``iterate_all()`` can be used to iterate - through all tenants in the Firebase project starting from this page. - """ - def __init__(self, download, page_token, max_results) -> None: - ... - - @property - def tenants(self): # -> list[Tenant]: - """A list of ``ExportedUserRecord`` instances available in this page.""" - ... - - @property - def next_page_token(self): - """Page token string for the next page (empty string indicates no more pages).""" - ... - - @property - def has_next_page(self): # -> bool: - """A boolean indicating whether more pages are available.""" - ... - - def get_next_page(self): # -> ListTenantsPage | None: - """Retrieves the next page of tenants, if available. - - Returns: - ListTenantsPage: Next page of tenants, or None if this is the last page. - """ - ... - - def iterate_all(self): # -> _TenantIterator: - """Retrieves an iterator for tenants. - - Returned iterator will iterate through all the tenants in the Firebase project - starting from this page. The iterator will never buffer more than one page of tenants - in memory at a time. - - Returns: - iterator: An iterator of Tenant instances. - """ - ... - - - -class _TenantIterator: - """An iterator that allows iterating over tenants. - - This implementation loads a page of tenants into memory, and iterates on them. When the whole - page has been traversed, it loads another page. This class never keeps more than one page - of entries in memory. - """ - def __init__(self, current_page) -> None: - ... - - def next(self): - ... - - def __next__(self): - ... - - def __iter__(self): # -> Self: - ... - - - diff --git a/typings/googleapiclient/__init__.pyi b/typings/googleapiclient/__init__.pyi deleted file mode 100644 index 2b096b9..0000000 --- a/typings/googleapiclient/__init__.pyi +++ /dev/null @@ -1,7 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import logging -from logging import NullHandler - diff --git a/typings/googleapiclient/_auth.pyi b/typings/googleapiclient/_auth.pyi deleted file mode 100644 index 7d7cb17..0000000 --- a/typings/googleapiclient/_auth.pyi +++ /dev/null @@ -1,56 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Helpers for authentication using oauth2client or google-auth.""" -HAS_GOOGLE_AUTH = ... -HAS_OAUTH2CLIENT = ... -def credentials_from_file(filename, scopes=..., quota_project_id=...): # -> Credentials | ServiceAccountCredentials: - """Returns credentials loaded from a file.""" - ... - -def default_credentials(scopes=..., quota_project_id=...): - """Returns Application Default Credentials.""" - ... - -def with_scopes(credentials, scopes): # -> Scoped: - """Scopes the credentials if necessary. - - Args: - credentials (Union[ - google.auth.credentials.Credentials, - oauth2client.client.Credentials]): The credentials to scope. - scopes (Sequence[str]): The list of scopes. - - Returns: - Union[google.auth.credentials.Credentials, - oauth2client.client.Credentials]: The scoped credentials. - """ - ... - -def authorized_http(credentials): # -> AuthorizedHttp: - """Returns an http client that is authorized with the given credentials. - - Args: - credentials (Union[ - google.auth.credentials.Credentials, - oauth2client.client.Credentials]): The credentials to use. - - Returns: - Union[httplib2.Http, google_auth_httplib2.AuthorizedHttp]: An - authorized http client. - """ - ... - -def refresh_credentials(credentials): - ... - -def apply_credentials(credentials, headers): - ... - -def is_valid(credentials): # -> bool: - ... - -def get_credentials_from_http(http): # -> None: - ... - diff --git a/typings/googleapiclient/_helpers.pyi b/typings/googleapiclient/_helpers.pyi deleted file mode 100644 index 2097e75..0000000 --- a/typings/googleapiclient/_helpers.pyi +++ /dev/null @@ -1,120 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Helper functions for commonly used utilities.""" -logger = ... -POSITIONAL_WARNING = ... -POSITIONAL_EXCEPTION = ... -POSITIONAL_IGNORE = ... -POSITIONAL_SET = ... -positional_parameters_enforcement = ... -_SYM_LINK_MESSAGE = ... -_IS_DIR_MESSAGE = ... -_MISSING_FILE_MESSAGE = ... -def positional(max_positional_args): # -> Callable[..., _Wrapped[Callable[..., Any], Any, Callable[..., Any], Any]]: - """A decorator to declare that only the first N arguments may be positional. - - This decorator makes it easy to support Python 3 style keyword-only - parameters. For example, in Python 3 it is possible to write:: - - def fn(pos1, *, kwonly1=None, kwonly2=None): - ... - - All named parameters after ``*`` must be a keyword:: - - fn(10, 'kw1', 'kw2') # Raises exception. - fn(10, kwonly1='kw1') # Ok. - - Example - ^^^^^^^ - - To define a function like above, do:: - - @positional(1) - def fn(pos1, kwonly1=None, kwonly2=None): - ... - - If no default value is provided to a keyword argument, it becomes a - required keyword argument:: - - @positional(0) - def fn(required_kw): - ... - - This must be called with the keyword parameter:: - - fn() # Raises exception. - fn(10) # Raises exception. - fn(required_kw=10) # Ok. - - When defining instance or class methods always remember to account for - ``self`` and ``cls``:: - - class MyClass(object): - - @positional(2) - def my_method(self, pos1, kwonly1=None): - ... - - @classmethod - @positional(2) - def my_method(cls, pos1, kwonly1=None): - ... - - The positional decorator behavior is controlled by - ``_helpers.positional_parameters_enforcement``, which may be set to - ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or - ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do - nothing, respectively, if a declaration is violated. - - Args: - max_positional_arguments: Maximum number of positional arguments. All - parameters after this index must be - keyword only. - - Returns: - A decorator that prevents using arguments after max_positional_args - from being used as positional parameters. - - Raises: - TypeError: if a keyword-only argument is provided as a positional - parameter, but only if - _helpers.positional_parameters_enforcement is set to - POSITIONAL_EXCEPTION. - """ - ... - -def parse_unique_urlencoded(content): # -> dict[Any, Any]: - """Parses unique key-value parameters from urlencoded content. - - Args: - content: string, URL-encoded key-value pairs. - - Returns: - dict, The key-value pairs from ``content``. - - Raises: - ValueError: if one of the keys is repeated. - """ - ... - -def update_query_params(uri, params): - """Updates a URI with new query parameters. - - If a given key from ``params`` is repeated in the ``uri``, then - the URI will be considered invalid and an error will occur. - - If the URI is valid, then each value from ``params`` will - replace the corresponding value in the query parameters (if - it exists). - - Args: - uri: string, A valid URI, with potential existing query parameters. - params: dict, A dictionary of query parameters. - - Returns: - The same URI but with the new query parameters added. - """ - ... - diff --git a/typings/googleapiclient/channel.pyi b/typings/googleapiclient/channel.pyi deleted file mode 100644 index a4d9ea3..0000000 --- a/typings/googleapiclient/channel.pyi +++ /dev/null @@ -1,211 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from googleapiclient import _helpers as util - -"""Channel notifications support. - -Classes and functions to support channel subscriptions and notifications -on those channels. - -Notes: - - This code is based on experimental APIs and is subject to change. - - Notification does not do deduplication of notification ids, that's up to - the receiver. - - Storing the Channel between calls is up to the caller. - - -Example setting up a channel: - - # Create a new channel that gets notifications via webhook. - channel = new_webhook_channel("https://example.com/my_web_hook") - - # Store the channel, keyed by 'channel.id'. Store it before calling the - # watch method because notifications may start arriving before the watch - # method returns. - ... - - resp = service.objects().watchAll( - bucket="some_bucket_id", body=channel.body()).execute() - channel.update(resp) - - # Store the channel, keyed by 'channel.id'. Store it after being updated - # since the resource_id value will now be correct, and that's needed to - # stop a subscription. - ... - - -An example Webhook implementation using webapp2. Note that webapp2 puts -headers in a case insensitive dictionary, as headers aren't guaranteed to -always be upper case. - - id = self.request.headers[X_GOOG_CHANNEL_ID] - - # Retrieve the channel by id. - channel = ... - - # Parse notification from the headers, including validating the id. - n = notification_from_headers(channel, self.request.headers) - - # Do app specific stuff with the notification here. - if n.resource_state == 'sync': - # Code to handle sync state. - elif n.resource_state == 'exists': - # Code to handle the exists state. - elif n.resource_state == 'not_exists': - # Code to handle the not exists state. - - -Example of unsubscribing. - - service.channels().stop(channel.body()).execute() -""" -EPOCH = ... -CHANNEL_PARAMS = ... -X_GOOG_CHANNEL_ID = ... -X_GOOG_MESSAGE_NUMBER = ... -X_GOOG_RESOURCE_STATE = ... -X_GOOG_RESOURCE_URI = ... -X_GOOG_RESOURCE_ID = ... -class Notification: - """A Notification from a Channel. - - Notifications are not usually constructed directly, but are returned - from functions like notification_from_headers(). - - Attributes: - message_number: int, The unique id number of this notification. - state: str, The state of the resource being monitored. - uri: str, The address of the resource being monitored. - resource_id: str, The unique identifier of the version of the resource at - this event. - """ - @util.positional(5) - def __init__(self, message_number, state, resource_uri, resource_id) -> None: - """Notification constructor. - - Args: - message_number: int, The unique id number of this notification. - state: str, The state of the resource being monitored. Can be one - of "exists", "not_exists", or "sync". - resource_uri: str, The address of the resource being monitored. - resource_id: str, The identifier of the watched resource. - """ - ... - - - -class Channel: - """A Channel for notifications. - - Usually not constructed directly, instead it is returned from helper - functions like new_webhook_channel(). - - Attributes: - type: str, The type of delivery mechanism used by this channel. For - example, 'web_hook'. - id: str, A UUID for the channel. - token: str, An arbitrary string associated with the channel that - is delivered to the target address with each event delivered - over this channel. - address: str, The address of the receiving entity where events are - delivered. Specific to the channel type. - expiration: int, The time, in milliseconds from the epoch, when this - channel will expire. - params: dict, A dictionary of string to string, with additional parameters - controlling delivery channel behavior. - resource_id: str, An opaque id that identifies the resource that is - being watched. Stable across different API versions. - resource_uri: str, The canonicalized ID of the watched resource. - """ - @util.positional(5) - def __init__(self, type, id, token, address, expiration=..., params=..., resource_id=..., resource_uri=...) -> None: - """Create a new Channel. - - In user code, this Channel constructor will not typically be called - manually since there are functions for creating channels for each specific - type with a more customized set of arguments to pass. - - Args: - type: str, The type of delivery mechanism used by this channel. For - example, 'web_hook'. - id: str, A UUID for the channel. - token: str, An arbitrary string associated with the channel that - is delivered to the target address with each event delivered - over this channel. - address: str, The address of the receiving entity where events are - delivered. Specific to the channel type. - expiration: int, The time, in milliseconds from the epoch, when this - channel will expire. - params: dict, A dictionary of string to string, with additional parameters - controlling delivery channel behavior. - resource_id: str, An opaque id that identifies the resource that is - being watched. Stable across different API versions. - resource_uri: str, The canonicalized ID of the watched resource. - """ - ... - - def body(self): # -> dict[str, Any]: - """Build a body from the Channel. - - Constructs a dictionary that's appropriate for passing into watch() - methods as the value of body argument. - - Returns: - A dictionary representation of the channel. - """ - ... - - def update(self, resp): # -> None: - """Update a channel with information from the response of watch(). - - When a request is sent to watch() a resource, the response returned - from the watch() request is a dictionary with updated channel information, - such as the resource_id, which is needed when stopping a subscription. - - Args: - resp: dict, The response from a watch() method. - """ - ... - - - -def notification_from_headers(channel, headers): # -> Notification: - """Parse a notification from the webhook request headers, validate - the notification, and return a Notification object. - - Args: - channel: Channel, The channel that the notification is associated with. - headers: dict, A dictionary like object that contains the request headers - from the webhook HTTP request. - - Returns: - A Notification object. - - Raises: - errors.InvalidNotificationError if the notification is invalid. - ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int. - """ - ... - -@util.positional(2) -def new_webhook_channel(url, token=..., expiration=..., params=...): # -> Channel: - """Create a new webhook Channel. - - Args: - url: str, URL to post notifications to. - token: str, An arbitrary string associated with the channel that - is delivered to the target address with each notification delivered - over this channel. - expiration: datetime.datetime, A time in the future when the channel - should expire. Can also be None if the subscription should use the - default expiration. Note that different services may have different - limits on how long a subscription lasts. Check the response from the - watch() method to see the value the service has set for an expiration - time. - params: dict, Extra parameters to pass on channel creation. Currently - not used for webhook channels. - """ - ... - diff --git a/typings/googleapiclient/discovery.pyi b/typings/googleapiclient/discovery.pyi deleted file mode 100644 index 44c05b1..0000000 --- a/typings/googleapiclient/discovery.pyi +++ /dev/null @@ -1,333 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from email.generator import BytesGenerator -from googleapiclient._helpers import positional - -"""Client for discovery based APIs. - -A client library for Google's discovery based APIs. -""" -__author__ = ... -__all__ = ["build", "build_from_document", "fix_method_name", "key2param"] -HAS_UNIVERSE = ... -logger = ... -URITEMPLATE = ... -VARNAME = ... -DISCOVERY_URI = ... -V1_DISCOVERY_URI = ... -V2_DISCOVERY_URI = ... -DEFAULT_METHOD_DOC = ... -HTTP_PAYLOAD_METHODS = ... -_MEDIA_SIZE_BIT_SHIFTS = ... -BODY_PARAMETER_DEFAULT_VALUE = ... -MEDIA_BODY_PARAMETER_DEFAULT_VALUE = ... -MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = ... -_PAGE_TOKEN_NAMES = ... -GOOGLE_API_USE_CLIENT_CERTIFICATE = ... -GOOGLE_API_USE_MTLS_ENDPOINT = ... -GOOGLE_CLOUD_UNIVERSE_DOMAIN = ... -DEFAULT_UNIVERSE = ... -STACK_QUERY_PARAMETERS = ... -STACK_QUERY_PARAMETER_DEFAULT_VALUE = ... -class APICoreVersionError(ValueError): - def __init__(self) -> None: - ... - - - -RESERVED_WORDS = ... -class _BytesGenerator(BytesGenerator): - _write_lines = ... - - -def fix_method_name(name): - """Fix method names to avoid '$' characters and reserved word conflicts. - - Args: - name: string, method name. - - Returns: - The name with '_' appended if the name is a reserved word and '$' and '-' - replaced with '_'. - """ - ... - -def key2param(key): # -> LiteralString: - """Converts key names into parameter names. - - For example, converting "max-results" -> "max_results" - - Args: - key: string, the method key name. - - Returns: - A safe method name based on the key name. - """ - ... - -@positional(2) -def build(serviceName, version, http=..., discoveryServiceUrl=..., developerKey=..., model=..., requestBuilder=..., credentials=..., cache_discovery=..., cache=..., client_options=..., adc_cert_path=..., adc_key_path=..., num_retries=..., static_discovery=..., always_use_jwt_access=...): - """Construct a Resource for interacting with an API. - - Construct a Resource object for interacting with an API. The serviceName and - version are the names from the Discovery service. - - Args: - serviceName: string, name of the service. - version: string, the version of the service. - http: httplib2.Http, An instance of httplib2.Http or something that acts - like it that HTTP requests will be made through. - discoveryServiceUrl: string, a URI Template that points to the location of - the discovery service. It should have two parameters {api} and - {apiVersion} that when filled in produce an absolute URI to the discovery - document for that service. - developerKey: string, key obtained from - https://code.google.com/apis/console. - model: googleapiclient.Model, converts to and from the wire format. - requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP - request. - credentials: oauth2client.Credentials or - google.auth.credentials.Credentials, credentials to be used for - authentication. - cache_discovery: Boolean, whether or not to cache the discovery doc. - cache: googleapiclient.discovery_cache.base.CacheBase, an optional - cache object for the discovery documents. - client_options: Mapping object or google.api_core.client_options, client - options to set user options on the client. - (1) The API endpoint should be set through client_options. If API endpoint - is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used - to control which endpoint to use. - (2) client_cert_source is not supported, client cert should be provided using - client_encrypted_cert_source instead. In order to use the provided client - cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be - set to `true`. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - adc_cert_path: str, client certificate file path to save the application - default client certificate for mTLS. This field is required if you want to - use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE` - environment variable must be set to `true` in order to use this field, - otherwise this field doesn't nothing. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - adc_key_path: str, client encrypted private key file path to save the - application default client encrypted private key for mTLS. This field is - required if you want to use the default client certificate. - `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to - `true` in order to use this field, otherwise this field doesn't nothing. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - num_retries: Integer, number of times to retry discovery with - randomized exponential backoff in case of intermittent/connection issues. - static_discovery: Boolean, whether or not to use the static discovery docs - included in the library. The default value for `static_discovery` depends - on the value of `discoveryServiceUrl`. `static_discovery` will default to - `True` when `discoveryServiceUrl` is also not provided, otherwise it will - default to `False`. - always_use_jwt_access: Boolean, whether always use self signed JWT for service - account credentials. This only applies to - google.oauth2.service_account.Credentials. - - Returns: - A Resource object with methods for interacting with the service. - - Raises: - google.auth.exceptions.MutualTLSChannelError: if there are any problems - setting up mutual TLS channel. - """ - ... - -@positional(1) -def build_from_document(service, base=..., future=..., http=..., developerKey=..., model=..., requestBuilder=..., credentials=..., client_options=..., adc_cert_path=..., adc_key_path=..., always_use_jwt_access=...): - """Create a Resource for interacting with an API. - - Same as `build()`, but constructs the Resource object from a discovery - document that is it given, as opposed to retrieving one over HTTP. - - Args: - service: string or object, the JSON discovery document describing the API. - The value passed in may either be the JSON string or the deserialized - JSON. - base: string, base URI for all HTTP requests, usually the discovery URI. - This parameter is no longer used as rootUrl and servicePath are included - within the discovery document. (deprecated) - future: string, discovery document with future capabilities (deprecated). - http: httplib2.Http, An instance of httplib2.Http or something that acts - like it that HTTP requests will be made through. - developerKey: string, Key for controlling API usage, generated - from the API Console. - model: Model class instance that serializes and de-serializes requests and - responses. - requestBuilder: Takes an http request and packages it up to be executed. - credentials: oauth2client.Credentials or - google.auth.credentials.Credentials, credentials to be used for - authentication. - client_options: Mapping object or google.api_core.client_options, client - options to set user options on the client. - (1) The API endpoint should be set through client_options. If API endpoint - is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used - to control which endpoint to use. - (2) client_cert_source is not supported, client cert should be provided using - client_encrypted_cert_source instead. In order to use the provided client - cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be - set to `true`. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - adc_cert_path: str, client certificate file path to save the application - default client certificate for mTLS. This field is required if you want to - use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE` - environment variable must be set to `true` in order to use this field, - otherwise this field doesn't nothing. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - adc_key_path: str, client encrypted private key file path to save the - application default client encrypted private key for mTLS. This field is - required if you want to use the default client certificate. - `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to - `true` in order to use this field, otherwise this field doesn't nothing. - More details on the environment variables are here: - https://google.aip.dev/auth/4114 - always_use_jwt_access: Boolean, whether always use self signed JWT for service - account credentials. This only applies to - google.oauth2.service_account.Credentials. - - Returns: - A Resource object with methods for interacting with the service. - - Raises: - google.auth.exceptions.MutualTLSChannelError: if there are any problems - setting up mutual TLS channel. - """ - ... - -class ResourceMethodParameters: - """Represents the parameters associated with a method. - - Attributes: - argmap: Map from method parameter name (string) to query parameter name - (string). - required_params: List of required parameters (represented by parameter - name as string). - repeated_params: List of repeated parameters (represented by parameter - name as string). - pattern_params: Map from method parameter name (string) to regular - expression (as a string). If the pattern is set for a parameter, the - value for that parameter must match the regular expression. - query_params: List of parameters (represented by parameter name as string) - that will be used in the query string. - path_params: Set of parameters (represented by parameter name as string) - that will be used in the base URL path. - param_types: Map from method parameter name (string) to parameter type. Type - can be any valid JSON schema type; valid values are 'any', 'array', - 'boolean', 'integer', 'number', 'object', or 'string'. Reference: - http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1 - enum_params: Map from method parameter name (string) to list of strings, - where each list of strings is the list of acceptable enum values. - """ - def __init__(self, method_desc) -> None: - """Constructor for ResourceMethodParameters. - - Sets default values and defers to set_parameters to populate. - - Args: - method_desc: Dictionary with metadata describing an API method. Value - comes from the dictionary of methods stored in the 'methods' key in - the deserialized discovery document. - """ - ... - - def set_parameters(self, method_desc): # -> None: - """Populates maps and lists based on method description. - - Iterates through each parameter for the method and parses the values from - the parameter dictionary. - - Args: - method_desc: Dictionary with metadata describing an API method. Value - comes from the dictionary of methods stored in the 'methods' key in - the deserialized discovery document. - """ - ... - - - -def createMethod(methodName, methodDesc, rootDesc, schema): # -> tuple[Any, Callable[..., Any]]: - """Creates a method for attaching to a Resource. - - Args: - methodName: string, name of the method to use. - methodDesc: object, fragment of deserialized discovery document that - describes the method. - rootDesc: object, the entire deserialized discovery document. - schema: object, mapping of schema names to schema descriptions. - """ - ... - -def createNextMethod(methodName, pageTokenName=..., nextPageTokenName=..., isPageTokenParameter=...): # -> tuple[Any, Callable[..., Any | None]]: - """Creates any _next methods for attaching to a Resource. - - The _next methods allow for easy iteration through list() responses. - - Args: - methodName: string, name of the method to use. - pageTokenName: string, name of request page token field. - nextPageTokenName: string, name of response page token field. - isPageTokenParameter: Boolean, True if request page token is a query - parameter, False if request page token is a field of the request body. - """ - ... - -class Resource: - """A class for interacting with a resource.""" - def __init__(self, http, baseUrl, model, requestBuilder, developerKey, resourceDesc, rootDesc, schema, universe_domain=...) -> None: - """Build a Resource from the API description. - - Args: - http: httplib2.Http, Object to make http requests with. - baseUrl: string, base URL for the API. All requests are relative to this - URI. - model: googleapiclient.Model, converts to and from the wire format. - requestBuilder: class or callable that instantiates an - googleapiclient.HttpRequest object. - developerKey: string, key obtained from - https://code.google.com/apis/console - resourceDesc: object, section of deserialized discovery document that - describes a resource. Note that the top level discovery document - is considered a resource. - rootDesc: object, the entire deserialized discovery document. - schema: object, mapping of schema names to schema descriptions. - universe_domain: string, the universe for the API. The default universe - is "googleapis.com". - """ - ... - - def __getstate__(self): # -> dict[str, Any]: - """Trim the state down to something that can be pickled. - - Uses the fact that the instance variable _dynamic_attrs holds attrs that - will be wiped and restored on pickle serialization. - """ - ... - - def __setstate__(self, state): # -> None: - """Reconstitute the state of the object from being pickled. - - Uses the fact that the instance variable _dynamic_attrs holds attrs that - will be wiped and restored on pickle serialization. - """ - ... - - def __enter__(self): # -> Self: - ... - - def __exit__(self, exc_type, exc, exc_tb): # -> None: - ... - - def close(self): # -> None: - """Close httplib2 connections.""" - ... - - - diff --git a/typings/googleapiclient/discovery_cache/__init__.pyi b/typings/googleapiclient/discovery_cache/__init__.pyi deleted file mode 100644 index bf3642a..0000000 --- a/typings/googleapiclient/discovery_cache/__init__.pyi +++ /dev/null @@ -1,34 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import logging -import os - -"""Caching utility for the discovery document.""" -LOGGER = ... -DISCOVERY_DOC_MAX_AGE = ... -DISCOVERY_DOC_DIR = ... -def autodetect(): # -> googleapiclient.discovery_cache.appengine_memcache.Cache | googleapiclient.discovery_cache.file_cache.Cache | None: - """Detects an appropriate cache module and returns it. - - Returns: - googleapiclient.discovery_cache.base.Cache, a cache object which - is auto detected, or None if no cache object is available. - """ - ... - -def get_static_doc(serviceName, version): # -> str | None: - """Retrieves the discovery document from the directory defined in - DISCOVERY_DOC_DIR corresponding to the serviceName and version provided. - - Args: - serviceName: string, name of the service. - version: string, the version of the service. - - Returns: - A string containing the contents of the JSON discovery document, - otherwise None if the JSON discovery document was not found. - """ - ... - diff --git a/typings/googleapiclient/discovery_cache/appengine_memcache.pyi b/typings/googleapiclient/discovery_cache/appengine_memcache.pyi deleted file mode 100644 index ef8bfd8..0000000 --- a/typings/googleapiclient/discovery_cache/appengine_memcache.pyi +++ /dev/null @@ -1,28 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from . import base - -"""App Engine memcache based cache for the discovery document.""" -LOGGER = ... -NAMESPACE = ... -class Cache(base.Cache): - """A cache with app engine memcache API.""" - def __init__(self, max_age) -> None: - """Constructor. - - Args: - max_age: Cache expiration in seconds. - """ - ... - - def get(self, url): # -> None: - ... - - def set(self, url, content): # -> None: - ... - - - -cache = ... diff --git a/typings/googleapiclient/discovery_cache/base.pyi b/typings/googleapiclient/discovery_cache/base.pyi deleted file mode 100644 index abf479b..0000000 --- a/typings/googleapiclient/discovery_cache/base.pyi +++ /dev/null @@ -1,35 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import abc - -"""An abstract class for caching the discovery document.""" -class Cache: - """A base abstract cache class.""" - __metaclass__ = abc.ABCMeta - @abc.abstractmethod - def get(self, url): - """Gets the content from the memcache with a given key. - - Args: - url: string, the key for the cache. - - Returns: - object, the value in the cache for the given key, or None if the key is - not in the cache. - """ - ... - - @abc.abstractmethod - def set(self, url, content): - """Sets the given key and content in the cache. - - Args: - url: string, the key for the cache. - content: string, the discovery document. - """ - ... - - - diff --git a/typings/googleapiclient/discovery_cache/file_cache.pyi b/typings/googleapiclient/discovery_cache/file_cache.pyi deleted file mode 100644 index 04576a0..0000000 --- a/typings/googleapiclient/discovery_cache/file_cache.pyi +++ /dev/null @@ -1,35 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from . import base - -"""File based cache for the discovery document. - -The cache is stored in a single file so that multiple processes can -share the same cache. It locks the file whenever accessing to the -file. When the cache content is corrupted, it will be initialized with -an empty cache. -""" -LOGGER = ... -FILENAME = ... -EPOCH = ... -class Cache(base.Cache): - """A file based cache for the discovery documents.""" - def __init__(self, max_age) -> None: - """Constructor. - - Args: - max_age: Cache expiration in seconds. - """ - ... - - def get(self, url): # -> Any | None: - ... - - def set(self, url, content): # -> None: - ... - - - -cache = ... diff --git a/typings/googleapiclient/errors.pyi b/typings/googleapiclient/errors.pyi deleted file mode 100644 index 097974f..0000000 --- a/typings/googleapiclient/errors.pyi +++ /dev/null @@ -1,108 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from googleapiclient import _helpers as util - -"""Errors for the library. - -All exceptions defined by the library -should be defined in this file. -""" -__author__ = ... -class Error(Exception): - """Base error for this module.""" - ... - - -class HttpError(Error): - """HTTP data was invalid or unexpected.""" - @util.positional(3) - def __init__(self, resp, content, uri=...) -> None: - ... - - @property - def status_code(self): - """Return the HTTP status code from the response content.""" - ... - - def __repr__(self): # -> str: - ... - - __str__ = ... - - -class InvalidJsonError(Error): - """The JSON returned could not be parsed.""" - ... - - -class UnknownFileType(Error): - """File type unknown or unexpected.""" - ... - - -class UnknownLinkType(Error): - """Link type unknown or unexpected.""" - ... - - -class UnknownApiNameOrVersion(Error): - """No API with that name and version exists.""" - ... - - -class UnacceptableMimeTypeError(Error): - """That is an unacceptable mimetype for this operation.""" - ... - - -class MediaUploadSizeError(Error): - """Media is larger than the method can accept.""" - ... - - -class ResumableUploadError(HttpError): - """Error occurred during resumable upload.""" - ... - - -class InvalidChunkSizeError(Error): - """The given chunksize is not valid.""" - ... - - -class InvalidNotificationError(Error): - """The channel Notification is invalid.""" - ... - - -class BatchError(HttpError): - """Error occurred during batch operations.""" - @util.positional(2) - def __init__(self, reason, resp=..., content=...) -> None: - ... - - def __repr__(self): # -> LiteralString: - ... - - __str__ = ... - - -class UnexpectedMethodError(Error): - """Exception raised by RequestMockBuilder on unexpected calls.""" - @util.positional(1) - def __init__(self, methodId=...) -> None: - """Constructor for an UnexpectedMethodError.""" - ... - - - -class UnexpectedBodyError(Error): - """Exception raised by RequestMockBuilder on unexpected bodies.""" - def __init__(self, expected, provided) -> None: - """Constructor for an UnexpectedMethodError.""" - ... - - - diff --git a/typings/googleapiclient/http.pyi b/typings/googleapiclient/http.pyi deleted file mode 100644 index 9e36e63..0000000 --- a/typings/googleapiclient/http.pyi +++ /dev/null @@ -1,857 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from googleapiclient import _helpers as util - -"""Classes to encapsulate a single HTTP request. - -The classes implement a command pattern, with every -object supporting an execute() method that does the -actual HTTP request. -""" -__author__ = ... -LOGGER = ... -DEFAULT_CHUNK_SIZE = ... -MAX_URI_LENGTH = ... -MAX_BATCH_LIMIT = ... -_TOO_MANY_REQUESTS = ... -DEFAULT_HTTP_TIMEOUT_SEC = ... -_LEGACY_BATCH_URI = ... -class MediaUploadProgress: - """Status of a resumable upload.""" - def __init__(self, resumable_progress, total_size) -> None: - """Constructor. - - Args: - resumable_progress: int, bytes sent so far. - total_size: int, total bytes in complete upload, or None if the total - upload size isn't known ahead of time. - """ - ... - - def progress(self): # -> float: - """Percent of upload completed, as a float. - - Returns: - the percentage complete as a float, returning 0.0 if the total size of - the upload is unknown. - """ - ... - - - -class MediaDownloadProgress: - """Status of a resumable download.""" - def __init__(self, resumable_progress, total_size) -> None: - """Constructor. - - Args: - resumable_progress: int, bytes received so far. - total_size: int, total bytes in complete download. - """ - ... - - def progress(self): # -> float: - """Percent of download completed, as a float. - - Returns: - the percentage complete as a float, returning 0.0 if the total size of - the download is unknown. - """ - ... - - - -class MediaUpload: - """Describes a media object to upload. - - Base class that defines the interface of MediaUpload subclasses. - - Note that subclasses of MediaUpload may allow you to control the chunksize - when uploading a media object. It is important to keep the size of the chunk - as large as possible to keep the upload efficient. Other factors may influence - the size of the chunk you use, particularly if you are working in an - environment where individual HTTP requests may have a hardcoded time limit, - such as under certain classes of requests under Google App Engine. - - Streams are io.Base compatible objects that support seek(). Some MediaUpload - subclasses support using streams directly to upload data. Support for - streaming may be indicated by a MediaUpload sub-class and if appropriate for a - platform that stream will be used for uploading the media object. The support - for streaming is indicated by has_stream() returning True. The stream() method - should return an io.Base object that supports seek(). On platforms where the - underlying httplib module supports streaming, for example Python 2.6 and - later, the stream will be passed into the http library which will result in - less memory being used and possibly faster uploads. - - If you need to upload media that can't be uploaded using any of the existing - MediaUpload sub-class then you can sub-class MediaUpload for your particular - needs. - """ - def chunksize(self): - """Chunk size for resumable uploads. - - Returns: - Chunk size in bytes. - """ - ... - - def mimetype(self): # -> Literal['application/octet-stream']: - """Mime type of the body. - - Returns: - Mime type. - """ - ... - - def size(self): # -> None: - """Size of upload. - - Returns: - Size of the body, or None of the size is unknown. - """ - ... - - def resumable(self): # -> Literal[False]: - """Whether this upload is resumable. - - Returns: - True if resumable upload or False. - """ - ... - - def getbytes(self, begin, end): - """Get bytes from the media. - - Args: - begin: int, offset from beginning of file. - length: int, number of bytes to read, starting at begin. - - Returns: - A string of bytes read. May be shorter than length if EOF was reached - first. - """ - ... - - def has_stream(self): # -> Literal[False]: - """Does the underlying upload support a streaming interface. - - Streaming means it is an io.IOBase subclass that supports seek, i.e. - seekable() returns True. - - Returns: - True if the call to stream() will return an instance of a seekable io.Base - subclass. - """ - ... - - def stream(self): - """A stream interface to the data being uploaded. - - Returns: - The returned value is an io.IOBase subclass that supports seek, i.e. - seekable() returns True. - """ - ... - - def to_json(self): # -> str: - """Create a JSON representation of an instance of MediaUpload. - - Returns: - string, a JSON representation of this instance, suitable to pass to - from_json(). - """ - ... - - @classmethod - def new_from_json(cls, s): # -> Any: - """Utility class method to instantiate a MediaUpload subclass from a JSON - representation produced by to_json(). - - Args: - s: string, JSON from to_json(). - - Returns: - An instance of the subclass of MediaUpload that was serialized with - to_json(). - """ - ... - - - -class MediaIoBaseUpload(MediaUpload): - """A MediaUpload for a io.Base objects. - - Note that the Python file object is compatible with io.Base and can be used - with this class also. - - fh = BytesIO('...Some data to upload...') - media = MediaIoBaseUpload(fh, mimetype='image/png', - chunksize=1024*1024, resumable=True) - farm.animals().insert( - id='cow', - name='cow.png', - media_body=media).execute() - - Depending on the platform you are working on, you may pass -1 as the - chunksize, which indicates that the entire file should be uploaded in a single - request. If the underlying platform supports streams, such as Python 2.6 or - later, then this can be very efficient as it avoids multiple connections, and - also avoids loading the entire file into memory before sending it. Note that - Google App Engine has a 5MB limit on request size, so you should never set - your chunksize larger than 5MB, or to -1. - """ - @util.positional(3) - def __init__(self, fd, mimetype, chunksize=..., resumable=...) -> None: - """Constructor. - - Args: - fd: io.Base or file object, The source of the bytes to upload. MUST be - opened in blocking mode, do not use streams opened in non-blocking mode. - The given stream must be seekable, that is, it must be able to call - seek() on fd. - mimetype: string, Mime-type of the file. - chunksize: int, File will be uploaded in chunks of this many bytes. Only - used if resumable=True. Pass in a value of -1 if the file is to be - uploaded as a single chunk. Note that Google App Engine has a 5MB limit - on request size, so you should never set your chunksize larger than 5MB, - or to -1. - resumable: bool, True if this is a resumable upload. False means upload - in a single request. - """ - ... - - def chunksize(self): # -> int: - """Chunk size for resumable uploads. - - Returns: - Chunk size in bytes. - """ - ... - - def mimetype(self): # -> Any: - """Mime type of the body. - - Returns: - Mime type. - """ - ... - - def size(self): - """Size of upload. - - Returns: - Size of the body, or None of the size is unknown. - """ - ... - - def resumable(self): # -> bool: - """Whether this upload is resumable. - - Returns: - True if resumable upload or False. - """ - ... - - def getbytes(self, begin, length): - """Get bytes from the media. - - Args: - begin: int, offset from beginning of file. - length: int, number of bytes to read, starting at begin. - - Returns: - A string of bytes read. May be shorted than length if EOF was reached - first. - """ - ... - - def has_stream(self): # -> Literal[True]: - """Does the underlying upload support a streaming interface. - - Streaming means it is an io.IOBase subclass that supports seek, i.e. - seekable() returns True. - - Returns: - True if the call to stream() will return an instance of a seekable io.Base - subclass. - """ - ... - - def stream(self): # -> Any: - """A stream interface to the data being uploaded. - - Returns: - The returned value is an io.IOBase subclass that supports seek, i.e. - seekable() returns True. - """ - ... - - def to_json(self): - """This upload type is not serializable.""" - ... - - - -class MediaFileUpload(MediaIoBaseUpload): - """A MediaUpload for a file. - - Construct a MediaFileUpload and pass as the media_body parameter of the - method. For example, if we had a service that allowed uploading images: - - media = MediaFileUpload('cow.png', mimetype='image/png', - chunksize=1024*1024, resumable=True) - farm.animals().insert( - id='cow', - name='cow.png', - media_body=media).execute() - - Depending on the platform you are working on, you may pass -1 as the - chunksize, which indicates that the entire file should be uploaded in a single - request. If the underlying platform supports streams, such as Python 2.6 or - later, then this can be very efficient as it avoids multiple connections, and - also avoids loading the entire file into memory before sending it. Note that - Google App Engine has a 5MB limit on request size, so you should never set - your chunksize larger than 5MB, or to -1. - """ - @util.positional(2) - def __init__(self, filename, mimetype=..., chunksize=..., resumable=...) -> None: - """Constructor. - - Args: - filename: string, Name of the file. - mimetype: string, Mime-type of the file. If None then a mime-type will be - guessed from the file extension. - chunksize: int, File will be uploaded in chunks of this many bytes. Only - used if resumable=True. Pass in a value of -1 if the file is to be - uploaded in a single chunk. Note that Google App Engine has a 5MB limit - on request size, so you should never set your chunksize larger than 5MB, - or to -1. - resumable: bool, True if this is a resumable upload. False means upload - in a single request. - """ - ... - - def __del__(self): # -> None: - ... - - def to_json(self): # -> str: - """Creating a JSON representation of an instance of MediaFileUpload. - - Returns: - string, a JSON representation of this instance, suitable to pass to - from_json(). - """ - ... - - @staticmethod - def from_json(s): # -> MediaFileUpload: - ... - - - -class MediaInMemoryUpload(MediaIoBaseUpload): - """MediaUpload for a chunk of bytes. - - DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for - the stream. - """ - @util.positional(2) - def __init__(self, body, mimetype=..., chunksize=..., resumable=...) -> None: - """Create a new MediaInMemoryUpload. - - DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for - the stream. - - Args: - body: string, Bytes of body content. - mimetype: string, Mime-type of the file or default of - 'application/octet-stream'. - chunksize: int, File will be uploaded in chunks of this many bytes. Only - used if resumable=True. - resumable: bool, True if this is a resumable upload. False means upload - in a single request. - """ - ... - - - -class MediaIoBaseDownload: - """ "Download media resources. - - Note that the Python file object is compatible with io.Base and can be used - with this class also. - - - Example: - request = farms.animals().get_media(id='cow') - fh = io.FileIO('cow.png', mode='wb') - downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) - - done = False - while done is False: - status, done = downloader.next_chunk() - if status: - print "Download %d%%." % int(status.progress() * 100) - print "Download Complete!" - """ - @util.positional(3) - def __init__(self, fd, request, chunksize=...) -> None: - """Constructor. - - Args: - fd: io.Base or file object, The stream in which to write the downloaded - bytes. - request: googleapiclient.http.HttpRequest, the media request to perform in - chunks. - chunksize: int, File will be downloaded in chunks of this many bytes. - """ - ... - - @util.positional(1) - def next_chunk(self, num_retries=...): # -> tuple[MediaDownloadProgress, bool] | tuple[MediaDownloadProgress, Literal[True]]: - """Get the next chunk of the download. - - Args: - num_retries: Integer, number of times to retry with randomized - exponential backoff. If all retries fail, the raised HttpError - represents the last request. If zero (default), we attempt the - request only once. - - Returns: - (status, done): (MediaDownloadProgress, boolean) - The value of 'done' will be True when the media has been fully - downloaded or the total size of the media is unknown. - - Raises: - googleapiclient.errors.HttpError if the response was not a 2xx. - httplib2.HttpLib2Error if a transport error has occurred. - """ - ... - - - -class _StreamSlice: - """Truncated stream. - - Takes a stream and presents a stream that is a slice of the original stream. - This is used when uploading media in chunks. In later versions of Python a - stream can be passed to httplib in place of the string of data to send. The - problem is that httplib just blindly reads to the end of the stream. This - wrapper presents a virtual stream that only reads to the end of the chunk. - """ - def __init__(self, stream, begin, chunksize) -> None: - """Constructor. - - Args: - stream: (io.Base, file object), the stream to wrap. - begin: int, the seek position the chunk begins at. - chunksize: int, the size of the chunk. - """ - ... - - def read(self, n=...): - """Read n bytes. - - Args: - n, int, the number of bytes to read. - - Returns: - A string of length 'n', or less if EOF is reached. - """ - ... - - - -class HttpRequest: - """Encapsulates a single HTTP request.""" - @util.positional(4) - def __init__(self, http, postproc, uri, method=..., body=..., headers=..., methodId=..., resumable=...) -> None: - """Constructor for an HttpRequest. - - Args: - http: httplib2.Http, the transport object to use to make a request - postproc: callable, called on the HTTP response and content to transform - it into a data object before returning, or raising an exception - on an error. - uri: string, the absolute URI to send the request to - method: string, the HTTP method to use - body: string, the request body of the HTTP request, - headers: dict, the HTTP request headers - methodId: string, a unique identifier for the API method being called. - resumable: MediaUpload, None if this is not a resumbale request. - """ - ... - - @util.positional(1) - def execute(self, http=..., num_retries=...): - """Execute the request. - - Args: - http: httplib2.Http, an http object to be used in place of the - one the HttpRequest request object was constructed with. - num_retries: Integer, number of times to retry with randomized - exponential backoff. If all retries fail, the raised HttpError - represents the last request. If zero (default), we attempt the - request only once. - - Returns: - A deserialized object model of the response body as determined - by the postproc. - - Raises: - googleapiclient.errors.HttpError if the response was not a 2xx. - httplib2.HttpLib2Error if a transport error has occurred. - """ - ... - - @util.positional(2) - def add_response_callback(self, cb): # -> None: - """add_response_headers_callback - - Args: - cb: Callback to be called on receiving the response headers, of signature: - - def cb(resp): - # Where resp is an instance of httplib2.Response - """ - ... - - @util.positional(1) - def next_chunk(self, http=..., num_retries=...): # -> tuple[MediaUploadProgress | None, Any] | tuple[None, Any] | tuple[MediaUploadProgress, None]: - """Execute the next step of a resumable upload. - - Can only be used if the method being executed supports media uploads and - the MediaUpload object passed in was flagged as using resumable upload. - - Example: - - media = MediaFileUpload('cow.png', mimetype='image/png', - chunksize=1000, resumable=True) - request = farm.animals().insert( - id='cow', - name='cow.png', - media_body=media) - - response = None - while response is None: - status, response = request.next_chunk() - if status: - print "Upload %d%% complete." % int(status.progress() * 100) - - - Args: - http: httplib2.Http, an http object to be used in place of the - one the HttpRequest request object was constructed with. - num_retries: Integer, number of times to retry with randomized - exponential backoff. If all retries fail, the raised HttpError - represents the last request. If zero (default), we attempt the - request only once. - - Returns: - (status, body): (ResumableMediaStatus, object) - The body will be None until the resumable media is fully uploaded. - - Raises: - googleapiclient.errors.HttpError if the response was not a 2xx. - httplib2.HttpLib2Error if a transport error has occurred. - """ - ... - - def to_json(self): # -> str: - """Returns a JSON representation of the HttpRequest.""" - ... - - @staticmethod - def from_json(s, http, postproc): # -> HttpRequest: - """Returns an HttpRequest populated with info from a JSON object.""" - ... - - @staticmethod - def null_postproc(resp, contents): # -> tuple[Any, Any]: - ... - - - -class BatchHttpRequest: - """Batches multiple HttpRequest objects into a single HTTP request. - - Example: - from googleapiclient.http import BatchHttpRequest - - def list_animals(request_id, response, exception): - \"\"\"Do something with the animals list response.\"\"\" - if exception is not None: - # Do something with the exception. - pass - else: - # Do something with the response. - pass - - def list_farmers(request_id, response, exception): - \"\"\"Do something with the farmers list response.\"\"\" - if exception is not None: - # Do something with the exception. - pass - else: - # Do something with the response. - pass - - service = build('farm', 'v2') - - batch = BatchHttpRequest() - - batch.add(service.animals().list(), list_animals) - batch.add(service.farmers().list(), list_farmers) - batch.execute(http=http) - """ - @util.positional(1) - def __init__(self, callback=..., batch_uri=...) -> None: - """Constructor for a BatchHttpRequest. - - Args: - callback: callable, A callback to be called for each response, of the - form callback(id, response, exception). The first parameter is the - request id, and the second is the deserialized response object. The - third is an googleapiclient.errors.HttpError exception object if an HTTP error - occurred while processing the request, or None if no error occurred. - batch_uri: string, URI to send batch requests to. - """ - ... - - @util.positional(2) - def add(self, request, callback=..., request_id=...): # -> None: - """Add a new request. - - Every callback added will be paired with a unique id, the request_id. That - unique id will be passed back to the callback when the response comes back - from the server. The default behavior is to have the library generate it's - own unique id. If the caller passes in a request_id then they must ensure - uniqueness for each request_id, and if they are not an exception is - raised. Callers should either supply all request_ids or never supply a - request id, to avoid such an error. - - Args: - request: HttpRequest, Request to add to the batch. - callback: callable, A callback to be called for this response, of the - form callback(id, response, exception). The first parameter is the - request id, and the second is the deserialized response object. The - third is an googleapiclient.errors.HttpError exception object if an HTTP error - occurred while processing the request, or None if no errors occurred. - request_id: string, A unique id for the request. The id will be passed - to the callback with the response. - - Returns: - None - - Raises: - BatchError if a media request is added to a batch. - KeyError is the request_id is not unique. - """ - ... - - @util.positional(1) - def execute(self, http=...): # -> None: - """Execute all the requests as a single batched HTTP request. - - Args: - http: httplib2.Http, an http object to be used in place of the one the - HttpRequest request object was constructed with. If one isn't supplied - then use a http object from the requests in this batch. - - Returns: - None - - Raises: - httplib2.HttpLib2Error if a transport error has occurred. - googleapiclient.errors.BatchError if the response is the wrong format. - """ - ... - - - -class HttpRequestMock: - """Mock of HttpRequest. - - Do not construct directly, instead use RequestMockBuilder. - """ - def __init__(self, resp, content, postproc) -> None: - """Constructor for HttpRequestMock - - Args: - resp: httplib2.Response, the response to emulate coming from the request - content: string, the response body - postproc: callable, the post processing function usually supplied by - the model class. See model.JsonModel.response() as an example. - """ - ... - - def execute(self, http=...): - """Execute the request. - - Same behavior as HttpRequest.execute(), but the response is - mocked and not really from an HTTP request/response. - """ - ... - - - -class RequestMockBuilder: - """A simple mock of HttpRequest - - Pass in a dictionary to the constructor that maps request methodIds to - tuples of (httplib2.Response, content, opt_expected_body) that should be - returned when that method is called. None may also be passed in for the - httplib2.Response, in which case a 200 OK response will be generated. - If an opt_expected_body (str or dict) is provided, it will be compared to - the body and UnexpectedBodyError will be raised on inequality. - - Example: - response = '{"data": {"id": "tag:google.c...' - requestBuilder = RequestMockBuilder( - { - 'plus.activities.get': (None, response), - } - ) - googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) - - Methods that you do not supply a response for will return a - 200 OK with an empty string as the response content or raise an excpetion - if check_unexpected is set to True. The methodId is taken from the rpcName - in the discovery document. - - For more details see the project wiki. - """ - def __init__(self, responses, check_unexpected=...) -> None: - """Constructor for RequestMockBuilder - - The constructed object should be a callable object - that can replace the class HttpResponse. - - responses - A dictionary that maps methodIds into tuples - of (httplib2.Response, content). The methodId - comes from the 'rpcName' field in the discovery - document. - check_unexpected - A boolean setting whether or not UnexpectedMethodError - should be raised on unsupplied method. - """ - ... - - def __call__(self, http, postproc, uri, method=..., body=..., headers=..., methodId=..., resumable=...): # -> HttpRequestMock: - """Implements the callable interface that discovery.build() expects - of requestBuilder, which is to build an object compatible with - HttpRequest.execute(). See that method for the description of the - parameters and the expected response. - """ - ... - - - -class HttpMock: - """Mock of httplib2.Http""" - def __init__(self, filename=..., headers=...) -> None: - """ - Args: - filename: string, absolute filename to read response from - headers: dict, header to return with response - """ - ... - - def request(self, uri, method=..., body=..., headers=..., redirections=..., connection_type=...): # -> tuple[Response[str], bytes | None]: - ... - - def close(self): # -> None: - ... - - - -class HttpMockSequence: - """Mock of httplib2.Http - - Mocks a sequence of calls to request returning different responses for each - call. Create an instance initialized with the desired response headers - and content and then use as if an httplib2.Http instance. - - http = HttpMockSequence([ - ({'status': '401'}, ''), - ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), - ({'status': '200'}, 'echo_request_headers'), - ]) - resp, content = http.request("http://examples.com") - - There are special values you can pass in for content to trigger - behavours that are helpful in testing. - - 'echo_request_headers' means return the request headers in the response body - 'echo_request_headers_as_json' means return the request headers in - the response body - 'echo_request_body' means return the request body in the response body - 'echo_request_uri' means return the request uri in the response body - """ - def __init__(self, iterable) -> None: - """ - Args: - iterable: iterable, a sequence of pairs of (headers, body) - """ - ... - - def request(self, uri, method=..., body=..., headers=..., redirections=..., connection_type=...): # -> tuple[Response[Any], bytes | Any | bytearray | memoryview[_I] | None]: - ... - - - -def set_user_agent(http, user_agent): - """Set the user-agent on every request. - - Args: - http - An instance of httplib2.Http - or something that acts like it. - user_agent: string, the value for the user-agent header. - - Returns: - A modified instance of http that was passed in. - - Example: - - h = httplib2.Http() - h = set_user_agent(h, "my-app-name/6.0") - - Most of the time the user-agent will be set doing auth, this is for the rare - cases where you are accessing an unauthenticated endpoint. - """ - ... - -def tunnel_patch(http): - """Tunnel PATCH requests over POST. - Args: - http - An instance of httplib2.Http - or something that acts like it. - - Returns: - A modified instance of http that was passed in. - - Example: - - h = httplib2.Http() - h = tunnel_patch(h, "my-app-name/6.0") - - Useful if you are running on a platform that doesn't support PATCH. - Apply this last if you are using OAuth 1.0, as changing the method - will result in a different signature. - """ - ... - -def build_http(): # -> Http: - """Builds httplib2.Http object - - Returns: - A httplib2.Http object, which is used to make http requests, and which has timeout set by default. - To override default timeout call - - socket.setdefaulttimeout(timeout_in_sec) - - before interacting with this method. - """ - ... - diff --git a/typings/googleapiclient/mimeparse.pyi b/typings/googleapiclient/mimeparse.pyi deleted file mode 100644 index 5f99ab7..0000000 --- a/typings/googleapiclient/mimeparse.pyi +++ /dev/null @@ -1,107 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""MIME-Type Parser - -This module provides basic functions for handling mime-types. It can handle -matching mime-types against a list of media-ranges. See section 14.1 of the -HTTP specification [RFC 2616] for a complete explanation. - - http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 - -Contents: - - parse_mime_type(): Parses a mime-type into its component parts. - - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' - quality parameter. - - quality(): Determines the quality ('q') of a mime-type when - compared against a list of media-ranges. - - quality_parsed(): Just like quality() except the second parameter must be - pre-parsed. - - best_match(): Choose the mime-type with the highest quality ('q') - from a list of candidates. -""" -__version__ = ... -__author__ = ... -__email__ = ... -__license__ = ... -__credits__ = ... -def parse_mime_type(mime_type): # -> tuple[LiteralString | Any, LiteralString | Any, dict[Any, Any]]: - """Parses a mime-type into its component parts. - - Carves up a mime-type and returns a tuple of the (type, subtype, params) - where 'params' is a dictionary of all the parameters for the media range. - For example, the media range 'application/xhtml;q=0.5' would get parsed - into: - - ('application', 'xhtml', {'q', '0.5'}) - """ - ... - -def parse_media_range(range): # -> tuple[LiteralString | Any, LiteralString | Any, dict[Any, Any]]: - """Parse a media-range into its component parts. - - Carves up a media range and returns a tuple of the (type, subtype, - params) where 'params' is a dictionary of all the parameters for the media - range. For example, the media range 'application/*;q=0.5' would get parsed - into: - - ('application', '*', {'q', '0.5'}) - - In addition this function also guarantees that there is a value for 'q' - in the params dictionary, filling it in with a proper default if - necessary. - """ - ... - -def fitness_and_quality_parsed(mime_type, parsed_ranges): # -> tuple[Any | int, float]: - """Find the best match for a mime-type amongst parsed media-ranges. - - Find the best match for a given mime-type against a list of media_ranges - that have already been parsed by parse_media_range(). Returns a tuple of - the fitness value and the value of the 'q' quality parameter of the best - match, or (-1, 0) if no match was found. Just as for quality_parsed(), - 'parsed_ranges' must be a list of parsed media ranges. - """ - ... - -def quality_parsed(mime_type, parsed_ranges): # -> float: - """Find the best match for a mime-type amongst parsed media-ranges. - - Find the best match for a given mime-type against a list of media_ranges - that have already been parsed by parse_media_range(). Returns the 'q' - quality parameter of the best match, 0 if no match was found. This function - bahaves the same as quality() except that 'parsed_ranges' must be a list of - parsed media ranges. - """ - ... - -def quality(mime_type, ranges): # -> float: - """Return the quality ('q') of a mime-type against a list of media-ranges. - - Returns the quality 'q' of a mime-type when compared against the - media-ranges in ranges. For example: - - >>> quality('text/html','text/*;q=0.3, text/html;q=0.7, - text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5') - 0.7 - - """ - ... - -def best_match(supported, header): # -> Literal['']: - """Return mime-type with the highest quality ('q') from list of candidates. - - Takes a list of supported mime-types and finds the best match for all the - media-ranges listed in header. The value of header must be a string that - conforms to the format of the HTTP Accept: header. The value of 'supported' - is a list of mime-types. The list of supported mime-types should be sorted - in order of increasing desirability, in case of a situation where there is - a tie. - - >>> best_match(['application/xbel+xml', 'text/xml'], - 'text/*;q=0.5,*/*; q=0.1') - 'text/xml' - """ - ... - diff --git a/typings/googleapiclient/model.pyi b/typings/googleapiclient/model.pyi deleted file mode 100644 index 27ca3dd..0000000 --- a/typings/googleapiclient/model.pyi +++ /dev/null @@ -1,262 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Model objects for requests and responses. - -Each API may support one or more serializations, such -as JSON, Atom, etc. The model classes are responsible -for converting between the wire format and the Python -object representation. -""" -__author__ = ... -HAS_API_VERSION = ... -_LIBRARY_VERSION = ... -_PY_VERSION = ... -LOGGER = ... -dump_request_response = ... -class Model: - """Model base class. - - All Model classes should implement this interface. - The Model serializes and de-serializes between a wire - format such as JSON and a Python object representation. - """ - def request(self, headers, path_params, query_params, body_value): # -> None: - """Updates outgoing requests with a serialized body. - - Args: - headers: dict, request headers - path_params: dict, parameters that appear in the request path - query_params: dict, parameters that appear in the query - body_value: object, the request body as a Python object, which must be - serializable. - Returns: - A tuple of (headers, path_params, query, body) - - headers: dict, request headers - path_params: dict, parameters that appear in the request path - query: string, query part of the request URI - body: string, the body serialized in the desired wire format. - """ - ... - - def response(self, resp, content): # -> None: - """Convert the response wire format into a Python object. - - Args: - resp: httplib2.Response, the HTTP response headers and status - content: string, the body of the HTTP response - - Returns: - The body de-serialized as a Python object. - - Raises: - googleapiclient.errors.HttpError if a non 2xx response is received. - """ - ... - - - -class BaseModel(Model): - """Base model class. - - Subclasses should provide implementations for the "serialize" and - "deserialize" methods, as well as values for the following class attributes. - - Attributes: - accept: The value to use for the HTTP Accept header. - content_type: The value to use for the HTTP Content-type header. - no_content_response: The value to return when deserializing a 204 "No - Content" response. - alt_param: The value to supply as the "alt" query parameter for requests. - """ - accept = ... - content_type = ... - no_content_response = ... - alt_param = ... - def request(self, headers, path_params, query_params, body_value, api_version=...): # -> tuple[Any, Any, Any, Any | None]: - """Updates outgoing requests with a serialized body. - - Args: - headers: dict, request headers - path_params: dict, parameters that appear in the request path - query_params: dict, parameters that appear in the query - body_value: object, the request body as a Python object, which must be - serializable by json. - api_version: str, The precise API version represented by this request, - which will result in an API Version header being sent along with the - HTTP request. - Returns: - A tuple of (headers, path_params, query, body) - - headers: dict, request headers - path_params: dict, parameters that appear in the request path - query: string, query part of the request URI - body: string, the body serialized as JSON - """ - ... - - def response(self, resp, content): # -> None: - """Convert the response wire format into a Python object. - - Args: - resp: httplib2.Response, the HTTP response headers and status - content: string, the body of the HTTP response - - Returns: - The body de-serialized as a Python object. - - Raises: - googleapiclient.errors.HttpError if a non 2xx response is received. - """ - ... - - def serialize(self, body_value): # -> None: - """Perform the actual Python object serialization. - - Args: - body_value: object, the request body as a Python object. - - Returns: - string, the body in serialized form. - """ - ... - - def deserialize(self, content): # -> None: - """Perform the actual deserialization from response string to Python - object. - - Args: - content: string, the body of the HTTP response - - Returns: - The body de-serialized as a Python object. - """ - ... - - - -class JsonModel(BaseModel): - """Model class for JSON. - - Serializes and de-serializes between JSON and the Python - object representation of HTTP request and response bodies. - """ - accept = ... - content_type = ... - alt_param = ... - def __init__(self, data_wrapper=...) -> None: - """Construct a JsonModel. - - Args: - data_wrapper: boolean, wrap requests and responses in a data wrapper - """ - ... - - def serialize(self, body_value): # -> str: - ... - - def deserialize(self, content): # -> Any: - ... - - @property - def no_content_response(self): # -> dict[Any, Any]: - ... - - - -class RawModel(JsonModel): - """Model class for requests that don't return JSON. - - Serializes and de-serializes between JSON and the Python - object representation of HTTP request, and returns the raw bytes - of the response body. - """ - accept = ... - content_type = ... - alt_param = ... - def deserialize(self, content): - ... - - @property - def no_content_response(self): # -> Literal['']: - ... - - - -class MediaModel(JsonModel): - """Model class for requests that return Media. - - Serializes and de-serializes between JSON and the Python - object representation of HTTP request, and returns the raw bytes - of the response body. - """ - accept = ... - content_type = ... - alt_param = ... - def deserialize(self, content): - ... - - @property - def no_content_response(self): # -> Literal['']: - ... - - - -class ProtocolBufferModel(BaseModel): - """Model class for protocol buffers. - - Serializes and de-serializes the binary protocol buffer sent in the HTTP - request and response bodies. - """ - accept = ... - content_type = ... - alt_param = ... - def __init__(self, protocol_buffer) -> None: - """Constructs a ProtocolBufferModel. - - The serialized protocol buffer returned in an HTTP response will be - de-serialized using the given protocol buffer class. - - Args: - protocol_buffer: The protocol buffer class used to de-serialize a - response from the API. - """ - ... - - def serialize(self, body_value): - ... - - def deserialize(self, content): - ... - - @property - def no_content_response(self): - ... - - - -def makepatch(original, modified): # -> dict[Any, Any]: - """Create a patch object. - - Some methods support PATCH, an efficient way to send updates to a resource. - This method allows the easy construction of patch bodies by looking at the - differences between a resource before and after it was modified. - - Args: - original: object, the original deserialized resource - modified: object, the modified deserialized resource - Returns: - An object that contains only the changes from original to modified, in a - form suitable to pass to a PATCH method. - - Example usage: - item = service.activities().get(postid=postid, userid=userid).execute() - original = copy.deepcopy(item) - item['object']['content'] = 'This is updated.' - service.activities.patch(postid=postid, userid=userid, - body=makepatch(original, item)).execute() - """ - ... - diff --git a/typings/googleapiclient/sample_tools.pyi b/typings/googleapiclient/sample_tools.pyi deleted file mode 100644 index af85775..0000000 --- a/typings/googleapiclient/sample_tools.pyi +++ /dev/null @@ -1,36 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -"""Utilities for making samples. - -Consolidates a lot of code commonly repeated in sample applications. -""" -__author__ = ... -__all__ = ["init"] -def init(argv, name, version, doc, filename, scope=..., parents=..., discovery_filename=...): # -> tuple[Any, Namespace]: - """A common initialization routine for samples. - - Many of the sample applications do the same initialization, which has now - been consolidated into this function. This function uses common idioms found - in almost all the samples, i.e. for an API with name 'apiname', the - credentials are stored in a file named apiname.dat, and the - client_secrets.json file is stored in the same directory as the application - main file. - - Args: - argv: list of string, the command-line parameters of the application. - name: string, name of the API. - version: string, version of the API. - doc: string, description of the application. Usually set to __doc__. - file: string, filename of the application. Usually set to __file__. - parents: list of argparse.ArgumentParser, additional command-line flags. - scope: string, The OAuth scope used. - discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL. - - Returns: - A tuple of (service, flags), where service is the service object and flags - is the parsed command-line flags. - """ - ... - diff --git a/typings/googleapiclient/schema.pyi b/typings/googleapiclient/schema.pyi deleted file mode 100644 index 0a81ca6..0000000 --- a/typings/googleapiclient/schema.pyi +++ /dev/null @@ -1,160 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from googleapiclient import _helpers as util - -"""Schema processing for discovery based APIs - -Schemas holds an APIs discovery schemas. It can return those schema as -deserialized JSON objects, or pretty print them as prototype objects that -conform to the schema. - -For example, given the schema: - - schema = \"\"\"{ - "Foo": { - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "ETag of the collection." - }, - "kind": { - "type": "string", - "description": "Type of the collection ('calendar#acl').", - "default": "calendar#acl" - }, - "nextPageToken": { - "type": "string", - "description": "Token used to access the next - page of this result. Omitted if no further results are available." - } - } - } - }\"\"\" - - s = Schemas(schema) - print s.prettyPrintByName('Foo') - - Produces the following output: - - { - "nextPageToken": "A String", # Token used to access the - # next page of this result. Omitted if no further results are available. - "kind": "A String", # Type of the collection ('calendar#acl'). - "etag": "A String", # ETag of the collection. - }, - -The constructor takes a discovery document in which to look up named schema. -""" -__author__ = ... -class Schemas: - """Schemas for an API.""" - def __init__(self, discovery) -> None: - """Constructor. - - Args: - discovery: object, Deserialized discovery document from which we pull - out the named schema. - """ - ... - - def prettyPrintByName(self, name): - """Get pretty printed object prototype from the schema name. - - Args: - name: string, Name of schema in the discovery document. - - Returns: - string, A string that contains a prototype object with - comments that conforms to the given schema. - """ - ... - - def prettyPrintSchema(self, schema): # -> LiteralString: - """Get pretty printed object prototype of schema. - - Args: - schema: object, Parsed JSON schema. - - Returns: - string, A string that contains a prototype object with - comments that conforms to the given schema. - """ - ... - - def get(self, name, default=...): - """Get deserialized JSON schema from the schema name. - - Args: - name: string, Schema name. - default: object, return value if name not found. - """ - ... - - - -class _SchemaToStruct: - """Convert schema to a prototype object.""" - @util.positional(3) - def __init__(self, schema, seen, dent=...) -> None: - """Constructor. - - Args: - schema: object, Parsed JSON schema. - seen: list, List of names of schema already seen while parsing. Used to - handle recursive definitions. - dent: int, Initial indentation depth. - """ - ... - - def emit(self, text): # -> None: - """Add text as a line to the output. - - Args: - text: string, Text to output. - """ - ... - - def emitBegin(self, text): # -> None: - """Add text to the output, but with no line terminator. - - Args: - text: string, Text to output. - """ - ... - - def emitEnd(self, text, comment): # -> None: - """Add text and comment to the output with line terminator. - - Args: - text: string, Text to output. - comment: string, Python comment. - """ - ... - - def indent(self): # -> None: - """Increase indentation level.""" - ... - - def undent(self): # -> None: - """Decrease indentation level.""" - ... - - def to_str(self, from_cache): # -> LiteralString: - """Prototype object based on the schema, in Python code with comments. - - Args: - from_cache: callable(name, seen), Callable that retrieves an object - prototype for a schema with the given name. Seen is a list of schema - names already seen as we recursively descend the schema definition. - - Returns: - Prototype object based on the schema, in Python code with comments. - The lines of the code will all be properly indented. - """ - ... - - - diff --git a/typings/googleapiclient/version.pyi b/typings/googleapiclient/version.pyi deleted file mode 100644 index fc797c9..0000000 --- a/typings/googleapiclient/version.pyi +++ /dev/null @@ -1,5 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -__version__ = ... diff --git a/typings/pywebpush/__init__.pyi b/typings/pywebpush/__init__.pyi deleted file mode 100644 index 345c3bc..0000000 --- a/typings/pywebpush/__init__.pyi +++ /dev/null @@ -1,276 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import asyncio -import base64 -import json -import os -import time -import logging -import aiohttp -import http_ece -import requests -from copy import deepcopy -from typing import Dict, Union, cast -from urlparse import urlparse -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import ec -from cryptography.hazmat.primitives import serialization -from functools import partial -from py_vapid import Vapid, Vapid01 -from requests import Response - -class WebPushException(Exception): - """Web Push failure. - - This may contain the requests.Response - - """ - def __init__(self, message, response=...) -> None: - ... - - def __str__(self) -> str: - ... - - - -class NoData(Exception): - """Message contained No Data, no encoding required.""" - ... - - -class CaseInsensitiveDict(dict): - """A dictionary that has case-insensitive keys""" - def __init__(self, data=..., **kwargs) -> None: - ... - - def __contains__(self, key) -> bool: - ... - - def __setitem__(self, key, value): # -> None: - ... - - def __getitem__(self, key): - ... - - def __delitem__(self, key): # -> None: - ... - - def get(self, key, default=...): # -> None: - ... - - def update(self, data) -> None: - ... - - - -class WebPusher: - """WebPusher encrypts a data block using HTTP Encrypted Content Encoding - for WebPush. - - See https://tools.ietf.org/html/draft-ietf-webpush-protocol-04 - for the current specification, and - https://developer.mozilla.org/en-US/docs/Web/API/Push_API for an - overview of Web Push. - - Example of use: - - The javascript promise handler for PushManager.subscribe() - receives a subscription_info object. subscription_info.getJSON() - will return a JSON representation. - (e.g. - .. code-block:: javascript - subscription_info.getJSON() == - {"endpoint": "https://push.server.com/...", - "keys":{"auth": "...", "p256dh": "..."} - } - ) - - This subscription_info block can be stored. - - To send a subscription update: - - .. code-block:: python - # Optional - # headers = py_vapid.sign({"aud": "https://push.server.com/", - "sub": "mailto:your_admin@your.site.com"}) - data = "Mary had a little lamb, with a nice mint jelly" - WebPusher(subscription_info).send(data, headers) - - """ - subscription_info = ... - valid_encodings = ... - verbose = ... - def __init__(self, subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], requests_session: Union[None, requests.Session] = ..., aiohttp_session: Union[None, aiohttp.client.ClientSession] = ..., verbose: bool = ...) -> None: - """Initialize using the info provided by the client PushSubscription - object (See - https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe) - - :param subscription_info: a dict containing the subscription_info from - the client. - :type subscription_info: dict - - :param requests_session: a requests.Session object to optimize requests - to the same client. - :type requests_session: requests.Session - - :param verbose: provide verbose feedback - :type verbose: bool - - """ - ... - - def verb(self, msg: str, *args, **kwargs) -> None: - ... - - def encode(self, data: bytes, content_encoding: str = ...) -> CaseInsensitiveDict: - """Encrypt the data. - - :param data: A serialized block of byte data (String, JSON, bit array, - etc.) Make sure that whatever you send, your client knows how - to understand it. - :type data: str - :param content_encoding: The content_encoding type to use to encrypt - the data. Defaults to RFC8188 "aes128gcm". The previous draft-01 is - "aesgcm", however this format is now deprecated. - :type content_encoding: enum("aesgcm", "aes128gcm") - - """ - ... - - def as_curl(self, endpoint: str, encoded_data: bytes, headers: Dict[str, str]) -> str: - """Return the send as a curl command. - - Useful for debugging. This will write out the encoded data to a local - file named `encrypted.data` - - :param endpoint: Push service endpoint URL - :type endpoint: basestring - :param encoded_data: byte array of encoded data - :type encoded_data: bytearray - :param headers: Additional headers for the send - :type headers: dict - :returns string - - """ - ... - - def send(self, *args, **kwargs) -> Union[Response, str]: - """Encode and send the data to the Push Service""" - ... - - async def send_async(self, *args, **kwargs) -> Union[aiohttp.ClientResponse, str]: - ... - - - -def webpush(subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], data: Union[None, str] = ..., vapid_private_key: Union[None, Vapid, str] = ..., vapid_claims: Union[None, Dict[str, Union[str, int]]] = ..., content_encoding: str = ..., curl: bool = ..., timeout: Union[None, float] = ..., ttl: int = ..., verbose: bool = ..., headers: Union[None, Dict[str, Union[str, int, float]]] = ..., requests_session: Union[None, requests.Session] = ...) -> Union[str, requests.Response]: - """ - One call solution to endcode and send `data` to the endpoint - contained in `subscription_info` using optional VAPID auth headers. - - in example: - - .. code-block:: python - - from pywebpush import python - - webpush( - subscription_info={ - "endpoint": "https://push.example.com/v1/abcd", - "keys": {"p256dh": "0123abcd...", - "auth": "001122..."} - }, - data="Mary had a little lamb, with a nice mint jelly", - vapid_private_key="path/to/key.pem", - vapid_claims={"sub": "YourNameHere@example.com"} - ) - - No additional method call is required. Any non-success will throw a - `WebPushException`. - - :param subscription_info: Provided by the client call - :type subscription_info: dict - :param data: Serialized data to send - :type data: str - :param vapid_private_key: Vapid instance or path to vapid private key PEM \ - or encoded str - :type vapid_private_key: Union[Vapid, str] - :param vapid_claims: Dictionary of claims ('sub' required) - :type vapid_claims: dict - :param content_encoding: Optional content type string - :type content_encoding: str - :param curl: Return as "curl" string instead of sending - :type curl: bool - :param timeout: POST requests timeout - :type timeout: float - :param ttl: Time To Live - :type ttl: int - :param verbose: Provide verbose feedback - :type verbose: bool - :return requests.Response or string - :param headers: Dictionary of extra HTTP headers to include - :type headers: dict - - """ - ... - -async def webpush_async(subscription_info: Dict[str, Union[Union[str, bytes], Dict[str, Union[str, bytes]]]], data: Union[None, str] = ..., vapid_private_key: Union[None, Vapid, str] = ..., vapid_claims: Union[None, Dict[str, Union[str, int]]] = ..., content_encoding: str = ..., curl: bool = ..., timeout: Union[None, float] = ..., ttl: int = ..., verbose: bool = ..., headers: Union[None, Dict[str, Union[str, int, float]]] = ..., aiohttp_session: Union[None, aiohttp.ClientSession] = ...) -> Union[str, aiohttp.ClientResponse]: - """ - Async version of webpush function. One call solution to encode and send - `data` to the endpoint contained in `subscription_info` using optional - VAPID auth headers. - - Example: - - .. code-block:: python - - from pywebpush import webpush_async - import asyncio - - async def send_notification(): - response = await webpush_async( - subscription_info={ - "endpoint": "https://push.example.com/v1/abcd", - "keys": {"p256dh": "0123abcd...", - "auth": "001122..."} - }, - data="Mary had a little lamb, with a nice mint jelly", - vapid_private_key="path/to/key.pem", - vapid_claims={"sub": "YourNameHere@example.com"} - ) - - asyncio.run(send_notification()) - - No additional method call is required. Any non-success will throw a - `WebPushException`. - - :param subscription_info: Provided by the client call - :type subscription_info: dict - :param data: Serialized data to send - :type data: str - :param vapid_private_key: Vapid instance or path to vapid private key PEM \ - or encoded str - :type vapid_private_key: Union[Vapid, str] - :param vapid_claims: Dictionary of claims ('sub' required) - :type vapid_claims: dict - :param content_encoding: Optional content type string - :type content_encoding: str - :param curl: Return as "curl" string instead of sending - :type curl: bool - :param timeout: POST requests timeout - :type timeout: float - :param ttl: Time To Live - :type ttl: int - :param verbose: Provide verbose feedback - :type verbose: bool - :param headers: Dictionary of extra HTTP headers to include - :type headers: dict - :param aiohttp_session: Optional aiohttp ClientSession for connection reuse - :type aiohttp_session: aiohttp.ClientSession - :return aiohttp.ClientResponse or string - - """ - ... - diff --git a/typings/pywebpush/__main__.pyi b/typings/pywebpush/__main__.pyi deleted file mode 100644 index 935e773..0000000 --- a/typings/pywebpush/__main__.pyi +++ /dev/null @@ -1,13 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -def get_config(): # -> Namespace: - ... - -def main() -> None: - """Send data""" - ... - -if __name__ == "__main__": - ... diff --git a/typings/pywebpush/foo.pyi b/typings/pywebpush/foo.pyi deleted file mode 100644 index cee36dd..0000000 --- a/typings/pywebpush/foo.pyi +++ /dev/null @@ -1,7 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -def send_push_notification(subscription, payload): # -> None: - ... - diff --git a/typings/pywebpush/tests/__init__.pyi b/typings/pywebpush/tests/__init__.pyi deleted file mode 100644 index 006bc27..0000000 --- a/typings/pywebpush/tests/__init__.pyi +++ /dev/null @@ -1,4 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - From 2de9900c3f21ae7beda8daa8c1e5f34a69830435 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:17:53 +0100 Subject: [PATCH 12/19] Add device token lifecycle tracking --- app/router/mobile/auth.py | 38 ++++++- app/schema/request/mobile/auth.py | 8 ++ app/service/device.py | 48 +++++++- app/service/users.py | 37 +++++-- app/worker/notification/invalid_tokens.py | 29 ++++- app/worker/notification/main.py | 47 ++++++-- app/worker/notification/notification_queue.py | 34 ++++++ db/generated/devices.py | 103 +++++++++++++++--- db/generated/models.py | 3 + db/queries/devices.sql | 29 +++++ .../sql/down/add_device_push_token_fields.sql | 6 + .../sql/up/add_device_push_token_fields.sql | 6 + ...c3e1f4a5b6_add_device_push_token_fields.py | 25 +++++ 13 files changed, 372 insertions(+), 41 deletions(-) create mode 100644 app/worker/notification/notification_queue.py create mode 100644 migrations/sql/down/add_device_push_token_fields.sql create mode 100644 migrations/sql/up/add_device_push_token_fields.sql create mode 100644 migrations/versions/d2c3e1f4a5b6_add_device_push_token_fields.py diff --git a/app/router/mobile/auth.py b/app/router/mobile/auth.py index 1784300..52e34a1 100644 --- a/app/router/mobile/auth.py +++ b/app/router/mobile/auth.py @@ -7,7 +7,12 @@ from app.core.exceptions import AppException from app.deps.token_auth import MobileUserSchema, get_current_mobile_user -from app.schema.request.mobile.auth import MobileAuthRequest, RefreshTokenRequest +from app.schema.request.mobile.auth import ( + MobileAuthRequest, + RefreshTokenRequest, + UpdateDeviceTokenRequest, + InactivateDeviceRequest, +) from app.schema.response.mobile.auth import MeResponse, DeviceSchema, MobileAuthResponse, SessionSchema, UserSchema router = APIRouter(prefix="/auth") @@ -58,6 +63,37 @@ async def revoke_device( return {"message": "Device revoked successfully"} +@router.post("/devices/token") +async def update_device_token( + req: UpdateDeviceTokenRequest, + container: Container = Depends(get_container), + current_user: MobileUserSchema = Depends(get_current_mobile_user), +) -> dict[str, str]: + + await container.device_service.update_device_push_token( + device_id=req.device_id, + user_id=current_user.user_id, + push_token=req.push_token, + ) + + return {"message": "Device token updated"} + + +@router.post("/devices/inactivate") +async def inactivate_device( + req: InactivateDeviceRequest, + container: Container = Depends(get_container), + current_user: MobileUserSchema = Depends(get_current_mobile_user), +) -> dict[str, str]: + + await container.device_service.inactivate_device( + device_id=req.device_id, + user_id=current_user.user_id, + ) + + return {"message": "Device marked as inactive"} + + @router.get("/me", response_model=MeResponse) async def get_me( current_user: MobileUserSchema = Depends(get_current_mobile_user), diff --git a/app/schema/request/mobile/auth.py b/app/schema/request/mobile/auth.py index f83ffa8..fcbf1c5 100644 --- a/app/schema/request/mobile/auth.py +++ b/app/schema/request/mobile/auth.py @@ -16,3 +16,11 @@ class MobileAuthRequest(BaseModel): class RefreshTokenRequest(BaseModel): refresh_token: str + +class UpdateDeviceTokenRequest(BaseModel): + device_id: UUID + push_token: str + + +class InactivateDeviceRequest(BaseModel): + device_id: UUID diff --git a/app/service/device.py b/app/service/device.py index 928dfb5..18c2928 100644 --- a/app/service/device.py +++ b/app/service/device.py @@ -35,6 +35,19 @@ async def create_device( except Exception as e : raise DBException.handle(e) + async def activate_device( + self: "DeviceService", + device_id: uuid.UUID, + user_id: uuid.UUID, + ) -> None: + try: + await self.device_querier.activate_device( + id=device_id, + user_id=user_id, + ) + except Exception as e: + raise DBException.handle(e) + async def revoke_device( self: "DeviceService", device_id: uuid.UUID, @@ -49,6 +62,40 @@ async def revoke_device( except Exception as e : raise DBException.handle(e) + async def update_device_push_token( + self: "DeviceService", + device_id: uuid.UUID, + user_id: uuid.UUID, + push_token: str, + ) -> UserDevice: + try: + device = await self.device_querier.update_device_push_token( + id=device_id, + push_token=push_token, + user_id=user_id, + ) + if device is None: + raise AppException.not_found("Device not found") + return device + except Exception as e: + raise DBException.handle(e) + + async def inactivate_device( + self: "DeviceService", + device_id: uuid.UUID, + user_id: uuid.UUID, + ) -> None: + try: + device = await self.device_querier.get_device__by_id(id=device_id) + if device is None or device.user_id != user_id: + raise AppException.not_found("Device not found") + await self.device_querier.deactivate_device( + id=device_id, + user_id=user_id, + ) + except Exception as e: + raise DBException.handle(e) + async def get_all_devices(self: "DeviceService", user_id: uuid.UUID) -> tuple[list[UserDevice], int]: devices: list[UserDevice] = [] @@ -81,4 +128,3 @@ async def count_devices(self: "DeviceService", user_id: uuid.UUID) -> int: except Exception as e : raise DBExceptionImpl.handle(e) - diff --git a/app/service/users.py b/app/service/users.py index ecfaf91..ee26556 100644 --- a/app/service/users.py +++ b/app/service/users.py @@ -86,20 +86,33 @@ async def mobile_register_login( device_id = req.device_id expires_at = datetime.now(timezone.utc) + timedelta(days=7) - device = await self.device_querier.create_device( - arg=device_queries.CreateDeviceParams( - column_1=device_id, - user_id=user_id, - device_name=req.device_name, - device_type=req.device_type, - totp_secret=None, - + existing_device = await self.device_querier.get_device__by_id(id=device_id) + + if existing_device: + if existing_device.user_id != user_id: + raise AppException.forbidden("Device already registered to another user") + if existing_device.is_invalid_token: + raise AppException.forbidden( + "Device push token is invalid. Update the token before logging in." + ) + if not existing_device.is_active: + await self.device_querier.activate_device( + id=device_id, + user_id=user_id, + ) + else: + device = await self.device_querier.create_device( + arg=device_queries.CreateDeviceParams( + column_1=device_id, + user_id=user_id, + device_name=req.device_name, + device_type=req.device_type, + totp_secret=None, + ) ) - ) - - if not device: - raise AppException.internal_error("Failed to create device") + if not device: + raise AppException.internal_error("Failed to create device") session = await self.session_querier.upsert_session( user_id=user_id, diff --git a/app/worker/notification/invalid_tokens.py b/app/worker/notification/invalid_tokens.py index 39a497b..02e24c9 100644 --- a/app/worker/notification/invalid_tokens.py +++ b/app/worker/notification/invalid_tokens.py @@ -2,9 +2,12 @@ from typing import Iterable, Sequence +from db.generated import devices as device_queries + from app.core.constant import RedisKey from app.core.logger import logger from app.infra.redis import RedisClient +from app.worker.notification.settings import NotifSetting class InvalidTokenStore: @@ -18,6 +21,7 @@ async def mark_invalid(self, tokens: Iterable[str]) -> None: return await self._redis.sadd(RedisKey.INVALID_TOKEN_SET_KEY, *normalized) + await self._redis.expire(RedisKey.INVALID_TOKEN_SET_KEY, NotifSetting.TTL_SECONDS) logger.warning("Marked %d tokens for cleanup", len(normalized)) @@ -35,4 +39,27 @@ async def remove(self, tokens: Sequence[str]) -> None: await self._redis.srem( RedisKey.INVALID_TOKEN_SET_KEY, *tokens - ) \ No newline at end of file + ) + + +class DeviceInvalidationStore: + def __init__(self, device_querier: device_queries.AsyncQuerier) -> None: + self._device_querier = device_querier + + async def mark_invalid(self, tokens: Iterable[str]) -> None: + normalized: list[str] = [t for t in tokens if t] + + if not normalized: + return + + failed: list[str] = [] + for token in normalized: + try: + await self._device_querier.mark_device_token_invalid(push_token=token) + except Exception: + failed.append(token) + logger.exception("Failed to flag device for invalid token %s", token) + + marked = len(normalized) - len(failed) + if marked: + logger.warning("Flagged %d devices as invalid", marked) diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py index 784cf19..1705d94 100644 --- a/app/worker/notification/main.py +++ b/app/worker/notification/main.py @@ -3,32 +3,51 @@ import asyncio from typing import Sequence +from db.generated import devices as device_queries + from app.core.logger import logger from app.worker.notification.firebase import ( NotificationDeliveryError, init_firebase_app, send_notification, ) -from app.worker.notification.invalid_tokens import InvalidTokenStore -from app.infra.notification_queue import NotificationQueue, NotificationQueueEntry -from app.infra.redis import RedisClient -from app.infra.nats import NatsClient +from app.worker.notification.invalid_tokens import ( + DeviceInvalidationStore, + InvalidTokenStore, +) +from app.worker.notification.notification_queue import NotificationQueue, NotificationQueueEntry from app.worker.notification.rate_limiter import RateLimiter from app.worker.notification.settings import NotifSetting +from app.infra.database import engine +from app.infra.redis import RedisClient +from app.infra.nats import NatsClient async def process_entry( entry: NotificationQueueEntry, queue: NotificationQueue, invalid_tokens: InvalidTokenStore, + invalid_devices: DeviceInvalidationStore, ) -> None: try: - await asyncio.to_thread(send_notification, entry.notification) + valid_tokens = [ + t for t in entry.notification.tokens + if not await invalid_tokens.is_invalid(t)] + + + if not valid_tokens: + logger.info("All tokens are invalid, skipping notification") + return + + notification = entry.notification.model_copy(update={"tokens": valid_tokens}) + + + await asyncio.to_thread(send_notification, notification) except NotificationDeliveryError as e: if e.invalid_tokens: await invalid_tokens.mark_invalid(e.invalid_tokens) - + await invalid_devices.mark_invalid(e.invalid_tokens) if e.failed_tokens: await retry(entry, queue, tokens=e.failed_tokens) @@ -59,7 +78,7 @@ async def retry( delay = min(NotifSetting.BASE_RETRY_DELAY * (2 ** attempts), 60) await asyncio.sleep(delay) - await queue.enqueue(notification, attempts=attempts) + await queue.enqueue_notification(notification, attempts=attempts) @@ -67,6 +86,7 @@ async def handle_message( raw_payload: bytes | str, queue: NotificationQueue, invalid_tokens: InvalidTokenStore, + invalid_devices: DeviceInvalidationStore, ) -> None: try: if isinstance(raw_payload, bytes): @@ -78,13 +98,14 @@ async def handle_message( logger.exception("Invalid message payload") return - await process_entry(entry, queue, invalid_tokens) + await process_entry(entry, queue, invalid_tokens, invalid_devices) async def run_worker( queue: NotificationQueue, invalid_tokens: InvalidTokenStore, + invalid_devices: DeviceInvalidationStore, ) -> None: logger.info("Notification worker started") @@ -94,7 +115,7 @@ async def run_worker( async def wrapped_handler(msg: bytes | str) -> None: async with semaphore: await rate_limiter.acquire() - await handle_message(msg, queue, invalid_tokens) + await handle_message(msg, queue, invalid_tokens, invalid_devices) for subject in queue.priority_subjects(): await NatsClient.subscribe(subject, wrapped_handler) @@ -121,14 +142,18 @@ async def main() -> None: queue = NotificationQueue(settings=NotifSetting) invalid_tokens = InvalidTokenStore(redis) + db_conn = await engine.connect() + device_querier = device_queries.AsyncQuerier(db_conn) + invalid_devices = DeviceInvalidationStore(device_querier) try: - await run_worker(queue, invalid_tokens) + await run_worker(queue, invalid_tokens, invalid_devices) finally: await redis.close() + await db_conn.close() logger.info("Worker shutdown") if __name__ == "__main__": - asyncio.run(main()) \ No newline at end of file + asyncio.run(main()) diff --git a/app/worker/notification/notification_queue.py b/app/worker/notification/notification_queue.py new file mode 100644 index 0000000..033e82e --- /dev/null +++ b/app/worker/notification/notification_queue.py @@ -0,0 +1,34 @@ +from typing import Sequence +from pydantic import BaseModel, ConfigDict, Field +from app.infra.nats import NatsClient +from app.schema.notification import NotificationPriority, PRIORITY_ORDER, UnifiedNotification +from app.worker.notification.settings import NotificationWorkerSettings + + +class NotificationQueueEntry(BaseModel): + notification: UnifiedNotification + attempts: int = Field(default=0, ge=0) + + model_config = ConfigDict(extra="forbid") + + +class NotificationQueue: + def __init__(self, settings: NotificationWorkerSettings) -> None: + self._settings = settings + + async def enqueue_notification( + self, + notification: UnifiedNotification, + attempts: int = 0 + ) -> None: + entry = NotificationQueueEntry(notification=notification, attempts=attempts) + subject = self._settings.subject_for(entry.notification.priority) + payload = entry.model_dump_json().encode("utf-8") + await NatsClient.publish(subject, payload) + + @staticmethod + def priority_index(priority: NotificationPriority) -> int: + return PRIORITY_ORDER.index(priority) + + def priority_subjects(self) -> Sequence[str]: + return self._settings.priority_subjects() \ No newline at end of file diff --git a/db/generated/devices.py b/db/generated/devices.py index 2df5e9b..2514ff9 100644 --- a/db/generated/devices.py +++ b/db/generated/devices.py @@ -12,6 +12,14 @@ from db.generated import models +ACTIVATE_DEVICE = """-- name: activate_device \\:exec +UPDATE user_devices +SET is_active = TRUE +WHERE id = :p1 +AND user_id = :p2 +""" + + COUNT__USER__DEVICES = """-- name: count__user__devices \\:one SELECT COUNT(*) FROM user_devices @@ -29,7 +37,7 @@ ) VALUES ( COALESCE(:p1, uuid_generate_v4()), :p2, :p3, :p4, :p5 ) -RETURNING id, user_id, device_name, device_type, totp_secret, is_2fa_enabled, last_active, created_at +RETURNING id, user_id, device_name, device_type, push_token, totp_secret, is_active, is_invalid_token, is_2fa_enabled, last_active, created_at """ @@ -42,6 +50,14 @@ class CreateDeviceParams: totp_secret: Optional[str] +DEACTIVATE_DEVICE = """-- name: deactivate_device \\:exec +UPDATE user_devices +SET is_active = FALSE +WHERE id = :p1 +AND user_id = :p2 +""" + + ENABLE_DEVICE2_FA = """-- name: enable_device2_fa \\:exec UPDATE user_devices SET is_2fa_enabled = TRUE @@ -52,19 +68,28 @@ class CreateDeviceParams: GET_DEVICE__BY_ID = """-- name: get_device__by_id \\:one -SELECT id, user_id, device_name, device_type, totp_secret, is_2fa_enabled, last_active, created_at from user_devices +SELECT id, user_id, device_name, device_type, push_token, totp_secret, is_active, is_invalid_token, is_2fa_enabled, last_active, created_at from user_devices WHERE id =:p1 """ LIST_USER_DEVICES = """-- name: list_user_devices \\:many -SELECT id, user_id, device_name, device_type, totp_secret, is_2fa_enabled, last_active, created_at +SELECT id, user_id, device_name, device_type, push_token, totp_secret, is_active, is_invalid_token, is_2fa_enabled, last_active, created_at FROM user_devices WHERE user_id = :p1 ORDER BY last_active DESC """ +MARK_DEVICE_TOKEN_INVALID = """-- name: mark_device_token_invalid \\:exec +UPDATE user_devices +SET + is_invalid_token = TRUE, + is_active = FALSE +WHERE push_token = :p1 +""" + + REVOKE_DEVICE = """-- name: revoke_device \\:exec DELETE FROM user_devices WHERE id = :p1 @@ -79,10 +104,25 @@ class CreateDeviceParams: """ +UPDATE_DEVICE_PUSH_TOKEN = """-- name: update_device_push_token \\:one +UPDATE user_devices +SET + push_token = :p2, + is_active = TRUE, + is_invalid_token = FALSE +WHERE id = :p1 +AND user_id = :p3 +RETURNING id, user_id, device_name, device_type, push_token, totp_secret, is_active, is_invalid_token, is_2fa_enabled, last_active, created_at +""" + + class AsyncQuerier: def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): self._conn = conn + async def activate_device(self, *, id: uuid.UUID, user_id: uuid.UUID) -> None: + await self._conn.execute(sqlalchemy.text(ACTIVATE_DEVICE), {"p1": id, "p2": user_id}) + async def count__user__devices(self, *, user_id: uuid.UUID) -> Optional[int]: row = (await self._conn.execute(sqlalchemy.text(COUNT__USER__DEVICES), {"p1": user_id})).first() if row is None: @@ -104,12 +144,18 @@ async def create_device(self, arg: CreateDeviceParams) -> Optional[models.UserDe user_id=row[1], device_name=row[2], device_type=row[3], - totp_secret=row[4], - is_2fa_enabled=row[5], - last_active=row[6], - created_at=row[7], + push_token=row[4], + totp_secret=row[5], + is_active=row[6], + is_invalid_token=row[7], + is_2fa_enabled=row[8], + last_active=row[9], + created_at=row[10], ) + async def deactivate_device(self, *, id: uuid.UUID, user_id: uuid.UUID) -> None: + await self._conn.execute(sqlalchemy.text(DEACTIVATE_DEVICE), {"p1": id, "p2": user_id}) + async def enable_device2_fa(self, *, id: uuid.UUID, user_id: uuid.UUID) -> None: await self._conn.execute(sqlalchemy.text(ENABLE_DEVICE2_FA), {"p1": id, "p2": user_id}) @@ -122,10 +168,13 @@ async def get_device__by_id(self, *, id: uuid.UUID) -> Optional[models.UserDevic user_id=row[1], device_name=row[2], device_type=row[3], - totp_secret=row[4], - is_2fa_enabled=row[5], - last_active=row[6], - created_at=row[7], + push_token=row[4], + totp_secret=row[5], + is_active=row[6], + is_invalid_token=row[7], + is_2fa_enabled=row[8], + last_active=row[9], + created_at=row[10], ) async def list_user_devices(self, *, user_id: uuid.UUID) -> AsyncIterator[models.UserDevice]: @@ -136,14 +185,38 @@ async def list_user_devices(self, *, user_id: uuid.UUID) -> AsyncIterator[models user_id=row[1], device_name=row[2], device_type=row[3], - totp_secret=row[4], - is_2fa_enabled=row[5], - last_active=row[6], - created_at=row[7], + push_token=row[4], + totp_secret=row[5], + is_active=row[6], + is_invalid_token=row[7], + is_2fa_enabled=row[8], + last_active=row[9], + created_at=row[10], ) + async def mark_device_token_invalid(self, *, push_token: Optional[str]) -> None: + await self._conn.execute(sqlalchemy.text(MARK_DEVICE_TOKEN_INVALID), {"p1": push_token}) + async def revoke_device(self, *, id: uuid.UUID, user_id: uuid.UUID) -> None: await self._conn.execute(sqlalchemy.text(REVOKE_DEVICE), {"p1": id, "p2": user_id}) async def update_device_last_active(self, *, id: uuid.UUID) -> None: await self._conn.execute(sqlalchemy.text(UPDATE_DEVICE_LAST_ACTIVE), {"p1": id}) + + async def update_device_push_token(self, *, id: uuid.UUID, push_token: Optional[str], user_id: uuid.UUID) -> Optional[models.UserDevice]: + row = (await self._conn.execute(sqlalchemy.text(UPDATE_DEVICE_PUSH_TOKEN), {"p1": id, "p2": push_token, "p3": user_id})).first() + if row is None: + return None + return models.UserDevice( + id=row[0], + user_id=row[1], + device_name=row[2], + device_type=row[3], + push_token=row[4], + totp_secret=row[5], + is_active=row[6], + is_invalid_token=row[7], + is_2fa_enabled=row[8], + last_active=row[9], + created_at=row[10], + ) diff --git a/db/generated/models.py b/db/generated/models.py index f8353a1..db5740d 100644 --- a/db/generated/models.py +++ b/db/generated/models.py @@ -230,7 +230,10 @@ class UserDevice: user_id: uuid.UUID device_name: Optional[str] device_type: Optional[str] + push_token: Optional[str] totp_secret: Optional[str] + is_active: bool + is_invalid_token: bool is_2fa_enabled: bool last_active: datetime.datetime created_at: datetime.datetime diff --git a/db/queries/devices.sql b/db/queries/devices.sql index 91e9534..2b512d8 100644 --- a/db/queries/devices.sql +++ b/db/queries/devices.sql @@ -42,3 +42,32 @@ WHERE id =$1; SELECT COUNT(*) FROM user_devices WHERE user_id = $1; + +-- name: UpdateDevicePushToken :one +UPDATE user_devices +SET + push_token = $2, + is_active = TRUE, + is_invalid_token = FALSE +WHERE id = $1 +AND user_id = $3 +RETURNING *; + +-- name: ActivateDevice :exec +UPDATE user_devices +SET is_active = TRUE +WHERE id = $1 +AND user_id = $2; + +-- name: DeactivateDevice :exec +UPDATE user_devices +SET is_active = FALSE +WHERE id = $1 +AND user_id = $2; + +-- name: MarkDeviceTokenInvalid :exec +UPDATE user_devices +SET + is_invalid_token = TRUE, + is_active = FALSE +WHERE push_token = $1; diff --git a/migrations/sql/down/add_device_push_token_fields.sql b/migrations/sql/down/add_device_push_token_fields.sql new file mode 100644 index 0000000..00d98b2 --- /dev/null +++ b/migrations/sql/down/add_device_push_token_fields.sql @@ -0,0 +1,6 @@ +DROP INDEX IF EXISTS idx_user_devices_push_token; + +ALTER TABLE user_devices + DROP COLUMN IF EXISTS is_invalid_token, + DROP COLUMN IF EXISTS is_active, + DROP COLUMN IF EXISTS push_token; diff --git a/migrations/sql/up/add_device_push_token_fields.sql b/migrations/sql/up/add_device_push_token_fields.sql new file mode 100644 index 0000000..1cf361e --- /dev/null +++ b/migrations/sql/up/add_device_push_token_fields.sql @@ -0,0 +1,6 @@ +ALTER TABLE user_devices + ADD COLUMN push_token TEXT, + ADD COLUMN is_active BOOLEAN DEFAULT TRUE NOT NULL, + ADD COLUMN is_invalid_token BOOLEAN DEFAULT FALSE NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_user_devices_push_token ON user_devices (push_token) WHERE push_token IS NOT NULL; diff --git a/migrations/versions/d2c3e1f4a5b6_add_device_push_token_fields.py b/migrations/versions/d2c3e1f4a5b6_add_device_push_token_fields.py new file mode 100644 index 0000000..204b0b7 --- /dev/null +++ b/migrations/versions/d2c3e1f4a5b6_add_device_push_token_fields.py @@ -0,0 +1,25 @@ +"""add_device_push_token_fields + +Revision ID: d2c3e1f4a5b6 +Revises: 5ead72a95638 +Create Date: 2026-03-25 00:00:00.000000 + +""" +from typing import Sequence, Union + +from migrations.helper import run_sql_down, run_sql_up + + +# revision identifiers, used by Alembic. +revision: str = 'd2c3e1f4a5b6' +down_revision: Union[str, Sequence[str], None] = '5ead72a95638' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + run_sql_up("add_device_push_token_fields") + + +def downgrade() -> None: + run_sql_down("add_device_push_token_fields") From 53f04d98c3192f7d4eef1e2e73e567e00a064c33 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:22:09 +0100 Subject: [PATCH 13/19] Fix lint issues --- app/infra/redis.py | 2 +- app/service/face_embedding.py | 1 - app/service/users.py | 61 ++++++++++--------- app/worker/audit/main.py | 1 - app/worker/audit/schema/audit.py | 2 - app/worker/audit/settings.py | 1 - app/worker/notification/firebase.py | 2 - app/worker/notification/main.py | 13 ++-- app/worker/notification/notification_queue.py | 2 +- app/worker/notification/rate_limiter.py | 2 +- app/worker/notification/settings.py | 8 +-- 11 files changed, 46 insertions(+), 49 deletions(-) diff --git a/app/infra/redis.py b/app/infra/redis.py index 66518e9..0ce5e68 100644 --- a/app/infra/redis.py +++ b/app/infra/redis.py @@ -73,4 +73,4 @@ async def srem(self, key: RedisKey | str, *values: str) -> int: async def close(self) -> None: - await self._client.close() \ No newline at end of file + await self._client.close() diff --git a/app/service/face_embedding.py b/app/service/face_embedding.py index b54347f..f71c906 100644 --- a/app/service/face_embedding.py +++ b/app/service/face_embedding.py @@ -68,7 +68,6 @@ def prepare(self) -> None: self.init_model() def embed(self, image: np.ndarray, bboxes: Sequence[BBox]) -> list[float]: - if not bboxes: raise ValueError("No faces to embed") diff --git a/app/service/users.py b/app/service/users.py index ee26556..c706159 100644 --- a/app/service/users.py +++ b/app/service/users.py @@ -19,7 +19,7 @@ from db.generated import user as user_queries from db.generated import devices as device_queries from db.generated import session as session_queries -from db.generated.models import User +from db.generated.models import User, UserDevice from app.core.logger import logger from app.service.face_embedding import FaceImagePayload, FaceEmbeddingService @@ -43,6 +43,37 @@ def __init__( self.session_querier = session_querier self.face_embedding_service = face_embedding_service + async def _ensure_device_for_login( + self, + user_id: uuid.UUID, + req: MobileAuthRequest, + ) -> UserDevice: + existing_device = await self.device_querier.get_device__by_id(id=req.device_id) + + if existing_device: + if existing_device.user_id != user_id: + raise AppException.forbidden("Device already registered to another user") + if existing_device.is_invalid_token: + raise AppException.forbidden( + "Device push token is invalid. Update the token before logging in." + ) + if not existing_device.is_active: + await self.device_querier.activate_device(id=req.device_id, user_id=user_id) + return existing_device + + device = await self.device_querier.create_device( + arg=device_queries.CreateDeviceParams( + column_1=req.device_id, + user_id=user_id, + device_name=req.device_name, + device_type=req.device_type, + totp_secret=None, + ) + ) + if not device: + raise AppException.internal_error("Failed to create device") + return device + async def mobile_register_login( self, redis: RedisClient, @@ -86,33 +117,7 @@ async def mobile_register_login( device_id = req.device_id expires_at = datetime.now(timezone.utc) + timedelta(days=7) - existing_device = await self.device_querier.get_device__by_id(id=device_id) - - if existing_device: - if existing_device.user_id != user_id: - raise AppException.forbidden("Device already registered to another user") - if existing_device.is_invalid_token: - raise AppException.forbidden( - "Device push token is invalid. Update the token before logging in." - ) - if not existing_device.is_active: - await self.device_querier.activate_device( - id=device_id, - user_id=user_id, - ) - else: - device = await self.device_querier.create_device( - arg=device_queries.CreateDeviceParams( - column_1=device_id, - user_id=user_id, - device_name=req.device_name, - device_type=req.device_type, - totp_secret=None, - ) - ) - - if not device: - raise AppException.internal_error("Failed to create device") + await self._ensure_device_for_login(user_id, req) session = await self.session_querier.upsert_session( user_id=user_id, diff --git a/app/worker/audit/main.py b/app/worker/audit/main.py index 0ba7f1e..fc46e1b 100644 --- a/app/worker/audit/main.py +++ b/app/worker/audit/main.py @@ -44,7 +44,6 @@ async def persist(self, payload: AuditEventMessage) -> None: ) logger.info("Persisted audit %s for %s", payload.event_type, payload.user_id) - def _parse_payload(raw_data: bytes) -> dict[str, Any] | None: try: diff --git a/app/worker/audit/schema/audit.py b/app/worker/audit/schema/audit.py index 145a6fd..e0f1c9b 100644 --- a/app/worker/audit/schema/audit.py +++ b/app/worker/audit/schema/audit.py @@ -9,5 +9,3 @@ class AuditEventMessage(BaseModel): user_id: UUID | None = None metadata: dict[str, Any] | None = None description: str | None = None - - diff --git a/app/worker/audit/settings.py b/app/worker/audit/settings.py index e5d1cd6..6d081c8 100644 --- a/app/worker/audit/settings.py +++ b/app/worker/audit/settings.py @@ -5,7 +5,6 @@ class AuditWorkerSettings(BaseSettings): - class Config: env_prefix = "AUDIT_" diff --git a/app/worker/notification/firebase.py b/app/worker/notification/firebase.py index c1d9c43..9ef3a50 100644 --- a/app/worker/notification/firebase.py +++ b/app/worker/notification/firebase.py @@ -22,8 +22,6 @@ class _SendResponse: class _BatchResponse: responses: list[_SendResponse] - - class NotificationDeliveryError(Exception): def __init__( self, diff --git a/app/worker/notification/main.py b/app/worker/notification/main.py index 1705d94..3f4711d 100644 --- a/app/worker/notification/main.py +++ b/app/worker/notification/main.py @@ -31,17 +31,16 @@ async def process_entry( ) -> None: try: valid_tokens = [ - t for t in entry.notification.tokens - if not await invalid_tokens.is_invalid(t)] - - + t + for t in entry.notification.tokens + if not await invalid_tokens.is_invalid(t) + ] + if not valid_tokens: logger.info("All tokens are invalid, skipping notification") return - + notification = entry.notification.model_copy(update={"tokens": valid_tokens}) - - await asyncio.to_thread(send_notification, notification) except NotificationDeliveryError as e: diff --git a/app/worker/notification/notification_queue.py b/app/worker/notification/notification_queue.py index 033e82e..bf1589d 100644 --- a/app/worker/notification/notification_queue.py +++ b/app/worker/notification/notification_queue.py @@ -31,4 +31,4 @@ def priority_index(priority: NotificationPriority) -> int: return PRIORITY_ORDER.index(priority) def priority_subjects(self) -> Sequence[str]: - return self._settings.priority_subjects() \ No newline at end of file + return self._settings.priority_subjects() diff --git a/app/worker/notification/rate_limiter.py b/app/worker/notification/rate_limiter.py index 4e67350..c5660b5 100644 --- a/app/worker/notification/rate_limiter.py +++ b/app/worker/notification/rate_limiter.py @@ -23,4 +23,4 @@ async def acquire(self) -> None: await asyncio.sleep(sleep_time) self._tokens = 0 else: - self._tokens -= 1 \ No newline at end of file + self._tokens -= 1 diff --git a/app/worker/notification/settings.py b/app/worker/notification/settings.py index 25713b6..4d23dd7 100644 --- a/app/worker/notification/settings.py +++ b/app/worker/notification/settings.py @@ -21,10 +21,10 @@ class NotificationWorkerSettings(BaseSettings): firebase_credentials_path: str | None = Field(None) MAX_SEND_ATTEMPTS = 5 BASE_RETRY_DELAY = 2 - + TTL_SECONDS = 30 * 24 * 3600 CONCURRENCY = 10 - RATE_LIMIT = 50 - RATE_PERIOD = 1.0 + RATE_LIMIT = 50 + RATE_PERIOD = 1.0 class Config: env_prefix = "NOTIFICATIONS_" @@ -35,4 +35,4 @@ def subject_for(self, priority: NotificationPriority) -> str: def priority_subjects(self) -> Sequence[str]: return [self.subject_for(priority) for priority in PRIORITY_ORDER] -NotifSetting = NotificationWorkerSettings() # type: ignore \ No newline at end of file +NotifSetting = NotificationWorkerSettings() # type: ignore From a30c44b197d44fe0e0ab5be4fb51443c498fad00 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:23:11 +0100 Subject: [PATCH 14/19] feat: fix file structure --- app/infra/notification_queue.py | 43 --------------------------------- app/main.py | 3 --- app/router/notifications.py | 18 -------------- 3 files changed, 64 deletions(-) delete mode 100644 app/infra/notification_queue.py delete mode 100644 app/router/notifications.py diff --git a/app/infra/notification_queue.py b/app/infra/notification_queue.py deleted file mode 100644 index ee47515..0000000 --- a/app/infra/notification_queue.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import annotations - -from typing import Sequence - -from pydantic import BaseModel, ConfigDict, Field - -from app.infra.nats import NatsClient -from app.schema.notification import NotificationPriority, PRIORITY_ORDER, UnifiedNotification -from app.worker.notification.settings import NotificationWorkerSettings - - -class NotificationQueueEntry(BaseModel): - notification: UnifiedNotification - attempts: int = Field(default=0, ge=0) - - model_config = ConfigDict(extra="forbid") - - -class NotificationQueue: - def __init__(self, settings: NotificationWorkerSettings) -> None: - self._settings = settings - - def _subject_for(self, priority: NotificationPriority) -> str: - return self._settings.subject_for(priority) - - async def enqueue(self, notification: UnifiedNotification, attempts: int = 0) -> None: - entry = NotificationQueueEntry(notification=notification, attempts=attempts) - await self._publish(entry) - - async def enqueue_entry(self, entry: NotificationQueueEntry) -> None: - await self._publish(entry) - - async def _publish(self, entry: NotificationQueueEntry) -> None: - subject = self._subject_for(entry.notification.priority) - payload = entry.model_dump_json().encode("utf-8") - await NatsClient.publish(subject, payload) - - @staticmethod - def priority_index(priority: NotificationPriority) -> int: - return PRIORITY_ORDER.index(priority) - - def priority_subjects(self) -> Sequence[str]: - return self._settings.priority_subjects() diff --git a/app/main.py b/app/main.py index cbf4ffc..26c4965 100644 --- a/app/main.py +++ b/app/main.py @@ -5,13 +5,11 @@ from fastapi import FastAPI, Request, Response from fastapi.middleware.cors import CORSMiddleware from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint - from app.core.config import settings from app.infra.minio import init_minio_client from app.infra.nats import NatsClient from app.infra.redis import RedisClient from app.router.mobile import router as mobile_router -from app.router.notifications import router as notifications_router from app.router.staff import router as staff_router from app.router.web import router as web_router from app.deps.ai_deps import get_face_embedding_service @@ -115,4 +113,3 @@ def health_check() -> dict[str, str]: app.include_router(mobile_router) app.include_router(staff_router) app.include_router(web_router) -app.include_router(notifications_router) diff --git a/app/router/notifications.py b/app/router/notifications.py deleted file mode 100644 index f732332..0000000 --- a/app/router/notifications.py +++ /dev/null @@ -1,18 +0,0 @@ -from fastapi import APIRouter, status - -from app.core.logger import logger -from app.infra.notification_queue import NotificationQueue -from app.schema.notification import UnifiedNotification -from app.worker.notification.settings import NotifSetting - - -queue = NotificationQueue(settings=NotifSetting) - -router = APIRouter(prefix="/notifications", tags=["notifications"]) - - -@router.post("/enqueue", status_code=status.HTTP_202_ACCEPTED) -async def enqueue_notification(notification: UnifiedNotification) -> dict[str, str]: - await queue.enqueue(notification) - logger.debug("Enqueued notification priority=%s tokens=%d", notification.priority, len(notification.tokens)) - return {"status": "queued"} From e8a8da710834383da09db8d5ffc1f0d2045a3c27 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:26:35 +0100 Subject: [PATCH 15/19] Fix type checking errors --- app/worker/notification/firebase.py | 6 ++++-- app/worker/notification/rate_limiter.py | 2 +- db/__init__.py | 2 ++ 3 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 db/__init__.py diff --git a/app/worker/notification/firebase.py b/app/worker/notification/firebase.py index 9ef3a50..ffc2360 100644 --- a/app/worker/notification/firebase.py +++ b/app/worker/notification/firebase.py @@ -1,8 +1,10 @@ from __future__ import annotations from typing import cast -import firebase_admin # pyright: ignore[reportMissingTypeStubs] -from firebase_admin import credentials, messaging # pyright: ignore[reportMissingTypeStubs] +# pyright: ignore[reportMissingTypeStubs] +import firebase_admin # type: ignore[import-untyped] +# pyright: ignore[reportMissingTypeStubs] +from firebase_admin import credentials, messaging # type: ignore[import-untyped] from app.core.config import settings from app.core.logger import logger diff --git a/app/worker/notification/rate_limiter.py b/app/worker/notification/rate_limiter.py index c5660b5..0828527 100644 --- a/app/worker/notification/rate_limiter.py +++ b/app/worker/notification/rate_limiter.py @@ -6,7 +6,7 @@ class RateLimiter: def __init__(self, rate: int, per: float) -> None: self._rate = rate self._per = per - self._tokens = rate + self._tokens: float = float(rate) self._last = time.monotonic() self._lock = asyncio.Lock() diff --git a/db/__init__.py b/db/__init__.py new file mode 100644 index 0000000..05f2bf4 --- /dev/null +++ b/db/__init__.py @@ -0,0 +1,2 @@ +"""Database package placeholder used for tooling.""" + From fa023db4f80736fde27faf8251890f79439a45d8 Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:33:20 +0100 Subject: [PATCH 16/19] Add audit gateway and notification service --- app/container.py | 6 ++++ app/router/mobile/__init__.py | 2 ++ app/router/mobile/audit.py | 37 ++++++++++++++++++++++++ app/schema/response/mobile/audit.py | 32 +++++++++++++++++++++ app/service/audit.py | 42 +++++++++++++++++++++++++++ app/service/notification_gateway.py | 11 ++++++++ db/generated/audit.py | 44 ++++++++++++++++++++++++++++- db/queries/audit.sql | 11 ++++++++ 8 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 app/router/mobile/audit.py create mode 100644 app/schema/response/mobile/audit.py create mode 100644 app/service/notification_gateway.py diff --git a/app/container.py b/app/container.py index fe56610..956c176 100644 --- a/app/container.py +++ b/app/container.py @@ -13,6 +13,7 @@ from app.service.staff_user import StaffUserService from app.service.audit import AuditService +from app.service.notification_gateway import NotificationGatewayService from app.service.upload_requests import UploadRequestsService from app.service.users import AuthService from app.service.user_notification import UserNotificationService @@ -32,6 +33,8 @@ from db.generated import notifications as notification_queries from db.generated import audit as audit_queries from app.service.event import EventService +from app.worker.notification.notification_queue import NotificationQueue +from app.worker.notification.settings import NotifSetting class Container: def __init__( @@ -118,6 +121,9 @@ def __init__( p_querier=self.participant_querier, ) + notification_queue = NotificationQueue(settings=NotifSetting) + self.notification_gateway_service = NotificationGatewayService(notification_queue) + diff --git a/app/router/mobile/__init__.py b/app/router/mobile/__init__.py index aa3c807..d82d02a 100644 --- a/app/router/mobile/__init__.py +++ b/app/router/mobile/__init__.py @@ -3,6 +3,7 @@ from app.router.mobile.enrollement import router as onboarding_router from app.router.mobile.event import router as event_router from app.router.mobile.notifications import router as mobile_notifications_router +from app.router.mobile.audit import router as audit_router router = APIRouter(prefix="/user", tags=["user"]) @@ -11,3 +12,4 @@ router.include_router(onboarding_router) router.include_router(event_router) router.include_router(mobile_notifications_router) +router.include_router(audit_router) diff --git a/app/router/mobile/audit.py b/app/router/mobile/audit.py new file mode 100644 index 0000000..41db10f --- /dev/null +++ b/app/router/mobile/audit.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from fastapi import APIRouter, Depends, Query + +from app.container import Container, get_container +from app.core.constant import AuditEventType +from app.deps.token_auth import MobileUserSchema, get_current_mobile_user +from app.schema.response.mobile.audit import AuditEventListResponse, AuditEventSchema + +router = APIRouter(prefix="/audits", tags=["audits"]) + + +@router.get("", response_model=AuditEventListResponse) +async def list_audits( + event_type: AuditEventType | None = Query(None), + user_id: UUID | None = Query(None), + created_from: datetime | None = Query(None, alias="from"), + created_to: datetime | None = Query(None, alias="to"), + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + container: Container = Depends(get_container), + _: MobileUserSchema = Depends(get_current_mobile_user), +) -> AuditEventListResponse: + events = await container.audit_service.list_audit_events( + event_type=event_type, + user_id=user_id, + created_from=created_from, + created_to=created_to, + limit=limit, + offset=offset, + ) + return AuditEventListResponse( + items=[AuditEventSchema.from_model(event) for event in events] + ) diff --git a/app/schema/response/mobile/audit.py b/app/schema/response/mobile/audit.py new file mode 100644 index 0000000..a6ea88a --- /dev/null +++ b/app/schema/response/mobile/audit.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any +from uuid import UUID + +from pydantic import BaseModel + +from db.generated.models import AuditEvent +from app.core.constant import AuditEventType + + +class AuditEventSchema(BaseModel): + id: UUID + event_type: AuditEventType + user_id: UUID | None + metadata: dict[str, Any] | None + created_at: datetime + + @classmethod + def from_model(cls, event: AuditEvent) -> "AuditEventSchema": + return cls( + id=event.id, + event_type=AuditEventType(event.event_type), + user_id=event.user_id, + metadata=event.metadata, + created_at=event.created_at, + ) + + +class AuditEventListResponse(BaseModel): + items: list[AuditEventSchema] diff --git a/app/service/audit.py b/app/service/audit.py index 95f535d..61b4b4e 100644 --- a/app/service/audit.py +++ b/app/service/audit.py @@ -1,5 +1,6 @@ from __future__ import annotations +from datetime import datetime from typing import Any from uuid import UUID @@ -7,6 +8,8 @@ from app.core.exceptions import AppException from db.generated import audit as audit_queries from db.generated.models import AuditEvent +from app.worker.audit.schema.audit import AuditEventMessage +from app.infra.nats import NatsClient, NatsSubjects class AuditService: @@ -28,3 +31,42 @@ async def record_event( if audit is None: raise AppException.internal_error("Failed to persist audit event") return audit + + async def publish_event( + self, + *, + event_type: AuditEventType, + user_id: UUID | None = None, + metadata: dict[str, Any] | None = None, + description: str | None = None, + ) -> None: + message = AuditEventMessage( + event_type=event_type, + user_id=user_id, + metadata=metadata, + description=description, + ).model_dump_json() + await NatsClient.publish(NatsSubjects.AUDIT_EVENT, message.encode("utf-8")) + + async def list_audit_events( + self, + *, + event_type: AuditEventType | None = None, + user_id: UUID | None = None, + created_from: datetime | None = None, + created_to: datetime | None = None, + limit: int = 50, + offset: int = 0, + ) -> list[AuditEvent]: + params = audit_queries.ListAuditEventsParams( + column_1=event_type.value if event_type else None, + column_2=user_id, + column_3=created_from, + column_4=created_to, + limit=limit, + offset=offset, + ) + events: list[AuditEvent] = [] + async for event in self.audit_querier.list_audit_events(arg=params): + events.append(event) + return events diff --git a/app/service/notification_gateway.py b/app/service/notification_gateway.py new file mode 100644 index 0000000..6c57ebd --- /dev/null +++ b/app/service/notification_gateway.py @@ -0,0 +1,11 @@ +from app.schema.notification import UnifiedNotification +from app.worker.notification.notification_queue import NotificationQueue +from app.worker.notification.settings import NotifSetting + + +class NotificationGatewayService: + def __init__(self, queue: NotificationQueue | None = None) -> None: + self._queue = queue or NotificationQueue(settings=NotifSetting) + + async def send_notification(self, notification: UnifiedNotification) -> None: + await self._queue.enqueue_notification(notification) diff --git a/db/generated/audit.py b/db/generated/audit.py index 5e8d260..048edbe 100644 --- a/db/generated/audit.py +++ b/db/generated/audit.py @@ -2,7 +2,8 @@ # versions: # sqlc v1.30.0 # source: audit.sql -from typing import Any, Optional +import dataclasses +from typing import Any, AsyncIterator, Optional import uuid import sqlalchemy @@ -23,6 +24,29 @@ """ +LIST_AUDIT_EVENTS = """-- name: list_audit_events \\:many +SELECT id, event_type, user_id, metadata, created_at +FROM audit_events +WHERE (:p1 IS NULL OR event_type = :p1) + AND (:p2 IS NULL OR user_id = :p2) + AND (:p3 IS NULL OR created_at >= :p3) + AND (:p4 IS NULL OR created_at <= :p4) +ORDER BY created_at DESC +LIMIT :p5 +OFFSET :p6 +""" + + +@dataclasses.dataclass() +class ListAuditEventsParams: + column_1: Optional[Any] + column_2: Optional[Any] + column_3: Optional[Any] + column_4: Optional[Any] + limit: int + offset: int + + class AsyncQuerier: def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): self._conn = conn @@ -38,3 +62,21 @@ async def create_audit_event(self, *, event_type: Any, user_id: Optional[uuid.UU metadata=row[3], created_at=row[4], ) + + async def list_audit_events(self, arg: ListAuditEventsParams) -> AsyncIterator[models.AuditEvent]: + result = await self._conn.stream(sqlalchemy.text(LIST_AUDIT_EVENTS), { + "p1": arg.column_1, + "p2": arg.column_2, + "p3": arg.column_3, + "p4": arg.column_4, + "p5": arg.limit, + "p6": arg.offset, + }) + async for row in result: + yield models.AuditEvent( + id=row[0], + event_type=row[1], + user_id=row[2], + metadata=row[3], + created_at=row[4], + ) diff --git a/db/queries/audit.sql b/db/queries/audit.sql index ecdfe5c..3234bda 100644 --- a/db/queries/audit.sql +++ b/db/queries/audit.sql @@ -7,3 +7,14 @@ INSERT INTO audit_events ( $1, $2, $3 ) RETURNING id, event_type, user_id, metadata, created_at; + +-- name: ListAuditEvents :many +SELECT id, event_type, user_id, metadata, created_at +FROM audit_events +WHERE ($1 IS NULL OR event_type = $1) + AND ($2 IS NULL OR user_id = $2) + AND ($3 IS NULL OR created_at >= $3) + AND ($4 IS NULL OR created_at <= $4) +ORDER BY created_at DESC +LIMIT $5 +OFFSET $6; From 152a3c47d7c4476977be5ed11acee543039748de Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:37:56 +0100 Subject: [PATCH 17/19] add notifcation gateway in the service --- app/container.py | 2 +- app/service/{notification_gateway.py => notification.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename app/service/{notification_gateway.py => notification.py} (100%) diff --git a/app/container.py b/app/container.py index 956c176..c7289f6 100644 --- a/app/container.py +++ b/app/container.py @@ -13,7 +13,7 @@ from app.service.staff_user import StaffUserService from app.service.audit import AuditService -from app.service.notification_gateway import NotificationGatewayService +from app.service.notification import NotificationGatewayService from app.service.upload_requests import UploadRequestsService from app.service.users import AuthService from app.service.user_notification import UserNotificationService diff --git a/app/service/notification_gateway.py b/app/service/notification.py similarity index 100% rename from app/service/notification_gateway.py rename to app/service/notification.py From 6cc396959e8350d2fc1d3e16ba2b34b4e465808e Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 15:39:43 +0100 Subject: [PATCH 18/19] Enhance audit filters --- app/container.py | 1 + app/router/mobile/audit.py | 5 ++- app/schema/response/mobile/audit.py | 25 ++++++++++++--- app/service/audit.py | 49 ++++++++++++++++++++++------- app/worker/audit/main.py | 6 +++- db/generated/audit.py | 25 ++++++++------- db/queries/audit.sql | 8 ++--- 7 files changed, 86 insertions(+), 33 deletions(-) diff --git a/app/container.py b/app/container.py index c7289f6..4147b32 100644 --- a/app/container.py +++ b/app/container.py @@ -109,6 +109,7 @@ def __init__( self.audit_service = AuditService( audit_querier=self.audit_querier, + user_querier=self.user_querier, ) self.staff_user_service = StaffUserService() diff --git a/app/router/mobile/audit.py b/app/router/mobile/audit.py index 41db10f..fe3226a 100644 --- a/app/router/mobile/audit.py +++ b/app/router/mobile/audit.py @@ -33,5 +33,8 @@ async def list_audits( offset=offset, ) return AuditEventListResponse( - items=[AuditEventSchema.from_model(event) for event in events] + items=[ + AuditEventSchema.from_model(audit_event, actor=actor) + for audit_event, actor in events + ] ) diff --git a/app/schema/response/mobile/audit.py b/app/schema/response/mobile/audit.py index a6ea88a..0119653 100644 --- a/app/schema/response/mobile/audit.py +++ b/app/schema/response/mobile/audit.py @@ -6,25 +6,42 @@ from pydantic import BaseModel -from db.generated.models import AuditEvent +from db.generated.models import AuditEvent, User from app.core.constant import AuditEventType +from app.schema.response.mobile.auth import UserSchema + + +class AuditActorSchema(UserSchema): + display_name: str | None + + @classmethod + def from_user(cls, user: User) -> "AuditActorSchema": + return cls( + id=user.id, + email=user.email, + display_name=user.display_name, + ) class AuditEventSchema(BaseModel): id: UUID event_type: AuditEventType - user_id: UUID | None metadata: dict[str, Any] | None created_at: datetime + actor: AuditActorSchema | None @classmethod - def from_model(cls, event: AuditEvent) -> "AuditEventSchema": + def from_model( + cls, + event: AuditEvent, + actor: User | None, + ) -> "AuditEventSchema": return cls( id=event.id, event_type=AuditEventType(event.event_type), - user_id=event.user_id, metadata=event.metadata, created_at=event.created_at, + actor=AuditActorSchema.from_user(actor) if actor else None, ) diff --git a/app/service/audit.py b/app/service/audit.py index 61b4b4e..d8179d6 100644 --- a/app/service/audit.py +++ b/app/service/audit.py @@ -1,20 +1,29 @@ from __future__ import annotations -from datetime import datetime +from datetime import datetime, timezone from typing import Any from uuid import UUID from app.core.constant import AuditEventType from app.core.exceptions import AppException from db.generated import audit as audit_queries -from db.generated.models import AuditEvent +from db.generated import user as user_queries +from db.generated.models import AuditEvent, User from app.worker.audit.schema.audit import AuditEventMessage from app.infra.nats import NatsClient, NatsSubjects class AuditService: - def __init__(self, audit_querier: audit_queries.AsyncQuerier) -> None: + def __init__( + self, + audit_querier: audit_queries.AsyncQuerier, + user_querier: user_queries.AsyncQuerier, + ) -> None: self.audit_querier = audit_querier + self.user_querier = user_querier + + _DEFAULT_CREATED_FROM = datetime(1970, 1, 1, tzinfo=timezone.utc) + _DEFAULT_CREATED_TO = datetime(9999, 12, 31, 23, 59, 59, tzinfo=timezone.utc) async def record_event( self, @@ -57,16 +66,34 @@ async def list_audit_events( created_to: datetime | None = None, limit: int = 50, offset: int = 0, - ) -> list[AuditEvent]: + ) -> list[tuple[AuditEvent, User | None]]: params = audit_queries.ListAuditEventsParams( - column_1=event_type.value if event_type else None, - column_2=user_id, - column_3=created_from, - column_4=created_to, - limit=limit, - offset=offset, + event_type.value if event_type else None, + user_id, + created_from or self._DEFAULT_CREATED_FROM, + created_to or self._DEFAULT_CREATED_TO, + limit, + offset, ) events: list[AuditEvent] = [] async for event in self.audit_querier.list_audit_events(arg=params): events.append(event) - return events + + user_ids = {event.user_id for event in events if event.user_id is not None} + actors = await self._load_actors(user_ids) + + return [ + ( + event, + actors.get(event.user_id) if event.user_id is not None else None, + ) + for event in events + ] + + async def _load_actors(self, user_ids: set[UUID]) -> dict[UUID, User]: + actors: dict[UUID, User] = {} + for user_id in user_ids: + user = await self.user_querier.get_user_by_id(id=user_id) + if user: + actors[user_id] = user + return actors diff --git a/app/worker/audit/main.py b/app/worker/audit/main.py index fc46e1b..95ceae8 100644 --- a/app/worker/audit/main.py +++ b/app/worker/audit/main.py @@ -9,6 +9,7 @@ from app.infra.nats import NatsClient, NatsSubjects from app.service.audit import AuditService from db.generated import audit as audit_queries +from db.generated import user as user_queries from app.worker.audit.schema.audit import AuditEventMessage @@ -25,7 +26,10 @@ async def start(self) -> None: if self._conn is not None: return self._conn = await engine.connect() - self._audit_service = AuditService(audit_queries.AsyncQuerier(self._conn)) + self._audit_service = AuditService( + audit_queries.AsyncQuerier(self._conn), + user_queries.AsyncQuerier(self._conn), + ) async def stop(self) -> None: if self._conn is not None: diff --git a/db/generated/audit.py b/db/generated/audit.py index 048edbe..47bf3f4 100644 --- a/db/generated/audit.py +++ b/db/generated/audit.py @@ -3,6 +3,7 @@ # sqlc v1.30.0 # source: audit.sql import dataclasses +import datetime from typing import Any, AsyncIterator, Optional import uuid @@ -27,10 +28,10 @@ LIST_AUDIT_EVENTS = """-- name: list_audit_events \\:many SELECT id, event_type, user_id, metadata, created_at FROM audit_events -WHERE (:p1 IS NULL OR event_type = :p1) - AND (:p2 IS NULL OR user_id = :p2) - AND (:p3 IS NULL OR created_at >= :p3) - AND (:p4 IS NULL OR created_at <= :p4) +WHERE event_type = COALESCE(:p1, event_type) + AND user_id = COALESCE(:p2, user_id) + AND created_at >= COALESCE(:p3, created_at) + AND created_at <= COALESCE(:p4, created_at) ORDER BY created_at DESC LIMIT :p5 OFFSET :p6 @@ -39,10 +40,10 @@ @dataclasses.dataclass() class ListAuditEventsParams: - column_1: Optional[Any] - column_2: Optional[Any] - column_3: Optional[Any] - column_4: Optional[Any] + event_type: Any + user_id: Optional[uuid.UUID] + created_at: datetime.datetime + created_at_2: datetime.datetime limit: int offset: int @@ -65,10 +66,10 @@ async def create_audit_event(self, *, event_type: Any, user_id: Optional[uuid.UU async def list_audit_events(self, arg: ListAuditEventsParams) -> AsyncIterator[models.AuditEvent]: result = await self._conn.stream(sqlalchemy.text(LIST_AUDIT_EVENTS), { - "p1": arg.column_1, - "p2": arg.column_2, - "p3": arg.column_3, - "p4": arg.column_4, + "p1": arg.event_type, + "p2": arg.user_id, + "p3": arg.created_at, + "p4": arg.created_at_2, "p5": arg.limit, "p6": arg.offset, }) diff --git a/db/queries/audit.sql b/db/queries/audit.sql index 3234bda..b244581 100644 --- a/db/queries/audit.sql +++ b/db/queries/audit.sql @@ -11,10 +11,10 @@ RETURNING id, event_type, user_id, metadata, created_at; -- name: ListAuditEvents :many SELECT id, event_type, user_id, metadata, created_at FROM audit_events -WHERE ($1 IS NULL OR event_type = $1) - AND ($2 IS NULL OR user_id = $2) - AND ($3 IS NULL OR created_at >= $3) - AND ($4 IS NULL OR created_at <= $4) +WHERE event_type = COALESCE($1, event_type) + AND user_id = COALESCE($2, user_id) + AND created_at >= COALESCE($3, created_at) + AND created_at <= COALESCE($4, created_at) ORDER BY created_at DESC LIMIT $5 OFFSET $6; From 5a4b52f55cf523b4deea20a343fd74b6924fbe2e Mon Sep 17 00:00:00 2001 From: wailbentafat Date: Wed, 25 Mar 2026 16:07:29 +0100 Subject: [PATCH 19/19] feat: add notifcaation and audit endpoint with propre filter link the workers with the logique --- app/container.py | 10 ++---- app/router/mobile/__init__.py | 2 -- app/router/mobile/notifications.py | 8 ++--- app/router/web/__init__.py | 3 +- app/router/web/audit.py | 40 ++++++++++++++++++++++++ app/schema/response/web/audit.py | 49 ++++++++++++++++++++++++++++++ app/service/audit.py | 17 ++++------- app/service/face_embedding.py | 2 +- app/service/notification.py | 11 ------- app/service/user_notification.py | 44 ++++++++++++--------------- 10 files changed, 125 insertions(+), 61 deletions(-) create mode 100644 app/router/web/audit.py create mode 100644 app/schema/response/web/audit.py delete mode 100644 app/service/notification.py diff --git a/app/container.py b/app/container.py index 4147b32..fd94a4f 100644 --- a/app/container.py +++ b/app/container.py @@ -13,7 +13,6 @@ from app.service.staff_user import StaffUserService from app.service.audit import AuditService -from app.service.notification import NotificationGatewayService from app.service.upload_requests import UploadRequestsService from app.service.users import AuthService from app.service.user_notification import UserNotificationService @@ -103,8 +102,11 @@ def __init__( staff_notifications_service=self.staff_notifications_service, ) + notification_queue = NotificationQueue(settings=NotifSetting) + self.user_notifications_service = UserNotificationService( notification_querier=self.notification_querier, + notification_queue=notification_queue, ) self.audit_service = AuditService( @@ -122,12 +124,6 @@ def __init__( p_querier=self.participant_querier, ) - notification_queue = NotificationQueue(settings=NotifSetting) - self.notification_gateway_service = NotificationGatewayService(notification_queue) - - - - async def get_container( conn: sqlalchemy.ext.asyncio.AsyncConnection = Depends(get_db), ) -> Container: diff --git a/app/router/mobile/__init__.py b/app/router/mobile/__init__.py index d82d02a..aa3c807 100644 --- a/app/router/mobile/__init__.py +++ b/app/router/mobile/__init__.py @@ -3,7 +3,6 @@ from app.router.mobile.enrollement import router as onboarding_router from app.router.mobile.event import router as event_router from app.router.mobile.notifications import router as mobile_notifications_router -from app.router.mobile.audit import router as audit_router router = APIRouter(prefix="/user", tags=["user"]) @@ -12,4 +11,3 @@ router.include_router(onboarding_router) router.include_router(event_router) router.include_router(mobile_notifications_router) -router.include_router(audit_router) diff --git a/app/router/mobile/notifications.py b/app/router/mobile/notifications.py index ca568cd..f8d4a56 100644 --- a/app/router/mobile/notifications.py +++ b/app/router/mobile/notifications.py @@ -10,23 +10,23 @@ @router.get("", response_model=UserNotificationListResponse) -async def list_user_notifications( +async def get_all_notifications( container: Container = Depends(get_container), current_user: MobileUserSchema = Depends(get_current_mobile_user), ) -> UserNotificationListResponse: - notifications = await container.user_notifications_service.list_notifications( + notifications = await container.user_notifications_service.get_all_notifications( user_id=current_user.user_id, ) return UserNotificationListResponse.from_models(notifications) @router.post("/read", response_model=UserNotificationListResponse) -async def mark_user_notifications_as_read( +async def mark_as_read( req: MarkUserNotificationsReadRequest, container: Container = Depends(get_container), current_user: MobileUserSchema = Depends(get_current_mobile_user), ) -> UserNotificationListResponse: - notifications = await container.user_notifications_service.mark_many_as_read( + notifications = await container.user_notifications_service.mark_notifications_as_read( notification_ids=req.notification_ids, user_id=current_user.user_id, ) diff --git a/app/router/web/__init__.py b/app/router/web/__init__.py index 396add6..9b1e12e 100644 --- a/app/router/web/__init__.py +++ b/app/router/web/__init__.py @@ -2,8 +2,9 @@ from app.router.web.staff_users import router as staff_users_router from app.router.web.event import router as event_router from app.router.web.auth import router as auth_routes - +from app.router.web.audit import router as audit_router router = APIRouter(prefix="/admin", tags=["admin"]) router.include_router(staff_users_router) router.include_router(event_router) router.include_router(auth_routes) +router.include_router(audit_router) diff --git a/app/router/web/audit.py b/app/router/web/audit.py new file mode 100644 index 0000000..2256756 --- /dev/null +++ b/app/router/web/audit.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from fastapi import APIRouter, Depends, Query + +from app.container import Container, get_container +from app.core.constant import AuditEventType +from app.deps.token_auth import MobileUserSchema, get_current_mobile_user +from app.schema.response.web.audit import AuditEventListResponse, AuditEventSchema + +router = APIRouter(prefix="/audits", tags=["audits"]) + + +@router.get("", response_model=AuditEventListResponse) +async def list_audits( + event_type: AuditEventType | None = Query(None), + user_id: UUID | None = Query(None), + created_from: datetime | None = Query(None, alias="from"), + created_to: datetime | None = Query(None, alias="to"), + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + container: Container = Depends(get_container), + _: MobileUserSchema = Depends(get_current_mobile_user), +) -> AuditEventListResponse: + events = await container.audit_service.list_audit_events( + event_type=event_type, + user_id=user_id, + created_from=created_from, + created_to=created_to, + limit=limit, + offset=offset, + ) + return AuditEventListResponse( + items=[ + AuditEventSchema.from_model(audit_event, actor=actor) + for audit_event, actor in events + ] + ) diff --git a/app/schema/response/web/audit.py b/app/schema/response/web/audit.py new file mode 100644 index 0000000..0119653 --- /dev/null +++ b/app/schema/response/web/audit.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any +from uuid import UUID + +from pydantic import BaseModel + +from db.generated.models import AuditEvent, User +from app.core.constant import AuditEventType +from app.schema.response.mobile.auth import UserSchema + + +class AuditActorSchema(UserSchema): + display_name: str | None + + @classmethod + def from_user(cls, user: User) -> "AuditActorSchema": + return cls( + id=user.id, + email=user.email, + display_name=user.display_name, + ) + + +class AuditEventSchema(BaseModel): + id: UUID + event_type: AuditEventType + metadata: dict[str, Any] | None + created_at: datetime + actor: AuditActorSchema | None + + @classmethod + def from_model( + cls, + event: AuditEvent, + actor: User | None, + ) -> "AuditEventSchema": + return cls( + id=event.id, + event_type=AuditEventType(event.event_type), + metadata=event.metadata, + created_at=event.created_at, + actor=AuditActorSchema.from_user(actor) if actor else None, + ) + + +class AuditEventListResponse(BaseModel): + items: list[AuditEventSchema] diff --git a/app/service/audit.py b/app/service/audit.py index d8179d6..24a4b55 100644 --- a/app/service/audit.py +++ b/app/service/audit.py @@ -41,7 +41,7 @@ async def record_event( raise AppException.internal_error("Failed to persist audit event") return audit - async def publish_event( + async def create_record( self, *, event_type: AuditEventType, @@ -80,8 +80,11 @@ async def list_audit_events( events.append(event) user_ids = {event.user_id for event in events if event.user_id is not None} - actors = await self._load_actors(user_ids) - + actors: dict[UUID, User] = {} + for user_id in user_ids: + user = await self.user_querier.get_user_by_id(id=user_id) + if user: + actors[user_id] = user return [ ( event, @@ -89,11 +92,3 @@ async def list_audit_events( ) for event in events ] - - async def _load_actors(self, user_ids: set[UUID]) -> dict[UUID, User]: - actors: dict[UUID, User] = {} - for user_id in user_ids: - user = await self.user_querier.get_user_by_id(id=user_id) - if user: - actors[user_id] = user - return actors diff --git a/app/service/face_embedding.py b/app/service/face_embedding.py index f71c906..5b01d7c 100644 --- a/app/service/face_embedding.py +++ b/app/service/face_embedding.py @@ -81,7 +81,7 @@ def embed(self, image: np.ndarray, bboxes: Sequence[BBox]) -> list[float]: if not faces: raise ValueError("No faces detected by the model") - x1, y1, x2, y2 = bboxes[0] + x1, y1, x2, y2 = bboxes[0] # type: ignore target_cx = (x1 + x2) / 2 target_cy = (y1 + y2) / 2 diff --git a/app/service/notification.py b/app/service/notification.py deleted file mode 100644 index 6c57ebd..0000000 --- a/app/service/notification.py +++ /dev/null @@ -1,11 +0,0 @@ -from app.schema.notification import UnifiedNotification -from app.worker.notification.notification_queue import NotificationQueue -from app.worker.notification.settings import NotifSetting - - -class NotificationGatewayService: - def __init__(self, queue: NotificationQueue | None = None) -> None: - self._queue = queue or NotificationQueue(settings=NotifSetting) - - async def send_notification(self, notification: UnifiedNotification) -> None: - await self._queue.enqueue_notification(notification) diff --git a/app/service/user_notification.py b/app/service/user_notification.py index e13b4d9..1d73971 100644 --- a/app/service/user_notification.py +++ b/app/service/user_notification.py @@ -2,6 +2,8 @@ import uuid from app.core.exceptions import AppException +from app.schema.notification import UnifiedNotification +from app.worker.notification.notification_queue import NotificationQueue from db.generated import notifications as notification_queries from db.generated.models import Notification @@ -10,8 +12,10 @@ class UserNotificationService: def __init__( self, notification_querier: notification_queries.AsyncQuerier, + notification_queue: NotificationQueue, ) -> None: self.notification_querier = notification_querier + self._notification_queue = notification_queue async def create_notification( self, @@ -19,17 +23,22 @@ async def create_notification( user_id: uuid.UUID, type: str, payload: dict[str, Any], + notification: UnifiedNotification | None = None, ) -> Notification: - notification = await self.notification_querier.create_notification( + notification_record = await self.notification_querier.create_notification( user_id=user_id, type=type, payload=payload, ) - if notification is None: + if notification_record is None: raise AppException.internal_error("Failed to create user notification") - return notification - async def list_notifications( + if notification is not None: + await self._notification_queue.enqueue_notification(notification) + + return notification_record + + async def get_all_notifications( self, *, user_id: uuid.UUID, @@ -41,21 +50,7 @@ async def list_notifications( notifications.append(notification) return notifications - async def mark_as_read( - self, - *, - notification_id: uuid.UUID, - user_id: uuid.UUID, - ) -> Notification: - notification = await self.notification_querier.mark_notification_as_read( - id=notification_id, - user_id=user_id, - ) - if notification is None: - raise AppException.not_found("Notification not found or already read") - return notification - - async def mark_many_as_read( + async def mark_notifications_as_read( self, *, notification_ids: list[uuid.UUID], @@ -67,10 +62,11 @@ async def mark_many_as_read( if notification_id in seen_notification_ids: continue seen_notification_ids.add(notification_id) - notifications.append( - await self.mark_as_read( - notification_id=notification_id, - user_id=user_id, - ) + notification = await self.notification_querier.mark_notification_as_read( + id=notification_id, + user_id=user_id, ) + if notification is None: + raise AppException.not_found("Notification not found or already read") + notifications.append(notification) return notifications