Skip to content

prefect.server.api.events

count_account_events(filter, countable=Path(...), time_unit=Body(default=TimeUnit.day), time_interval=Body(default=1.0, ge=0.01), db=Depends(provide_database_interface)) async

Returns distinct objects and the count of events associated with them. Objects that can be counted include the day the event occurred, the type of event, or the IDs of the resources associated with the event.

Source code in src/prefect/server/api/events.py
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
@router.post(
    "/count-by/{countable}",
)
async def count_account_events(
    filter: EventFilter,
    countable: Countable = Path(...),
    time_unit: TimeUnit = Body(default=TimeUnit.day),
    time_interval: float = Body(default=1.0, ge=0.01),
    db: PrefectDBInterface = Depends(provide_database_interface),
) -> List[EventCount]:
    """
    Returns distinct objects and the count of events associated with them.  Objects
    that can be counted include the day the event occurred, the type of event, or
    the IDs of the resources associated with the event.
    """
    async with db.session_context() as session:
        return await handle_event_count_request(
            session=session,
            filter=filter,
            countable=countable,
            time_unit=time_unit,
            time_interval=time_interval,
        )

create_events(events, ephemeral_request=Depends(is_ephemeral_request), db=Depends(provide_database_interface)) async

Record a batch of Events

Source code in src/prefect/server/api/events.py
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
@router.post("", status_code=status.HTTP_204_NO_CONTENT, response_class=Response)
async def create_events(
    events: List[Event],
    ephemeral_request: bool = Depends(is_ephemeral_request),
    db: PrefectDBInterface = Depends(provide_database_interface),
):
    """Record a batch of Events"""
    received_events = [event.receive() for event in events]
    if ephemeral_request:
        async with db.session_context() as session:
            try:
                await database.write_events(session, received_events)
            except RuntimeError as exc:
                if "can't create new thread at interpreter shutdown" in str(exc):
                    # Background events sometimes fail to write when the interpreter is shutting down.
                    # This is a known issue in Python 3.12.2 that can be ignored and is fixed in Python 3.12.3.
                    # see e.g. https://github.com/python/cpython/issues/113964
                    logger.debug("Received event during interpreter shutdown, ignoring")
                else:
                    raise
    else:
        await messaging.publish(received_events)

read_account_events_page(request, page_token=Depends(verified_page_token), db=Depends(provide_database_interface)) async

Returns the next page of Events for a previous query against the given Account, and the URL to request the next page (if there are more results).

Source code in src/prefect/server/api/events.py
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
@router.get(
    "/filter/next",
)
async def read_account_events_page(
    request: Request,
    page_token: str = Depends(verified_page_token),
    db: PrefectDBInterface = Depends(provide_database_interface),
) -> EventPage:
    """
    Returns the next page of Events for a previous query against the given Account, and
    the URL to request the next page (if there are more results).
    """
    async with db.session_context() as session:
        try:
            events, total, next_token = await database.query_next_page(
                session=session, page_token=page_token
            )
        except InvalidTokenError:
            raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)

        return EventPage(
            events=events,
            total=total,
            next_page=generate_next_page_link(request, next_token),
        )

read_events(request, filter=Body(None, description='Additional optional filter criteria to narrow down the set of Events'), limit=Body(INTERACTIVE_PAGE_SIZE, ge=0, le=INTERACTIVE_PAGE_SIZE, embed=True, description='The number of events to return with each page'), db=Depends(provide_database_interface)) async

Queries for Events matching the given filter criteria in the given Account. Returns the first page of results, and the URL to request the next page (if there are more results).

Source code in src/prefect/server/api/events.py
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
@router.post(
    "/filter",
)
async def read_events(
    request: Request,
    filter: Optional[EventFilter] = Body(
        None,
        description=(
            "Additional optional filter criteria to narrow down the set of Events"
        ),
    ),
    limit: int = Body(
        INTERACTIVE_PAGE_SIZE,
        ge=0,
        le=INTERACTIVE_PAGE_SIZE,
        embed=True,
        description="The number of events to return with each page",
    ),
    db: PrefectDBInterface = Depends(provide_database_interface),
) -> EventPage:
    """
    Queries for Events matching the given filter criteria in the given Account.  Returns
    the first page of results, and the URL to request the next page (if there are more
    results).
    """
    filter = filter or EventFilter()
    async with db.session_context() as session:
        events, total, next_token = await database.query_events(
            session=session,
            filter=filter,
            page_size=limit,
        )

        return EventPage(
            events=events,
            total=total,
            next_page=generate_next_page_link(request, next_token),
        )

stream_events_in(websocket) async

Open a WebSocket to stream incoming Events

Source code in src/prefect/server/api/events.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
@router.websocket("/in")
async def stream_events_in(websocket: WebSocket) -> None:
    """Open a WebSocket to stream incoming Events"""

    await websocket.accept()

    try:
        async with messaging.create_event_publisher() as publisher:
            async for event_json in websocket.iter_text():
                event = Event.model_validate_json(event_json)
                await publisher.publish_event(event.receive())
    except subscriptions.NORMAL_DISCONNECT_EXCEPTIONS:  # pragma: no cover
        pass  # it's fine if a client disconnects either normally or abnormally

    return None

stream_workspace_events_out(websocket) async

Open a WebSocket to stream Events

Source code in src/prefect/server/api/events.py
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
@router.websocket("/out")
async def stream_workspace_events_out(
    websocket: WebSocket,
) -> None:
    """Open a WebSocket to stream Events"""
    websocket = await subscriptions.accept_prefect_socket(
        websocket,
    )
    if not websocket:
        return

    try:
        # After authentication, the next message is expected to be a filter message, any
        # other type of message will close the connection.
        message = await websocket.receive_json()

        if message["type"] != "filter":
            return await websocket.close(
                WS_1002_PROTOCOL_ERROR, reason="Expected 'filter' message"
            )

        wants_backfill = message.get("backfill", True)

        try:
            filter = EventFilter.model_validate(message["filter"])
        except Exception as e:
            return await websocket.close(
                WS_1002_PROTOCOL_ERROR, reason=f"Invalid filter: {e}"
            )

        filter.occurred.clamp(PREFECT_EVENTS_MAXIMUM_WEBSOCKET_BACKFILL.value())
        filter.order = EventOrder.ASC

        # subscribe to the ongoing event stream first so we don't miss events...
        async with stream.events(filter) as event_stream:
            # ...then if the user wants, backfill up to the last 1k events...
            if wants_backfill:
                backfilled_ids = set()

                async with automations_session() as session:
                    backfill, _, next_page = await database.query_events(
                        session=session,
                        filter=filter,
                        page_size=PREFECT_EVENTS_WEBSOCKET_BACKFILL_PAGE_SIZE.value(),
                    )

                    while backfill:
                        for event in backfill:
                            backfilled_ids.add(event.id)
                            await websocket.send_json(
                                {
                                    "type": "event",
                                    "event": event.model_dump(mode="json"),
                                }
                            )

                        if not next_page:
                            break

                        backfill, _, next_page = await database.query_next_page(
                            session=session,
                            page_token=next_page,
                        )

            # ...before resuming the ongoing stream of events
            async for event in event_stream:
                if not event:
                    if await subscriptions.still_connected(websocket):
                        continue
                    break

                if wants_backfill and event.id in backfilled_ids:
                    backfilled_ids.remove(event.id)
                    continue

                await websocket.send_json(
                    {"type": "event", "event": event.model_dump(mode="json")}
                )

    except subscriptions.NORMAL_DISCONNECT_EXCEPTIONS:  # pragma: no cover
        pass  # it's fine if a client disconnects either normally or abnormally

    return None