1+ from datetime import datetime , timedelta , timezone
12import re
23from unittest .mock import call
34
78from connect .eaas .core .inject .models import Context
89from sqlalchemy .exc import DBAPIError
910
11+ from connect_bi_reporter .constants import SECONDS_BACKOFF_FACTOR , SECONDS_DELAY
1012from connect_bi_reporter .events import ConnectBiReporterEventsApplication
1113
1214
@@ -229,6 +231,11 @@ def test_create_upload_schedule_task(
229231 ),
230232 )
231233 ext .get_installation_admin_client = lambda self : connect_client
234+
235+ _now = datetime (2024 , 10 , 15 , 10 , 0 , 0 , tzinfo = timezone .utc )
236+ p_datetime = mocker .patch ('connect_bi_reporter.uploads.services.datetime' )
237+ p_datetime .utcnow = lambda : _now
238+
232239 mocker .patch (
233240 'connect_bi_reporter.uploads.tasks.get_extension_owner_client' ,
234241 return_value = connect_client ,
@@ -245,6 +252,9 @@ def test_create_upload_schedule_task(
245252 'connect_bi_reporter.scheduler.create_schedule_task' ,
246253 return_value = eaas_schedule_task ,
247254 )
255+ p_get_task_payload = mocker .patch (
256+ 'connect_bi_reporter.scheduler.EaasScheduleTask.get_task_payload' ,
257+ )
248258 feed1 = feed_factory (
249259 schedule_id = report_schedule ['id' ],
250260 account_id = installation ['owner' ]['id' ],
@@ -274,6 +284,44 @@ def test_create_upload_schedule_task(
274284 ),
275285 ],
276286 )
287+ delay = SECONDS_DELAY
288+ new_delay = SECONDS_DELAY + SECONDS_BACKOFF_FACTOR
289+ p_get_task_payload .assert_has_calls (
290+ [
291+ call (
292+ trigger_type = 'onetime' ,
293+ trigger_data = {
294+ 'date' : (_now + timedelta (seconds = delay )).isoformat (),
295+ },
296+ method_payload = {
297+ 'method' : 'process_upload' ,
298+ 'description' : 'This task will download the report from'
299+ ' connect and published it in the respective storage.' ,
300+ 'parameter' : {
301+ 'installation_id' : 'EIN-8436-7221-8308' ,
302+ 'upload_id' : f'ULF-{ feed1 .id .split ("-" , 1 )[- 1 ]} -000' ,
303+ },
304+ 'name' : 'Process Uploads - PA-000-000' ,
305+ },
306+ ),
307+ call (
308+ trigger_type = 'onetime' ,
309+ trigger_data = {
310+ 'date' : (_now + timedelta (seconds = new_delay )).isoformat (),
311+ },
312+ method_payload = {
313+ 'method' : 'process_upload' ,
314+ 'description' : 'This task will download the report from'
315+ ' connect and published it in the respective storage.' ,
316+ 'parameter' : {
317+ 'installation_id' : 'EIN-8436-7221-8308' ,
318+ 'upload_id' : f'ULF-{ feed2 .id .split ("-" , 1 )[- 1 ]} -000' ,
319+ },
320+ 'name' : 'Process Uploads - PA-000-000' ,
321+ },
322+ ),
323+ ],
324+ )
277325 for idx , zipped in enumerate (zip (uploads , [feed1 , feed2 ])):
278326 upload , feed = zipped
279327 assert result .status == 'success'
@@ -298,6 +346,8 @@ def test_create_upload_schedule_task(
298346 f' created for Upload `{ uploads [1 ].id } `: '
299347 f'Will process Report File `{ report_file [1 ]["id" ]} `'
300348 )
349+ assert delay == 120
350+ assert new_delay == 240
301351
302352
303353def test_create_upload_schedule_task_no_feeds (
0 commit comments