1+ from datetime import datetime , timedelta , timezone
12import re
23from unittest .mock import call
34
78from connect .eaas .core .inject .models import Context
89from sqlalchemy .exc import DBAPIError
910
11+ from connect_bi_reporter .constants import SECONDS_BACKOFF_FACTOR , SECONDS_DELAY
1012from connect_bi_reporter .events import ConnectBiReporterEventsApplication
1113
1214
@@ -16,6 +18,8 @@ def test_process_upload(dbsession, connect_client, installation, logger, mocker,
1618 logger ,
1719 config = {},
1820 )
21+ p_time = mocker .patch ('connect_bi_reporter.uploads.tasks.time' )
22+ p_time .monotonic .side_effect = [10 , 12 ]
1923 ext .get_installation_admin_client = lambda self : connect_client
2024
2125 with open ('./tests/uploads/test-zip.zip' , 'rb' ) as zf :
@@ -41,6 +45,10 @@ def test_process_upload(dbsession, connect_client, installation, logger, mocker,
4145 assert re .match (feed .file_name + '_\\ d{8} \\ d{2}:\\ d{2}:\\ d{2}.csv' , upload .name )
4246 assert upload .size == 1024
4347 assert upload .status == upload .STATUSES .uploaded
48+ assert logger .method_calls [0 ].args [0 ] == (
49+ f'Execution of `process_upload` task for Upload { upload .id } '
50+ f'finished (took "2"): Upload status: `uploaded`, Taks result: `done`.'
51+ )
4452
4553
4654def test_process_upload_report_download_failed (
@@ -229,6 +237,11 @@ def test_create_upload_schedule_task(
229237 ),
230238 )
231239 ext .get_installation_admin_client = lambda self : connect_client
240+
241+ _now = datetime (2024 , 10 , 15 , 10 , 0 , 0 , tzinfo = timezone .utc )
242+ p_datetime = mocker .patch ('connect_bi_reporter.uploads.services.datetime' )
243+ p_datetime .utcnow = lambda : _now
244+
232245 mocker .patch (
233246 'connect_bi_reporter.uploads.tasks.get_extension_owner_client' ,
234247 return_value = connect_client ,
@@ -245,6 +258,9 @@ def test_create_upload_schedule_task(
245258 'connect_bi_reporter.scheduler.create_schedule_task' ,
246259 return_value = eaas_schedule_task ,
247260 )
261+ p_get_task_payload = mocker .patch (
262+ 'connect_bi_reporter.scheduler.EaasScheduleTask.get_task_payload' ,
263+ )
248264 feed1 = feed_factory (
249265 schedule_id = report_schedule ['id' ],
250266 account_id = installation ['owner' ]['id' ],
@@ -274,6 +290,44 @@ def test_create_upload_schedule_task(
274290 ),
275291 ],
276292 )
293+ delay = SECONDS_DELAY
294+ new_delay = SECONDS_DELAY + SECONDS_BACKOFF_FACTOR
295+ p_get_task_payload .assert_has_calls (
296+ [
297+ call (
298+ trigger_type = 'onetime' ,
299+ trigger_data = {
300+ 'date' : (_now + timedelta (seconds = delay )).isoformat (),
301+ },
302+ method_payload = {
303+ 'method' : 'process_upload' ,
304+ 'description' : 'This task will download the report from'
305+ ' connect and published it in the respective storage.' ,
306+ 'parameter' : {
307+ 'installation_id' : 'EIN-8436-7221-8308' ,
308+ 'upload_id' : f'ULF-{ feed1 .id .split ("-" , 1 )[- 1 ]} -000' ,
309+ },
310+ 'name' : 'Process Uploads - PA-000-000' ,
311+ },
312+ ),
313+ call (
314+ trigger_type = 'onetime' ,
315+ trigger_data = {
316+ 'date' : (_now + timedelta (seconds = new_delay )).isoformat (),
317+ },
318+ method_payload = {
319+ 'method' : 'process_upload' ,
320+ 'description' : 'This task will download the report from'
321+ ' connect and published it in the respective storage.' ,
322+ 'parameter' : {
323+ 'installation_id' : 'EIN-8436-7221-8308' ,
324+ 'upload_id' : f'ULF-{ feed2 .id .split ("-" , 1 )[- 1 ]} -000' ,
325+ },
326+ 'name' : 'Process Uploads - PA-000-000' ,
327+ },
328+ ),
329+ ],
330+ )
277331 for idx , zipped in enumerate (zip (uploads , [feed1 , feed2 ])):
278332 upload , feed = zipped
279333 assert result .status == 'success'
@@ -298,6 +352,8 @@ def test_create_upload_schedule_task(
298352 f' created for Upload `{ uploads [1 ].id } `: '
299353 f'Will process Report File `{ report_file [1 ]["id" ]} `'
300354 )
355+ assert delay == 120
356+ assert new_delay == 240
301357
302358
303359def test_create_upload_schedule_task_no_feeds (
0 commit comments