Skip to content

Commit 2d65764

Browse files
authored
Merge pull request #47 from cloudblue/LITE-30091-invalid-rql-filter-when-fetching-reports-for-upload-creation
LITE-30091 Invalid rql filter when fetching reports for upload creation
2 parents ff2c593 + b7a876e commit 2d65764

File tree

5 files changed

+54
-22
lines changed

5 files changed

+54
-22
lines changed

connect_bi_reporter/uploads/services.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,13 +118,13 @@ def disable_feeds(db, feeds, logger):
118118
def create_uploads(db, client, logger, feeds):
119119
feeds_to_disable = []
120120
uploads = []
121-
rql = R().status.eq('succeeded')
122121
feed_ids = [f.id for f in feeds]
123122
existing_reports_ids = util.flatten_iterator(db.query(Upload.report_id).filter(
124123
Upload.feed_id.in_(feed_ids),
125124
))
126125
for feed in feeds:
127126
report_file = None
127+
rql = R().status.eq('succeeded')
128128
rql &= R().account.id.eq(feed.account_id) & R().schedule.id.eq(feed.schedule_id)
129129
reason, mark_as_disabled = _get_report_schedule_reason(client, feed.schedule_id)
130130
if mark_as_disabled:

docker-compose.yml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,6 @@ services:
3636
DATABASE_URL: postgresql+psycopg2://postgres:1q2w3e@db/bi_reporter
3737
depends_on:
3838
- db
39-
links:
40-
- "db_ram:db"
4139

4240
connect_bi_reporter_test:
4341
container_name: connect_bi_reporter_test

poetry.lock

Lines changed: 5 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ sqlalchemy = "^1.3.12"
2020
psycopg2-binary = "^2.9.6"
2121
pyjwt = "^2.8.0"
2222
azure-storage-blob = "^12.19.0"
23-
connect-extension-utils = "^1.0.0"
23+
connect-extension-utils = "^1.2.0"
2424

2525
[tool.poetry.dev-dependencies]
2626
pytest = ">=6.1.2,<8"

tests/uploads/test_tasks.py

Lines changed: 47 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
import re
2+
from unittest.mock import call
23

34
import pytest
45
from connect.client import ClientError
6+
from connect.client.rql import R
57
from connect.eaas.core.inject.models import Context
68
from sqlalchemy.exc import DBAPIError
79

@@ -213,7 +215,10 @@ def test_create_upload_schedule_task(
213215
'account_id': installation['owner']['id'],
214216
},
215217
}
216-
report_file = {'id': 'RP-262-019-481', 'renderer': 'csv'}
218+
report_file = [
219+
{'id': 'RP-262-019-481', 'renderer': 'csv'},
220+
{'id': 'RP-262-019-482', 'renderer': 'csv'},
221+
]
217222
ext = ConnectBiReporterEventsApplication(
218223
connect_client,
219224
logger,
@@ -232,37 +237,66 @@ def test_create_upload_schedule_task(
232237
'connect_bi_reporter.uploads.services.get_report_schedule',
233238
return_value=report_schedule,
234239
)
235-
mocker.patch(
240+
p_get_reporting_report = mocker.patch(
236241
'connect_bi_reporter.uploads.services.get_reporting_report',
237-
return_value=report_file,
242+
side_effect=report_file,
238243
)
239244
mocker.patch(
240245
'connect_bi_reporter.scheduler.create_schedule_task',
241246
return_value=eaas_schedule_task,
242247
)
243-
feed = feed_factory(
248+
feed1 = feed_factory(
249+
schedule_id=report_schedule['id'],
250+
account_id=installation['owner']['id'],
251+
status=feed_factory._meta.model.STATUSES.enabled,
252+
)
253+
feed2 = feed_factory(
244254
schedule_id=report_schedule['id'],
245255
account_id=installation['owner']['id'],
246256
status=feed_factory._meta.model.STATUSES.enabled,
247257
)
248258

249259
result = ext.create_uploads(schedule)
250-
upload = dbsession.query(upload_factory._meta.model).first()
251-
assert result.status == 'success'
252-
assert upload.report_id == report_file['id']
253-
assert upload.status == upload_factory._meta.model.STATUSES.pending
254-
assert upload.feed_id == feed.id
260+
uploads = dbsession.query(upload_factory._meta.model).all()
261+
p_get_reporting_report.assert_has_calls(
262+
[
263+
call(
264+
connect_client, (
265+
R().status.eq('succeeded') & R().account.id.eq(feed1.account_id)
266+
& R().schedule.id.eq(feed1.schedule_id)
267+
),
268+
),
269+
call(
270+
connect_client, (
271+
R().status.eq('succeeded') & R().account.id.eq(feed2.account_id)
272+
& R().schedule.id.eq(feed2.schedule_id)
273+
),
274+
),
275+
],
276+
)
277+
for idx, zipped in enumerate(zip(uploads, [feed1, feed2])):
278+
upload, feed = zipped
279+
assert result.status == 'success'
280+
assert upload.report_id == report_file[idx]['id']
281+
assert upload.status == upload_factory._meta.model.STATUSES.pending
282+
assert upload.feed_id == feed.id
283+
255284
assert logger.method_calls[0].args[0] == (
256-
f'New Uploads were created: `Upload={upload.id}'
257-
f' for Feed={feed.id}`.'
285+
f'New Uploads were created: `Upload={uploads[0].id} for Feed={feed1.id}, '
286+
f'Upload={uploads[1].id} for Feed={feed2.id}`.'
258287
)
259288
assert logger.method_calls[1].args[0] == (
260289
f'Periodic Schedule Task created: `{eaas_schedule_task["id"]}`.'
261290
)
262291
assert logger.method_calls[2].args[0] == (
263292
f'New Scheduled Task `{eaas_schedule_task["id"]}`'
264-
f' created for Upload `{upload.id}`: '
265-
f'Will process Report File `{report_file["id"]}`'
293+
f' created for Upload `{uploads[0].id}`: '
294+
f'Will process Report File `{report_file[0]["id"]}`'
295+
)
296+
assert logger.method_calls[4].args[0] == (
297+
f'New Scheduled Task `{eaas_schedule_task["id"]}`'
298+
f' created for Upload `{uploads[1].id}`: '
299+
f'Will process Report File `{report_file[1]["id"]}`'
266300
)
267301

268302

0 commit comments

Comments
 (0)