Skip to content

Commit 9713172

Browse files
authored
Merge pull request #54 from openimis/release/24.10
MERGING release/24.10 into develop
2 parents ef8cc36 + 263dfd7 commit 9713172

File tree

13 files changed

+220
-29
lines changed

13 files changed

+220
-29
lines changed

MANIFEST.in

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
include LICENSE.md
2-
include README.md
2+
include README.md
3+
include tools/tests/*

tools/resources.py

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,10 @@
1010

1111
def process_imported_patient_categories(row):
1212
# Transform the patient category data
13-
adult_cat = int(row["adult_cat"])
14-
minor_cat = int(row["minor_cat"])
15-
female_cat = int(row["female_cat"])
16-
male_cat = int(row["male_cat"])
13+
adult_cat = int(row.pop("adult_cat", 1))
14+
minor_cat = int(row.pop("minor_cat", 1))
15+
female_cat = int(row.pop("female_cat", 1))
16+
male_cat = int(row.pop("male_cat", 1))
1717

1818
category = 0
1919
if male_cat:
@@ -26,10 +26,6 @@ def process_imported_patient_categories(row):
2626
category = category | PATIENT_CATEGORY_MASK_MINOR
2727

2828
# Remove the now useless fields
29-
row.pop("adult_cat")
30-
row.pop("minor_cat")
31-
row.pop("female_cat")
32-
row.pop("male_cat")
3329

3430
# Add the merged patient category value
3531
row["patient_category"] = category
@@ -78,7 +74,7 @@ def dehydrate_minor_cat(self, item_service):
7874
# This method is called once before importing data
7975
# This is used to add the two mandatory fields that are required for creating a medical.Item
8076
# If self.fields do not have a fields.Field, with the column_name set up, then these columns are ignored during import
81-
def before_import(self, dataset, using_transactions, dry_run, **kwargs):
77+
def before_import(self, dataset, **kwargs):
8278
if "patient_category" not in self.fields:
8379
self.fields["patient_category"] = fields.Field(attribute='patient_category', column_name="patient_category",
8480
saves_null_values=False,
@@ -87,13 +83,21 @@ def before_import(self, dataset, using_transactions, dry_run, **kwargs):
8783
self.fields["audit_user_id"] = fields.Field(attribute='audit_user_id', column_name="audit_user_id",
8884
saves_null_values=False,
8985
widget=IntegerWidget())
86+
super().before_import(dataset, **kwargs)
9087

9188
# This method is called when the user flags a row to be deleted (the "delete" column value is '1')
9289
def for_delete(self, row, instance):
9390
if "delete" in row:
9491
return self.fields['delete'].clean(row)
9592

96-
93+
def __init__(self, user, queryset=None, ):
94+
"""
95+
@param user: User to be used for location rights for import and export, and for audit_user_id
96+
@param queryset: Queryset to use for export, Default to full quetyset
97+
"""
98+
super().__init__()
99+
self._user = user
100+
97101
# This class is responsible for customizing the import and export processes
98102
class ItemResource(ItemServiceResource):
99103

@@ -102,10 +106,12 @@ class Meta:
102106

103107
# These are the fields that are going to get exported/
104108
fields = ('code', 'name', 'type', 'package', 'price', 'quantity',
105-
'care_type', 'frequency', 'patient_category')
109+
'care_type', 'frequency', 'patient_category',
110+
'male_cat', 'female_cat', 'adult_cat', 'minor_cat')
106111

107112
# You can customize the order for exports, but this order is also used during upload
108113
# (to know which fields will be there, instead of reading the headers)
114+
export_order = fields
109115
# export_order = ('code', 'name', 'type', 'package', 'price', 'quantity',
110116
# 'care_type', 'frequency', 'male_cat', 'female_cat', 'adult_cat', 'minor_cat')
111117

@@ -131,7 +137,9 @@ class Meta:
131137

132138
# These are the fields that are going to get exported/
133139
fields = ('code', 'name', 'type', 'level', 'price', 'category',
134-
'care_type', 'frequency', 'patient_category')
140+
'care_type', 'frequency', 'patient_category',
141+
'male_cat', 'female_cat', 'adult_cat', 'minor_cat')
142+
export_order = fields
135143

136144
# This method is called once for each row during import
137145
# This is where you can do some data validation/modification + add the missing data

tools/services.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1641,7 +1641,12 @@ def upload_feedbacks(archive, user):
16411641

16421642
def validate_imported_item_row(row):
16431643
# TODO : refactor this function and the code used in validating XML uploads
1644-
categories = [row["adult_cat"], row["minor_cat"], row["male_cat"], row["female_cat"]]
1644+
categories = [
1645+
row.get("adult_cat", 1),
1646+
row.get("minor_cat", 1),
1647+
row.get("male_cat", 1),
1648+
row.get("female_cat", 1)
1649+
]
16451650
if len(row["code"]) < 1 or len(row["code"]) > 6:
16461651
raise ValidationError(f"Item '{row['code']}': code is invalid. Must be between 1 and 6 characters")
16471652
elif len(row["name"]) < 1 or len(row["name"]) > 100:

tools/tests/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +0,0 @@
1-
from tools.tests.test_services import *

tools/tests/item_example.json

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
[
2+
{
3+
"code": "TEST1",
4+
"name": "Test item 1",
5+
"type": "D",
6+
"package": "10 TABLETS",
7+
"price": 720,
8+
"quantity": 1,
9+
"care_type": "B",
10+
"frequency": 0
11+
},
12+
{
13+
"code": "TEST2",
14+
"name": "Test item 2",
15+
"type": "D",
16+
"package": "10 TABLETS",
17+
"price": 720,
18+
"quantity": 1,
19+
"care_type": "I",
20+
"frequency": 0
21+
}
22+
23+
]

tools/tests/service_example.json

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
[
2+
{
3+
"code": "TEST1",
4+
"name": "Test service 1",
5+
"type": "C",
6+
"level": "S",
7+
"price": 720,
8+
"category": "O",
9+
"care_type": "B",
10+
"frequency": 0,
11+
"adult_cat": 0,
12+
"minor_cat": 1,
13+
"male_cat": 1,
14+
"female_cat": 1
15+
},
16+
{
17+
"code": "TEST2",
18+
"name": "Test service 2",
19+
"type": "C",
20+
"level": "S",
21+
"price": 720,
22+
"category": "O",
23+
"care_type": "B",
24+
"frequency": 180,
25+
"adult_cat": 1,
26+
"minor_cat": 0,
27+
"male_cat": 0,
28+
"female_cat": 1
29+
}
30+
31+
]

tools/tests/test_item.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import os
2+
from tablib import Dataset
3+
from core.test_helpers import create_test_interactive_user
4+
from django.test import TestCase
5+
from location.test_helpers import create_test_location
6+
from django.conf import settings
7+
from tools.resources import ItemResource
8+
9+
class ImportItemTest(TestCase):
10+
11+
def setUp(self) -> None:
12+
super(ImportItemTest, self).setUp()
13+
self.user = create_test_interactive_user()
14+
15+
def test_simple_import(self):
16+
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
17+
resource = ItemResource(user=self.user)
18+
dataset = Dataset()
19+
with open(os.path.join(dir_path, 'tests/item_example.json'), 'r') as f:
20+
dataset.load(f.read())
21+
result = resource.import_data(
22+
dataset, dry_run=True, use_transactions=True,
23+
collect_failed_rows=False,
24+
)
25+
self.assertEqual(result.has_errors(), False)
26+
27+
def test_simple_export(self):
28+
result = ItemResource(self.user).export().dict
29+
self.assertTrue(result)
30+

0 commit comments

Comments
 (0)