From d4b215d06c077dd149311854f72b419cff72c868 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 16 Nov 2022 14:29:31 +0300 Subject: [PATCH 001/270] Add pyxform entities functionality Signed-off-by: Kipchirchir Sigei --- requirements/base.in | 1 + requirements/base.pip | 9 +++++---- requirements/dev.pip | 9 +++++---- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/requirements/base.in b/requirements/base.in index 9f61c60d57..1ff49b9a8d 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -9,3 +9,4 @@ -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient -e git+https://github.com/onaio/ona-oidc.git@v1.0.1#egg=ona-oidc -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter +-e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform diff --git a/requirements/base.pip b/requirements/base.pip index 68408c276d..81767cd619 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -16,6 +16,11 @@ # via -r requirements/base.in -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient # via -r requirements/base.in +-e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform + # via + # -r requirements/base.in + # onadata + # pyfloip -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.12 @@ -305,10 +310,6 @@ pytz==2022.1 # djangorestframework # fleming # onadata -pyxform==1.10.1 - # via - # onadata - # pyfloip recaptcha-client==1.0.6 # via onadata redis==4.3.4 diff --git a/requirements/dev.pip b/requirements/dev.pip index 6878ae392d..f9e7821022 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -16,6 +16,11 @@ # via -r requirements/base.in -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient # via -r requirements/base.in +-e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform + # via + # -r requirements/base.in + # onadata + # pyfloip -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.12 @@ -411,10 +416,6 @@ pytz==2022.1 # djangorestframework # fleming # onadata -pyxform==1.10.1 - # via - # onadata - # pyfloip pyyaml==6.0 # via prospector recaptcha-client==1.0.6 From 3628bae20424dd92b24520c7b467318107d9b1f5 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 7 Dec 2022 18:25:25 +0300 Subject: [PATCH 002/270] Update pyxform to v1.11.1 Signed-off-by: Kipchirchir Sigei --- requirements/base.in | 1 - requirements/base.pip | 9 ++++----- requirements/dev.pip | 9 ++++----- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/requirements/base.in b/requirements/base.in index 1ff49b9a8d..9f61c60d57 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -9,4 +9,3 @@ -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient -e git+https://github.com/onaio/ona-oidc.git@v1.0.1#egg=ona-oidc -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter --e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform diff --git a/requirements/base.pip b/requirements/base.pip index 81767cd619..61b8109c4c 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -16,11 +16,6 @@ # via -r requirements/base.in -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient # via -r requirements/base.in --e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform - # via - # -r requirements/base.in - # onadata - # pyfloip -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.12 @@ -310,6 +305,10 @@ pytz==2022.1 # djangorestframework # fleming # onadata +pyxform==1.11.1 + # via + # onadata + # pyfloip recaptcha-client==1.0.6 # via onadata redis==4.3.4 diff --git a/requirements/dev.pip b/requirements/dev.pip index f9e7821022..d6de72e875 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -16,11 +16,6 @@ # via -r requirements/base.in -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient # via -r requirements/base.in --e git+https://github.com/XLSForm/pyxform.git@8e45fd2613e1c0aabfc194a122150d453b1e3cc2#egg=pyxform - # via - # -r requirements/base.in - # onadata - # pyfloip -e git+https://github.com/onaio/savreaderwriter.git@updates#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.12 @@ -416,6 +411,10 @@ pytz==2022.1 # djangorestframework # fleming # onadata +pyxform==1.11.1 + # via + # onadata + # pyfloip pyyaml==6.0 # via prospector recaptcha-client==1.0.6 From a223a4256529554ab3cec0fd9acf82183803d4ed Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 7 Dec 2022 20:01:29 +0300 Subject: [PATCH 003/270] Add tests for publishing forms with entities Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/tests/test_publish_xls.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/onadata/apps/logger/tests/test_publish_xls.py b/onadata/apps/logger/tests/test_publish_xls.py index 10192e3087..5b1b25f572 100644 --- a/onadata/apps/logger/tests/test_publish_xls.py +++ b/onadata/apps/logger/tests/test_publish_xls.py @@ -76,6 +76,45 @@ def test_xform_hash(self): self.xform.save(update_fields=["title"]) self.assertFalse(self.xform.hash == "" or self.xform.hash is None) self.assertFalse(self.xform.hash == xform_old_hash) + + def test_xform_with_entities(self): + md=""" + | survey | | | | + | | type | name | label | + | | text | a | A | + | entities | | | | + | | dataset | label | | + | | trees | a | | + """ + self._create_user_and_login() + self.xform = self._publish_markdown(md, self.user) + # assert has entities namespace + self.assertIn( + 'xmlns:entities="http://www.opendatakit.org/xforms/entities"', + self.xform.xml + ) + # assert has entities version + self.assertIn( + 'entities:entities-version="2022.1.0"', + self.xform.xml + ) + + def test_xform_with_entities_save_to(self): + md=""" + | survey | | | | | + | | type | name | label | save_to | + | | text | a | A | foo | + | entities | | | | | + | | dataset | label | | | + | | trees | a | | | + """ + self._create_user_and_login() + self.xform = self._publish_markdown(md, self.user) + # assert has save_to column in xml + self.assertIn( + 'entities:saveto="foo"', + self.xform.xml + ) def test_report_exception_with_exc_info(self): e = Exception("A test exception") From 859fbbb88d1c0d95c7947dd593c94f7ce0eac2fb Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 7 Dec 2022 21:37:36 +0300 Subject: [PATCH 004/270] Update pyxform error messages Error messages have been updated on pyxform, see https://github.com/XLSForm/pyxform/pull/624/files#diff-61e9fd63d6e4bf29ba55beef5eaae1095994e11820ca7c86abce96c880cdc09cR885-R886 https://github.com/XLSForm/pyxform/pull/624/files#diff-56f86b46598649c7a80d76590152faa89d9b2bf632b1531bcd93749dcf24c8f6R161 Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_floip_viewset.py | 7 ++++-- .../api/tests/viewsets/test_xform_viewset.py | 6 ++--- onadata/apps/logger/tests/test_publish_xls.py | 23 ++++++++++++++++++- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_floip_viewset.py b/onadata/apps/api/tests/viewsets/test_floip_viewset.py index 579d9feee4..57d677f682 100644 --- a/onadata/apps/api/tests/viewsets/test_floip_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_floip_viewset.py @@ -218,8 +218,11 @@ def test_publish_number_question_names(self): # pylint: disable=C0103 self.assertEqual(response.status_code, 400) self.assertEqual(response['Content-Type'], 'application/vnd.api+json') - self.assertIn(u"The name '1448506769745_42' is an invalid XML tag", - response.data['text']) + self.assertIn( + "The name '1448506769745_42' " + "contains an invalid character '1'", + response.data['text'] + ) def test_responses_endpoint_format(self): """ diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 4969a97bf9..d50764a351 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -3496,9 +3496,9 @@ def test_survey_preview_endpoint(self): response = view(request) self.assertEqual(response.status_code, 400, response.data) error_message = ( - "[row : 2] Invalid question name [sdfasdfaf " - "sdf] Names must begin with a letter, colon, or underscore." - "Subsequent characters can include numbers, dashes, and periods." + "[row : 2] Invalid question name 'sdfasdfaf sdf'. " + "Names must begin with a letter, colon, or underscore. " + "Other characters can include numbers, dashes, and periods." ) self.assertEqual(response.data.get("detail"), error_message) diff --git a/onadata/apps/logger/tests/test_publish_xls.py b/onadata/apps/logger/tests/test_publish_xls.py index 5b1b25f572..3dfccf89f5 100644 --- a/onadata/apps/logger/tests/test_publish_xls.py +++ b/onadata/apps/logger/tests/test_publish_xls.py @@ -76,7 +76,7 @@ def test_xform_hash(self): self.xform.save(update_fields=["title"]) self.assertFalse(self.xform.hash == "" or self.xform.hash is None) self.assertFalse(self.xform.hash == xform_old_hash) - + def test_xform_with_entities(self): md=""" | survey | | | | @@ -116,6 +116,27 @@ def test_xform_with_entities_save_to(self): self.xform.xml ) + def test_xform_create_if_in_entities(self): + md=""" + | survey | | | | + | | type | name | label | + | | text | a | A | + | entities | | | | + | | dataset | create_if | label | + | | trees | string-length(a) > 3 | a | + """ + self._create_user_and_login() + self.xform = self._publish_markdown(md, self.user) + # assert has create_if entity expression + self.assertIn( + 'calculate="string-length(a) > 3"', + self.xform.xml + ) + self.assertIn( + '', + self.xform.xml + ) + def test_report_exception_with_exc_info(self): e = Exception("A test exception") try: From b7cea08d6fb62538837c44aeb19ed2dcdf74a674 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 9 Dec 2022 10:41:12 +0300 Subject: [PATCH 005/270] Add check form empty geom field Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/migrations/0004_update_instance_geoms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/logger/migrations/0004_update_instance_geoms.py b/onadata/apps/logger/migrations/0004_update_instance_geoms.py index a313561509..59fb98c4db 100644 --- a/onadata/apps/logger/migrations/0004_update_instance_geoms.py +++ b/onadata/apps/logger/migrations/0004_update_instance_geoms.py @@ -13,7 +13,7 @@ def update_instance_geoms(apps, schema_editor): xform__downloadable=True, xform__deleted_at__isnull=True, ): - if inst.geom.empty: + if inst.geom and inst.geom.empty: inst.geom = None inst.save() From 70e2a4f43d7154d8893d8f9132bdbc2b03fdec12 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 30 Jan 2023 11:47:20 +0300 Subject: [PATCH 006/270] Update pyxform to 1.12.0 Signed-off-by: Kipchirchir Sigei --- requirements/base.pip | 2 +- requirements/dev.pip | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/base.pip b/requirements/base.pip index 61b8109c4c..0484616170 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -305,7 +305,7 @@ pytz==2022.1 # djangorestframework # fleming # onadata -pyxform==1.11.1 +pyxform==1.12.0 # via # onadata # pyfloip diff --git a/requirements/dev.pip b/requirements/dev.pip index d6de72e875..b23a9f3048 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -411,7 +411,7 @@ pytz==2022.1 # djangorestframework # fleming # onadata -pyxform==1.11.1 +pyxform==1.12.0 # via # onadata # pyfloip From 45c2aaef9338335585bedfa56219422347ccee5a Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 30 Jan 2023 12:16:36 +0300 Subject: [PATCH 007/270] Add big image and single entity tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/tests/test_publish_xls.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/onadata/apps/logger/tests/test_publish_xls.py b/onadata/apps/logger/tests/test_publish_xls.py index 3dfccf89f5..808ba5c659 100644 --- a/onadata/apps/logger/tests/test_publish_xls.py +++ b/onadata/apps/logger/tests/test_publish_xls.py @@ -4,6 +4,7 @@ from django.core.management import call_command from django.core.management.base import CommandError +from pyxform.errors import PyXFormError from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models.xform import XForm @@ -137,6 +138,35 @@ def test_xform_create_if_in_entities(self): self.xform.xml ) + def test_xform_big_image_invalid_if_no_image(self): + md=""" + | survey | | | | + | | type | name | media::big-image | + | | text | c | m.png | + """ + self._create_user_and_login() + msg = ("To use big-image, you must also specify" + " an image for the survey element") + with self.assertRaisesMessage(PyXFormError, msg): + self.xform = self._publish_markdown(md, self.user) + + def test_single_entity_allowed_per_form(self): + md=""" + | survey | | | | + | | type | name | label | + | | text | a | A | + | entities | | | | + | | dataset | | | + | | trees | | | + | | shovels | | | + """ + self._create_user_and_login() + msg = ("Currently, you can only declare a single entity per form." + " Please make sure your entities sheet only declares" + " one entity.") + with self.assertRaisesMessage(PyXFormError, msg): + self.xform = self._publish_markdown(md, self.user) + def test_report_exception_with_exc_info(self): e = Exception("A test exception") try: From 55ea8041e2d5019774cc5b53d67a970ff32236bd Mon Sep 17 00:00:00 2001 From: Davis Raymond Date: Wed, 7 Jun 2023 14:59:30 +0300 Subject: [PATCH 008/270] Tag release v3.9.1 (#2432) * chore(release): Tag release v3.9.1 * refactor(throttling): support having a list of values for headers * refactor(throttle): fix an issue where settings would not override default * chore(docs): Update release date --- CHANGES.rst | 19 ++++++++++++++++ onadata/__init__.py | 2 +- onadata/libs/tests/test_throttle.py | 16 ++++++++++++++ onadata/libs/throttle.py | 34 +++++++++++++++++------------ setup.cfg | 2 +- 5 files changed, 57 insertions(+), 16 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b2acfb21ed..b7bfcdd0ab 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,25 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.9.1(2023-06-07) +------------------ + +- Add configurable timeouts for webhook requests + `PR #2419 ` + [@DavisRayM] +- Handle AttributeError returned when element is None + `PR #2421 ` + [@DavisRayM] +- Fix xlsx export bug on UI + `PR #2426 ` + [@KipSigei] +- Fix unbound local error exception + `PR #2428 ` + [@KipSigei] +- Upgrade django to v3.2.19 + `PR #2429 ` + [@KipSigei] + v3.9.0(2023-05-02) ----------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index 8790b019da..dc0b7a1d80 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.9.0" +__version__ = "3.9.1" # This will make sure the app is always imported when diff --git a/onadata/libs/tests/test_throttle.py b/onadata/libs/tests/test_throttle.py index 05ade65421..c7b2c2a8e0 100644 --- a/onadata/libs/tests/test_throttle.py +++ b/onadata/libs/tests/test_throttle.py @@ -30,3 +30,19 @@ def test_requests_are_throttled(self): # get cached key key = self.throttle.get_cache_key(request, None) self.assertEqual(key, "throttle_header_Google-HTTP-Java-Client/1.35.0(gzip)") + + @override_settings( + THROTTLE_HEADERS={ + "HTTP_USER_AGENT": ["Google-HTTP-Java-Client/1.35.0 (gzip)", "Mozilla/5.0"], + } + ) + def test_request_throttling_multiple_headers(self): + extra = {"HTTP_USER_AGENT": "Mozilla/5.0"} + request = self.factory.get("/", **extra) + key = self.throttle.get_cache_key(request, None) + self.assertEqual(key, "throttle_header_Mozilla/5.0") + + extra = {"HTTP_USER_AGENT": "Google-HTTP-Java-Client/1.35.0 (gzip)"} + request = self.factory.get("/", **extra) + key = self.throttle.get_cache_key(request, None) + self.assertEqual(key, "throttle_header_Google-HTTP-Java-Client/1.35.0(gzip)") diff --git a/onadata/libs/throttle.py b/onadata/libs/throttle.py index 137d28039f..ff79442a0b 100644 --- a/onadata/libs/throttle.py +++ b/onadata/libs/throttle.py @@ -2,7 +2,7 @@ Module containing throttling utilities """ -from django.conf import Settings +from django.conf import settings from rest_framework.throttling import SimpleRateThrottle @@ -14,21 +14,27 @@ class RequestHeaderThrottle(SimpleRateThrottle): """ scope = "header" - throttled_headers = getattr( - Settings, - "THROTTLE_HEADERS", - {"HTTP_USER_AGENT": "Google-HTTP-Java-Client/1.35.0 (gzip)"}, - ) + + @property + def throttled_headers(self): + return getattr( + settings, + "THROTTLE_HEADERS", + {"HTTP_USER_AGENT": "Google-HTTP-Java-Client/1.35.0 (gzip)"}, + ) + + def get_ident_from_header(self, value): + cleaned_ident = value.replace(" ", "") + return self.cache_format % {"scope": self.scope, "ident": cleaned_ident} def get_cache_key(self, request, _): for header, value in self.throttled_headers.items(): header_value = request.META.get(header, None) - if header_value == value: - ident = header_value - # remove whitespace from key - cleaned_ident = ident.replace(" ", "") - return self.cache_format % { - "scope": self.scope, - "ident": cleaned_ident - } + if isinstance(value, str): + if header_value == value: + return self.get_ident_from_header(header_value) + elif isinstance(value, list): + for val in value: + if header_value == val: + return self.get_ident_from_header(header_value) return None diff --git a/setup.cfg b/setup.cfg index e6185c0fe8..c5d1e2e421 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.9.0 +version = 3.9.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 6c4ac6dff09a3a28191c24924a6e704ebb0560ee Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 14 Jun 2023 09:24:52 +0300 Subject: [PATCH 009/270] fix open-data endpoint timeout bug (#2435) * fix open-data endpoint timeout bug there currently exists a peculiar intermittent bug where after ordering the queryset and the first item is accessed such as instances[0] or by slicing instances[0:1] (as in the the pagination implementation) the execution freezes and no result is returned. This causes the server to timeout. The workaround only ensures we order and paginate the results only when the queryset returns more than 1 item * address linting error * add test case add test case for count query parameter on endpoint /api/v2/open-data//data * address linting errors * address linting errors --- .../tests/viewsets/test_tableau_viewset.py | 397 +++++++++--------- .../apps/api/viewsets/v2/tableau_viewset.py | 27 +- onadata/settings/github_actions_test.py | 2 +- 3 files changed, 222 insertions(+), 204 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index 6d7b0d54d8..8c860bdced 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -11,39 +11,39 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models.open_data import get_or_create_opendata from onadata.apps.api.viewsets.v2.tableau_viewset import ( - TableauViewSet, unpack_select_multiple_data, - unpack_gps_data, clean_xform_headers) + TableauViewSet, + unpack_select_multiple_data, + unpack_gps_data, + clean_xform_headers, +) from onadata.libs.renderers.renderers import pairing def streaming_data(response): - return json.loads(u''.join( - [i.decode('utf-8') for i in response.streaming_content])) + return json.loads("".join([i.decode("utf-8") for i in response.streaming_content])) class TestTableauViewSet(TestBase): - def setUp(self): super(TestTableauViewSet, self).setUp() self._create_user_and_login() - self._submission_time = parse_datetime('2020-02-18 15:54:01Z') - self.fixture_dir = os.path.join( - self.this_directory, 'fixtures', 'csv_export') - path = os.path.join(self.fixture_dir, 'tutorial_w_repeats.xlsx') + self._submission_time = parse_datetime("2020-02-18 15:54:01Z") + self.fixture_dir = os.path.join(self.this_directory, "fixtures", "csv_export") + path = os.path.join(self.fixture_dir, "tutorial_w_repeats.xlsx") self._publish_xls_file_and_set_xform(path) - path = os.path.join(self.fixture_dir, 'repeats_sub.xml') + path = os.path.join(self.fixture_dir, "repeats_sub.xml") self.factory = RequestFactory() - self.extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % self.user.auth_token} - self._make_submission( - path, forced_submission_time=self._submission_time) + self.extra = {"HTTP_AUTHORIZATION": f"Token {self.user.auth_token}"} + self._make_submission(path, forced_submission_time=self._submission_time) - self.view = TableauViewSet.as_view({ - 'post': 'create', - 'patch': 'partial_update', - 'delete': 'destroy', - 'get': 'data' - }) + self.view = TableauViewSet.as_view( + { + "post": "create", + "patch": "partial_update", + "delete": "destroy", + "get": "data", + } + ) def get_open_data_object(self): return get_or_create_opendata(self.xform)[0] @@ -52,154 +52,111 @@ def test_tableau_data_and_fetch(self): # pylint: disable=invalid-name """ Test the schema and data endpoint and data returned by each. """ - self.view = TableauViewSet.as_view({ - 'get': 'schema' - }) + self.view = TableauViewSet.as_view({"get": "schema"}) _open_data = get_or_create_opendata(self.xform) uuid = _open_data[0].uuid expected_schema = [ { - 'table_alias': 'data', - 'connection_name': f'{self.xform.project_id}_{self.xform.id_string}', # noqa - 'column_headers': [ - { - 'id': '_id', - 'dataType': 'int', - 'alias': '_id' - }, - { - 'id': 'name', - 'dataType': 'string', - 'alias': 'name' - }, - { - 'id': 'age', - 'dataType': 'int', - 'alias': 'age' - }, + "table_alias": "data", + "connection_name": f"{self.xform.project_id}_{self.xform.id_string}", # noqa pylint: disable=line-too-long + "column_headers": [ + {"id": "_id", "dataType": "int", "alias": "_id"}, + {"id": "name", "dataType": "string", "alias": "name"}, + {"id": "age", "dataType": "int", "alias": "age"}, + {"id": "picture", "dataType": "string", "alias": "picture"}, { - 'id': 'picture', - 'dataType': 'string', - 'alias': 'picture' + "id": "has_children", + "dataType": "string", + "alias": "has_children", }, { - 'id': 'has_children', - 'dataType': 'string', - 'alias': 'has_children' + "id": "_gps_latitude", + "dataType": "string", + "alias": "_gps_latitude", }, { - 'id': '_gps_latitude', - 'dataType': 'string', - 'alias': '_gps_latitude' + "id": "_gps_longitude", + "dataType": "string", + "alias": "_gps_longitude", }, { - 'id': '_gps_longitude', - 'dataType': 'string', - 'alias': '_gps_longitude' + "id": "_gps_altitude", + "dataType": "string", + "alias": "_gps_altitude", }, { - 'id': '_gps_altitude', - 'dataType': 'string', - 'alias': '_gps_altitude' + "id": "_gps_precision", + "dataType": "string", + "alias": "_gps_precision", }, { - 'id': '_gps_precision', - 'dataType': 'string', - 'alias': '_gps_precision' + "id": "browsers_firefox", + "dataType": "string", + "alias": "browsers_firefox", }, { - 'id': 'browsers_firefox', - 'dataType': 'string', - 'alias': 'browsers_firefox' + "id": "browsers_chrome", + "dataType": "string", + "alias": "browsers_chrome", }, + {"id": "browsers_ie", "dataType": "string", "alias": "browsers_ie"}, { - 'id': 'browsers_chrome', - 'dataType': 'string', - 'alias': 'browsers_chrome' + "id": "browsers_safari", + "dataType": "string", + "alias": "browsers_safari", }, { - 'id': 'browsers_ie', - 'dataType': 'string', - 'alias': 'browsers_ie' + "id": "meta_instanceID", + "dataType": "string", + "alias": "meta_instanceID", }, - { - 'id': 'browsers_safari', - 'dataType': 'string', - 'alias': 'browsers_safari' - }, - { - 'id': 'meta_instanceID', - 'dataType': 'string', - 'alias': 'meta_instanceID' - } - ] + ], }, { - 'table_alias': 'children', - 'connection_name': f'{self.xform.project_id}_{self.xform.id_string}_children', # noqa - 'column_headers': [ - { - 'id': '_id', - 'dataType': 'int', - 'alias': '_id' - }, - { - 'id': '__parent_id', - 'dataType': 'int', - 'alias': '__parent_id' - }, + "table_alias": "children", + "connection_name": f"{self.xform.project_id}_{self.xform.id_string}_children", # noqa pylint: disable=line-too-long + "column_headers": [ + {"id": "_id", "dataType": "int", "alias": "_id"}, + {"id": "__parent_id", "dataType": "int", "alias": "__parent_id"}, { - 'id': '__parent_table', - 'dataType': 'string', - 'alias': '__parent_table' + "id": "__parent_table", + "dataType": "string", + "alias": "__parent_table", }, - { - 'id': 'childs_name', - 'dataType': 'string', - 'alias': 'childs_name' - }, - { - 'id': 'childs_age', - 'dataType': 'int', - 'alias': 'childs_age' - } - ] - }] + {"id": "childs_name", "dataType": "string", "alias": "childs_name"}, + {"id": "childs_age", "dataType": "int", "alias": "childs_age"}, + ], + }, + ] - request1 = self.factory.get('/', **self.extra) + request1 = self.factory.get("/", **self.extra) response1 = self.view(request1, uuid=uuid) self.assertEqual(response1.status_code, 200) self.assertEqual(response1.data, expected_schema) # Test that multiple schemas are generated for each repeat self.assertEqual(len(response1.data), 2) self.assertListEqual( - ['column_headers', 'connection_name', 'table_alias'], - sorted(list(response1.data[0].keys())) + ["column_headers", "connection_name", "table_alias"], + sorted(list(response1.data[0].keys())), ) - connection_name = f'{self.xform.project_id}_{self.xform.id_string}' - self.assertEqual( - connection_name, response1.data[0].get('connection_name')) + connection_name = f"{self.xform.project_id}_{self.xform.id_string}" + self.assertEqual(connection_name, response1.data[0].get("connection_name")) # Test that the table alias field being sent to Tableau # for each schema contains the right table name - self.assertEqual( - u'data', response1.data[0].get('table_alias') - ) - self.assertEqual( - u'children', response1.data[1].get('table_alias') - ) + self.assertEqual("data", response1.data[0].get("table_alias")) + self.assertEqual("children", response1.data[1].get("table_alias")) _id_datatype = [ - a.get('dataType') - for a in response1.data[0]['column_headers'] - if a.get('id') == '_id'][0] - self.assertEqual(_id_datatype, 'int') + a.get("dataType") + for a in response1.data[0]["column_headers"] + if a.get("id") == "_id" + ][0] + self.assertEqual(_id_datatype, "int") - self.view = TableauViewSet.as_view({ - 'get': 'data' - }) - request2 = self.factory.get('/', **self.extra) + self.view = TableauViewSet.as_view({"get": "data"}) + request2 = self.factory.get("/", **self.extra) response2 = self.view(request2, uuid=uuid) self.assertEqual(response2.status_code, 200) @@ -207,41 +164,44 @@ def test_tableau_data_and_fetch(self): # pylint: disable=invalid-name row_data = streaming_data(response2) expected_data = [ { - '_gps_altitude': '0', - '_gps_latitude': '26.431228', - '_gps_longitude': '58.157921', - '_gps_precision': '0', - '_id': self.xform.instances.first().id, - 'age': 32, - 'browsers_chrome': 'TRUE', - 'browsers_firefox': 'TRUE', - 'browsers_ie': 'TRUE', - 'browsers_safari': 'TRUE', - 'children': [ + "_gps_altitude": "0", + "_gps_latitude": "26.431228", + "_gps_longitude": "58.157921", + "_gps_precision": "0", + "_id": self.xform.instances.first().id, + "age": 32, + "browsers_chrome": "TRUE", + "browsers_firefox": "TRUE", + "browsers_ie": "TRUE", + "browsers_safari": "TRUE", + "children": [ { - '__parent_id': self.xform.instances.first().id, - '__parent_table': 'data', - '_id': int(pairing( - self.xform.instances.first().id, 1)), - 'childs_age': 2, - 'childs_name': 'Harry'}, + "__parent_id": self.xform.instances.first().id, + "__parent_table": "data", + "_id": int(pairing(self.xform.instances.first().id, 1)), + "childs_age": 2, + "childs_name": "Harry", + }, { - '__parent_id': self.xform.instances.first().id, - '__parent_table': 'data', - '_id': int(pairing( - self.xform.instances.first().id, 2)), - 'childs_age': 5, - 'childs_name': 'Potter'}], - 'has_children': '1', - 'name': 'Tom', - 'picture': 'wotm_01_green_desktop-10_36_1.jpg' - }] + "__parent_id": self.xform.instances.first().id, + "__parent_table": "data", + "_id": int(pairing(self.xform.instances.first().id, 2)), + "childs_age": 5, + "childs_name": "Potter", + }, + ], + "has_children": "1", + "name": "Tom", + "picture": "wotm_01_green_desktop-10_36_1.jpg", + } + ] # Test to confirm that the repeat tables generated # are related to the main table self.assertEqual( - row_data[0]['children'][0]['__parent_table'], - response1.data[0]['table_alias']) + row_data[0]["children"][0]["__parent_table"], + response1.data[0]["table_alias"], + ) self.assertEqual(row_data, expected_data) def test_unpack_select_multiple_data(self): @@ -249,34 +209,36 @@ def test_unpack_select_multiple_data(self): Test expected output when `unpack_select_multiple_data` function is run. """ - picked_choices = ['firefox', 'chrome', 'ie', 'safari'] - list_name = 'browsers' - choices_names = ['firefox', 'chrome', 'ie', 'safari'] - prefix = '' + picked_choices = ["firefox", "chrome", "ie", "safari"] + list_name = "browsers" + choices_names = ["firefox", "chrome", "ie", "safari"] + prefix = "" expected_data = { - 'browsers_chrome': 'TRUE', - 'browsers_firefox': 'TRUE', - 'browsers_ie': 'TRUE', - 'browsers_safari': 'TRUE' - } + "browsers_chrome": "TRUE", + "browsers_firefox": "TRUE", + "browsers_ie": "TRUE", + "browsers_safari": "TRUE", + } select_multiple_data = unpack_select_multiple_data( - picked_choices, list_name, choices_names, prefix) + picked_choices, list_name, choices_names, prefix + ) self.assertEqual(select_multiple_data, expected_data) # Confirm expected data when 2 choices are selected - picked_choices = ['firefox', 'safari'] + picked_choices = ["firefox", "safari"] select_multiple_data = unpack_select_multiple_data( - picked_choices, list_name, choices_names, prefix) + picked_choices, list_name, choices_names, prefix + ) expected_data = { - 'browsers_chrome': 'FALSE', - 'browsers_firefox': 'TRUE', - 'browsers_ie': 'FALSE', - 'browsers_safari': 'TRUE' - } + "browsers_chrome": "FALSE", + "browsers_firefox": "TRUE", + "browsers_ie": "FALSE", + "browsers_safari": "TRUE", + } self.assertEqual(select_multiple_data, expected_data) @@ -287,18 +249,17 @@ def test_unpack_gps_data(self): """ # We receive gps data as a string # with 4 space separated values - gps_data = '26.431228 58.157921 0 0' + gps_data = "26.431228 58.157921 0 0" qstn_name = "gps" prefix = "" - data = unpack_gps_data( - gps_data, qstn_name, prefix) + data = unpack_gps_data(gps_data, qstn_name, prefix) expected_data = { - '_gps_latitude': '26.431228', - '_gps_longitude': '58.157921', - '_gps_altitude': '0', - '_gps_precision': '0' - } + "_gps_latitude": "26.431228", + "_gps_longitude": "58.157921", + "_gps_altitude": "0", + "_gps_precision": "0", + } self.assertEqual(data, expected_data) def test_clean_xform_headers(self): @@ -308,15 +269,13 @@ def test_clean_xform_headers(self): are being pushed to Tableau. """ headers = self.xform.get_headers(repeat_iterations=1) - group_columns = [ - field for field in headers if search(r"\[+\d+\]", field)] - self.assertEqual(group_columns, - ['children[1]/childs_name', - 'children[1]/childs_age']) + group_columns = [field for field in headers if search(r"\[+\d+\]", field)] + self.assertEqual( + group_columns, ["children[1]/childs_name", "children[1]/childs_age"] + ) cleaned_data = clean_xform_headers(group_columns) - self.assertEqual(cleaned_data, - ['childs_name', 'childs_age']) + self.assertEqual(cleaned_data, ["childs_name", "childs_age"]) def test_replace_media_links(self): """ @@ -330,27 +289,73 @@ def test_replace_media_links(self): """ xform_w_attachments = self._publish_markdown(images_md, self.user) submission_file = NamedTemporaryFile(delete=False) - with open(submission_file.name, 'w') as xml_file: + with open(submission_file.name, "w", encoding="utf-8") as xml_file: xml_file.write( "" "1335783522563.jpg" "1442323232322.jpg" "uuid:729f173c688e482486a48661700455ff" - "" % - (xform_w_attachments.id_string)) + "" % (xform_w_attachments.id_string) + ) media_file = "1335783522563.jpg" self._make_submission_w_attachment( submission_file.name, - os.path.join(self.this_directory, 'fixtures', 'transportation', - 'instances', self.surveys[0], media_file)) + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + self.surveys[0], + media_file, + ), + ) submission_data = xform_w_attachments.instances.first().json _open_data = get_or_create_opendata(xform_w_attachments) uuid = _open_data[0].uuid - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) # cast generator response to list for easy manipulation row_data = streaming_data(response) self.assertEqual( - row_data[0]['image1'], - f"example.com{submission_data['_attachments'][0]['download_url']}") + row_data[0]["image1"], + f"example.com{submission_data['_attachments'][0]['download_url']}", + ) + + def test_pagination(self): + """Pagination works correctly""" + self.view = TableauViewSet.as_view({"get": "data"}) + # test 1 submission + _open_data = get_or_create_opendata(self.xform) + uuid = _open_data[0].uuid + request = self.factory.get( + "/", data={"page": 1, "page_size": 100}, **self.extra + ) + response = self.view(request, uuid=uuid) + self.assertEqual(response.status_code, 200) + row_data = streaming_data(response) + self.assertEqual(len(row_data), 1) + + # multiple submissions are ordered by primary key + path = os.path.join(self.fixture_dir, "repeats_sub.xml") + self._make_submission(path, forced_submission_time=self._submission_time) + response = self.view(request, uuid=uuid) + self.assertEqual(response.status_code, 200) + row_data = streaming_data(response) + self.assertEqual(len(row_data), 2) + instances = self.xform.instances.all().order_by("pk") + self.assertEqual(row_data[0]["_id"], instances[0].pk) + self.assertEqual(row_data[1]["_id"], instances[1].pk) + + def test_count_query_param(self): + """count query param works""" + self.view = TableauViewSet.as_view({"get": "data"}) + path = os.path.join(self.fixture_dir, "repeats_sub.xml") + # make submission number 2 + self._make_submission(path, forced_submission_time=self._submission_time) + _open_data = get_or_create_opendata(self.xform) + uuid = _open_data[0].uuid + request = self.factory.get("/", data={"count": True}, **self.extra) + response = self.view(request, uuid=uuid) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, {"count": 2}) diff --git a/onadata/apps/api/viewsets/v2/tableau_viewset.py b/onadata/apps/api/viewsets/v2/tableau_viewset.py index 80de83a8c5..74aeff03ba 100644 --- a/onadata/apps/api/viewsets/v2/tableau_viewset.py +++ b/onadata/apps/api/viewsets/v2/tableau_viewset.py @@ -53,7 +53,7 @@ def process_tableau_data( else: flat_dict[ID] = row_id - for (key, value) in row.items(): + for key, value in row.items(): qstn = xform.get_element(key) if qstn: qstn_type = qstn.get("type") @@ -181,8 +181,8 @@ def data(self, request, **kwargs): ] query_param_keys = request.query_params should_paginate = any(k in query_param_keys for k in pagination_keys) - data = [] + if isinstance(self.object.content_object, XForm): if not self.object.active: return Response(status=status.HTTP_404_NOT_FOUND) @@ -202,16 +202,29 @@ def data(self, request, **kwargs): # Filter out deleted submissions instances = Instance.objects.filter( **qs_kwargs, deleted_at__isnull=True - ).order_by("pk") + ).only("json") + # we prefer to use len(instances) instead of instances.count() as using + # len is less expensive as no db query is made. Read more + # https://docs.djangoproject.com/en/4.2/topics/db/optimization/ + num_instances = len(instances) if count: - return Response({"count": instances.count()}) - - if should_paginate: + return Response({"count": num_instances}) + + # there currently exists a peculiar intermittent bug where after ordering + # the queryset and the first item is accessed such as instances[0] or by + # slicing instances[0:1] (as in the the pagination implementation) the + # execution freezes and no result is returned. This causes the server to + # timeout. The workaround below only ensures we order and paginate + # the results only when the queryset returns more than 1 item + if num_instances > 1: + instances = instances.order_by("pk") + + if should_paginate and num_instances > 1: instances = self.paginate_queryset(instances) + # Switch out media file names for url links in queryset data = replace_attachment_name_with_url(instances) - data = process_tableau_data( TableauDataSerializer(data, many=True).data, xform ) diff --git a/onadata/settings/github_actions_test.py b/onadata/settings/github_actions_test.py index 28a2cafdb2..68d2dd503f 100644 --- a/onadata/settings/github_actions_test.py +++ b/onadata/settings/github_actions_test.py @@ -15,7 +15,7 @@ "NAME": "onadata", "USER": "onadata", "PASSWORD": "onadata", - "HOST": "localhost", + "HOST": os.environ.get("DB_HOST", "localhost"), } } From 25eaba1083a531a699adf6a2f5377d39a2e9cfae Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 19 Jun 2023 20:11:31 +0300 Subject: [PATCH 010/270] Tag release v3.9.2 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 7 +++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b7bfcdd0ab..84a0e3c601 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,13 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.9.2(2023-06-19) +------------------ + +- Fix open-data endpoint timeout bug + `PR #2435 ` + [@kelvin-muchiri] + v3.9.1(2023-06-07) ------------------ diff --git a/onadata/__init__.py b/onadata/__init__.py index dc0b7a1d80..7bddb7affb 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.9.1" +__version__ = "3.9.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index c5d1e2e421..10156d4de2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.9.1 +version = 3.9.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From fdc34950c30c0a40916db39c408fdaf7ea5774cc Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 13 Jun 2023 19:56:22 +0300 Subject: [PATCH 011/270] Add ability to permanently delete submissions Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/data_viewset.py | 24 +++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index e6f35fa034..889882997d 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -341,6 +341,11 @@ def destroy(self, request, *args, **kwargs): """Soft deletes submissions data.""" instance_ids = request.data.get("instance_ids") delete_all_submissions = strtobool(request.data.get("delete_all", "False")) + # get param to trigger permanent submission deletion + permanent_delete = strtobool(request.data.get("permanent_delete", "False")) + enable_submission_permanent_delete = getattr( + settings, "ENABLE_SUBMISSION_PERMANENT_DELETE", False + ) # pylint: disable=attribute-defined-outside-init self.object = self.get_object() @@ -365,7 +370,11 @@ def destroy(self, request, *args, **kwargs): ) for instance in queryset.iterator(): - delete_instance(instance, request.user) + if permanent_delete and enable_submission_permanent_delete: + instance.delete() + else: + # enable soft deletion + delete_instance(instance, request.user) # updates the num_of_submissions for the form. after_count = self.object.submission_count(force_update=True) @@ -393,7 +402,18 @@ def destroy(self, request, *args, **kwargs): if request.user.has_perm(CAN_DELETE_SUBMISSION, self.object.xform): instance_id = self.object.pk - delete_instance(self.object, request.user) + if permanent_delete and enable_submission_permanent_delete: + self.object.delete() + else: + # enable soft deletion + delete_instance(self.object, request.user) + + # updates the num_of_submissions for the form. + self.object.xform.submission_count(force_update=True) + + # update the date modified field of the project + self.object.xform.project.date_modified = timezone.now() + self.object.xform.project.save(update_fields=["date_modified"]) # send message send_message( From 8fd6c330a3d256636fb986b34aeb33fb8278cab7 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 13 Jun 2023 19:56:45 +0300 Subject: [PATCH 012/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_data_viewset.py | 137 ++++++++++++++++++ onadata/apps/api/viewsets/data_viewset.py | 5 +- 2 files changed, 140 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index d4a8f9cecc..69638e18ed 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -1729,6 +1729,143 @@ def test_deletion_of_bulk_submissions(self, send_message_mock): self.assertEqual(current_count, 2) self.assertEqual(self.xform.num_of_submissions, 2) + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=True) + @patch("onadata.apps.api.viewsets.data_viewset.send_message") + def test_submissions_permanent_deletion(self, send_message_mock): + """ + Test that permanent submission deletions work + """ + self._make_submissions() + self.xform.refresh_from_db() + formid = self.xform.pk + dataid = self.xform.instances.all().order_by("id")[0].pk + view = DataViewSet.as_view({"delete": "destroy", "get": "list"}) + + # initial count = 4 submissions + request = self.factory.get("/", **self.extra) + response = view(request, pk=formid) + self.assertEqual(len(response.data), 4) + + request = self.factory.delete( + "/", **self.extra, data={"permanent_delete": True} + ) + response = view(request, pk=formid, dataid=dataid) + self.assertEqual(response.status_code, 204) + + # test that xform submission count is updated + self.xform.refresh_from_db() + self.assertEqual(self.xform.num_of_submissions, 3) + + # Test project details updated successfully + self.assertEqual( + self.xform.project.date_modified.strftime("%Y-%m-%d %H:%M:%S"), + timezone.now().strftime("%Y-%m-%d %H:%M:%S"), + ) + + # message sent upon delete + self.assertTrue(send_message_mock.called) + send_message_mock.assert_called_with( + instance_id=dataid, + target_id=formid, + target_type=XFORM, + user=request.user, + message_verb=SUBMISSION_DELETED, + ) + + # second delete of same submission should return 404 + request = self.factory.delete( + "/", **self.extra, data={"permanent_delete": True} + ) + response = view(request, pk=formid, dataid=dataid) + self.assertEqual(response.status_code, 404) + + # remaining 3 submissions + request = self.factory.get("/", **self.extra) + response = view(request, pk=formid) + self.assertEqual(len(response.data), 3) + + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=True) + @patch("onadata.apps.api.viewsets.data_viewset.send_message") + def test_permanent_deletions_bulk_submissions(self, send_message_mock): + """ + Test that permanent bulk submission deletions work + """ + self._make_submissions() + self.xform.refresh_from_db() + + formid = self.xform.pk + initial_count = self.xform.num_of_submissions + view = DataViewSet.as_view({"delete": "destroy"}) + + # test with valid instance id's + records_to_be_deleted = self.xform.instances.all()[:2] + instance_ids = ",".join([str(i.pk) for i in records_to_be_deleted]) + data = {"instance_ids": instance_ids, "permanent_delete": True} + + request = self.factory.delete("/", data=data, **self.extra) + response = view(request, pk=formid) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.data.get("message"), + "%d records were deleted" % len(records_to_be_deleted), + ) + self.assertTrue(send_message_mock.called) + send_message_mock.called_with( + [str(i.pk) for i in records_to_be_deleted], + formid, + XFORM, + request.user, + SUBMISSION_DELETED, + ) + self.xform.refresh_from_db() + current_count = self.xform.num_of_submissions + self.assertNotEqual(current_count, initial_count) + self.assertEqual(current_count, 2) + self.assertEqual(self.xform.num_of_submissions, 2) + + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=True) + @patch("onadata.apps.api.viewsets.data_viewset.send_message") + def test_permanent_instance_delete_inactive_form(self, send_message_mock): + """ + Test that permanent submission deletions works on inactive forms + """ + self._make_submissions() + formid = self.xform.pk + dataid = self.xform.instances.all().order_by("id")[0].pk + view = DataViewSet.as_view( + { + "delete": "destroy", + } + ) + + request = self.factory.delete( + "/", **self.extra, data={"permanent_delete": True} + ) + response = view(request, pk=formid, dataid=dataid) + + self.assertEqual(response.status_code, 204) + + # test that xform submission count is updated + self.xform.refresh_from_db() + self.assertEqual(self.xform.num_of_submissions, 3) + + # make form inactive + self.xform.downloadable = False + self.xform.save() + + dataid = self.xform.instances.filter(deleted_at=None).order_by("id")[0].pk + + request = self.factory.delete("/", **self.extra, data={"permanent_delete": True}) + response = view(request, pk=formid, dataid=dataid) + + self.assertEqual(response.status_code, 204) + + # test that xform submission count is updated + self.xform.refresh_from_db() + self.assertEqual(self.xform.num_of_submissions, 2) + self.assertTrue(send_message_mock.called) + @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_delete_submission_inactive_form(self, send_message_mock): self._make_submissions() diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 889882997d..f8200468d5 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -337,8 +337,9 @@ def enketo(self, request, *args, **kwargs): return Response(data=data) + # pylint: disable=too-many-branches def destroy(self, request, *args, **kwargs): - """Soft deletes submissions data.""" + """Deletes submissions data.""" instance_ids = request.data.get("instance_ids") delete_all_submissions = strtobool(request.data.get("delete_all", "False")) # get param to trigger permanent submission deletion @@ -407,7 +408,7 @@ def destroy(self, request, *args, **kwargs): else: # enable soft deletion delete_instance(self.object, request.user) - + # updates the num_of_submissions for the form. self.object.xform.submission_count(force_update=True) From 1a40fbc9de19b1c6e1e3fad412e4d8220c44311d Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 14 Jun 2023 18:40:07 +0300 Subject: [PATCH 013/270] Cleanup Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_data_viewset.py | 20 +++++++++++++++ onadata/apps/api/viewsets/data_viewset.py | 25 ++++++++++++++++--- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 69638e18ed..d05b55a879 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -1866,6 +1866,26 @@ def test_permanent_instance_delete_inactive_form(self, send_message_mock): self.assertEqual(self.xform.num_of_submissions, 2) self.assertTrue(send_message_mock.called) + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=False) + def test_failed_permanent_deletion(self): + """ + Test that permanent submission deletion throws bad request when + functionality is disabled + """ + self._make_submissions() + formid = self.xform.pk + dataid = self.xform.instances.all().order_by("id")[0].pk + view = DataViewSet.as_view({"delete": "destroy"}) + + request = self.factory.delete( + "/", **self.extra, data={"permanent_delete": True} + ) + response = view(request, pk=formid, dataid=dataid) + self.assertEqual(response.status_code, 400) + self.assertEqual( + response.data, {"error": "Permanent Submission deletion not allowed"} + ) + @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_delete_submission_inactive_form(self, send_message_mock): self._make_submissions() diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index f8200468d5..05169074e5 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -370,13 +370,24 @@ def destroy(self, request, *args, **kwargs): deleted_at__isnull=True, ) + error_msg = None for instance in queryset.iterator(): - if permanent_delete and enable_submission_permanent_delete: - instance.delete() + if permanent_delete: + if enable_submission_permanent_delete: + instance.delete() + else: + error_msg = { + "error": _("Permanent Submission deletion not allowed") + } + break else: # enable soft deletion delete_instance(instance, request.user) + if error_msg: + # return error msg if permanent deletion not enabled + return Response(error_msg, status=status.HTTP_400_BAD_REQUEST) + # updates the num_of_submissions for the form. after_count = self.object.submission_count(force_update=True) number_of_records_deleted = initial_count - after_count @@ -403,8 +414,14 @@ def destroy(self, request, *args, **kwargs): if request.user.has_perm(CAN_DELETE_SUBMISSION, self.object.xform): instance_id = self.object.pk - if permanent_delete and enable_submission_permanent_delete: - self.object.delete() + if permanent_delete: + if enable_submission_permanent_delete: + self.object.delete() + else: + error_msg = { + "error": _("Permanent Submission deletion not allowed") + } + return Response(error_msg, status=status.HTTP_400_BAD_REQUEST) else: # enable soft deletion delete_instance(self.object, request.user) From 1a17d089ec44251fd899172786653f720183e956 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 20 Jun 2023 09:54:00 +0300 Subject: [PATCH 014/270] Add documentation Signed-off-by: Kipchirchir Sigei --- docs/data.rst | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/docs/data.rst b/docs/data.rst index fa6587b3c3..cda282e25d 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -1099,6 +1099,43 @@ Response {"status_code": 200, "message": "3 records were deleted"} +Permanent Deletion of Submissions +------------------------------------ + +**Permanently Delete a specific submission instance** + +`DELETE /api/v1/data/{pk}/{dataid}` + +A POST payload of parameter `permanent_delete` with the value 'True'. The value is 'False' by default. + +**Payload** +:: + + permanent_delete = 'True' + + +Response +^^^^^^^^^ + +:: + HTTP 204 No Content + +**Permanently Delete a subset of submissions** + +`DELETE /api/v1/data/{pk}?permanent_delete=true&instance_ids=1,2,3` + +Response +^^^^^^^^^ + +:: + + { + "status_code": "200", + "message": "3 records were deleted" + } + + + GEOJSON ------- From 146daeeecec7d42e9d0b64d8180600aba46f7a7b Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 20 Jun 2023 10:49:15 +0300 Subject: [PATCH 015/270] Correctly parse param to boolean Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/data_viewset.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 05169074e5..90763cb16d 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -66,7 +66,7 @@ OSMSerializer, ) from onadata.libs.utils.api_export_tools import custom_response_handler -from onadata.libs.utils.common_tools import json_stream +from onadata.libs.utils.common_tools import json_stream, str_to_bool from onadata.libs.utils.viewer_tools import get_enketo_urls, get_form_url SAFE_METHODS = ["GET", "HEAD", "OPTIONS"] @@ -343,7 +343,7 @@ def destroy(self, request, *args, **kwargs): instance_ids = request.data.get("instance_ids") delete_all_submissions = strtobool(request.data.get("delete_all", "False")) # get param to trigger permanent submission deletion - permanent_delete = strtobool(request.data.get("permanent_delete", "False")) + permanent_delete = str_to_bool(request.data.get("permanent_delete")) enable_submission_permanent_delete = getattr( settings, "ENABLE_SUBMISSION_PERMANENT_DELETE", False ) From 4d92e746d4fe21fe9466536954f2dc7d4d906046 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 20 Jun 2023 12:39:29 +0300 Subject: [PATCH 016/270] Documentation cleanup Signed-off-by: Kipchirchir Sigei --- docs/data.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/data.rst b/docs/data.rst index cda282e25d..7305562f7c 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -1113,6 +1113,11 @@ A POST payload of parameter `permanent_delete` with the value 'True'. The value permanent_delete = 'True' +Example +^^^^^^^^^ +:: + + `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d permanent_delete = 'True'` Response ^^^^^^^^^ @@ -1122,7 +1127,13 @@ Response **Permanently Delete a subset of submissions** -`DELETE /api/v1/data/{pk}?permanent_delete=true&instance_ids=1,2,3` +`DELETE /api/v1/data/{pk}` + +Example +^^^^^^^^^ +:: + + `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d permanent_delete = 'True' -d instance_ids=101425,108428,1974624` Response ^^^^^^^^^ @@ -1135,7 +1146,6 @@ Response } - GEOJSON ------- From 5fef3b7ff9398652a575d7ec4ef2c5e1722133d2 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 21 Jun 2023 14:28:51 +0300 Subject: [PATCH 017/270] Update error msg Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/data_viewset.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 90763cb16d..850778f40f 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -377,7 +377,9 @@ def destroy(self, request, *args, **kwargs): instance.delete() else: error_msg = { - "error": _("Permanent Submission deletion not allowed") + "error": _( + "Permanent submission deletion is not enabled for this server." + ) } break else: @@ -419,7 +421,9 @@ def destroy(self, request, *args, **kwargs): self.object.delete() else: error_msg = { - "error": _("Permanent Submission deletion not allowed") + "error": _( + "Permanent submission deletion is not enabled for this server." + ) } return Response(error_msg, status=status.HTTP_400_BAD_REQUEST) else: From f929305682bdab35f0acde385e096732a860a8c3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 21 Jun 2023 15:17:20 +0300 Subject: [PATCH 018/270] Test enhancements and cleanup Signed-off-by: Kipchirchir Sigei --- docs/data.rst | 6 ++++-- .../api/tests/viewsets/test_data_viewset.py | 17 ++++++++++++++--- onadata/apps/api/viewsets/data_viewset.py | 17 ++++++----------- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/docs/data.rst b/docs/data.rst index 7305562f7c..851e351b57 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -1108,6 +1108,8 @@ Permanent Deletion of Submissions A POST payload of parameter `permanent_delete` with the value 'True'. The value is 'False' by default. +Note: This functionality is only enabled when the ``ENABLE_SUBMISSION_PERMANENT_DELETE`` setting is set to `True` within the application + **Payload** :: @@ -1117,7 +1119,7 @@ Example ^^^^^^^^^ :: - `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d permanent_delete = 'True'` + `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d 'permanent_delete=True'` Response ^^^^^^^^^ @@ -1133,7 +1135,7 @@ Example ^^^^^^^^^ :: - `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d permanent_delete = 'True' -d instance_ids=101425,108428,1974624` + `curl -X DELETE https://api.ona.io/api/v1/data/28058' -d 'permanent_delete=True' -d 'instance_ids=101425,108428,1974624'` Response ^^^^^^^^^ diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index d05b55a879..cb751b7888 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -1755,6 +1755,7 @@ def test_submissions_permanent_deletion(self, send_message_mock): # test that xform submission count is updated self.xform.refresh_from_db() self.assertEqual(self.xform.num_of_submissions, 3) + self.assertEqual(self.xform.instances.count(), 3) # Test project details updated successfully self.assertEqual( @@ -1784,6 +1785,10 @@ def test_submissions_permanent_deletion(self, send_message_mock): response = view(request, pk=formid) self.assertEqual(len(response.data), 3) + # check number of instances and num_of_submissions field + self.assertEqual(self.xform.instances.count(), 3) + self.assertEqual(self.xform.num_of_submissions, 3) + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=True) @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_permanent_deletions_bulk_submissions(self, send_message_mock): @@ -1824,6 +1829,9 @@ def test_permanent_deletions_bulk_submissions(self, send_message_mock): self.assertEqual(current_count, 2) self.assertEqual(self.xform.num_of_submissions, 2) + # check number of xform instances + self.assertEqual(self.xform.instances.count(), 2) + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=True) @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_permanent_instance_delete_inactive_form(self, send_message_mock): @@ -1849,6 +1857,7 @@ def test_permanent_instance_delete_inactive_form(self, send_message_mock): # test that xform submission count is updated self.xform.refresh_from_db() self.assertEqual(self.xform.num_of_submissions, 3) + self.assertEqual(self.xform.instances.count(), 3) # make form inactive self.xform.downloadable = False @@ -1866,6 +1875,9 @@ def test_permanent_instance_delete_inactive_form(self, send_message_mock): self.assertEqual(self.xform.num_of_submissions, 2) self.assertTrue(send_message_mock.called) + # check number of instances and num_of_submissions field + self.assertEqual(self.xform.instances.count(), 2) + @override_settings(ENABLE_SUBMISSION_PERMANENT_DELETE=False) def test_failed_permanent_deletion(self): """ @@ -1882,9 +1894,8 @@ def test_failed_permanent_deletion(self): ) response = view(request, pk=formid, dataid=dataid) self.assertEqual(response.status_code, 400) - self.assertEqual( - response.data, {"error": "Permanent Submission deletion not allowed"} - ) + error_msg = "Permanent submission deletion is not enabled for this server." + self.assertEqual(response.data, {"error": error_msg}) @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_delete_submission_inactive_form(self, send_message_mock): diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 850778f40f..b13e512a7e 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -337,7 +337,7 @@ def enketo(self, request, *args, **kwargs): return Response(data=data) - # pylint: disable=too-many-branches + # pylint: disable=too-many-branches,too-many-locals def destroy(self, request, *args, **kwargs): """Deletes submissions data.""" instance_ids = request.data.get("instance_ids") @@ -347,6 +347,9 @@ def destroy(self, request, *args, **kwargs): enable_submission_permanent_delete = getattr( settings, "ENABLE_SUBMISSION_PERMANENT_DELETE", False ) + permanent_delete_disabled_msg = _( + "Permanent submission deletion is not enabled for this server." + ) # pylint: disable=attribute-defined-outside-init self.object = self.get_object() @@ -376,11 +379,7 @@ def destroy(self, request, *args, **kwargs): if enable_submission_permanent_delete: instance.delete() else: - error_msg = { - "error": _( - "Permanent submission deletion is not enabled for this server." - ) - } + error_msg = {"error": permanent_delete_disabled_msg} break else: # enable soft deletion @@ -420,11 +419,7 @@ def destroy(self, request, *args, **kwargs): if enable_submission_permanent_delete: self.object.delete() else: - error_msg = { - "error": _( - "Permanent submission deletion is not enabled for this server." - ) - } + error_msg = {"error": permanent_delete_disabled_msg} return Response(error_msg, status=status.HTTP_400_BAD_REQUEST) else: # enable soft deletion From 22476de418ac71121ef3e6778401f61b8a6189fc Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 30 Jun 2023 16:35:39 +0300 Subject: [PATCH 019/270] Update sqlparse package Signed-off-by: Kipchirchir Sigei --- requirements/azure.pip | 2 +- requirements/base.pip | 2 +- requirements/s3.pip | 2 +- requirements/ses.pip | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/azure.pip b/requirements/azure.pip index 7dd4659300..7265472daa 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -40,7 +40,7 @@ six==1.16.0 # via # azure-core # isodate -sqlparse==0.4.3 +sqlparse==0.4.4 # via django typing-extensions==4.5.0 # via diff --git a/requirements/base.pip b/requirements/base.pip index 70fc6b35ff..da5b382126 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -367,7 +367,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sqlalchemy==2.0.7 # via tabulator -sqlparse==0.4.3 +sqlparse==0.4.4 # via # django # django-debug-toolbar diff --git a/requirements/s3.pip b/requirements/s3.pip index 1abbe7b226..0787ff5893 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -30,7 +30,7 @@ s3transfer==0.6.0 # via boto3 six==1.16.0 # via python-dateutil -sqlparse==0.4.3 +sqlparse==0.4.4 # via django urllib3==1.26.15 # via botocore diff --git a/requirements/ses.pip b/requirements/ses.pip index b3c93e22bc..36227cf03b 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -34,7 +34,7 @@ s3transfer==0.6.0 # via boto3 six==1.16.0 # via python-dateutil -sqlparse==0.4.3 +sqlparse==0.4.4 # via django urllib3==1.26.15 # via botocore From c0ed0ae27f45ac313359438d8dc9deda68f2c3c3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 22 Jun 2023 10:41:00 +0300 Subject: [PATCH 020/270] Add task to delete old submissions Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index d74cf87521..7fbd4c5569 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -4,16 +4,19 @@ """ import os import sys +from datetime import timedelta from celery.result import AsyncResult +from django.conf import settings from django.core.files.uploadedfile import TemporaryUploadedFile from django.core.files.storage import default_storage from django.contrib.auth import get_user_model +from django.utils import timezone from django.utils.datastructures import MultiValueDict from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email -from onadata.apps.logger.models.xform import XForm +from onadata.apps.logger.models import Instance, XForm from onadata.celeryapp import app User = get_user_model() @@ -106,3 +109,14 @@ def send_verification_email(email, message_txt, subject): def send_account_lockout_email(email, message_txt, subject): """Sends account locked email.""" send_generic_email(email, message_txt, subject) + +@app.task(ignore_result=True) +def delete_inactive_submissions(): + """ + Task to periodically delete soft deleted submissions + """ + submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", 360) + time_threshold = timezone.now() - timedelta(days=submissions_lifespan) + # deletes soft deleted submissions that are older than time threshold + instances = Instance.objects.filter(deleted_at__gt=time_threshold) + instances.delete() From 6613dea0eca2c21cfd7090c858fc90624f285992 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 23 Jun 2023 17:42:09 +0300 Subject: [PATCH 021/270] Add tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 10 ++++-- onadata/apps/api/tests/views/test_tasks.py | 42 ++++++++++++++++++++++ 2 files changed, 49 insertions(+), 3 deletions(-) create mode 100644 onadata/apps/api/tests/views/test_tasks.py diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 7fbd4c5569..e29992a364 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -11,6 +11,7 @@ from django.core.files.uploadedfile import TemporaryUploadedFile from django.core.files.storage import default_storage from django.contrib.auth import get_user_model +from django.db.models import Q from django.utils import timezone from django.utils.datastructures import MultiValueDict @@ -110,13 +111,16 @@ def send_account_lockout_email(email, message_txt, subject): """Sends account locked email.""" send_generic_email(email, message_txt, subject) -@app.task(ignore_result=True) + +@app.task() def delete_inactive_submissions(): """ - Task to periodically delete soft deleted submissions + Task to periodically delete soft deleted submissions from db """ submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", 360) time_threshold = timezone.now() - timedelta(days=submissions_lifespan) # deletes soft deleted submissions that are older than time threshold - instances = Instance.objects.filter(deleted_at__gt=time_threshold) + instances = Instance.objects.filter( + Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold), + ) instances.delete() diff --git a/onadata/apps/api/tests/views/test_tasks.py b/onadata/apps/api/tests/views/test_tasks.py new file mode 100644 index 0000000000..2a200fbc8c --- /dev/null +++ b/onadata/apps/api/tests/views/test_tasks.py @@ -0,0 +1,42 @@ +""" +API tasks test +""" +from datetime import timedelta + +from celery import current_app +from django.conf import settings +from django.test import override_settings + +from onadata.apps.api.tasks import delete_inactive_submissions +from onadata.apps.main.tests.test_base import TestBase + + +class TestAPITasks(TestBase): + """ + Test api tasks + """ + + def setUp(self): + super().setUp() + settings.CELERY_TASK_ALWAYS_EAGER = True + current_app.conf.CELERY_TASK_ALWAYS_EAGER = True + + # pylint: disable=invalid-name + @override_settings(INACTIVE_SUBMISSIONS_LIFESPAN=20) + def test_delete_inactive_submissions_async(self): # noqa + """Test delete_inactive_submissions() task""" + self._publish_transportation_form() + self._make_submissions() + self.xform.refresh_from_db() + # check submissions count + self.assertEqual(self.xform.instances.count(), 4) + # soft delete one of the instances + instance = self.xform.instances.last() + deleted_at = instance.date_created - timedelta(days=60) + instance.set_deleted(deleted_at, self.user) + # test that theres one soft deleted submission + self.assertEqual(self.xform.instances.filter(deleted_at__isnull=False).count(), 1) + delete_inactive_submissions() + # test that the soft deleted submission is deleted + # since deleted_at is greater than specified lifespan + self.assertEqual(self.xform.instances.filter(deleted_at__isnull=False).count(), 0) From b5d761c88cb32f8c5830fccfc8952746c66e1dcd Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 27 Jun 2023 11:08:05 +0300 Subject: [PATCH 022/270] Cleanup Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 4 +++- onadata/apps/api/tests/views/test_tasks.py | 18 ++++++++++++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index e29992a364..d279ecd949 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -117,10 +117,12 @@ def delete_inactive_submissions(): """ Task to periodically delete soft deleted submissions from db """ - submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", 360) + submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", 365) time_threshold = timezone.now() - timedelta(days=submissions_lifespan) # deletes soft deleted submissions that are older than time threshold instances = Instance.objects.filter( Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold), + date_created__lte=time_threshold ) + # perform a bulk delete, to avoid triggering model signals instances.delete() diff --git a/onadata/apps/api/tests/views/test_tasks.py b/onadata/apps/api/tests/views/test_tasks.py index 2a200fbc8c..f665a7d71b 100644 --- a/onadata/apps/api/tests/views/test_tasks.py +++ b/onadata/apps/api/tests/views/test_tasks.py @@ -6,6 +6,7 @@ from celery import current_app from django.conf import settings from django.test import override_settings +from django.utils import timezone from onadata.apps.api.tasks import delete_inactive_submissions from onadata.apps.main.tests.test_base import TestBase @@ -23,7 +24,7 @@ def setUp(self): # pylint: disable=invalid-name @override_settings(INACTIVE_SUBMISSIONS_LIFESPAN=20) - def test_delete_inactive_submissions_async(self): # noqa + def test_delete_inactive_submissions_async(self): # noqa """Test delete_inactive_submissions() task""" self._publish_transportation_form() self._make_submissions() @@ -32,11 +33,20 @@ def test_delete_inactive_submissions_async(self): # noqa self.assertEqual(self.xform.instances.count(), 4) # soft delete one of the instances instance = self.xform.instances.last() - deleted_at = instance.date_created - timedelta(days=60) + # set submission date_created to be 90 days from now + instance.date_created = timezone.now() - timedelta(days=90) + instance.save() + # set deleted_at to be 60 days from now + # meaning the submission is soft deleted 30 days after being created + deleted_at = timezone.now() - timedelta(days=60) instance.set_deleted(deleted_at, self.user) # test that theres one soft deleted submission - self.assertEqual(self.xform.instances.filter(deleted_at__isnull=False).count(), 1) + self.assertEqual( + self.xform.instances.filter(deleted_at__isnull=False).count(), 1 + ) delete_inactive_submissions() # test that the soft deleted submission is deleted # since deleted_at is greater than specified lifespan - self.assertEqual(self.xform.instances.filter(deleted_at__isnull=False).count(), 0) + self.assertEqual( + self.xform.instances.filter(deleted_at__isnull=False).count(), 0 + ) From 10aede3245e03a41baae51978a28823b157c4daa Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 30 Jun 2023 09:57:01 +0300 Subject: [PATCH 023/270] Enhance tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tests/views/test_tasks.py | 47 ++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/onadata/apps/api/tests/views/test_tasks.py b/onadata/apps/api/tests/views/test_tasks.py index f665a7d71b..5e7bf7fdca 100644 --- a/onadata/apps/api/tests/views/test_tasks.py +++ b/onadata/apps/api/tests/views/test_tasks.py @@ -9,6 +9,8 @@ from django.utils import timezone from onadata.apps.api.tasks import delete_inactive_submissions +from onadata.apps.logger.models import Attachment +from onadata.apps.logger.models.instance import InstanceHistory from onadata.apps.main.tests.test_base import TestBase @@ -50,3 +52,48 @@ def test_delete_inactive_submissions_async(self): # noqa self.assertEqual( self.xform.instances.filter(deleted_at__isnull=False).count(), 0 ) + + # pylint: disable=invalid-name + @override_settings(INACTIVE_SUBMISSIONS_LIFESPAN=20) + def test_delete_inactive_submissions_with_attachments(self): # noqa + """Test delete_inactive_submissions() task""" + self._publish_transportation_form() + self._submit_transport_instance_w_attachment() + self._submit_transport_instance_w_uuid("transport_2011-07-25_19-05-36") + self.xform.refresh_from_db() + # check submissions count + self.assertEqual(self.xform.instances.count(), 2) + # check attachments count + self.assertEqual(Attachment.objects.all().count(), 1) + instance = self.xform.instances.first() + # create instance history + InstanceHistory.objects.create( + xml=instance.xml, + checksum=instance.checksum, + xform_instance=instance, + ) + history_count = InstanceHistory.objects.filter( + xform_instance__id=instance.pk + ).count() + self.assertEqual(history_count, 1) + # set submission date_created to be 90 days from now + instance.date_created = timezone.now() - timedelta(days=90) + instance.save() + # soft delete submission + # set deleted_at to be 60 days from now + # meaning the submission is soft deleted 30 days after being created + deleted_at = timezone.now() - timedelta(days=60) + instance.set_deleted(deleted_at, self.user) + # test that theres one soft deleted submission + self.assertEqual( + self.xform.instances.filter(deleted_at__isnull=False).count(), 1 + ) + delete_inactive_submissions() + # test that the soft deleted submission is deleted + # since deleted_at is greater than specified lifespan + self.assertEqual( + self.xform.instances.filter(deleted_at__isnull=False).count(), 0 + ) + # test that the deletion cascades to InstanceHistory & attachments + self.assertEqual(Attachment.objects.all().count(), 0) + self.assertEqual(Attachment.objects.all().count(), 0) From 445df7a3f5d56465bcb5f7c8595d86c59a57184d Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 30 Jun 2023 15:41:11 +0300 Subject: [PATCH 024/270] Delete attachments from storage Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index d279ecd949..11b0bb360c 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -9,7 +9,7 @@ from celery.result import AsyncResult from django.conf import settings from django.core.files.uploadedfile import TemporaryUploadedFile -from django.core.files.storage import default_storage +from django.core.files.storage import default_storage, get_storage_class from django.contrib.auth import get_user_model from django.db.models import Q from django.utils import timezone @@ -17,7 +17,8 @@ from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email -from onadata.apps.logger.models import Instance, XForm +from onadata.libs.utils.model_tools import queryset_iterator +from onadata.apps.logger.models import Instance, XForm, Attachment from onadata.celeryapp import app User = get_user_model() @@ -112,17 +113,31 @@ def send_account_lockout_email(email, message_txt, subject): send_generic_email(email, message_txt, subject) +def delete_attachments(attachment): + storage = get_storage_class()() + storage.exists(attachment.media_file.name) and storage.delete( + attachment.media_file.name + ) + + @app.task() def delete_inactive_submissions(): """ Task to periodically delete soft deleted submissions from db """ - submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", 365) - time_threshold = timezone.now() - timedelta(days=submissions_lifespan) - # deletes soft deleted submissions that are older than time threshold - instances = Instance.objects.filter( - Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold), - date_created__lte=time_threshold - ) - # perform a bulk delete, to avoid triggering model signals - instances.delete() + submissions_lifespan = getattr(settings, "INACTIVE_SUBMISSIONS_LIFESPAN", None) + if submissions_lifespan: + time_threshold = timezone.now() - timedelta(days=submissions_lifespan) + # delete instance attachments + instances = Instance.objects.filter( + Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold), + date_created__lte=time_threshold, + ) + for instance in queryset_iterator(instances): + attachments = Attachment.objects.filter(instance=instance) + _ = [ + delete_attachments(attachment) + for attachment in queryset_iterator(attachments) + ] + # delete submission + instance.delete() From 31c7d1eea5625fc1d9fe64f436c7bfe49358e6c1 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 30 Jun 2023 15:41:33 +0300 Subject: [PATCH 025/270] Update tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 4 ++- onadata/apps/api/tests/views/test_tasks.py | 27 +++++++++++++++---- .../transport_2011-07-25_19-05-49_edited.xml | 1 + 3 files changed, 26 insertions(+), 6 deletions(-) create mode 100755 onadata/apps/main/tests/fixtures/transportation/instances/transport_2011-07-25_19-05-49/transport_2011-07-25_19-05-49_edited.xml diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 11b0bb360c..21b10e520b 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -22,6 +22,7 @@ from onadata.celeryapp import app User = get_user_model() +storage = get_storage_class()() def recreate_tmp_file(name, path, mime_type): @@ -114,7 +115,8 @@ def send_account_lockout_email(email, message_txt, subject): def delete_attachments(attachment): - storage = get_storage_class()() + """Util function to delete attachments""" + # pylint: disable=expression-not-assigned storage.exists(attachment.media_file.name) and storage.delete( attachment.media_file.name ) diff --git a/onadata/apps/api/tests/views/test_tasks.py b/onadata/apps/api/tests/views/test_tasks.py index 5e7bf7fdca..3aadb609f4 100644 --- a/onadata/apps/api/tests/views/test_tasks.py +++ b/onadata/apps/api/tests/views/test_tasks.py @@ -1,10 +1,12 @@ """ API tasks test """ +import os from datetime import timedelta from celery import current_app from django.conf import settings +from django.core.files.storage import get_storage_class from django.test import override_settings from django.utils import timezone @@ -65,13 +67,24 @@ def test_delete_inactive_submissions_with_attachments(self): # noqa self.assertEqual(self.xform.instances.count(), 2) # check attachments count self.assertEqual(Attachment.objects.all().count(), 1) + # check if attachment file exists in file system + default_storage = get_storage_class()() + self.assertTrue( + default_storage.exists(self.attachment.media_file.name) + ) instance = self.xform.instances.first() # create instance history - InstanceHistory.objects.create( - xml=instance.xml, - checksum=instance.checksum, - xform_instance=instance, + s = self.surveys[0] + xml_edit_submission_file_path = os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + s, + s + "_edited.xml", ) + # edit submission + self._make_submission(xml_edit_submission_file_path) history_count = InstanceHistory.objects.filter( xform_instance__id=instance.pk ).count() @@ -96,4 +109,8 @@ def test_delete_inactive_submissions_with_attachments(self): # noqa ) # test that the deletion cascades to InstanceHistory & attachments self.assertEqual(Attachment.objects.all().count(), 0) - self.assertEqual(Attachment.objects.all().count(), 0) + self.assertEqual(InstanceHistory.objects.all().count(), 0) + # check that attachment doesn't exist in storage + self.assertFalse( + default_storage.exists(self.attachment.media_file.name) + ) diff --git a/onadata/apps/main/tests/fixtures/transportation/instances/transport_2011-07-25_19-05-49/transport_2011-07-25_19-05-49_edited.xml b/onadata/apps/main/tests/fixtures/transportation/instances/transport_2011-07-25_19-05-49/transport_2011-07-25_19-05-49_edited.xml new file mode 100755 index 0000000000..c08177e29b --- /dev/null +++ b/onadata/apps/main/tests/fixtures/transportation/instances/transport_2011-07-25_19-05-49/transport_2011-07-25_19-05-49_edited.xml @@ -0,0 +1 @@ +none1335783522563.jpguuid:6b2cc313-fc09-437e-8139-fcd32f695d41uuid:5b2cc313-fc09-437e-8149-fcd32f695d41 From a709f15ba51ed78979fc4c412debbdb15f0e3a25 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 30 Jun 2023 18:12:26 +0300 Subject: [PATCH 026/270] Add pre_delete signal to delete attachments Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 21 +++------------------ onadata/apps/logger/models/instance.py | 22 +++++++++++++++++++++- 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 21b10e520b..1d6c0ef46d 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -9,7 +9,7 @@ from celery.result import AsyncResult from django.conf import settings from django.core.files.uploadedfile import TemporaryUploadedFile -from django.core.files.storage import default_storage, get_storage_class +from django.core.files.storage import default_storage from django.contrib.auth import get_user_model from django.db.models import Q from django.utils import timezone @@ -18,11 +18,10 @@ from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator -from onadata.apps.logger.models import Instance, XForm, Attachment +from onadata.apps.logger.models import Instance, XForm from onadata.celeryapp import app User = get_user_model() -storage = get_storage_class()() def recreate_tmp_file(name, path, mime_type): @@ -114,14 +113,6 @@ def send_account_lockout_email(email, message_txt, subject): send_generic_email(email, message_txt, subject) -def delete_attachments(attachment): - """Util function to delete attachments""" - # pylint: disable=expression-not-assigned - storage.exists(attachment.media_file.name) and storage.delete( - attachment.media_file.name - ) - - @app.task() def delete_inactive_submissions(): """ @@ -132,14 +123,8 @@ def delete_inactive_submissions(): time_threshold = timezone.now() - timedelta(days=submissions_lifespan) # delete instance attachments instances = Instance.objects.filter( - Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold), - date_created__lte=time_threshold, + Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold) ) for instance in queryset_iterator(instances): - attachments = Attachment.objects.filter(instance=instance) - _ = [ - delete_attachments(attachment) - for attachment in queryset_iterator(attachments) - ] # delete submission instance.delete() diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index e03fc06d8f..3bc59b1221 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -12,9 +12,10 @@ from django.contrib.gis.db import models from django.contrib.gis.geos import GeometryCollection, Point from django.core.cache import cache +from django.core.files.storage import get_storage_class from django.db import connection, transaction from django.db.models import Q -from django.db.models.signals import post_delete, post_save +from django.db.models.signals import post_delete, post_save, pre_delete from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext as _ @@ -33,6 +34,7 @@ ) from onadata.celeryapp import app from onadata.libs.utils.common_tools import report_exception +from onadata.libs.utils.model_tools import queryset_iterator from onadata.libs.data.query import get_numeric_fields from onadata.libs.utils.cache_tools import ( DATAVIEW_COUNT, @@ -83,6 +85,7 @@ ) # pylint: disable=invalid-name User = get_user_model() +storage = get_storage_class()() def get_attachment_url(attachment, suffix=None): @@ -831,6 +834,17 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): update_project_date_modified(instance.pk, created) +# pylint: disable=unused-argument +def permanently_delete_attachments(sender, instance=None, created=False, **kwargs): + if instance: + attachments = instance.attachments.all() + for attachment in queryset_iterator(attachments): + # pylint: disable=expression-not-assigned + storage.exists(attachment.media_file.name) and storage.delete( + attachment.media_file.name + ) + + post_save.connect( post_save_submission, sender=Instance, dispatch_uid="post_save_submission" ) @@ -841,6 +855,12 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): dispatch_uid="update_xform_submission_count_delete", ) +pre_delete.connect( + permanently_delete_attachments, + sender=Instance, + dispatch_uid="permanently_delete_attachments" +) + class InstanceHistory(models.Model, InstanceBaseClass): """Stores deleted submission XML to maintain a history of edits.""" From 00fe7831a5f7252acec6b3bd32a585dcbafe50cf Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 3 Jul 2023 12:11:30 +0300 Subject: [PATCH 027/270] Update instance filter query to AND operation Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tasks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 1d6c0ef46d..90a5f49869 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -11,7 +11,6 @@ from django.core.files.uploadedfile import TemporaryUploadedFile from django.core.files.storage import default_storage from django.contrib.auth import get_user_model -from django.db.models import Q from django.utils import timezone from django.utils.datastructures import MultiValueDict @@ -123,7 +122,7 @@ def delete_inactive_submissions(): time_threshold = timezone.now() - timedelta(days=submissions_lifespan) # delete instance attachments instances = Instance.objects.filter( - Q(deleted_at__isnull=False) | Q(deleted_at__gte=time_threshold) + deleted_at__isnull=False, deleted_at__lte=time_threshold ) for instance in queryset_iterator(instances): # delete submission From 9ab17f1b65039519fff215411d74008a84c213c4 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 3 Jul 2023 21:34:40 +0300 Subject: [PATCH 028/270] Tag release v3.10.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 16 ++++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 84a0e3c601..1af7c8c63b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,22 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.10.0(2023-07-03) +------------------- + +- Add task to permanently delete soft-deleted submissiions + `PR #2446 ` + [@KipSigei] +- Update sqlparse package to fix security vulnerability + `PR #2447 ` + [@KipSigei] +- Add ability to permanently delete submissions + `PR #2437 ` + [@KipSigei] +- Update pyxform version to v1.12.0 with case management support + `PR #2341 ` + [@KipSigei] + v3.9.2(2023-06-19) ------------------ diff --git a/onadata/__init__.py b/onadata/__init__.py index 7bddb7affb..337c3e0055 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.9.2" +__version__ = "3.10.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 10156d4de2..4483c1525b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.9.2 +version = 3.10.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 719cd82687bda00ddd1592d4584b3f730168e79e Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jul 2023 16:22:34 +0300 Subject: [PATCH 029/270] Optimize attachments endpoint: Filter attachments using media pk Signed-off-by: Kipchirchir Sigei --- .../apps/logger/templates/downloadSubmission.xml | 2 +- onadata/apps/viewer/views.py | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/onadata/apps/logger/templates/downloadSubmission.xml b/onadata/apps/logger/templates/downloadSubmission.xml index 189fb5d4f4..4dbee8df4f 100644 --- a/onadata/apps/logger/templates/downloadSubmission.xml +++ b/onadata/apps/logger/templates/downloadSubmission.xml @@ -6,6 +6,6 @@ {% for media in media_files %} {{ media.name|safe }} md5:{{ media.file_hash }} - {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }} + {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }}&attachment_id={{ media.pk }} {% endfor %} diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 427a1ebcc1..5f7f2cdcc4 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -866,13 +866,16 @@ def attachment_url(request, size="medium"): """ media_file = request.GET.get("media_file") no_redirect = request.GET.get("no_redirect") + attachment_id = request.GET.get("attachment_id") if not media_file: return HttpResponseNotFound(_("Attachment not found")) - - result = Attachment.objects.filter(media_file=media_file).order_by()[0:1] - if not result: - return HttpResponseNotFound(_("Attachment not found")) - attachment = result[0] + if attachment_id: + attachment = get_object_or_404(Attachment, pk=attachment_id) + else: + result = Attachment.objects.filter(media_file=media_file).order_by()[0:1] + if not result: + return HttpResponseNotFound(_("Attachment not found")) + attachment = result[0] if size == "original" and no_redirect == "true": response = response_with_mimetype_and_name( From 0c4bdd9f9d3a39e83fb4c7b18b4bed47044ef7b5 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jul 2023 17:30:32 +0300 Subject: [PATCH 030/270] Add tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/tests/test_briefcase_api.py | 2 +- .../transportation/view/downloadSubmission.xml | 2 +- onadata/apps/viewer/tests/test_attachment_url.py | 15 +++++++++++++++ onadata/apps/viewer/views.py | 3 ++- 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/onadata/apps/logger/tests/test_briefcase_api.py b/onadata/apps/logger/tests/test_briefcase_api.py index b99a21060c..93bb54e82f 100644 --- a/onadata/apps/logger/tests/test_briefcase_api.py +++ b/onadata/apps/logger/tests/test_briefcase_api.py @@ -223,9 +223,9 @@ def test_view_downloadSubmission(self): for var in ( ("{{submissionDate}}", instance.date_created.isoformat()), ("{{form_id}}", str(self.xform.id)), + ("{{attachment_id}}", str(self.attachment.id)), ): text = text.replace(*var) - self.assertContains(response, instanceId, status_code=200) self.assertMultiLineEqual(response.content.decode("utf-8"), text) diff --git a/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml b/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml index 75eb71b3c7..0deba37c3d 100644 --- a/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml +++ b/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml @@ -6,6 +6,6 @@ 1335783522563.jpg md5:2ca0d22073a9b6b4ebe51368b08da60c - http://testserver/attachment/original?media_file=bob/attachments/{{form_id}}_transportation_2011_07_25/1335783522563.jpg + http://testserver/attachment/original?media_file=bob/attachments/{{form_id}}_transportation_2011_07_25/1335783522563.jpg&attachment_id={{attachment_id}} diff --git a/onadata/apps/viewer/tests/test_attachment_url.py b/onadata/apps/viewer/tests/test_attachment_url.py index fa2c504fc7..8a96beed8e 100644 --- a/onadata/apps/viewer/tests/test_attachment_url.py +++ b/onadata/apps/viewer/tests/test_attachment_url.py @@ -43,6 +43,21 @@ def test_attachment_has_mimetype(self): attachment = Attachment.objects.all().reverse()[0] self.assertEqual(attachment.mimetype, 'image/jpeg') + def test_attachment_url_w_media_id(self): + self.assertEqual( + Attachment.objects.count(), self.attachment_count + 1) + response = self.client.get( + self.url, {"attachment_id": self.attachment.id}) + self.assertEqual(response.status_code, 302) # redirects to amazon + + def test_attachment_url_w_media_id_no_redirect(self): + self.assertEqual( + Attachment.objects.count(), self.attachment_count + 1) + response = self.client.get( + self.url, {"attachment_id": self.attachment.id, + 'no_redirect': 'true'}) + self.assertEqual(response.status_code, 200) # no redirects to amazon + def tearDown(self): path = os.path.join(settings.MEDIA_ROOT, self.user.username) for root, dirs, files in os.walk(path, topdown=False): diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 5f7f2cdcc4..53e8804394 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -867,7 +867,8 @@ def attachment_url(request, size="medium"): media_file = request.GET.get("media_file") no_redirect = request.GET.get("no_redirect") attachment_id = request.GET.get("attachment_id") - if not media_file: + + if not media_file and not attachment_id: return HttpResponseNotFound(_("Attachment not found")) if attachment_id: attachment = get_object_or_404(Attachment, pk=attachment_id) From ef55fcb63ea194dded398a55521920d43dc80c4e Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jul 2023 18:13:59 +0300 Subject: [PATCH 031/270] Cleanup Signed-off-by: Kipchirchir Sigei --- .../apps/api/tests/viewsets/test_briefcase_viewset.py | 9 ++++++--- onadata/apps/logger/templates/downloadSubmission.xml | 2 +- onadata/apps/logger/tests/test_briefcase_api.py | 2 +- .../fixtures/transportation/view/downloadSubmission.xml | 2 +- onadata/apps/viewer/tests/test_attachment_url.py | 3 +++ 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index cc8ce908a7..bfb984a611 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -301,7 +301,8 @@ def test_view_downloadSubmission(self): text = f.read() for var in ((u'{{submissionDate}}', instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id))): + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) self.assertMultiLineEqual(response.content.decode('utf-8'), text) @@ -466,7 +467,8 @@ def test_view_downloadSubmission_no_xmlns(self, mock_get_object): text = f.read() for var in ((u'{{submissionDate}}', instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id))): + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): text = text.replace(*var) self.assertNotIn( 'transportation id="transportation_2011_07_25"' @@ -521,7 +523,8 @@ def test_view_downloadSubmission_multiple_nodes(self, mock_get_object): text = f.read() for var in ((u'{{submissionDate}}', instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id))): + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) diff --git a/onadata/apps/logger/templates/downloadSubmission.xml b/onadata/apps/logger/templates/downloadSubmission.xml index 4dbee8df4f..2b92207870 100644 --- a/onadata/apps/logger/templates/downloadSubmission.xml +++ b/onadata/apps/logger/templates/downloadSubmission.xml @@ -6,6 +6,6 @@ {% for media in media_files %} {{ media.name|safe }} md5:{{ media.file_hash }} - {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }}&attachment_id={{ media.pk }} + {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }}&attachment_id={{ media.pk|safe }} {% endfor %} diff --git a/onadata/apps/logger/tests/test_briefcase_api.py b/onadata/apps/logger/tests/test_briefcase_api.py index 93bb54e82f..1f9875665d 100644 --- a/onadata/apps/logger/tests/test_briefcase_api.py +++ b/onadata/apps/logger/tests/test_briefcase_api.py @@ -223,7 +223,7 @@ def test_view_downloadSubmission(self): for var in ( ("{{submissionDate}}", instance.date_created.isoformat()), ("{{form_id}}", str(self.xform.id)), - ("{{attachment_id}}", str(self.attachment.id)), + ("{{media_id}}", str(self.attachment.id)), ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) diff --git a/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml b/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml index 0deba37c3d..7439014a46 100644 --- a/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml +++ b/onadata/apps/main/tests/fixtures/transportation/view/downloadSubmission.xml @@ -6,6 +6,6 @@ 1335783522563.jpg md5:2ca0d22073a9b6b4ebe51368b08da60c - http://testserver/attachment/original?media_file=bob/attachments/{{form_id}}_transportation_2011_07_25/1335783522563.jpg&attachment_id={{attachment_id}} + http://testserver/attachment/original?media_file=bob/attachments/{{form_id}}_transportation_2011_07_25/1335783522563.jpg&attachment_id={{media_id}} diff --git a/onadata/apps/viewer/tests/test_attachment_url.py b/onadata/apps/viewer/tests/test_attachment_url.py index 8a96beed8e..166a72568d 100644 --- a/onadata/apps/viewer/tests/test_attachment_url.py +++ b/onadata/apps/viewer/tests/test_attachment_url.py @@ -44,13 +44,16 @@ def test_attachment_has_mimetype(self): self.assertEqual(attachment.mimetype, 'image/jpeg') def test_attachment_url_w_media_id(self): + """Test attachment url with attachment id""" self.assertEqual( Attachment.objects.count(), self.attachment_count + 1) response = self.client.get( self.url, {"attachment_id": self.attachment.id}) self.assertEqual(response.status_code, 302) # redirects to amazon + # pylint: disable=invalid-name def test_attachment_url_w_media_id_no_redirect(self): + """Test attachment url with attachment id no redirect""" self.assertEqual( Attachment.objects.count(), self.attachment_count + 1) response = self.client.get( From 58fbee8e86b765c96ec80eddb6312b2efa246ca0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 20 Jul 2023 11:14:02 +0300 Subject: [PATCH 032/270] Fix media id formatting issue Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/templates/downloadSubmission.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/logger/templates/downloadSubmission.xml b/onadata/apps/logger/templates/downloadSubmission.xml index 2b92207870..790ff68894 100644 --- a/onadata/apps/logger/templates/downloadSubmission.xml +++ b/onadata/apps/logger/templates/downloadSubmission.xml @@ -6,6 +6,6 @@ {% for media in media_files %} {{ media.name|safe }} md5:{{ media.file_hash }} - {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }}&attachment_id={{ media.pk|safe }} + {{ host }}{% url "attachment_url" 'original' %}?media_file={{ media.media_file.name|safe }}&attachment_id={{ media.pk|stringformat:"d" }} {% endfor %} From a538ad789b4e53b1451ee2c196f11b3b006e2c95 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 20 Jul 2023 08:50:21 +0300 Subject: [PATCH 033/270] Bump to django v3.2.20 Signed-off-by: Kipchirchir Sigei --- requirements/azure.in | 2 +- requirements/azure.pip | 2 +- requirements/base.pip | 2 +- requirements/dev.pip | 2 +- requirements/s3.in | 2 +- requirements/s3.pip | 2 +- requirements/ses.in | 2 +- requirements/ses.pip | 2 +- setup.cfg | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements/azure.in b/requirements/azure.in index 6c6e5d293d..d748531d54 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ django-storages[azure] cryptography>=39.0.1 -django >=3.2.19,<4 +django >=3.2.20,<4 diff --git a/requirements/azure.pip b/requirements/azure.pip index 7265472daa..81bc6b2a44 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -20,7 +20,7 @@ cryptography==40.0.1 # via # -r requirements/azure.in # azure-storage-blob -django==3.2.19 +django==3.2.20 # via # -r requirements/azure.in # django-storages diff --git a/requirements/base.pip b/requirements/base.pip index da5b382126..485387f4aa 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -97,7 +97,7 @@ deprecated==1.2.13 # onadata dict2xml==1.7.3 # via onadata -django==3.2.19 +django==3.2.20 # via # django-activity-stream # django-cors-headers diff --git a/requirements/dev.pip b/requirements/dev.pip index 7121f9cb33..0cb7ca6faf 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -114,7 +114,7 @@ dict2xml==1.7.1 # via onadata dill==0.3.5.1 # via pylint -django==3.2.19 +django==3.2.20 # via # django-cors-headers # django-csp diff --git a/requirements/s3.in b/requirements/s3.in index d83a493085..fd471546b4 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ django-storages -django >=3.2.19,<4 +django >=3.2.20,<4 boto3 diff --git a/requirements/s3.pip b/requirements/s3.pip index 0787ff5893..a051f86a47 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -12,7 +12,7 @@ botocore==1.29.103 # via # boto3 # s3transfer -django==3.2.19 +django==3.2.20 # via # -r requirements/s3.in # django-storages diff --git a/requirements/ses.in b/requirements/ses.in index 30520026f1..1f0304a6b8 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django >=3.2.19,<4 +django >=3.2.20,<4 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index 36227cf03b..51e19aa96b 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -14,7 +14,7 @@ botocore==1.29.103 # via # boto3 # s3transfer -django==3.2.19 +django==3.2.20 # via # -r requirements/ses.in # django-ses diff --git a/setup.cfg b/setup.cfg index 4483c1525b..c923121fa2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,7 +27,7 @@ tests_require = mock requests-mock install_requires = - Django>=3.2.19,<4 + Django>=3.2.20,<4 django-guardian django-registration-redux django-templated-email From 12bf5d36abf91b33161b4e2c4a7d0b85a1c7d4a1 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 20 Jul 2023 12:53:09 +0300 Subject: [PATCH 034/270] Tag release v3.10.1 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 10 ++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1af7c8c63b..7aafe57b90 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,16 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.10.1(2023-07-20) +------------------- + +- Upgrade django to v3.2.20 + `PR #2454 ` + [@KipSigei] +- Optimize attachments endpoint + `PR #2452 ` + [@KipSigei] + v3.10.0(2023-07-03) ------------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index 337c3e0055..671e4cfd42 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.10.0" +__version__ = "3.10.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index c923121fa2..02ce64bd06 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.10.0 +version = 3.10.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From ea3ac4f77b868115274d94e972c4c6b1e465b394 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 20 Jul 2023 16:20:00 +0300 Subject: [PATCH 035/270] Add ability to create, update project invitations (#2430) * add ProjectInvitation model model to store a project invitation details * APIRequestFactory test * add tests for get project invitations list * add tests for get project invitations list * add create project invitation endpoint * add tests for create project invitation endpoint * update project invitation role * add endpoint to revoke project invitation * add endpoint to resend project invitation * update comments * restore onadata/libs/filters.py * make project invitation status readonly have status field in create project invitation endpoint readonly * add project invitation endpoints documentation add API documentation remove project key from API response * format project invitations documentation * format project invitations documentation * format project invitations documentation * format project invitations documentation * format project invitations documentation * update path for revoke, resend project invitation * expose ProjectInvitation model to Django admin * revert changes to expose ProjectInvitation in Django admin * fix lint errors fix lint errors for file onadata/libs/serializers/project_invitation_serializer * fix lint errors fix lint errors for file onadata/apps/logger/models/project_invitation.py * fix cylic dependency * fix linting errors * refactor code * add code comments * refactor code * suppress linting error suppress linting error abstract-method / Method 'create' is abstract in class 'BaseSerializer' but is not overridden * remove duplicate variable declaration * separate update project invitation from create * add test case to update project invitation * fix typo in docs * fix typo in docs * Send and accept project invitation (#2443) * send project invitation email * add tests for ProjectInvitationEmailTestCase and refactor * accept project invitation accept all pending project invitations when user creates account * have invitation_id, invitation_token as query params change invitation_id, invitation_token from being submitted as part of the payload but instead received from query params * add tests for tasks add tests for send_project_invitation_email_async, accept_project_invitation_async * add documentation for accept project invitation * enhance project invitation docs * enhance project invitation docs * update method docstring * update method docstring * fix rst typos in docs * fix rst typos in docs * fix rst typos in docs * fix rst typos in docs * add fields invited_by, accepted_by for ProjectInvitation * remove unused code * update docs * add test case * provide flexibility to add extra context data to invitation email templates * catch exceptions * refactor code * refactor code * fix linting error * fix linting errors * fix linting erros * fix linting erros * fix linting errors * fix linting errors * fix linting errors * fix linting errors * fix linting errors * Update invitations url path Signed-off-by: Kipchirchir Sigei * Fix typon in invitations endpoint methods Signed-off-by: Kipchirchir Sigei * Cleanup Signed-off-by: Kipchirchir Sigei * remove HTML ampersand character from invitation mail * remove unique together ProjectInvitation model there can be multiple revoked invitations. To support this, unique together integrity check has been removed. To prevent duplicate invitations from being created, a validation check has been added to the create invitation endpoint * refactor code * add temporary logging for debugging * log temporarily for debugging * log temporarily for debugging * log temp for debuggig * remove debugging logs * fix linting error add missing method docstring * share projects if invitation invalid/missing If id and token are invalid or are not provided but the user registers using an email that matches a pending invitation, then that project is shared with the user. * refactor code * fix failing test fix failing test remove PATCH support endpoint /api/v1/projects/{pk}/invitations update documentation * update documentatio * update documentation * fix bug when working with multipart/formdata * fix typo in docs * fix Invitation already exists when updating invitation when the email does not change when updating invitation, the error 'Invitation already exists' occurred. The fix was to have the check for uniqueness only when creating * fix 'User already exists' when updating an accepted invitation ensure only pending invitations can be updated * send project invtation email when email is updated * fix typo * Only accept project invitations whose email match new user email (#2449) * remove project invitation id and token verification remove invitation_id and invitation_token query params from invitation email link. remove support for allowing a user to register using a different email from the one the invite was sent to add a post_save signal to accept only invitations that match the new user email and remove implementation for accepting invitation from the UserProfileSerializer. This is because a user can also be created using OIDC * update project invitation documentation * fix linting errors * fix error when creating user with no password fix AttributeError: 'NoneType' object has no attribute 'lower' when creating a user with password field missing from the payload * validate password if not None when creating user * refactor cod * use queryset_iterator to iterate queryset --------- Signed-off-by: Kipchirchir Sigei Co-authored-by: Kipchirchir Sigei --- docs/projects.rst | 237 ++++++ onadata/apps/api/permissions.py | 9 +- onadata/apps/api/tasks.py | 20 +- onadata/apps/api/tests/test_tasks.py | 32 + .../tests/viewsets/test_project_viewset.py | 773 +++++++++++++++++- .../viewsets/test_user_profile_viewset.py | 199 ++++- onadata/apps/api/urls/v1_urls.py | 4 +- onadata/apps/api/viewsets/project_viewset.py | 86 +- .../migrations/0005_projectinvitation.py | 32 + .../migrations/0006_auto_20230622_0533.py | 26 + ...alter_projectinvitation_unique_together.py | 17 + onadata/apps/logger/models/__init__.py | 1 + .../apps/logger/models/project_invitation.py | 69 ++ .../tests/models/test_project_invitation.py | 127 +++ onadata/apps/main/signals.py | 29 +- onadata/apps/main/tests/test_signals.py | 78 ++ .../project_invitation_serializer.py | 166 ++++ .../serializers/user_profile_serializer.py | 42 +- .../libs/templates/projects/invitation.txt | 11 + .../templates/projects/invitation_subject.txt | 1 + onadata/libs/tests/utils/test_email.py | 87 +- onadata/libs/utils/email.py | 58 ++ onadata/libs/utils/user_auth.py | 16 +- 23 files changed, 2023 insertions(+), 97 deletions(-) create mode 100644 onadata/apps/api/tests/test_tasks.py create mode 100644 onadata/apps/logger/migrations/0005_projectinvitation.py create mode 100644 onadata/apps/logger/migrations/0006_auto_20230622_0533.py create mode 100644 onadata/apps/logger/migrations/0007_alter_projectinvitation_unique_together.py create mode 100644 onadata/apps/logger/models/project_invitation.py create mode 100644 onadata/apps/logger/tests/models/test_project_invitation.py create mode 100644 onadata/apps/main/tests/test_signals.py create mode 100644 onadata/libs/serializers/project_invitation_serializer.py create mode 100644 onadata/libs/templates/projects/invitation.txt create mode 100644 onadata/libs/templates/projects/invitation_subject.txt diff --git a/docs/projects.rst b/docs/projects.rst index 75eb64f906..677f76ae1e 100644 --- a/docs/projects.rst +++ b/docs/projects.rst @@ -8,6 +8,7 @@ Where: - ``pk`` - is the project id - ``formid`` - is the form id - ``owner`` - is the username for the user or organization of the project +- ``invitation_pk`` - is the project invitation id Register a new Project ----------------------- @@ -515,3 +516,239 @@ Get user profiles that have starred a project
 	GET /api/v1/projects/{pk}/star
+ +Get Project Invitation List +--------------------------- + +.. raw:: html + +
GET /api/v1/projects/{pk}/invitations
+ +Example +^^^^^^^ + +:: + + curl -X GET https://api.ona.io/api/v1/projects/1/invitations + +Response +^^^^^^^^ + +:: + + [ + { + "id": 1, + "email":"janedoe@example.com", + "role":"readonly", + "status": 1 + + }, + { + "id": 2, + "email":"johndoe@example.com", + "role":"editor", + "status": 2, + } + ] + +Get a list of project invitations with a specific status +-------------------------------------------------------- + +The available choices are: + +- ``1`` - Pending. Invitations which have not been accepted by recipients. +- ``2`` - Accepted. Invitations which have been accepted by recipients. +- ``3`` - Revoked. Invitations which were cancelled. + + +.. raw:: html + +
GET /api/v1/projects/{pk}/invitations?status=2
+ + +Example +^^^^^^^ + +:: + + curl -X GET https://api.ona.io/api/v1/projects/1/invitations?status=2 + +Response +^^^^^^^^ + +:: + + [ + + { + "id": 2, + "email":"johndoe@example.com", + "role":"editor", + "status": 2, + } + ] + + +Create a new project invitation +------------------------------- + +Invite an **unregistered** user to a project. An email will be sent to the user which has a link for them to +create an account. + +.. raw:: html + +
POST /api/v1/projects/{pk}/invitations
+ +Example +^^^^^^^ + +:: + + curl -X POST -d "email=janedoe@example.com" -d "role=readonly" https://api.ona.io/api/v1/projects/1/invitations + + +``email``: The email address of the unregistered user. + +- Should be a valid email. If the ``PROJECT_INVITATION_EMAIL_DOMAIN_WHITELIST`` setting has been enabled, then the email domain has to be in the whitelist for it to be also valid + +**Example** + +:: + + PROJECT_INVITATION_EMAIL_DOMAIN_WHITELIST=["foo.com", "bar.com"] + +- Email should not be that of a registered user + +``role``: The user's role for the project. + +- Must be a valid role + + +Response +^^^^^^^^ + +:: + + { + "id": 1, + "email": "janedoe@example.com", + "role": "readonly", + "status": 1, + } + + +The link embedded in the email will be of the format ``http://{url}`` +where: + +- ``url`` - is the URL the recipient will be redirected to on clicking the link. The default is ``{domain}/api/v1/profiles`` where ``domain`` is domain where the API is hosted. + +Normally, you would want the email recipient to be redirected to a web app. This can be achieved by +adding the setting ``PROJECT_INVITATION_URL`` + +**Example** + +:: + + PROJECT_INVITATION_URL = 'https://example.com/register' + + +Update a project invitation +--------------------------- + +.. raw:: html + +
+    PUT /api/v1/projects/{pk}/invitations
+    
+ + +Example +^^^^^^^ + +:: + + curl -X PUT -d "email=janedoe@example.com" -d "role=editor" -d "invitation_id=1" https://api.ona.io/api/v1/projects/1/invitations/1 + +Response +^^^^^^^^ + +:: + + { + "id": 1, + "email": "janedoe@example.com", + "role": "editor", + "status": 1, + } + + +Resend a project invitation +--------------------------- + +Resend a project invitation email + +.. raw:: html + +
POST /api/v1/projects/{pk}/resend-invitation
+ +Example +^^^^^^^ + +:: + + curl -X POST -d "invitation_id=6" https://api.ona.io/api/v1/projects/1/resend-invitation + + +``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. + +- Must be a ``ProjectInvitation`` whose status is **Pending** + +Response +^^^^^^^^ + +:: + + { + "message": "Success" + } + +Revoke a project invitation +--------------------------- + +Cancel a project invitation. A revoked invitation means that project will **not** be shared with the new user +even if they accept the invitation. + +.. raw:: html + +
POST /api/v1/projects/{pk}/revoke-invitation
+ +Example +^^^^^^^ + +:: + + curl -X POST -d "invitation_id=6" https://api.ona.io/api/v1/projects/1/revoke-invitation + +``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. + +- Must be a ``ProjectInvitation`` whose status is **Pending** + +Response +^^^^^^^^ + +:: + + { + "message": "Success" + } + + +Accept a project invitation +--------------------------- + +Since a project invitation is sent to an unregistered user, acceptance of the invitation is handled +when `creating a new user `_. + +All pending invitations whose email match the new user's email will be accepted and projects shared with the +user \ No newline at end of file diff --git a/onadata/apps/api/permissions.py b/onadata/apps/api/permissions.py index 8d5db54b2b..efe64ad4cc 100644 --- a/onadata/apps/api/permissions.py +++ b/onadata/apps/api/permissions.py @@ -191,7 +191,6 @@ def has_permission(self, request, view): is_authenticated = request and request.user.is_authenticated if is_authenticated and view.action == "create": - # Handle bulk create # if doing a bulk create we will fail the entire process if the # user lacks permissions for even one instance @@ -278,6 +277,12 @@ def has_object_permission(self, request, view, obj): if remove and request.user.username.lower() == username.lower(): return True + if view.action == "invitations" and not ( + ManagerRole.user_has_role(request.user, obj) + or OwnerRole.user_has_role(request.user, obj) + ): + return False + return super().has_object_permission(request, view, obj) @@ -306,7 +311,6 @@ def has_permission(self, request, view): and (request.user.is_authenticated or not self.authenticated_users_only) and request.user.has_perms(perms) ): - return True return False @@ -387,7 +391,6 @@ class UserViewSetPermissions(DjangoModelPermissionsOrAnonReadOnly): """ def has_permission(self, request, view): - if request.user.is_anonymous and view.action == "list": if request.GET.get("search"): raise exceptions.NotAuthenticated() diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 90a5f49869..268cbe1931 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -4,6 +4,7 @@ """ import os import sys +import logging from datetime import timedelta from celery.result import AsyncResult @@ -17,7 +18,8 @@ from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator -from onadata.apps.logger.models import Instance, XForm +from onadata.apps.logger.models import Instance, ProjectInvitation, XForm +from onadata.libs.utils.email import ProjectInvitationEmail from onadata.celeryapp import app User = get_user_model() @@ -127,3 +129,19 @@ def delete_inactive_submissions(): for instance in queryset_iterator(instances): # delete submission instance.delete() + + +@app.task() +def send_project_invitation_email_async( + invitation_id: str, url: str +): # pylint: disable=invalid-name + """Sends project invitation email asynchronously""" + try: + invitation = ProjectInvitation.objects.get(id=invitation_id) + + except ProjectInvitation.DoesNotExist as err: + logging.exception(err) + + else: + email = ProjectInvitationEmail(invitation, url) + email.send() diff --git a/onadata/apps/api/tests/test_tasks.py b/onadata/apps/api/tests/test_tasks.py new file mode 100644 index 0000000000..d1a1a3e648 --- /dev/null +++ b/onadata/apps/api/tests/test_tasks.py @@ -0,0 +1,32 @@ +"""Tests for module onadata.apps.api.tasks""" + +from unittest.mock import patch + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.api.tasks import ( + send_project_invitation_email_async, +) +from onadata.apps.logger.models import ProjectInvitation +from onadata.libs.utils.user_auth import get_user_default_project +from onadata.libs.utils.email import ProjectInvitationEmail + + +class SendProjectInivtationEmailAsyncTestCase(TestBase): + """Tests for send_project_invitation_email_async""" + + def setUp(self) -> None: + super().setUp() + + project = get_user_default_project(self.user) + self.invitation = ProjectInvitation.objects.create( + project=project, + email="janedoe@example.com", + role="manager", + ) + + @patch.object(ProjectInvitationEmail, "send") + def test_sends_email(self, mock_send): + """Test email is sent""" + url = "https://example.com/register" + send_project_invitation_email_async(self.invitation.id, url) + mock_send.assert_called_once() diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index f0f8d29e7d..7bea96d600 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -3,19 +3,22 @@ Test ProjectViewSet module. """ import json +import pytz import os from collections import OrderedDict from six import iteritems from operator import itemgetter +from datetime import datetime from django.conf import settings from django.db.models import Q from django.core.cache import cache from django.test import override_settings +from django.contrib.auth import get_user_model from rest_framework.authtoken.models import Token from httmock import HTTMock, urlmatch -from mock import MagicMock, patch +from mock import MagicMock, patch, Mock import dateutil.parser import requests @@ -31,7 +34,7 @@ from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.team_viewset import TeamViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Project, XForm, XFormVersion +from onadata.apps.logger.models import Project, XForm, XFormVersion, ProjectInvitation from onadata.apps.main.models import MetaData from onadata.libs import permissions as role from onadata.libs.models.share_project import ShareProject @@ -53,6 +56,8 @@ ProjectSerializer, ) +User = get_user_model() + ROLES = [ ReadOnlyRoleNoDownload, ReadOnlyRole, @@ -174,10 +179,7 @@ def test_projects_list_with_pagination(self): self.assertEqual(len(response.data), 2) # test with pagination enabled - params = { - "page": 1, - "page_size": 1 - } + params = {"page": 1, "page_size": 1} request = self.factory.get("/", data=params, **self.extra) request.user = self.user response = view(request) @@ -539,21 +541,21 @@ def test_projects_create_many_users(self): def test_form_publish_odk_validation_errors(self): self._project_create() path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "error_test_form.xlsx", - ) + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "error_test_form.xlsx", + ) with open(path, "rb") as xlsx_file: view = ProjectViewSet.as_view({"post": "forms"}) post_data = {"xls_file": xlsx_file} request = self.factory.post("/", data=post_data, **self.extra) response = view(request, pk=self.project.pk) self.assertEqual(response.status_code, 400) - self.assertIn("ODK Validate Errors:", response.data.get('text')) + self.assertIn("ODK Validate Errors:", response.data.get("text")) # pylint: disable=invalid-name def test_publish_xls_form_to_project(self): @@ -597,8 +599,8 @@ def test_view_xls_form(self): resultset = MetaData.objects.filter( Q(object_id=self.xform.pk), Q(data_type="enketo_url") - | Q(data_type="enketo_preview_url") - | Q(data_type="enketo_single_submit_url"), + | Q(data_type="enketo_preview_url") # noqa W503 + | Q(data_type="enketo_single_submit_url"), # noqa W503 ) url = resultset.get(data_type="enketo_url") preview_url = resultset.get(data_type="enketo_preview_url") @@ -1774,8 +1776,7 @@ def test_project_all_users_can_share_remove_themselves(self): view = ProjectViewSet.as_view({"put": "share"}) data = {"username": "alice", "remove": True} - for (role_name, role_class) in iteritems(role.ROLES): - + for role_name, role_class in iteritems(role.ROLES): ShareProject(self.project, "alice", role_name).save() self.assertTrue(role_class.user_has_role(self.user, self.project)) @@ -2274,7 +2275,6 @@ def test_public_project_on_creation(self): self.assertTrue(project.shared) def test_permission_passed_to_dataview_parent_form(self): - self._project_create() project1 = self.project self._publish_xls_form_to_project() @@ -2308,8 +2308,7 @@ def test_permission_passed_to_dataview_parent_form(self): view = ProjectViewSet.as_view({"put": "share"}) data = {"username": "alice", "remove": True} - for (role_name, role_class) in iteritems(role.ROLES): - + for role_name, role_class in iteritems(role.ROLES): ShareProject(self.project, "alice", role_name).save() self.assertFalse(role_class.user_has_role(self.user, project1)) @@ -2327,7 +2326,6 @@ def test_permission_passed_to_dataview_parent_form(self): self.assertFalse(role_class.user_has_role(self.user, self.xform)) def test_permission_not_passed_to_dataview_parent_form(self): - self._project_create() project1 = self.project self._publish_xls_form_to_project() @@ -2361,8 +2359,7 @@ def test_permission_not_passed_to_dataview_parent_form(self): view = ProjectViewSet.as_view({"put": "share"}) data = {"username": "alice", "remove": True} - for (role_name, role_class) in iteritems(role.ROLES): - + for role_name, role_class in iteritems(role.ROLES): ShareProject(self.project, "alice", role_name).save() self.assertFalse(role_class.user_has_role(self.user, project1)) @@ -2693,3 +2690,729 @@ def test_project_caching(self): self.xform.num_of_submissions, ) self.assertEqual(response.data["num_datasets"], 1) + + +class GetProjectInvitationListTestCase(TestAbstractViewSet): + """Tests for get project invitation list""" + + def setUp(self): + super().setUp() + self._project_create() + self.view = ProjectViewSet.as_view({"get": "invitations"}) + + def test_authentication(self): + """Authentication is required""" + request = self.factory.get("/") + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 404) + + def test_invalid_project(self): + """Invalid project is handled""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=817) + self.assertEqual(response.status_code, 404) + + def test_only_admins_allowed(self): + """Only project admins are allowed to get invitation list""" + # login as editor alice + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + self._login_user_and_profile(alice_data) + request = self.factory.get("/", **self.extra) + + # only owner and manager roles have permission + for role_class in ROLES_ORDERED: + ShareProject(self.project, "alice", role_class.name).save() + self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) + response = self.view(request, pk=self.project.pk) + + if role_class.name in [ManagerRole.name, OwnerRole.name]: + self.assertEqual(response.status_code, 200) + else: + self.assertEqual(response.status_code, 403) + + def test_invitation_list(self): + """Returns project invitation list""" + jane_invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + john_invitation = ProjectInvitation.objects.create( + email="johndoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.ACCEPTED, + ) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.project.pk) + expected_response = [ + { + "id": jane_invitation.pk, + "email": "janedoe@example.com", + "role": "editor", + "status": 1, + }, + { + "id": john_invitation.pk, + "email": "johndoe@example.com", + "role": "editor", + "status": 2, + }, + ] + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, expected_response) + + def test_no_invitations_available(self): + """Returns an empty list if no invitations available""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, []) + + def test_status_query_param_works(self): + """Filtering by status query parameter works""" + jane_invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + ProjectInvitation.objects.create( + email="johndoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.ACCEPTED, + ) + request = self.factory.get("/", data={"status": "1"}, **self.extra) + response = self.view(request, pk=self.project.pk) + expected_response = [ + { + "id": jane_invitation.pk, + "email": "janedoe@example.com", + "role": "editor", + "status": 1, + } + ] + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, expected_response) + + +@patch( + "onadata.libs.serializers.project_invitation_serializer.send_project_invitation_email_async.delay" +) +class CreateProjectInvitationTestCase(TestAbstractViewSet): + """Tests for create project invitation""" + + def setUp(self): + super().setUp() + self._project_create() + self.view = ProjectViewSet.as_view({"post": "invitations"}) + + def test_authentication(self, mock_send_mail): + """Authentication is required""" + request = self.factory.post("/", data={}) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 401) + + def test_invalid_project(self, mock_send_mail): + """Invalid project is handled""" + request = self.factory.post("/", data={}, **self.extra) + response = self.view(request, pk=817) + self.assertEqual(response.status_code, 404) + + def test_only_admins_allowed(self, mock_send_mail): + """Only project admins are allowed to create project invitation""" + # login as editor alice + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + self._login_user_and_profile(alice_data) + request = self.factory.post("/", data={}, **self.extra) + + # only owner and manager roles have permission + for role_class in ROLES_ORDERED: + ShareProject(self.project, "alice", role_class.name).save() + self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) + response = self.view(request, pk=self.project.pk) + + if role_class.name in [ManagerRole.name, OwnerRole.name]: + self.assertEqual(response.status_code, 400) + else: + self.assertEqual(response.status_code, 403) + + @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + def test_create_invitation(self, mock_send_mail): + """Project invitation can be created""" + post_data = { + "email": "janedoe@example.com", + "role": "editor", + } + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(self.project.invitations.count(), 1) + invitation = self.project.invitations.first() + self.assertEqual( + response.data, + { + "id": invitation.pk, + "email": "janedoe@example.com", + "role": "editor", + "status": 1, + }, + ) + mock_send_mail.assert_called_once_with( + invitation.pk, "https://example.com/register" + ) + self.assertEqual(invitation.invited_by, self.user) + + # duplicate invitation not allowed + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + def test_email_required(self, mock_send_mail): + """email is required""" + # blank string + post_data = {"email": "", "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + # missing field + post_data = {"role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + def test_email_valid(self, mock_send_mail): + """email should be a valid email""" + # a valid email + post_data = {"email": "akalkal", "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + @override_settings(PROJECT_INVITATION_EMAIL_DOMAIN_WHITELIST=["foo.com"]) + def test_email_whitelist(self, mock_send_mail): + """Email address domain whitelist works""" + # email domain should be in whitelist + post_data = {"email": "janedoe@xample.com", "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + # email in whitelist is successful + post_data = {"email": "janedoe@foo.com", "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + mock_send_mail.assert_called_once() + + @override_settings(PROJECT_INVITATION_EMAIL_DOMAIN_WHITELIST=["FOo.com"]) + def test_email_whitelist_case_insenstive(self, mock_send_mail): + """Email domain whitelist check should be case insenstive""" + post_data = {"email": "janedoe@FOO.com", "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + mock_send_mail.assert_called_once() + + def test_user_unregistered(self, mock_send_mail): + """You cannot invite an existing user + + The email should be of a user who is not registered + """ + alice_data = {"username": "alice", "email": "alice@localhost.com"} + self._create_user_profile(alice_data) + post_data = {"email": alice_data["email"], "role": "editor"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + def test_role_required(self, mock_send_mail): + """role field is required""" + # blank role + post_data = {"email": "janedoe@example.com", "role": ""} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + # missing role + post_data = {"email": "janedoe@example.com"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + def test_role_valid(self, mock_send_mail): + """Role should be a valid choice""" + post_data = {"email": "janedoe@example.com", "role": "abracadbra"} + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + +@patch( + "onadata.libs.serializers.project_invitation_serializer.send_project_invitation_email_async.delay" +) +class UpdateProjectInvitationTestCase(TestAbstractViewSet): + """Tests for update project invitation""" + + def setUp(self): + super().setUp() + self._project_create() + self.view = ProjectViewSet.as_view({"put": "invitations"}) + self.invitation = self.project.invitations.create( + email="janedoe@example.com", + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + + def test_authentication(self, mock_send_mail): + """Authentication is required""" + request = self.factory.put("/", data={}) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 401) + + def test_invalid_project(self, mock_send_mail): + """Invalid project is handled""" + request = self.factory.put("/", data={}, **self.extra) + response = self.view(request, pk=817) + self.assertEqual(response.status_code, 404) + + def test_invalid_invitation_id(self, mock_send_mail): + """Invalid project invitation is handled""" + request = self.factory.put("/", data={}, **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 404) + + def test_only_admins_allowed(self, mock_send_mail): + """Only project admins are allowed to update project invitation""" + # login as editor alice + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + self._login_user_and_profile(alice_data) + request = self.factory.put( + "/", data={"invitation_id": self.invitation.id}, **self.extra + ) + + # only owner and manager roles have permission + for role_class in ROLES_ORDERED: + ShareProject(self.project, "alice", role_class.name).save() + self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) + response = self.view(request, pk=self.project.pk) + + if role_class.name in [ManagerRole.name, OwnerRole.name]: + self.assertEqual(response.status_code, 400) + else: + self.assertEqual(response.status_code, 403) + + @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + def test_update(self, mock_send_mail): + """We can update an invitation""" + payload = { + "email": "rihanna@example.com", + "role": "readonly", + "invitation_id": self.invitation.id, + } + request = self.factory.put( + "/", + data=json.dumps(payload), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.invitation.refresh_from_db() + self.assertEqual(self.invitation.email, "rihanna@example.com") + self.assertEqual(self.invitation.role, "readonly") + self.assertEqual( + response.data, + { + "id": self.invitation.pk, + "email": "rihanna@example.com", + "role": "readonly", + "status": 1, + }, + ) + mock_send_mail.assert_called_once_with( + self.invitation.pk, "https://example.com/register" + ) + + def test_update_role_only(self, mock_send_mail): + """We can update role only""" + payload = { + "email": self.invitation.email, + "role": "readonly", + "invitation_id": self.invitation.id, + } + request = self.factory.put( + "/", + data=json.dumps(payload), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.invitation.refresh_from_db() + self.assertEqual(self.invitation.role, "readonly") + self.assertEqual( + response.data, + { + "id": self.invitation.pk, + "email": "janedoe@example.com", + "role": "readonly", + "status": 1, + }, + ) + mock_send_mail.assert_not_called() + + @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + def test_update_email_only(self, mock_send_mail): + """We can update email only""" + payload = { + "email": "rihanna@example.com", + "role": self.invitation.role, + "invitation_id": self.invitation.id, + } + request = self.factory.put( + "/", + data=json.dumps(payload), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.invitation.refresh_from_db() + self.assertEqual(self.invitation.email, "rihanna@example.com") + self.assertEqual( + response.data, + { + "id": self.invitation.pk, + "email": "rihanna@example.com", + "role": "editor", + "status": 1, + }, + ) + mock_send_mail.assert_called_once_with( + self.invitation.pk, "https://example.com/register" + ) + + def test_only_pending_allowed(self, mock_send_mail): + """Only pending invitation can be updated""" + for value, _ in ProjectInvitation.Status.choices: + invitation = self.project.invitations.create( + email=f"jandoe-{value}@example.com", + role="editor", + status=value, + ) + payload = { + "email": "rihanna@example.com", + "role": "readonly", + "invitation_id": invitation.id, + } + request = self.factory.put("/", data=payload, **self.extra) + response = self.view(request, pk=self.project.pk) + + if value == ProjectInvitation.Status.PENDING: + self.assertEqual(response.status_code, 200) + + else: + self.assertEqual(response.status_code, 400) + + def test_user_unregistered(self, mock_send_mail): + """Email cannot be updated to that of an existing user""" + alice_data = {"username": "alice", "email": "alice@example.com"} + self._create_user_profile(alice_data) + post_data = { + "email": alice_data["email"], + "role": "editor", + "invitation_id": self.invitation.id, + } + request = self.factory.put( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + response = self.view(request, pk=self.project.pk) + print("Helleo", response.data) + self.assertEqual(response.status_code, 400) + self.invitation.refresh_from_db() + # invitation email not updated + self.assertEqual(self.invitation.email, "janedoe@example.com") + + +class RevokeInvitationTestCase(TestAbstractViewSet): + """Tests for revoke invitation""" + + def setUp(self): + super().setUp() + self._project_create() + self.view = ProjectViewSet.as_view({"post": "revoke_invitation"}) + + def test_authentication(self): + """Authentication is required""" + request = self.factory.post("/", data={}) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 401) + + def test_invalid_project(self): + """Invalid project is handled""" + request = self.factory.post("/", data={}, **self.extra) + response = self.view(request, pk=817) + self.assertEqual(response.status_code, 404) + + def test_only_admins_allowed(self): + """Only project admins are allowed to create project invitation""" + # login as editor alice + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + self._login_user_and_profile(alice_data) + request = self.factory.post("/", data={}, **self.extra) + + # only owner and manager roles have permission + for role_class in ROLES_ORDERED: + ShareProject(self.project, "alice", role_class.name).save() + self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) + response = self.view(request, pk=self.project.pk) + + if role_class.name in [ManagerRole.name, OwnerRole.name]: + self.assertEqual(response.status_code, 400) + else: + self.assertEqual(response.status_code, 403) + + def test_revoke_invite(self): + """Invitation is revoked""" + invitation = self.project.invitations.create( + email="jandoe@example.com", role="editor" + ) + post_data = {"invitation_id": invitation.pk} + request = self.factory.post("/", data=post_data, **self.extra) + mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + response = self.view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + invitation.refresh_from_db() + self.assertEqual(invitation.status, ProjectInvitation.Status.REVOKED) + self.assertEqual(invitation.revoked_at, mocked_now) + self.assertEqual(response.data, {"message": "Success"}) + + def test_invitation_id_required(self): + """`invitation_id` field is required""" + # blank + post_data = {"invitation_id": ""} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + # missing + request = self.factory.post("/", data={}, **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + def test_invitation_id_valid(self): + """`invitation_id` should valid""" + post_data = {"invitation_id": "89"} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + + def test_only_pending_allowed(self): + """Only invitations whose status is pending can be revoked""" + + for value, _ in ProjectInvitation.Status.choices: + invitation = self.project.invitations.create( + email=f"jandoe-{value}@example.com", + role="editor", + status=value, + ) + post_data = {"invitation_id": invitation.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request, pk=self.project.pk) + + if value == ProjectInvitation.Status.PENDING: + self.assertEqual(response.status_code, 200) + + else: + self.assertEqual(response.status_code, 400) + + +@patch( + "onadata.libs.serializers.project_invitation_serializer.send_project_invitation_email_async.delay" +) +class ResendInvitationTestCase(TestAbstractViewSet): + """Tests for resend invitation""" + + def setUp(self): + super().setUp() + self._project_create() + self.view = ProjectViewSet.as_view({"post": "resend_invitation"}) + + def test_authentication(self, mock_send_mail): + """Authentication is required""" + request = self.factory.post("/", data={}) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 401) + mock_send_mail.assert_not_called() + + def test_invalid_project(self, mock_send_mail): + """Invalid project is handled""" + request = self.factory.post("/", data={}, **self.extra) + response = self.view(request, pk=817) + self.assertEqual(response.status_code, 404) + mock_send_mail.assert_not_called() + + def test_only_admins_allowed(self, mock_send_mail): + """Only project admins are allowed to create project invitation""" + # login as editor alice + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + self._login_user_and_profile(alice_data) + request = self.factory.post("/", data={}, **self.extra) + + # only owner and manager have permission + for role_class in ROLES_ORDERED: + ShareProject(self.project, "alice", role_class.name).save() + self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) + response = self.view(request, pk=self.project.pk) + + if role_class.name in [ManagerRole.name, OwnerRole.name]: + self.assertEqual(response.status_code, 400) + else: + self.assertEqual(response.status_code, 403) + + mock_send_mail.assert_not_called() + + @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + def test_resend_invite(self, mock_send_mail): + """Invitation is revoked""" + invitation = self.project.invitations.create( + email="jandoe@example.com", role="editor" + ) + post_data = {"invitation_id": invitation.pk} + mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=pytz.utc) + request = self.factory.post("/", data=post_data, **self.extra) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + response = self.view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + invitation.refresh_from_db() + self.assertEqual(invitation.resent_at, mocked_now) + self.assertEqual(response.data, {"message": "Success"}) + mock_send_mail.assert_called_once_with( + invitation.id, + "https://example.com/register", + ) + + def test_invitation_id_required(self, mock_send_mail): + """`invitation_id` field is required""" + # blank + post_data = {"invitation_id": ""} + request = self.factory.post("/", data=post_data, **self.extra) + view = ProjectViewSet.as_view({"post": "resend_invitation"}) + response = view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + # missing + request = self.factory.post("/", data={}, **self.extra) + view = ProjectViewSet.as_view({"post": "resend_invitation"}) + response = view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + def test_invitation_id_valid(self, mock_send_mail): + """`invitation_id` should valid""" + post_data = {"invitation_id": "89"} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 400) + mock_send_mail.assert_not_called() + + def test_only_pending_allowed(self, mock_send_mail): + """Only invitations whose status is pending can be resent""" + + for value, _ in ProjectInvitation.Status.choices: + invitation = self.project.invitations.create( + email=f"jandoe-{value}@example.com", + role="editor", + status=value, + ) + post_data = {"invitation_id": invitation.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request, pk=self.project.pk) + + if value == ProjectInvitation.Status.PENDING: + self.assertEqual(response.status_code, 200) + + else: + self.assertEqual(response.status_code, 400) + + mock_send_mail.assert_called_once() diff --git a/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py index fc64bfe894..aaed53b8d5 100644 --- a/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py @@ -26,10 +26,12 @@ from onadata.apps.api.viewsets.connect_viewset import ConnectViewSet from onadata.apps.api.viewsets.user_profile_viewset import UserProfileViewSet from onadata.apps.logger.models.instance import Instance +from onadata.apps.logger.models.project_invitation import ProjectInvitation from onadata.apps.main.models import UserProfile from onadata.apps.main.models.user_profile import set_kpi_formbuilder_permissions from onadata.libs.authentication import DigestAuthentication from onadata.libs.serializers.user_profile_serializer import _get_first_last_names +from onadata.libs.permissions import EditorRole User = get_user_model() @@ -91,7 +93,7 @@ def test_user_profile_list(self): "/api/v1/profiles", data=json.dumps(_profile_data()), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -137,7 +139,7 @@ def test_user_profile_list_with_and_without_users_param(self): "/api/v1/profiles", data=json.dumps(_profile_data()), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -267,7 +269,7 @@ def test_profile_create(self, mock_send_verification_email): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -294,7 +296,7 @@ def _create_user_using_profiles_endpoint(self, data): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -422,7 +424,7 @@ def test_profile_require_auth(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -439,7 +441,7 @@ def test_profile_create_without_last_name(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -452,7 +454,7 @@ def test_disallow_profile_create_w_same_username(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 400) @@ -481,7 +483,7 @@ def test_profile_create_with_malfunctioned_email(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -514,7 +516,7 @@ def test_profile_create_with_invalid_username(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 400) @@ -554,7 +556,7 @@ def test_profile_create_missing_name_field(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) response.render() @@ -678,14 +680,13 @@ def test_partial_update_metadata_field(self): self.assertEqual(profile.metadata, {"b": "caah"}) def test_put_update(self): - data = _profile_data() # create profile request = self.factory.post( "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -696,7 +697,7 @@ def test_put_update(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request, user="deno") self.assertEqual(response.status_code, 400) @@ -708,7 +709,7 @@ def test_put_update(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request, user="deno") @@ -724,7 +725,7 @@ def test_profile_create_mixed_case(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -746,13 +747,17 @@ def test_profile_create_mixed_case(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 400) self.assertIn("%s already exists" % data["username"], response.data["username"]) - @override_settings(AUTH_PASSWORD_VALIDATORS=[{"NAME": "onadata.libs.utils.validators.PreviousPasswordValidator"}]) + @override_settings( + AUTH_PASSWORD_VALIDATORS=[ + {"NAME": "onadata.libs.utils.validators.PreviousPasswordValidator"} + ] + ) def test_change_password(self): view = UserProfileViewSet.as_view({"post": "change_password"}) current_password = "bobbob" @@ -764,26 +769,39 @@ def test_change_password(self): request = self.factory.post("/", data=post_data, **self.extra) response = view(request, user="bob") self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, {"error": "Invalid password. You have 9 attempts left."}) + self.assertEqual( + response.data, {"error": "Invalid password. You have 9 attempts left."} + ) post_data = {"current_password": current_password, "new_password": ""} request = self.factory.post("/", data=post_data, **self.extra) response = view(request, user="bob") self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, {"error": "current_password and new_password fields cannot be blank"}) + self.assertEqual( + response.data, + {"error": "current_password and new_password fields cannot be blank"}, + ) post_data = {"current_password": "", "new_password": new_password} request = self.factory.post("/", data=post_data, **self.extra) response = view(request, user="bob") self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, {"error": "current_password and new_password fields cannot be blank"}) + self.assertEqual( + response.data, + {"error": "current_password and new_password fields cannot be blank"}, + ) # Assert new password can not be the same as the current password - post_data = {"current_password": current_password, "new_password": current_password} + post_data = { + "current_password": current_password, + "new_password": current_password, + } request = self.factory.post("/", data=post_data, **self.extra) response = view(request, user="bob") self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, {"errors": ["You cannot use a previously used password."]}) + self.assertEqual( + response.data, {"errors": ["You cannot use a previously used password."]} + ) # Assert password is changed when current_password and new_password meet criteria post_data = {"current_password": current_password, "new_password": new_password} @@ -813,10 +831,14 @@ def test_change_password(self): # Assert user can not set old password as their password post_data = {"current_password": new_password, "new_password": current_password} - request = self.factory.post("/", data=post_data, **{"HTTP_AUTHORIZATION": f"Token {new_token}"}) + request = self.factory.post( + "/", data=post_data, **{"HTTP_AUTHORIZATION": f"Token {new_token}"} + ) response = view(request, user="bob") self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, {"errors": ["You cannot use a previously used password."]}) + self.assertEqual( + response.data, {"errors": ["You cannot use a previously used password."]} + ) def test_change_password_wrong_current_password(self): view = UserProfileViewSet.as_view({"post": "change_password"}) @@ -851,7 +873,7 @@ def test_profile_create_with_name(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) @@ -891,7 +913,7 @@ def test_twitter_username_validation(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) @@ -914,7 +936,7 @@ def test_twitter_username_validation(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) @@ -934,7 +956,7 @@ def test_put_patch_method_on_names(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -946,7 +968,7 @@ def test_put_patch_method_on_names(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request, user="deno") @@ -975,7 +997,7 @@ def test_send_email_activation_api(self, mock_send_mail): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -1040,7 +1062,7 @@ def test_update_first_last_name_password_not_affected(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request, user=self.user.username) @@ -1090,7 +1112,7 @@ def test_partial_update_unique_email_api(self, mock_send_verification_email): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **deno_extra + **deno_extra, ) response = self.view(request, user=rp.user.username) @@ -1109,7 +1131,7 @@ def test_partial_update_unique_email_api(self, mock_send_verification_email): "/api/v1/profiles", data=json.dumps(_profile_data()), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -1119,7 +1141,7 @@ def test_partial_update_unique_email_api(self, mock_send_verification_email): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request, user=user.username) @@ -1138,7 +1160,7 @@ def test_profile_create_fails_with_long_first_and_last_names(self): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual( @@ -1155,7 +1177,6 @@ def test_profile_create_fails_with_long_first_and_last_names(self): def grant_perms_form_builder( self, url, request ): # pylint: disable=no-self-use,unused-argument - assert "X-ONADATA-KOBOCAT-AUTH" in request.headers response = requests.Response() @@ -1402,7 +1423,7 @@ def test_account_activation_emails(self, mock_send_mail): "/api/v1/profiles", data=json.dumps(data), content_type="application/json", - **self.extra + **self.extra, ) response = self.view(request) self.assertEqual(response.status_code, 201) @@ -1433,3 +1454,107 @@ def test_account_activation_emails(self, mock_send_mail): ), ] ) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @override_settings(ENABLE_EMAIL_VERIFICATION=True) + @patch( + ( + "onadata.libs.serializers.user_profile_serializer." + "send_verification_email.delay" + ) + ) + def test_accept_invitaton(self, mock_send_email): + """An invitation is accepted successfuly""" + self._project_create() + invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + ) + # user registers using same email as invitation email + data = _profile_data() + del data["name"] + data["email"] = invitation.email + request = self.factory.post( + "/api/v1/profiles", + data=json.dumps(data), + content_type="application/json", + **self.extra, + ) + response = self.view(request) + self.assertEqual(response.status_code, 201) + user = User.objects.get(username="deno") + mock_send_email.assert_called_once() + invitation.refresh_from_db() + self.assertEqual(invitation.status, ProjectInvitation.Status.ACCEPTED) + self.assertTrue(EditorRole.user_has_role(user, self.project)) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @override_settings(ENABLE_EMAIL_VERIFICATION=True) + @override_settings( + AUTH_PASSWORD_VALIDATORS=[ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa + }, + ] + ) + @patch( + ( + "onadata.libs.serializers.user_profile_serializer." + "send_verification_email.delay" + ) + ) + def test_password_optional(self, mock_send_verification_email): + """Field `password` is optional""" + # password not provided + data = _profile_data() + del data["name"] + del data["password"] + request = self.factory.post( + "/api/v1/profiles", + data=json.dumps(data), + content_type="application/json", + **self.extra, + ) + response = self.view(request) + self.assertEqual(response.status_code, 201) + profile = UserProfile.objects.get(user__username=data["username"]) + data["id"] = profile.user.pk + data["gravatar"] = profile.gravatar + data["url"] = "http://testserver/api/v1/profiles/deno" + data["user"] = "http://testserver/api/v1/users/deno" + data["metadata"] = {} + data["metadata"]["last_password_edit"] = profile.metadata["last_password_edit"] + data["joined_on"] = profile.user.date_joined + data["name"] = "%s %s" % ("Dennis", "erama") + self.assertEqual(response.data, data) + self.assertTrue(mock_send_verification_email.called) + user = User.objects.get(username="deno") + self.assertTrue(user.is_active) + # password blank + data = _profile_data() + user.delete() + del data["name"] + data["password"] = "" + request = self.factory.post( + "/api/v1/profiles", + data=json.dumps(data), + content_type="application/json", + **self.extra, + ) + response = self.view(request) + self.assertEqual(response.status_code, 201) + profile = UserProfile.objects.get(user__username=data["username"]) + data["id"] = profile.user.pk + data["gravatar"] = profile.gravatar + data["url"] = "http://testserver/api/v1/profiles/deno" + data["user"] = "http://testserver/api/v1/users/deno" + data["metadata"] = {} + data["metadata"]["last_password_edit"] = profile.metadata["last_password_edit"] + data["joined_on"] = profile.user.date_joined + data["name"] = "%s %s" % ("Dennis", "erama") + del data["password"] + self.assertEqual(response.data, data) + self.assertTrue(mock_send_verification_email.called) + user = User.objects.get(username="deno") + self.assertTrue(user.is_active) diff --git a/onadata/apps/api/urls/v1_urls.py b/onadata/apps/api/urls/v1_urls.py index af7f9b84bc..c27e1069f6 100644 --- a/onadata/apps/api/urls/v1_urls.py +++ b/onadata/apps/api/urls/v1_urls.py @@ -133,7 +133,7 @@ def get_urls(self): router.register(r"orgs", OrganizationProfileViewSet) router.register(r"osm", OsmViewSet, basename="osm") router.register(r"private-data", AuthenticatedDataViewSet, basename="private-data") -router.register(r"profiles", UserProfileViewSet) +router.register(r"profiles", UserProfileViewSet, basename="userprofile") router.register(r"projects", ProjectViewSet) router.register(r"restservices", RestServicesViewSet, basename="restservices") router.register(r"stats/messaging", MessagingStatsViewSet, basename="messagingstats") @@ -146,6 +146,6 @@ def get_urls(self): ) router.register(r"submissions", XFormSubmissionViewSet, basename="submissions") router.register(r"teams", TeamViewSet) -router.register(r"user", ConnectViewSet) +router.register(r"user", ConnectViewSet, basename="connect") router.register(r"users", UserViewSet, basename="user") router.register(r"widgets", WidgetViewSet, basename="widgets") diff --git a/onadata/apps/api/viewsets/project_viewset.py b/onadata/apps/api/viewsets/project_viewset.py index d30386312d..b5d168708a 100644 --- a/onadata/apps/api/viewsets/project_viewset.py +++ b/onadata/apps/api/viewsets/project_viewset.py @@ -5,6 +5,7 @@ from django.core.cache import cache from django.core.mail import send_mail from django.shortcuts import get_object_or_404 +from django.utils.translation import gettext as _ from rest_framework import status from rest_framework.decorators import action @@ -14,7 +15,7 @@ from onadata.apps.api import tools as utils from onadata.apps.api.permissions import ProjectPermissions from onadata.apps.api.tools import get_baseviewset_class -from onadata.apps.logger.models import Project, XForm +from onadata.apps.logger.models import Project, XForm, ProjectInvitation from onadata.apps.main.models import UserProfile from onadata.apps.main.models.meta_data import MetaData from onadata.libs.data import strtobool @@ -39,6 +40,11 @@ XFormCreateSerializer, XFormSerializer, ) +from onadata.libs.serializers.project_invitation_serializer import ( + ProjectInvitationSerializer, + ProjectInvitationRevokeSerializer, + ProjectInvitationResendSerializer, +) from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete from onadata.libs.utils.common_tools import merge_dicts from onadata.libs.utils.export_tools import str_to_bool @@ -76,6 +82,16 @@ def get_serializer_class(self): """Return BaseProjectSerializer class when listing projects.""" if self.action == "list": return BaseProjectSerializer + + if self.action == "invitations": + return ProjectInvitationSerializer + + if self.action == "revoke_invitation": + return ProjectInvitationRevokeSerializer + + if self.action == "resend_invitation": + return ProjectInvitationResendSerializer + return super().get_serializer_class() def get_queryset(self): @@ -226,8 +242,74 @@ def star(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) + @action( + detail=True, + methods=["GET", "POST", "PUT"], + url_path="invitations", + ) + def invitations(self, request, *args, **kwargs): + """List, Create. Update project invitations""" + project = self.get_object() + method = request.method.upper() + + if method == "GET": + invitations = project.invitations.all() + invitation_status = request.query_params.get("status") + + if invitation_status: + invitations = invitations.filter(status=invitation_status) + + serializer = self.get_serializer(invitations, many=True) + return Response(serializer.data) + + if method == "POST": + draft_request_data = self.request.data.copy() + draft_request_data["project"] = project.pk + data = draft_request_data + serializer = self.get_serializer(data=data) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data) + + if method == "PUT": + invitation_id = request.data.get("invitation_id") + invitation = get_object_or_404( + ProjectInvitation, + pk=invitation_id, + ) + draft_request_data = self.request.data.copy() + draft_request_data["project"] = project.pk + data = draft_request_data + serializer = self.get_serializer( + invitation, + data=data, + ) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data) + + return Response(status=status.HTTP_204_NO_CONTENT) + + @action(detail=True, methods=["POST"], url_path="revoke-invitation") + def revoke_invitation(self, request, *args, **kwargs): + """Revoke a project invitation object""" + self.get_object() + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response({"message": _("Success")}) + + @action(detail=True, methods=["POST"], url_path="resend-invitation") + def resend_invitation(self, request, *args, **kwargs): + """Resend a project invitation object""" + self.get_object() + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response({"message": _("Success")}) + def destroy(self, request, *args, **kwargs): - """ "Soft deletes a project""" + """Soft deletes a project""" project = self.get_object() user = request.user project.soft_delete(user) diff --git a/onadata/apps/logger/migrations/0005_projectinvitation.py b/onadata/apps/logger/migrations/0005_projectinvitation.py new file mode 100644 index 0000000000..0256e22cd8 --- /dev/null +++ b/onadata/apps/logger/migrations/0005_projectinvitation.py @@ -0,0 +1,32 @@ +# Generated by Django 3.2.18 on 2023-05-17 10:05 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('logger', '0004_update_instance_geoms'), + ] + + operations = [ + migrations.CreateModel( + name='ProjectInvitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('email', models.EmailField(max_length=254)), + ('role', models.CharField(max_length=100)), + ('status', models.PositiveSmallIntegerField(choices=[(1, 'Pending'), (2, 'Accepted'), (3, 'Revoked')], default=1)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('accepted_at', models.DateTimeField(blank=True, null=True)), + ('resent_at', models.DateTimeField(blank=True, null=True)), + ('revoked_at', models.DateTimeField(blank=True, null=True)), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='logger.project')), + ], + options={ + 'abstract': False, + 'unique_together': {('email', 'project', 'status')}, + }, + ), + ] diff --git a/onadata/apps/logger/migrations/0006_auto_20230622_0533.py b/onadata/apps/logger/migrations/0006_auto_20230622_0533.py new file mode 100644 index 0000000000..bc89cc6607 --- /dev/null +++ b/onadata/apps/logger/migrations/0006_auto_20230622_0533.py @@ -0,0 +1,26 @@ +# Generated by Django 3.2.19 on 2023-06-22 09:33 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('logger', '0005_projectinvitation'), + ] + + operations = [ + migrations.AddField( + model_name='projectinvitation', + name='accepted_by', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='project_invitations_accepted', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='projectinvitation', + name='invited_by', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='project_invitations_created', to=settings.AUTH_USER_MODEL), + ), + ] diff --git a/onadata/apps/logger/migrations/0007_alter_projectinvitation_unique_together.py b/onadata/apps/logger/migrations/0007_alter_projectinvitation_unique_together.py new file mode 100644 index 0000000000..fa68191e20 --- /dev/null +++ b/onadata/apps/logger/migrations/0007_alter_projectinvitation_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.19 on 2023-07-11 12:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('logger', '0006_auto_20230622_0533'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='projectinvitation', + unique_together=set(), + ), + ] diff --git a/onadata/apps/logger/models/__init__.py b/onadata/apps/logger/models/__init__.py index 8ddb3ffa20..3f76ae73f4 100644 --- a/onadata/apps/logger/models/__init__.py +++ b/onadata/apps/logger/models/__init__.py @@ -16,3 +16,4 @@ from onadata.apps.logger.models.xform import XForm # noqa from onadata.apps.logger.models.xform_version import XFormVersion # noqa from onadata.apps.logger.xform_instance_parser import InstanceParseError # noqa +from onadata.apps.logger.models.project_invitation import ProjectInvitation # noqa diff --git a/onadata/apps/logger/models/project_invitation.py b/onadata/apps/logger/models/project_invitation.py new file mode 100644 index 0000000000..08003b81a2 --- /dev/null +++ b/onadata/apps/logger/models/project_invitation.py @@ -0,0 +1,69 @@ +""" +ProjectInvitation class +""" +from django.db import models +from django.contrib.auth import get_user_model +from django.utils.translation import gettext_lazy as _ +from django.utils import timezone +from onadata.apps.logger.models.project import Project +from onadata.libs.models.base_model import BaseModel + +User = get_user_model() + + +class ProjectInvitation(BaseModel): + """ProjectInvitation model class""" + + class Meta(BaseModel.Meta): + app_label = "logger" + + class Status(models.IntegerChoices): + """Choices for `status` field""" + + PENDING = 1, _("Pending") + ACCEPTED = 2, _("Accepted") + REVOKED = 3, _("Revoked") + + email = models.EmailField() + project = models.ForeignKey( + Project, on_delete=models.CASCADE, related_name="invitations" + ) + role = models.CharField(max_length=100) + status = models.PositiveSmallIntegerField( + choices=Status.choices, default=Status.PENDING + ) + invited_by = models.ForeignKey( + User, + related_name="project_invitations_created", + null=True, + blank=True, + on_delete=models.SET_NULL, + ) + accepted_by = models.ForeignKey( + User, + related_name="project_invitations_accepted", + null=True, + blank=True, + on_delete=models.SET_NULL, + ) + created_at = models.DateTimeField(auto_now_add=True) + accepted_at = models.DateTimeField(null=True, blank=True) + resent_at = models.DateTimeField(null=True, blank=True) + revoked_at = models.DateTimeField(null=True, blank=True) + + def __str__(self): + return f"{self.email}|{self.project}" + + def accept(self, accepted_by=None, accepted_at=None) -> None: + """Accept invitation""" + + self.accepted_at = accepted_at or timezone.now() + self.accepted_by = accepted_by + self.status = ProjectInvitation.Status.ACCEPTED + self.save() + + def revoke(self, revoked_at=None) -> None: + """Revoke invitation""" + self.revoked_at = revoked_at or timezone.now() + self.status = ProjectInvitation.Status.REVOKED + self.save() diff --git a/onadata/apps/logger/tests/models/test_project_invitation.py b/onadata/apps/logger/tests/models/test_project_invitation.py new file mode 100644 index 0000000000..0c0646c1e1 --- /dev/null +++ b/onadata/apps/logger/tests/models/test_project_invitation.py @@ -0,0 +1,127 @@ +""" +Tests for ProjectInvitation model +""" +from datetime import datetime +from unittest.mock import patch, Mock +import pytz +from onadata.apps.logger.models import ProjectInvitation +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.utils.user_auth import get_user_default_project + + +class ProjectInvitationTestCase(TestBase): + + """Tests for ProjectInvitation model""" + + def setUp(self) -> None: + super().setUp() + + self.project = get_user_default_project(self.user) + + def test_creation(self): + """We can create a ProjectInvitation object""" + created_at = datetime(2023, 5, 17, 14, 21, 0, tzinfo=pytz.utc) + resent_at = datetime(2023, 5, 17, 14, 24, 0, tzinfo=pytz.utc) + accepted_at = datetime(2023, 5, 17, 14, 25, 0, tzinfo=pytz.utc) + revoked_at = datetime(2023, 5, 17, 14, 26, 0, tzinfo=pytz.utc) + jane = self._create_user("jane", "1234") + + with patch("django.utils.timezone.now", Mock(return_value=created_at)): + invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.ACCEPTED, + accepted_at=accepted_at, + resent_at=resent_at, + revoked_at=revoked_at, + invited_by=self.user, + accepted_by=jane, + ) + + self.assertEqual(f"{invitation}", f"janedoe@example.com|{self.project}") + self.assertEqual(invitation.email, "janedoe@example.com") + self.assertEqual(invitation.project, self.project) + self.assertEqual(invitation.role, "editor") + self.assertEqual(invitation.status, ProjectInvitation.Status.ACCEPTED) + self.assertEqual(invitation.created_at, created_at) + self.assertEqual(invitation.accepted_at, accepted_at) + self.assertEqual(invitation.resent_at, resent_at) + self.assertEqual(invitation.revoked_at, revoked_at) + self.assertEqual(invitation.invited_by, self.user) + self.assertEqual(invitation.accepted_by, jane) + + def test_defaults(self): + """Defaults for optional fields are correct""" + invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + ) + self.assertIsNone(invitation.invited_by) + self.assertIsNone(invitation.accepted_by) + self.assertIsNone(invitation.accepted_at) + self.assertIsNone(invitation.revoked_at) + self.assertIsNone(invitation.resent_at) + self.assertEqual(invitation.status, ProjectInvitation.Status.PENDING) + + def test_revoke(self): + """Calling revoke method works correctly""" + mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + invitation.revoke() + invitation.refresh_from_db() + self.assertEqual(invitation.revoked_at, mocked_now) + self.assertEqual(invitation.status, ProjectInvitation.Status.REVOKED) + + # setting revoked_at explicitly works + revoked_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=pytz.utc) + invitation = ProjectInvitation.objects.create( + email="john@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + invitation.revoke(revoked_at=revoked_at) + invitation.refresh_from_db() + self.assertEqual(invitation.revoked_at, revoked_at) + self.assertEqual(invitation.status, ProjectInvitation.Status.REVOKED) + + def test_accept(self): + """Calling accept method works correctly""" + mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=pytz.utc) + jane = self._create_user("jane", "1234") + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + invitation.accept() + invitation.refresh_from_db() + self.assertEqual(invitation.accepted_at, mocked_now) + self.assertIsNone(invitation.accepted_by) + self.assertEqual(invitation.status, ProjectInvitation.Status.ACCEPTED) + + # setting accepted_at explicitly works + accepted_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=pytz.utc) + invitation = ProjectInvitation.objects.create( + email="john@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + invitation.accept(accepted_at=accepted_at, accepted_by=jane) + invitation.refresh_from_db() + self.assertEqual(invitation.accepted_at, accepted_at) + self.assertEqual(invitation.accepted_by, jane) + self.assertEqual(invitation.status, ProjectInvitation.Status.ACCEPTED) diff --git a/onadata/apps/main/signals.py b/onadata/apps/main/signals.py index 824d03b7d6..f7e1febc15 100644 --- a/onadata/apps/main/signals.py +++ b/onadata/apps/main/signals.py @@ -4,10 +4,14 @@ """ from django.conf import settings from django.contrib.auth import get_user_model +from django.db.models.signals import post_save +from django.dispatch import receiver from django.template.loader import render_to_string +from django.utils import timezone from onadata.libs.utils.email import send_generic_email - +from onadata.libs.utils.model_tools import queryset_iterator +from onadata.apps.logger.models import ProjectInvitation User = get_user_model() @@ -62,3 +66,26 @@ def send_activation_email(sender, instance=None, **kwargs): send_generic_email( instance.email, email, f"{deployment_name} account activated" ) + + +@receiver(post_save, sender=User, dispatch_uid="accept_project_invitation") +def accept_project_invitation(sender, instance=None, created=False, **kwargs): + """Accept project invitations that match user email""" + if created: + invitation_qs = ProjectInvitation.objects.filter( + email=instance.email, + status=ProjectInvitation.Status.PENDING, + ) + now = timezone.now() + # ShareProject needs to be imported inline because otherwise we get + # django.core.exceptions.AppRegistryNotReady: Apps aren't loaded yet. + # pylint: disable=import-outside-toplevel + from onadata.libs.models.share_project import ShareProject + + for invitation in queryset_iterator(invitation_qs): + ShareProject( + invitation.project, + instance.username, + invitation.role, + ).save() + invitation.accept(accepted_at=now, accepted_by=instance) diff --git a/onadata/apps/main/tests/test_signals.py b/onadata/apps/main/tests/test_signals.py new file mode 100644 index 0000000000..0484910d29 --- /dev/null +++ b/onadata/apps/main/tests/test_signals.py @@ -0,0 +1,78 @@ + +"""Tests for onadata.apps.main.signals module""" + +from datetime import datetime +from unittest.mock import Mock, patch + +import pytz + +from django.contrib.auth import get_user_model + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import ProjectInvitation, Project +from onadata.libs.utils.user_auth import get_user_default_project +from onadata.libs.permissions import EditorRole, ManagerRole + +User = get_user_model() + + +class AcceptProjectInvitationTestCase(TestBase): + """Tests for accept_project_inviation""" + + def setUp(self): + super().setUp() + self.project = get_user_default_project(self.user) + self.invitation = ProjectInvitation.objects.create( + email="mike@example.com", + project=self.project, + role="editor", + ) + self.mocked_now = datetime(2023, 6, 21, 14, 29, 0, tzinfo=pytz.utc) + + def test_accept_invitation(self): + """Accept invitation works""" + john_invitation = ProjectInvitation.objects.create( + email="johndoe@example.com", + project=self.project, + role="manager", + ) + project = Project.objects.create( + name="Project 2", + created_by=self.user, + organization=self.user, + ) + mike_invitation = ProjectInvitation.objects.create( + email="mike@example.com", + project=project, + role="manager", + ) + + with patch("django.utils.timezone.now", Mock(return_value=self.mocked_now)): + mike = User.objects.create(username="mike", email="mike@example.com") + self.invitation.refresh_from_db() + self.assertEqual(self.invitation.status, ProjectInvitation.Status.ACCEPTED) + self.assertEqual(self.invitation.accepted_at, self.mocked_now) + self.assertEqual(self.invitation.accepted_by, mike) + self.assertTrue(EditorRole.user_has_role(mike, self.project)) + # other invitations are not touched + john_invitation.refresh_from_db() + self.assertEqual(john_invitation.status, ProjectInvitation.Status.PENDING) + # other projects are shared + mike_invitation.refresh_from_db() + self.assertEqual(mike_invitation.status, ProjectInvitation.Status.ACCEPTED) + self.assertEqual(mike_invitation.accepted_at, self.mocked_now) + self.assertEqual(mike_invitation.accepted_by, mike) + self.assertTrue(ManagerRole.user_has_role(mike, project)) + + def test_only_pending_accepted(self): + """Only pending invitations are accepted""" + self.invitation.status = ProjectInvitation.Status.REVOKED + self.invitation.save() + + with patch("django.utils.timezone.now", Mock(return_value=self.mocked_now)): + mike = User.objects.create(username="mike", email="mike@example.com") + self.invitation.refresh_from_db() + self.assertEqual(self.invitation.status, ProjectInvitation.Status.REVOKED) + self.assertIsNone(self.invitation.accepted_at) + self.assertIsNone(self.invitation.accepted_by) + self.assertFalse(EditorRole.user_has_role(mike, self.project)) diff --git a/onadata/libs/serializers/project_invitation_serializer.py b/onadata/libs/serializers/project_invitation_serializer.py new file mode 100644 index 0000000000..31f0601ecd --- /dev/null +++ b/onadata/libs/serializers/project_invitation_serializer.py @@ -0,0 +1,166 @@ +import re +from django.conf import settings +from django.utils.translation import gettext as _ +from django.contrib.auth import get_user_model +from django.utils import timezone +from rest_framework import serializers +from onadata.apps.logger.models import ProjectInvitation +from onadata.libs.permissions import ROLES +from onadata.apps.api.tasks import send_project_invitation_email_async +from onadata.libs.utils.email import get_project_invitation_url + + +User = get_user_model() + + +class ProjectInvitationSerializer(serializers.ModelSerializer): + """Serializer for ProjectInvitation model object""" + + class Meta: + model = ProjectInvitation + fields = ( + "id", + "email", + "project", + "role", + "status", + ) + read_only_fields = ("status",) + extra_kwargs = {"project": {"write_only": True}} + + def validate_email(self, email): + """Validate `email` field""" + # Regular expression pattern for email validation + pattern = r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$" + err_msg = "Invalid email." + + # Check if the email matches the pattern + if not re.match(pattern, email): + raise serializers.ValidationError(_(err_msg)) + + domain_whitelist = getattr( + settings, "PROJECT_INVITATION_EMAIL_DOMAIN_WHITELIST", [] + ) + + if domain_whitelist: + # Extract the domain from the email address + domain = email.split("@")[1] + + # Check if the domain matches "foo.com" + if not domain.lower() in [ + allowed_domain.lower() for allowed_domain in domain_whitelist + ]: + raise serializers.ValidationError(_(err_msg)) + + return email + + def validate_role(self, role): + """Validate `role` field""" + if role not in ROLES: + raise serializers.ValidationError(_("Invalid role.")) + + return role + + def _validate_email_exists(self, email): + """Email should not be of an existing user""" + if User.objects.filter(email=email).exists(): + raise serializers.ValidationError(_("User already exists")) + + def _send_project_invitation_email(self, invitation_id: str) -> None: + """Send project invitation email""" + project_activation_url = get_project_invitation_url(self.context["request"]) + send_project_invitation_email_async.delay(invitation_id, project_activation_url) + + def create(self, validated_data): + if ProjectInvitation.objects.filter( + email=validated_data["email"], + project=validated_data["project"], + status=ProjectInvitation.Status.PENDING, + ).exists(): + raise serializers.ValidationError(_("Invitation already exists.")) + + self._validate_email_exists(validated_data["email"]) + instance = super().create(validated_data) + instance.invited_by = self.context["request"].user + instance.save() + self._send_project_invitation_email(instance.id) + + return instance + + def update(self, instance, validated_data): + # only a pending invitation can be updated + if instance.status != ProjectInvitation.Status.PENDING: + raise serializers.ValidationError( + _("Only pending invitations can be updated") + ) + + self._validate_email_exists(validated_data["email"]) + has_email_changed = instance.email != validated_data["email"] + updated_instance = super().update(instance, validated_data) + + if has_email_changed: + self._send_project_invitation_email(instance.id) + + return updated_instance + + +# pylint: disable=abstract-method +class ProjectInvitationUpdateBaseSerializer(serializers.Serializer): + """Base serializer for project invitation updates""" + + invitation_id = serializers.IntegerField() + + def validate_invitation_id(self, invitation_id): + """Validate `invitation_id` field""" + try: + ProjectInvitation.objects.get(pk=invitation_id) + + except ProjectInvitation.DoesNotExist as e: + raise serializers.ValidationError(_("Invalid invitation_id.")) from e + + return invitation_id + + +class ProjectInvitationRevokeSerializer(ProjectInvitationUpdateBaseSerializer): + """Serializer for revoking a project invitation""" + + def validate_invitation_id(self, invitation_id): + super().validate_invitation_id(invitation_id) + + invitation = ProjectInvitation.objects.get(pk=invitation_id) + + if invitation.status != ProjectInvitation.Status.PENDING: + raise serializers.ValidationError( + _("You cannot revoke an invitation which is not pending") + ) + + return invitation_id + + def save(self, **kwargs): + invitation_id = self.validated_data.get("invitation_id") + invitation = ProjectInvitation.objects.get(pk=invitation_id) + invitation.revoke() + + +class ProjectInvitationResendSerializer(ProjectInvitationUpdateBaseSerializer): + """Serializer for resending a project invitation""" + + def validate_invitation_id(self, invitation_id): + super().validate_invitation_id(invitation_id) + + invitation = ProjectInvitation.objects.get(pk=invitation_id) + + if invitation.status != ProjectInvitation.Status.PENDING: + raise serializers.ValidationError( + _("You cannot resend an invitation which is not pending") + ) + + return invitation_id + + def save(self, **kwargs): + invitation_id = self.validated_data.get("invitation_id") + invitation = ProjectInvitation.objects.get(pk=invitation_id) + invitation.resent_at = timezone.now() + invitation.save() + project_activation_url = get_project_invitation_url(self.context["request"]) + send_project_invitation_email_async.delay(invitation_id, project_activation_url) diff --git a/onadata/libs/serializers/user_profile_serializer.py b/onadata/libs/serializers/user_profile_serializer.py index 8df4508239..36ea893b9b 100644 --- a/onadata/libs/serializers/user_profile_serializer.py +++ b/onadata/libs/serializers/user_profile_serializer.py @@ -22,7 +22,9 @@ from rest_framework import serializers from onadata.apps.api.models.temp_token import TempToken -from onadata.apps.api.tasks import send_verification_email +from onadata.apps.api.tasks import ( + send_verification_email, +) from onadata.apps.main.forms import RegistrationFormUserProfile from onadata.apps.main.models import UserProfile from onadata.libs.authentication import expired @@ -30,7 +32,10 @@ from onadata.libs.serializers.fields.json_field import JsonField from onadata.libs.utils.analytics import TrackObjectEvent from onadata.libs.utils.cache_tools import IS_ORG -from onadata.libs.utils.email import get_verification_email_data, get_verification_url +from onadata.libs.utils.email import ( + get_verification_email_data, + get_verification_url, +) RESERVED_NAMES = RegistrationFormUserProfile.RESERVED_USERNAMES LEGAL_USERNAMES_REGEX = RegistrationFormUserProfile.legal_usernames_re @@ -175,8 +180,8 @@ def __init__(self, *args, **kwargs): request = self.context.get("request") if ( isinstance(self.instance, QuerySet) - or (request and request.user != self.instance.user) - or not request + or (request and request.user != self.instance.user) # noqa W503 + or not request # noqa W503 ): for field in getattr(self.Meta, "owner_only_fields"): self.fields.pop(field) @@ -206,9 +211,9 @@ def to_representation(self, instance): if ( "email" in ret - and request is None - or request.user - and not request.user.has_perm(CAN_VIEW_PROFILE, instance) + and request is None # noqa W503 + or request.user # noqa W503 + and not request.user.has_perm(CAN_VIEW_PROFILE, instance) # noqa W503 ): del ret["email"] @@ -266,36 +271,37 @@ def create(self, validated_data): request = self.context.get("request") metadata = {} username = params.get("username") + password = params.get("password1", "") site = Site.objects.get(pk=settings.SITE_ID) new_user = None + try: new_user = RegistrationProfile.objects.create_inactive_user( username=username, - password=params.get("password1"), + password=password, email=params.get("email"), site=site, send_email=settings.SEND_EMAIL_ACTIVATION_API, ) - validate_password(params.get("password1"), user=new_user) except IntegrityError as e: raise serializers.ValidationError( _(f"User account {username} already exists") ) from e + + try: + validate_password(password, user=new_user) + except ValidationError as e: # Delete created user object if created # to allow re-registration if new_user: new_user.delete() raise serializers.ValidationError({"password": e.messages}) + new_user.is_active = True new_user.first_name = params.get("first_name") new_user.last_name = params.get("last_name") new_user.save() - - if getattr(settings, "ENABLE_EMAIL_VERIFICATION", False): - redirect_url = params.get("redirect_url") - _send_verification_email(redirect_url, new_user, request) - created_by = request.user created_by = None if created_by.is_anonymous else created_by metadata["last_password_edit"] = timezone.now().isoformat() @@ -312,6 +318,10 @@ def create(self, validated_data): ) profile.save() + if getattr(settings, "ENABLE_EMAIL_VERIFICATION", False): + redirect_url = params.get("redirect_url") + _send_verification_email(redirect_url, new_user, request) + return profile def validate_username(self, value): @@ -377,8 +387,8 @@ def validate(self, attrs): params = _get_registration_params(attrs) if ( not self.instance - and params.get("name") is None - and params.get("first_name") is None + and params.get("name") is None # noqa W503 + and params.get("first_name") is None # noqa W503 ): raise serializers.ValidationError( {"name": _("Either name or first_name should be provided")} diff --git a/onadata/libs/templates/projects/invitation.txt b/onadata/libs/templates/projects/invitation.txt new file mode 100644 index 0000000000..fb812e8017 --- /dev/null +++ b/onadata/libs/templates/projects/invitation.txt @@ -0,0 +1,11 @@ +{% load i18n %}{% blocktranslate %} +Hello, + +You have been added to {{project_name}} by a project admin allowing you to begin data collection. + +To begin using {{deployment_name}}, please create an account first by clicking the link below: +{{invitation_url}} + +Thanks, +The Team at {{ deployment_name }} +{% endblocktranslate %} \ No newline at end of file diff --git a/onadata/libs/templates/projects/invitation_subject.txt b/onadata/libs/templates/projects/invitation_subject.txt new file mode 100644 index 0000000000..63553ef886 --- /dev/null +++ b/onadata/libs/templates/projects/invitation_subject.txt @@ -0,0 +1 @@ +{% load i18n %}{% blocktranslate %}Invitation to Join a Project on {{deployment_name}}{% endblocktranslate %} \ No newline at end of file diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index 5236df217d..55ff86ffa6 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -1,9 +1,17 @@ from six.moves.urllib.parse import urlencode - +from mock import patch from django.test import RequestFactory from django.test.utils import override_settings from onadata.apps.main.tests.test_base import TestBase -from onadata.libs.utils.email import get_verification_email_data, get_verification_url +from onadata.libs.utils.email import ( + get_verification_email_data, + get_verification_url, + get_project_invitation_url, +) +from onadata.libs.utils.email import ProjectInvitationEmail +from onadata.apps.logger.models import ProjectInvitation +from onadata.libs.utils.user_auth import get_user_default_project + VERIFICATION_URL = "http://ab.cd.ef" @@ -114,3 +122,78 @@ def test_get_verification_email_data_with_verification_and_redirect_urls(self): def test_email_data_does_not_contain_newline_chars(self): email_data = self._get_email_data(include_redirect_url=True) self.assertNotIn("\n", email_data.get("subject")) + + +class ProjectInvitationEmailTestCase(TestBase): + """Tests for class ProjectInvitationEmail""" + + def setUp(self) -> None: + super().setUp() + + self.project = get_user_default_project(self.user) + self.user.profile.name = "Test User" + self.user.profile.save() + self.project.name = "Test Invitation" + self.project.save() + self.invitation = ProjectInvitation.objects.create( + email="janedoe@example.com", + project=self.project, + role="editor", + status=ProjectInvitation.Status.PENDING, + ) + self.email = ProjectInvitationEmail( + self.invitation, "https://example.com/register" + ) + + @override_settings(DEPLOYMENT_NAME="Misfit") + @patch("onadata.libs.utils.email.send_generic_email") + def test_send(self, mock_send): + """Email is sent successfully""" + self.email.send() + email_data = { + "subject": "Invitation to Join a Project on Misfit", + "message_txt": "\nHello,\n\nYou have been added to Test Invitation by" + " a project admin allowing you to begin data collection.\n\nTo begin" + " using Misfit, please create an account first by clicking the link below:" + "\nhttps://example.com/register" + "\n\nThanks,\nThe Team at Misfit\n", + } + mock_send.assert_called_with( + self.invitation.email, + **email_data, + ) + + @override_settings(DEPLOYMENT_NAME="Misfit") + def test_get_template_data(self): + """Context data for the email templates is correct""" + expected_data = { + "subject": {"deployment_name": "Misfit"}, + "body": { + "deployment_name": "Misfit", + "project_name": "Test Invitation", + "invitation_url": "https://example.com/register", + "organization": "Test User", + }, + } + data = self.email.get_template_data() + self.assertEqual(data, expected_data) + + +class ProjectInvitationURLTestCase(TestBase): + """Tests for get_project_invitation_url""" + + def setUp(self): + super().setUp() + + self.custom_request = RequestFactory().get("/path", data={"name": "test"}) + + @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + def test_url_configured(self): + """settings.PROJECT_INVITATION_URL is set""" + url = get_project_invitation_url(self.custom_request) + self.assertEqual(url, "https://example.com/register") + + def test_url_not_configured(self): + """settings.PROJECT_INVITATION_URL not set""" + url = get_project_invitation_url(self.custom_request) + self.assertEqual(url, "http://testserver/api/v1/profiles") diff --git a/onadata/libs/utils/email.py b/onadata/libs/utils/email.py index d725941b6f..527e85c246 100644 --- a/onadata/libs/utils/email.py +++ b/onadata/libs/utils/email.py @@ -4,9 +4,11 @@ """ from django.conf import settings from django.core.mail import EmailMultiAlternatives +from django.http import HttpRequest from django.template.loader import render_to_string from six.moves.urllib.parse import urlencode from rest_framework.reverse import reverse +from onadata.apps.logger.models import ProjectInvitation def get_verification_url(redirect_url, request, verification_key): @@ -75,3 +77,59 @@ def send_generic_email(email, message_txt, subject): email_message = EmailMultiAlternatives(subject, message_txt, from_email, [email]) email_message.send() + + +def get_project_invitation_url(request: HttpRequest): + """Get project invitation url""" + url: str = getattr(settings, "PROJECT_INVITATION_URL", "") + + if not url: + url = reverse("userprofile-list", request=request) + + return url + + +class ProjectInvitationEmail: + """ + A class to send a project invitation email + """ + + def __init__(self, invitation: ProjectInvitation, url: str) -> None: + super().__init__() + + self.invitation = invitation + self.url = url + + def get_template_data(self) -> dict[str, str]: + """Get context data for the templates""" + deployment_name = getattr(settings, "DEPLOYMENT_NAME", "Ona") + organization = self.invitation.project.organization.profile.name + data = { + "subject": {"deployment_name": deployment_name}, + "body": { + "deployment_name": deployment_name, + "project_name": self.invitation.project.name, + "invitation_url": self.url, + "organization": organization, + }, + } + + return data + + def get_email_data(self) -> dict[str, str]: + """Get the email data to be sent""" + message_path = "projects/invitation.txt" + subject_path = "projects/invitation_subject.txt" + template_data = self.get_template_data() + email_data = { + "subject": render_to_string(subject_path, template_data["subject"]), + "message_txt": render_to_string( + message_path, + template_data["body"], + ), + } + return email_data + + def send(self) -> None: + """Send project invitation email""" + send_generic_email(self.invitation.email, **self.get_email_data()) diff --git a/onadata/libs/utils/user_auth.py b/onadata/libs/utils/user_auth.py index d7b55dc428..5e3d9c75cc 100644 --- a/onadata/libs/utils/user_auth.py +++ b/onadata/libs/utils/user_auth.py @@ -87,14 +87,14 @@ def has_permission(xform, owner, request, shared=False): user = request.user return ( shared - or xform.shared_data - or ( + or xform.shared_data # noqa W503 + or ( # noqa W503 hasattr(request, "session") - and request.session.get("public_link") == xform.uuid + and request.session.get("public_link") == xform.uuid # noqa W503 ) - or owner == user - or user.has_perm("logger.view_xform", xform) - or user.has_perm("logger.change_xform", xform) + or owner == user # noqa W503 + or user.has_perm("logger.view_xform", xform) # noqa W503 + or user.has_perm("logger.change_xform", xform) # noqa W503 ) @@ -103,8 +103,8 @@ def has_edit_permission(xform, owner, request, shared=False): user = request.user return ( (shared and xform.shared_data) - or owner == user - or user.has_perm("logger.change_xform", xform) + or owner == user # noqa W503 + or user.has_perm("logger.change_xform", xform) # noqa W503 ) From 28d2990b2f7f162513cce5142112f76d091880d5 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 21 Jul 2023 10:21:05 +0300 Subject: [PATCH 036/270] Fix AttributeError: 'NoneType' object has no attribute 'strip' when exporting form data (#2453) * fix bug when exporting SAV fix bug AttributeError: 'NoneType' object has no attribute 'strip'. * enhance test * refactor code * update docstring --- .../childrens_survey_sw_missing_en_label.xlsx | Bin 0 -> 9127 bytes .../libs/tests/utils/test_export_builder.py | 50 ++++++++ onadata/libs/utils/export_builder.py | 113 +++++++++++------- 3 files changed, 121 insertions(+), 42 deletions(-) create mode 100644 onadata/apps/logger/tests/fixtures/childrens_survey_sw_missing_en_label.xlsx diff --git a/onadata/apps/logger/tests/fixtures/childrens_survey_sw_missing_en_label.xlsx b/onadata/apps/logger/tests/fixtures/childrens_survey_sw_missing_en_label.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..9b5461ef07b3e5aa43badcff9450e7fc126606a4 GIT binary patch literal 9127 zcmbVSWmsHWvc=ssI3#!=bb#O?xVsY^f_pdap5X58?j9gG!QI{6-Fc9iH#d_vbAP-# zU;C*#r`KM)s&>_u5r>4r0)vBt17i<9Qv&-<5TD;`0rky54DVi^iz8a4+5t#@N6rDX zS05I5;L>v%Bt#m>n2B4+EeNXpGdsLbF9k3$kb*1PL#*B0A9b@AB`oL`0hHpJ{)(#p z5KWYNuDKn3kJgS1ECU&CUFdAWa+dtGz))$i^Zpgy-YjREFfoMfGfD9g@GZUlFJK>e zz5+EA?HM1xeB1O<-4DXBHdM-(c86;8ku*2`U?ECE=)}#ms{A^V%&uEZBGV<8tq}uR z-w(lYQQx(`Z|Nus1rG@zB8eVLwt3i%Ggccy_7KBmd1y?zsDrF#xD(1_s!Ftl-& zeB&x2ahQrt#H1w__&MdiZxd5S92U+ufjQyu8H7OyFff__3KQz{84hL)_Le|XT}w;T zcMj%eor(+6a{x5=C9QDB(ZyMpDw}N6fbXMlhj&N$3yS>E5KJbsu1bHMt&RZK1H;Uo z(1=%>^*u1V5hSCv9k$*T^=4{96a{)hL)D8%lv0rwIl^RRR}Ng#7DXH35GIAy3b7Z; zm@a7iX*x``3xh32RM7(vj1HZIFHj0nGGW9s3~IN<4CC)RMWjc&3_A6xizM0yZFDf# z7xuzgekIRThlEM!o0z)hB!_vEk%UQ=4Rg<5x}@o-&4=IUZxN=-RDxG@-qc80+=-b# zElq>85$hk3^tB_;WDnUBKZc94z6jG+P;Y^8id7;Dh>bxvVsO-0@sl7Xfe?{x8~Kj@ z2x|H@D;eJHS8!{jV1``*-}M&$>L42PB1}_XU2<_AccaXZ(S^Y7$)X32HcAC$6Zk}Y z+Eb~q&xMDW-We2QberE)f?pww8{?1TTvbP^>k?h0I34>q9a-8UcWHYc?p>t}AI*8p zQpA#{yVKJ{Dq6DXO!+miJn`}fZ z$aZA#v$B1g)dJGg)~8t&Qj37DZxy1V4_Rv6wh}Mr(Ipuyl_e6Df%OR|ZEoVBmaZgAR9+Beb5W74^%WP69HJROQmJ^KlnzO&8gCMIo==W7fdQ7 zFdwu+Ln=LZn&l!tTr_m~@O-)q=cbE}z`Y|rWan0&>p)8`h7z_{AjtU=Ljb}pBTGdsI=Q*J9qpv6vG<+CA{hxP;?)vlJR0sJDRYCK^n)OF5$9Nm}y$4}OotuaJXd2kk%2j5N(+-2qQx{?yJi7_3`M}po z492i7X7WPA?1tG`=d!EWN-+jC3(NFL0$-B9(Pqy;iZsB}H6U-Kz{cpKoL&WnXTQCZ zR9#hU8Ec}vfN1&gO=9z)#cDtyp=ZulOw}E)SK*2UO?ifXV2bi>45V!sUQvK>GyRlcqqf0bQ#EVl{~p2dGb`^Np($p_83Bt-Go0 z=XK#7UmL7`gDRs%{JHFLlvTP9>E3Ty~i+0$5z8T!N zPa097db4-}Y7{SEit|nZa%5Z&K_}|+mEFWe-$T66oPZIlspy&J=$L;;^H+cQZ;+9` zz72@s*US4C#y6?<0O#1ynue6i9J-BPm&{^_<_q9U%dK_fcgHIXAXAdT>qlsVi3B`c zjDgVv!0^n$e&wE~Jf7||WxnW0X}XIx90X^QGJ0?C?RmseCt@TPa4`Si=6d3^%Aub2 zS`d%^c)qE9-nI5x&n{~}<_Lg;@{0O;egR(g#|c-;zE#MzM@*eZGO|89hE-1uF$11Z z{&$+8#^Ebr%@C0lC7i{wt?9HxWYOaQ$~!{b48}GqKPaCe`>b^;)8Xv^IE_g|#x9|uI7L=lh*y$<#|Ezj$+W`wmNLX@1>J|RUV zShCb`m#y6^j2ITT5xxqDbyBWrN6xMV ziq2l>$1C7REMxRc=4V0&eJ`d{Q;l!W*4()$*hZ zlLWj_t4K_bs6~j|uTOI%Qc+AN5|vk1S)fg2zlD}<(~`?;R-eyD0H2DEx8nE)Av)am z9mzB{y;C0zl`=Bc9&h5^o4`J5Vg-$xHdPei>apn^3JLGKPBAJZWes0JV!rEpMaxDUCO6h+^Az34Zh--w*) z9$T%$@ngWxwJ&xU3%eaf7X6Z*mM<_5pqUjHm;T7E+rC}U=C|XNJYHJ>mwLk8bNXEb z7n>#M9j#g1KTdAT74(H>9Os<$8+m^0>`wns6f|-hB_P_X)`W-+!dfj*;Ab`b8lHrc z@|ADyj7LOERd1v*Cnsorj4#d1AJJ2FFZ&Xli5COZUf89Uiav*7m6Tr|{^^x;SG!Y; z%@3?9g|&ViHVZe8S^`=R>bXIwfr3F8mZ*Tq>Zzc~%WU}!jLHx7rJ4z6llvmU^Se_g zysk4r>gvJ?b-KOMVxgTED7eaD^y2~OB1*yUw=6{CwZFl^vGVsE+`WD%f0Gfv%1*;y z?5XNiX(kF{N8JFfD{Ak=T9d&lYF*wSmn=>lvHuv3y^t#sW3+!0-<=P$73~uPRVWXk zu3)A?9g-Af#v+r;M}4;iknU49zMWce`HahykTYRqb%L<5F_(S&u)CEKP;07i{Hb~6 zfSbSQvuxti_3l=}dK{q65yzt>l8q$`?rGqD4 zf0p{XNUMw)mnvKkMkRHw=Jwqi24SE2$#!3dP}Gzh@-A^=OHhB5?Q4J_!e}fW&3fyc z{OLP)e;#M*i!f>W&g|s;duZ>N=A;fU@stA1{ximEyJ$>HpqwFbGgUdE|4;K1bV6L2 z`2?Aa6v>c77n7tVcq$47os(#);aDtLf1MH@g2uL8XPwOn$Y#iPo!La`Cg6!n0$Q3( z?E*Sma(!%TCnFql2k9f31B2-jQT)ln2lBdX5#oEiwNx*1Rv(=*+H(5fE0^Zwb3_1m zsFLfd7E%-ul9w5y$O%kt0Wg2q$(JSsTWWXDqBejpYy$LSvkDltjAfobO2fnU4&{>Q z3l}JUGtx_3a~lm&BEMwJRyX(o@jeu$48&4taC7{IWZ~u+%*V6h^z5XG z2AzSMOcvwjABPe7d}nH;O`u*nby!c^ujVX)I<9819H+TEaQ%wNVsKxaxp-AW7 zzd)#5^I}8~%!@LdzG)`$X%C<@>cuUh`C8Pj35Cfp9PFLHo~o8%d5j{jqK#27N9t8P z#3`!H4K0`LICU*;IHQ@UG)50IUx3P7 z)K0^@ttM+<$wR`ogFd+8!yK>Nm`z2<8g`+*UPOm6ntI4a=ViBZ&W)1B(XnXCg}Z^G{--;ogvCyn06lA%S$Mcq{Ji3{|g)9@%{t4tIo z?_o*N2d?_allF>|N7#!~d^|pt=0v6JcL~B{{OdT&T?3dBJQe^-8 z!&@Q463cTtS96i^`Awp*P3|L)`yrvwK?&Y-7HD%3W}|0yV>6$P!Z1t(O2ZuO(ZQl} z!BP%VS#K{C5%2C_-$OjjZzVsmz-z>bo^_k*b-A4lJ7a@-+h>LzF8${>(}VG9Lmp8i z_Kw}L^&F2)l+*dpax4(v^d=z*fWhTw8ch>I^N&IFW`8fT=GA1A|Mre&M@P0{)>B=P z+(Ozx>Cr_{m!A~yp`z_bP|tM&J7%jL&0|}>;_?L}rX6mmK$@8{VfoeLd&odFlrDC)Y#c zijhLd=}OApJM=`5;K@e!f_TO!p%jtYbX!5ivDuRX`#$sfyvuDF@|F&{bA273jzN7Bx!Ag=SI&*# zIy6O{5|0OoQ;>NIJ1&smPD7v$DbI9XO{u0#XQCd;8`C$&Gh45&li`)S;+$+XwN)M% z7@vN*x3y+U|9B`cFg5i5hkIlC<=&JeE!SAkny!@Lxc2i+UD-k+i?Re)nHMrcgI|Bw4PMlDCOhgC z##-blAkvY3=~I9XrEVO2m--(MXM`I!GV508A?z4Zr}(7wSBkO)bs>C1|}PO{o4be*!yD(;|E5;gkND~GpgKgGAIb9HDoDnHCL zh9^guwx7;vI|Dhes9Ur(6AJgS)I5cZ|LAUD9JfJjp@J+Rbgub}=EQJB~kcO?$}}SMX-jJS-Ww@=|kK^6jMi z-u(| zVUv!YhwAOT9VIr~i|aTNn4Z#lULE@uC*CB z_8x1X!oDJc_YIOlYC-FyS7Be->NvU+&3(W`0I$v=GbO$xLr$cW!=*S|{NZjQ`Pd`u z`MYdz?xj-&sp(62F`5BwqD#hk7OQK50-XH`OO20c_*6p+>E9ak-MMA8<@=)x`PYTG z6czf@)n4W64i`0?GLDPrV;PXX6%PsAGUats=syo2X@O>ynSu~7pfq*1Hz^EV^_ZXY z(>F3pKTugvLzn^VZlWyQJRYe}f$QkQ_alOSMlHtf4uJUF)Q)P6Q^ff02g2N2Y|o50 zQ%KxuMqNH@r@Ommy{*%as%6_s4Zr)?8l3HIX+`j{7eBDQr9)6NcZIvDc%ApLHhLc$ zt?n3Bxuh`Mk6%-J=1I8s3H&AXK}Ga_H3*Ie^f`6}a{;CQ$MgFTQ z+-;vwL-0}+Ce70L>EJh&k&KAU^U z%DnX%;XLUVpnxgLs-I}~6-o4KDEZWW!R&; zIj+l+r(OqhMoLV))@?zfdR>yPx`|s^1p5GKR2BpNp+rPWJ&dLrsEE{gP$u`;pE||O z3Ee89%}^hW+2U2ucdYQJ6NLzDo${y~M#+^oM;^A|M(`XK`sGlIAam?}U6is6IlQuQ zv|h7b?=xSDHxQ{=Rz&{pyhACz+dbC~u&Wt+?03xkMCr^#(L4zqDdj^#J|S-;>$N6} zFAGMXOy?^UQ)1XnYoyMDTKqjzv#6{pV?FoS7Fn6IA(*dcL(?EoIWl|OPui*qVs((Z zO#m{Zp05!v?TmSAyZw;5Ej7nV+y!gE z(P4isB?xf8C|FOE^2OLSwN0M-N6xP4P?>mkk#mvqtHVWQC@P|Lvb?CNJ*#RsMapAa z4{!HCSqYcGQi8R;PmaY&Ht7?_18=jlozfX#uAktm7R$o11>KYTgca`1(7`RUTXgKY z12pj0VQFNzOHfqd=z9q3j*P|<_$J@IUFbb^!Um5LW$iT+Xo00U_Q19Y6f&k4pS^-B zI>WJlTcK1V__3??EP#I{EmnV>1EJ|jl;4NmC|Ku7YxfvpL`*CH(P?ie=`2v|WkzAr zKS`jy%%;ChOP+rO`(mjl1GKaPG3Z$W^EzRf9N>A5Vky z84U#~bo`{Whq;=>>oOWgk2Bsv&msU)m?HduANV;_ziX{XQuQO?*Fp~2Hj6)Nl{>HwD2)v|9(k+Xo|OC>D|wd7#1Oo^vz_eVpX2H?jt{7>Ba zh*3ghWNvI1nW93J%#_T8%~aP@cU@(HZB?WJRQhppIAElHymD1%>RJPU8kX=7ANE3V!h#Xvzr`{V}Byv&tq);y_qN3kbb zxDUoHV)~hxK`hF1MMAYuj&6z6gENSH9-< zHvYn*u(LEco)|9ToCNW^Sumd!B{51U9POk;2X#+fEIUv8H#WsdZb=KquW}tvy}90e`vCm{oXEiN{@pVy zNzdT@_o(q)oBoaCwjdiz^B3%jBg&t$0aT1_LfY^I!MA(Ing2Xci>OM+KL>=!%xsoBW<=Dd1j%QLeeTAm{VM=jKy@w z)TB{_ome>t#VHX-NxQD|MD8OXd@phZHlfU^Er1F4Y}7D_ZG6L*M-+0Uj#Qy2W<#vr z11rILT7KsW@`J3{v5p2DuA4$<_M$@Yb$zjJCOR=oBO1km*nCG=i%l4TSN#~%JIq6@ zhvDTsIF?gY(i2z{NL7qmcQ>szZX*i^qh6Vfgti|9-Kzn;tRI|+ElxYmdOK)oV&z)M zn)OL!5sc6{Ms|qO?jH8`)e|O3uKT(k=i?pdMqIg7aU7!gilx!MNqU{zJ~q@NRn=VG zfrg&YV)diFKrSVk=S9XH?LHRd#YjHc4_CtL^jWi=#u(u~OJnXpRL0T!YRb}=;o=y_ zVX}B0u63A~=>ZOb1@_lAgx@>wFWV6QU4Gk?@TcPMoz0iV{9i)#e5pU$^nWV<-Y^%q6|6t|UGXGC2zvsL!OMt%w7U!3h|FR(X)6Vaa{v|{HO8~gP?128vm;bcz zdjNXL2>ueU=NtP!`N5y+zq{d=DD{`n6aJ?D&zSY6_U}&R#dZ8821LJU>;21d|7qZN fn*Y0jN#g(LiDbl~pD6 Date: Fri, 14 Jul 2023 13:34:28 +0300 Subject: [PATCH 037/270] Ensure verification URL works for multiple domains --- onadata/libs/tests/utils/test_email.py | 35 +++++++++++++++++++++++--- onadata/libs/utils/email.py | 18 +++++++++++-- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index 55ff86ffa6..36c8677738 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -24,7 +24,13 @@ def setUp(self): self.redirect_url = "http://red.ir.ect" self.custom_request = RequestFactory().get("/path", data={"name": "test"}) - @override_settings(VERIFICATION_URL=None) + @override_settings( + VERIFICATION_URL={ + "stage-testserver": 'https://stage-testserver/email-verification-confirmation', + "*": None, + } + ) + @override_settings(ALLOWED_HOSTS="*") def test_get_verification_url(self): # without redirect_url verification_url = get_verification_url( @@ -64,6 +70,28 @@ def test_get_verification_url(self): ("http://testserver/api/v1/profiles/verify_email?%s" % string_query_params), ) + # with redirect_url + self.custom_request.META["HTTP_HOST"] = "stage-testserver" + verification_url = get_verification_url( + **{ + "redirect_url": self.redirect_url, + "request": self.custom_request, + "verification_key": self.verification_key, + } + ) + + string_query_params = urlencode( + { + "verification_key": self.verification_key, + "redirect_url": self.redirect_url, + } + ) + + self.assertEqual( + verification_url, + ("https://stage-testserver/email-verification-confirmation?%s" % string_query_params) + ) + def _get_email_data(self, include_redirect_url=False): verification_url = get_verification_url( **{ @@ -100,7 +128,7 @@ def test_get_verification_email_data_without_verification_url_set(self): email_data.get("message_txt"), ) - @override_settings(VERIFICATION_URL=VERIFICATION_URL) + @override_settings(VERIFICATION_URL={"*": VERIFICATION_URL}) def test_get_verification_email_data_with_verification_url_set(self): email_data = self._get_email_data() self.assertIn( @@ -108,7 +136,7 @@ def test_get_verification_email_data_with_verification_url_set(self): email_data.get("message_txt"), ) - @override_settings(VERIFICATION_URL=VERIFICATION_URL) + @override_settings(VERIFICATION_URL={"*": VERIFICATION_URL}) def test_get_verification_email_data_with_verification_and_redirect_urls(self): email_data = self._get_email_data(include_redirect_url=True) encoded_url = urlencode( @@ -197,3 +225,4 @@ def test_url_not_configured(self): """settings.PROJECT_INVITATION_URL not set""" url = get_project_invitation_url(self.custom_request) self.assertEqual(url, "http://testserver/api/v1/profiles") +# Add test case for using a different host diff --git a/onadata/libs/utils/email.py b/onadata/libs/utils/email.py index 527e85c246..559d3c699d 100644 --- a/onadata/libs/utils/email.py +++ b/onadata/libs/utils/email.py @@ -13,8 +13,22 @@ def get_verification_url(redirect_url, request, verification_key): """Returns the verification_url""" - verification_url = getattr(settings, "VERIFICATION_URL", None) - url = verification_url or reverse("userprofile-verify-email", request=request) + # get verification URL based on host + verification_url_map = getattr(settings, "VERIFICATION_URL", {}) + host = request.get_host() + url = ( + ( + verification_url_map + and host in verification_url_map + and verification_url_map[host] + ) + or ( + verification_url_map + and "*" in verification_url_map + and verification_url_map["*"] + ) + or reverse("userprofile-verify-email", request=request) + ) query_params_dict = {"verification_key": verification_key} if redirect_url: query_params_dict.update({"redirect_url": redirect_url}) From c12367698bb65c8afc4eafb091e49cc54a0f0340 Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 20 Jul 2023 16:04:49 +0300 Subject: [PATCH 038/270] Set ENKETO_LOGIN_URL for multiple domains --- .../api/tests/viewsets/test_xform_viewset.py | 20 +++++++++++++++---- onadata/apps/api/viewsets/xform_viewset.py | 12 ++++++++++- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 0e66f0c357..40ab829e5c 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -871,7 +871,7 @@ def test_existing_form_format(self): formid = self.xform.pk request = self.factory.get("/", **self.extra) # get existing form format - exsting_format = get_existing_file_format(self.xform.xls, 'xls') + exsting_format = get_existing_file_format(self.xform.xls, "xls") # XLSX format response = view(request, pk=formid, format="xlsx") @@ -1226,7 +1226,13 @@ def test_login_enketo_no_redirect(self): "Authentication failure, cannot redirect", ) - @override_settings(ENKETO_CLIENT_LOGIN_URL="http://test.ona.io/login") + @override_settings( + ENKETO_CLIENT_LOGIN_URL={ + "*": "http://test.ona.io/login", + "stage-testserver": "http://gh.ij.kl/login", + } + ) + @override_settings(ALLOWED_HOSTS=["*"]) def test_login_enketo_no_jwt_but_with_return_url(self): with HTTMock(enketo_urls_mock): self._publish_xls_form_to_project() @@ -1237,9 +1243,16 @@ def test_login_enketo_no_jwt_but_with_return_url(self): url = "https://enketo.ona.io/::YY8M" query_data = {"return": url} request = self.factory.get("/", data=query_data) + + # user is redirected to default login page "*" response = view(request, pk=formid) + self.assertTrue(response.url.startswith("http://test.ona.io/login")) + self.assertEqual(response.status_code, 302) - # user is redirected to the set login page in settings file + # user is redirected to login page for "stage-testserver" + request.META["HTTP_HOST"] = "stage-testserver" + response = view(request, pk=formid) + self.assertTrue(response.url.startswith("http://gh.ij.kl/login")) self.assertEqual(response.status_code, 302) @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) @@ -5239,7 +5252,6 @@ def test_share_auto_xform_meta_perms(self): MetaData.xform_meta_permission(self.xform, data_value=data_value) for role_class in ROLES_ORDERED: - data = {"username": "alice", "role": role_class.name} request = self.factory.post("/", data=data, **self.extra) response = view(request, pk=formid) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 4a92d99b1a..f6cabe7ee3 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -467,8 +467,18 @@ def login(self, request, **kwargs): if redirect: return redirect + # get value of login URL based on host + host = request.get_host() + enketo_client_login_url_setting = settings.ENKETO_CLIENT_LOGIN_URL or {} + enketo_client_login_url = ( + host in enketo_client_login_url_setting + and enketo_client_login_url_setting[host] + ) or ( + "*" in enketo_client_login_url_setting + and enketo_client_login_url_setting["*"] + ) login_vars = { - "login_url": settings.ENKETO_CLIENT_LOGIN_URL, + "login_url": enketo_client_login_url, "return_url": urlencode({"return_url": return_url}), } client_login = "{login_url}?{return_url}".format(**login_vars) From 30dc934d9ff31fc200e76695dd53d6d944dd38a4 Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 20 Jul 2023 16:18:59 +0300 Subject: [PATCH 039/270] Fix ci formatting errors --- onadata/apps/api/tools.py | 33 +++++++++++++++++++++- onadata/apps/api/viewsets/xform_viewset.py | 33 +--------------------- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index 35e58897a5..c147a80f13 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -739,7 +739,6 @@ def update_role_by_meta_xform_perms(xform): users = get_xform_users(xform) for user in users: - role = users.get(user).get("role") if role in editor_role: role = ROLES.get(meta_perms[0]) @@ -773,3 +772,35 @@ def replace_attachment_name_with_url(data): except ValueError: pass return data + + +ENKETO_AUTH_COOKIE = getattr(settings, "ENKETO_AUTH_COOKIE", "__enketo") +ENKETO_META_UID_COOKIE = getattr( + settings, "ENKETO_META_UID_COOKIE", "__enketo_meta_uid" +) +ENKETO_META_USERNAME_COOKIE = getattr( + settings, "ENKETO_META_USERNAME_COOKIE", "__enketo_meta_username" +) + + +def set_enketo_signed_cookies(resp, username=None, json_web_token=None): + """Set signed cookies for JWT token in the HTTPResponse resp object.""" + if not username and not json_web_token: + return None + + max_age = 30 * 24 * 60 * 60 * 1000 + enketo_meta_uid = {"max_age": max_age, "salt": settings.ENKETO_API_SALT} + enketo = {"secure": False, "salt": settings.ENKETO_API_SALT} + + # add domain attribute if ENKETO_AUTH_COOKIE_DOMAIN is set in settings + # i.e. don't add in development environment because cookie automatically + # assigns 'localhost' as domain + if getattr(settings, "ENKETO_AUTH_COOKIE_DOMAIN", None): + enketo_meta_uid["domain"] = settings.ENKETO_AUTH_COOKIE_DOMAIN + enketo["domain"] = settings.ENKETO_AUTH_COOKIE_DOMAIN + + resp.set_signed_cookie(ENKETO_META_UID_COOKIE, username, **enketo_meta_uid) + resp.set_signed_cookie(ENKETO_META_USERNAME_COOKIE, username, **enketo_meta_uid) + resp.set_signed_cookie(ENKETO_AUTH_COOKIE, json_web_token, **enketo) + + return resp diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index f6cabe7ee3..b7e9bc1a3a 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -99,14 +99,6 @@ ) from onadata.settings.common import CSV_EXTENSION, XLS_EXTENSIONS -ENKETO_AUTH_COOKIE = getattr(settings, "ENKETO_AUTH_COOKIE", "__enketo") -ENKETO_META_UID_COOKIE = getattr( - settings, "ENKETO_META_UID_COOKIE", "__enketo_meta_uid" -) -ENKETO_META_USERNAME_COOKIE = getattr( - settings, "ENKETO_META_USERNAME_COOKIE", "__enketo_meta_username" -) - # pylint: disable=invalid-name BaseViewset = get_baseviewset_class() User = get_user_model() @@ -186,29 +178,6 @@ def get_survey_xml(csv_name): return survey.to_xml() -def set_enketo_signed_cookies(resp, username=None, json_web_token=None): - """Set signed cookies for JWT token in the HTTPResponse resp object.""" - if not username and not json_web_token: - return None - - max_age = 30 * 24 * 60 * 60 * 1000 - enketo_meta_uid = {"max_age": max_age, "salt": settings.ENKETO_API_SALT} - enketo = {"secure": False, "salt": settings.ENKETO_API_SALT} - - # add domain attribute if ENKETO_AUTH_COOKIE_DOMAIN is set in settings - # i.e. don't add in development environment because cookie automatically - # assigns 'localhost' as domain - if getattr(settings, "ENKETO_AUTH_COOKIE_DOMAIN", None): - enketo_meta_uid["domain"] = settings.ENKETO_AUTH_COOKIE_DOMAIN - enketo["domain"] = settings.ENKETO_AUTH_COOKIE_DOMAIN - - resp.set_signed_cookie(ENKETO_META_UID_COOKIE, username, **enketo_meta_uid) - resp.set_signed_cookie(ENKETO_META_USERNAME_COOKIE, username, **enketo_meta_uid) - resp.set_signed_cookie(ENKETO_AUTH_COOKIE, json_web_token, **enketo) - - return resp - - def parse_webform_return_url(return_url, request): """ Given a webform url and request containing authentication information @@ -251,7 +220,7 @@ def parse_webform_return_url(return_url, request): else: username = request.user.username - response_redirect = set_enketo_signed_cookies( + response_redirect = utils.set_enketo_signed_cookies( response_redirect, username=username, json_web_token=jwt_param ) From 7f0c5d8570db1f7c584178f45673ac5b08dc36fd Mon Sep 17 00:00:00 2001 From: apiyo Date: Mon, 24 Jul 2023 04:41:38 +0300 Subject: [PATCH 040/270] Use host from request in place of Site url --- .../tests/viewsets/test_tableau_viewset.py | 3 ++ .../api/tests/viewsets/test_xform_viewset.py | 4 +++ onadata/apps/api/tools.py | 10 ++++-- onadata/apps/api/viewsets/dataview_viewset.py | 1 + .../apps/api/viewsets/v2/tableau_viewset.py | 2 +- onadata/apps/api/viewsets/xform_viewset.py | 1 + onadata/apps/logger/views.py | 2 +- onadata/apps/main/context_processors.py | 6 ++-- onadata/apps/viewer/tests/test_exports.py | 4 +-- onadata/apps/viewer/views.py | 10 ++++-- .../serializers/organization_serializer.py | 17 ++++++---- .../serializers/user_profile_serializer.py | 2 +- onadata/libs/tests/utils/test_email.py | 1 - .../libs/tests/utils/test_export_builder.py | 18 ++++++---- onadata/libs/tests/utils/test_export_tools.py | 34 ++++++++++++++++--- onadata/libs/utils/api_export_tools.py | 3 ++ onadata/libs/utils/common_tools.py | 11 +++--- onadata/libs/utils/csv_builder.py | 23 +++++++++---- onadata/libs/utils/export_builder.py | 27 +++++++++++++-- onadata/libs/utils/user_auth.py | 2 +- 20 files changed, 131 insertions(+), 50 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index 8c860bdced..f4cd9fa1f3 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -8,6 +8,7 @@ from django.test import RequestFactory from tempfile import NamedTemporaryFile from django.utils.dateparse import parse_datetime +from django.test.utils import override_settings from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models.open_data import get_or_create_opendata from onadata.apps.api.viewsets.v2.tableau_viewset import ( @@ -277,6 +278,7 @@ def test_clean_xform_headers(self): cleaned_data = clean_xform_headers(group_columns) self.assertEqual(cleaned_data, ["childs_name", "childs_age"]) + @override_settings(ALLOWED_HOSTS=["*"]) def test_replace_media_links(self): """ Test that attachment details exported to Tableau contains @@ -313,6 +315,7 @@ def test_replace_media_links(self): _open_data = get_or_create_opendata(xform_w_attachments) uuid = _open_data[0].uuid request = self.factory.get("/", **self.extra) + request.META["HTTP_HOST"] = "example.com" response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) # cast generator response to list for easy manipulation diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 40ab829e5c..b028d4f3cf 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -4320,6 +4320,7 @@ def test_csv_export__with_and_without_do_not_split_select_multiples(self): self.assertNotEqual(multiples_select_split, no_multiples_select_split) self.assertGreater(multiples_select_split, no_multiples_select_split) + @override_settings(ALLOWED_HOSTS=["*"]) def test_csv_export_with_and_without_removed_group_name(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() @@ -4344,6 +4345,7 @@ def test_csv_export_with_and_without_removed_group_name(self): data = {"remove_group_name": True} request = self.factory.get("/", data=data, **self.extra) + request.META["HTTP_HOST"] = "example.com" response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) @@ -4973,6 +4975,7 @@ def test_export_form_data_async_include_labels_only(self, async_result): headers = next(csv_reader) self.assertIn("Is ambulance available daily or weekly?", headers) + @override_settings(ALLOWED_HOSTS=["*"]) def test_csv_exports_w_images_link(self): with HTTMock(enketo_mock): xlsform_path = os.path.join( @@ -5022,6 +5025,7 @@ def test_csv_exports_w_images_link(self): data = {"include_images": True} # request for export again request = self.factory.get("/", data=data, **self.extra) + request.META["HTTP_HOST"] = "example.com" response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index c147a80f13..ee34913fb3 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -168,7 +168,11 @@ def create_organization_object(org_name, creator, attrs=None): except IntegrityError as e: raise ValidationError(_(f"{org_name} already exists")) from e if email: - site = Site.objects.get(pk=settings.SITE_ID) + site = ( + attrs["host"] + if "host" in attrs + else Site.objects.get(pk=settings.SITE_ID).domain + ) registration_profile.send_activation_email(site) profile = OrganizationProfile( user=new_user, @@ -749,9 +753,9 @@ def update_role_by_meta_xform_perms(xform): role.add(user, xform) -def replace_attachment_name_with_url(data): +def replace_attachment_name_with_url(data, request): """Replaces the attachment filename with a URL in ``data`` object.""" - site_url = Site.objects.get_current().domain + site_url = request.get_host() or Site.objects.get_current().domain for record in data: attachments: dict = record.json.get("_attachments") diff --git a/onadata/apps/api/viewsets/dataview_viewset.py b/onadata/apps/api/viewsets/dataview_viewset.py index f252266c0a..1ee101c2e3 100644 --- a/onadata/apps/api/viewsets/dataview_viewset.py +++ b/onadata/apps/api/viewsets/dataview_viewset.py @@ -221,6 +221,7 @@ def export_async(self, request, *args, **kwargs): dataview = self.get_object() xform = dataview.xform options = parse_request_export_options(params) + options["host"] = request.get_host() options.update( { diff --git a/onadata/apps/api/viewsets/v2/tableau_viewset.py b/onadata/apps/api/viewsets/v2/tableau_viewset.py index 74aeff03ba..5e99970d2d 100644 --- a/onadata/apps/api/viewsets/v2/tableau_viewset.py +++ b/onadata/apps/api/viewsets/v2/tableau_viewset.py @@ -224,7 +224,7 @@ def data(self, request, **kwargs): instances = self.paginate_queryset(instances) # Switch out media file names for url links in queryset - data = replace_attachment_name_with_url(instances) + data = replace_attachment_name_with_url(instances, request) data = process_tableau_data( TableauDataSerializer(data, many=True).data, xform ) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index b7e9bc1a3a..46ac045c9e 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -872,6 +872,7 @@ def export_async(self, request, *args, **kwargs): meta = request.query_params.get("meta") data_id = request.query_params.get("data_id") options = parse_request_export_options(request.query_params) + options["host"] = request.get_host() options.update( { diff --git a/onadata/apps/logger/views.py b/onadata/apps/logger/views.py index 5a3e6aa844..cf073a54f0 100644 --- a/onadata/apps/logger/views.py +++ b/onadata/apps/logger/views.py @@ -91,7 +91,7 @@ def _html_submission_response(request, instance): data = {} data["username"] = instance.xform.user.username data["id_string"] = instance.xform.id_string - data["domain"] = Site.objects.get(id=settings.SITE_ID).domain + data["domain"] = request.get_host() or Site.objects.get(id=settings.SITE_ID).domain return render(request, "submission.html", data) diff --git a/onadata/apps/main/context_processors.py b/onadata/apps/main/context_processors.py index 96e115c8c2..94a3e90bad 100644 --- a/onadata/apps/main/context_processors.py +++ b/onadata/apps/main/context_processors.py @@ -22,9 +22,9 @@ def site_name(request): """Returns the SITE_NAME/""" site_id = getattr(settings, "SITE_ID", None) try: - site = Site.objects.get(pk=site_id) + request_host = request.get_host() if request else None + name = request_host or Site.objects.get(pk=site_id).name except Site.DoesNotExist: name = "example.org" - else: - name = site.name + return {"SITE_NAME": name} diff --git a/onadata/apps/viewer/tests/test_exports.py b/onadata/apps/viewer/tests/test_exports.py index ba3c8409d4..e79bae1f44 100644 --- a/onadata/apps/viewer/tests/test_exports.py +++ b/onadata/apps/viewer/tests/test_exports.py @@ -1155,7 +1155,7 @@ def test_dict_to_joined_export_works(self): survey_name = 'survey' indices = {survey_name: 0} output = dict_to_joined_export(data, 1, indices, survey_name, - self.xform.get_survey(), data) + self.xform.get_survey(), data, None) self.assertEqual(output[survey_name], expected_output[survey_name]) # 1st level self.assertEqual(len(output['children']), 3) @@ -1243,7 +1243,7 @@ def test_dict_to_joined_export_notes(self): survey_name = 'tutorial' indices = {survey_name: 0} data = dict_to_joined_export(submission, 1, indices, survey_name, - self.xform.get_survey(), submission) + self.xform.get_survey(), submission, None) expected_data = { 'tutorial': { '_id': 579828, diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 53e8804394..47ceedbe40 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -313,7 +313,12 @@ def data_export(request, username, id_string, export_type): # noqa C901 audit = {"xform": xform.id_string, "export_type": export_type} - options = {"extension": extension, "username": username, "id_string": id_string} + options = { + "extension": extension, + "username": username, + "id_string": id_string, + "host": request.get_host(), + } if query: options["query"] = query @@ -400,7 +405,6 @@ def create_export(request, username, id_string, export_type): credential = None if export_type == Export.GOOGLE_SHEETS_EXPORT: - credential = _get_google_credential(request) if isinstance(credential, HttpResponseRedirect): return credential @@ -435,6 +439,7 @@ def create_export(request, username, id_string, export_type): "remove_group_name": str_to_bool(remove_group_name), "meta": meta.replace(",", "") if meta else None, "google_credentials": credential, + "host": request.get_host(), } try: @@ -510,6 +515,7 @@ def export_list(request, username, id_string, export_type): # noqa C901 "meta": export_meta, "token": export_token, "google_credentials": credential, + "host": request.get_host(), } if should_create_new_export(xform, export_type, options): diff --git a/onadata/libs/serializers/organization_serializer.py b/onadata/libs/serializers/organization_serializer.py index d151728083..0887c830a5 100644 --- a/onadata/libs/serializers/organization_serializer.py +++ b/onadata/libs/serializers/organization_serializer.py @@ -85,6 +85,7 @@ def create(self, validated_data): if "request" in self.context: creator = self.context["request"].user + validated_data["host"] = self.context["request"].get_host() validated_data["organization"] = org_name @@ -130,13 +131,15 @@ def _create_user_list(user_list): except UserProfile.DoesNotExist: profile = UserProfile.objects.create(user=u) - users_list.append({ - "user": u.username, - "role": get_role_in_org(u, obj), - "first_name": u.first_name, - "last_name": u.last_name, - "gravatar": profile.gravatar, - }) + users_list.append( + { + "user": u.username, + "role": get_role_in_org(u, obj), + "first_name": u.first_name, + "last_name": u.last_name, + "gravatar": profile.gravatar, + } + ) return users_list members = get_organization_members(obj) if obj else [] diff --git a/onadata/libs/serializers/user_profile_serializer.py b/onadata/libs/serializers/user_profile_serializer.py index 36ea893b9b..7a20cc6aa1 100644 --- a/onadata/libs/serializers/user_profile_serializer.py +++ b/onadata/libs/serializers/user_profile_serializer.py @@ -272,7 +272,7 @@ def create(self, validated_data): metadata = {} username = params.get("username") password = params.get("password1", "") - site = Site.objects.get(pk=settings.SITE_ID) + site = request.get_host() or Site.objects.get(pk=settings.SITE_ID).domain new_user = None try: diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index 36c8677738..12cc55c9e5 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -225,4 +225,3 @@ def test_url_not_configured(self): """settings.PROJECT_INVITATION_URL not set""" url = get_project_invitation_url(self.custom_request) self.assertEqual(url, "http://testserver/api/v1/profiles") -# Add test case for using a different host diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 2119672e76..7282c21459 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -385,7 +385,7 @@ def test_zipped_csv_export_works(self): outputs = [] for d in self.data: outputs.append( - dict_to_joined_export(d, index, indices, survey_name, survey, d) + dict_to_joined_export(d, index, indices, survey_name, survey, d, None) ) index += 1 @@ -1272,7 +1272,9 @@ def test_export_with_image_attachments(self): export_builder = ExportBuilder() export_builder.set_survey(survey) with NamedTemporaryFile(suffix=".xlsx") as temp_xls_file: - export_builder.to_xlsx_export(temp_xls_file, xdata) + export_builder.to_xlsx_export( + temp_xls_file, xdata, options={"host": "example.com"} + ) temp_xls_file.seek(0) workbook = load_workbook(temp_xls_file) children_sheet = workbook["exp"] @@ -1884,7 +1886,7 @@ def test_type_conversion(self): survey_name = survey.name indices = {survey_name: 0} data = dict_to_joined_export( - submission_1, 1, indices, survey_name, survey, submission_1 + submission_1, 1, indices, survey_name, survey, submission_1, None ) new_row = export_builder.pre_process_row( data[survey_name], export_builder.sections[0] @@ -1896,7 +1898,7 @@ def test_type_conversion(self): # check missing values dont break and empty values return blank strings indices = {survey_name: 0} data = dict_to_joined_export( - submission_2, 1, indices, survey_name, survey, submission_2 + submission_2, 1, indices, survey_name, survey, submission_2, None ) new_row = export_builder.pre_process_row( data[survey_name], export_builder.sections[0] @@ -1962,7 +1964,7 @@ def test_to_sav_export(self): outputs = [] for d in self.data: outputs.append( - dict_to_joined_export(d, index, indices, survey_name, survey, d) + dict_to_joined_export(d, index, indices, survey_name, survey, d, None) ) index += 1 @@ -2015,7 +2017,7 @@ def test_to_sav_export_language(self): outputs = [] for d in self.data: outputs.append( - dict_to_joined_export(d, index, indices, survey_name, survey, d) + dict_to_joined_export(d, index, indices, survey_name, survey, d, None) ) index += 1 @@ -2509,7 +2511,9 @@ def test_to_sav_export_with_labels(self): outputs = [] for item in self.data: outputs.append( - dict_to_joined_export(item, index, indices, survey_name, survey, item) + dict_to_joined_export( + item, index, indices, survey_name, survey, item, None + ) ) index += 1 diff --git a/onadata/libs/tests/utils/test_export_tools.py b/onadata/libs/tests/utils/test_export_tools.py index 4390df7324..62e6dd993c 100644 --- a/onadata/libs/tests/utils/test_export_tools.py +++ b/onadata/libs/tests/utils/test_export_tools.py @@ -321,7 +321,13 @@ def test_get_value_or_attachment_uri(self): key = "photo" value = "123.jpg" val_or_url = get_value_or_attachment_uri( - key, value, row, self.xform, media_xpaths, attachment_list + key, + value, + row, + self.xform, + media_xpaths, + attachment_list, + host="example.com", ) self.assertTrue(val_or_url) @@ -332,7 +338,13 @@ def test_get_value_or_attachment_uri(self): # when include_images is False, you get the value media_xpaths = [] val_or_url = get_value_or_attachment_uri( - key, value, row, self.xform, media_xpaths, attachment_list + key, + value, + row, + self.xform, + media_xpaths, + attachment_list, + host="example.com", ) self.assertTrue(val_or_url) self.assertEqual(value, val_or_url) @@ -344,7 +356,13 @@ def test_get_value_or_attachment_uri(self): media_xpaths = ["photo"] val_or_url = get_value_or_attachment_uri( - key, value, row, self.xform, media_xpaths, attachment_list + key, + value, + row, + self.xform, + media_xpaths, + attachment_list, + host="example.com", ) self.assertTrue(val_or_url) self.assertEqual(value, val_or_url) @@ -381,7 +399,13 @@ def test_get_attachment_uri_for_filename_with_space(self): key = "photo" value = "1 2 3.jpg" val_or_url = get_value_or_attachment_uri( - key, value, row, self.xform, media_xpaths, attachment_list + key, + value, + row, + self.xform, + media_xpaths, + attachment_list, + host="example.com", ) self.assertTrue(val_or_url) @@ -713,7 +737,7 @@ def test_geojson_export_when_submission_deleted(self): } ], } - self.assertEqual(len(geojson['features']), 1) + self.assertEqual(len(geojson["features"]), 1) content = json.loads(content) self.assertEqual(content, geojson) diff --git a/onadata/libs/utils/api_export_tools.py b/onadata/libs/utils/api_export_tools.py index 72df74a44d..ee96000e35 100644 --- a/onadata/libs/utils/api_export_tools.py +++ b/onadata/libs/utils/api_export_tools.py @@ -152,6 +152,8 @@ def custom_response_handler( # noqa: C0901 dataview_pk = hasattr(dataview, "pk") and dataview.pk options["dataview_pk"] = dataview_pk + options["host"] = request.get_host() + if dataview: columns_with_hxl = get_columns_with_hxl(xform.survey.get("children")) @@ -249,6 +251,7 @@ def _generate_new_export( # noqa: C0901 "extension": extension, "username": xform.user.username, "id_string": xform.id_string, + "host": request.get_host(), } if query: options["query"] = query diff --git a/onadata/libs/utils/common_tools.py b/onadata/libs/utils/common_tools.py index e5a9e051a1..8eba194e95 100644 --- a/onadata/libs/utils/common_tools.py +++ b/onadata/libs/utils/common_tools.py @@ -237,19 +237,15 @@ def __ne__(self, other): return ComparatorClass -def current_site_url(path): +def current_site_url(path, host): """ Returns fully qualified URL (no trailing slash) for the current site. :param path :return: complete url """ - # pylint: disable=import-outside-toplevel - from django.contrib.sites.models import Site - - current_site = Site.objects.get_current() protocol = getattr(settings, "ONA_SITE_PROTOCOL", "http") port = getattr(settings, "ONA_SITE_PORT", "") - url = f"{protocol}://{current_site.domain}" + url = f"{protocol}://{host}" if port: url += f":{port}" if path: @@ -315,6 +311,7 @@ def get_value_or_attachment_uri( attachment_list=None, show_choice_labels=False, language=None, + host=None, ): """ Gets either the attachment value or the attachment url @@ -339,7 +336,7 @@ def get_value_or_attachment_uri( if a.get("name") == value ] if attachments: - value = current_site_url(attachments[0].get("download_url", "")) + value = current_site_url(attachments[0].get("download_url", ""), host) return value diff --git a/onadata/libs/utils/csv_builder.py b/onadata/libs/utils/csv_builder.py index 0f5182561f..773b8f341a 100644 --- a/onadata/libs/utils/csv_builder.py +++ b/onadata/libs/utils/csv_builder.py @@ -253,8 +253,8 @@ def __init__( show_choice_labels=True, include_reviews=False, language=None, + host=None, ): - self.username = username self.id_string = id_string self.filter_query = filter_query @@ -302,6 +302,7 @@ def __init__( self.index_tags = index_tags self.show_choice_labels = show_choice_labels self.language = language + self.host = host self._setup() @@ -464,7 +465,7 @@ def _tag_edit_string(cls, record): @classmethod def _split_gps_fields(cls, record, gps_fields): updated_gps_fields = {} - for (key, value) in iteritems(record): + for key, value in iteritems(record): if key in gps_fields and isinstance(value, str): gps_xpaths = DataDictionary.get_additional_geopoint_xpaths(key) gps_parts = {xpath: None for xpath in gps_xpaths} @@ -552,8 +553,8 @@ def __init__( show_choice_labels=False, include_reviews=False, language=None, + host=None, ): - super().__init__( username, id_string, @@ -576,6 +577,7 @@ def __init__( show_choice_labels, include_reviews, language, + host, ) self.ordered_columns = OrderedDict() @@ -600,6 +602,7 @@ def _reindex( index_tags=DEFAULT_INDEX_TAGS, show_choice_labels=False, language=None, + host=None, ): """ Flatten list columns by appending an index, otherwise return as is @@ -643,7 +646,7 @@ def get_ordered_repeat_value(xpath, repeat_value): # set within a group. _item = item - for (nested_key, nested_val) in iteritems(_item): + for nested_key, nested_val in iteritems(_item): # given the key "children/details" and nested_key/ # abbreviated xpath # "children/details/immunization/polio_1", @@ -677,6 +680,7 @@ def get_ordered_repeat_value(xpath, repeat_value): index_tags=index_tags, show_choice_labels=show_choice_labels, language=language, + host=host, ) ) else: @@ -698,6 +702,7 @@ def get_ordered_repeat_value(xpath, repeat_value): include_images, show_choice_labels=show_choice_labels, language=language, + host=host, ) else: record[key] = get_value_or_attachment_uri( @@ -708,6 +713,7 @@ def get_ordered_repeat_value(xpath, repeat_value): include_images, show_choice_labels=show_choice_labels, language=language, + host=host, ) else: # anything that's not a list will be in the top level dict so its @@ -724,6 +730,7 @@ def get_ordered_repeat_value(xpath, repeat_value): include_images, show_choice_labels=show_choice_labels, language=language, + host=host, ) return record @@ -763,7 +770,7 @@ def _update_ordered_columns_from_data(self, cursor): """ # add ordered columns for select multiples if self.split_select_multiples: - for (key, choices) in iteritems(self.select_multiples): + for key, choices in iteritems(self.select_multiples): # HACK to ensure choices are NOT duplicated if key in self.ordered_columns.keys(): self.ordered_columns[key] = remove_dups_from_list_maintain_order( @@ -783,7 +790,7 @@ def _update_ordered_columns_from_data(self, cursor): # add ordered columns for nested repeat data for record in cursor: # re index column repeats - for (key, value) in iteritems(record): + for key, value in iteritems(record): self._reindex( key, value, @@ -795,6 +802,7 @@ def _update_ordered_columns_from_data(self, cursor): index_tags=self.index_tags, show_choice_labels=self.show_choice_labels, language=self.language, + host=self.host, ) def _format_for_dataframe(self, cursor): @@ -818,7 +826,7 @@ def _format_for_dataframe(self, cursor): self._tag_edit_string(record) flat_dict = {} # re index repeats - for (key, value) in iteritems(record): + for key, value in iteritems(record): reindexed = self._reindex( key, value, @@ -830,6 +838,7 @@ def _format_for_dataframe(self, cursor): index_tags=self.index_tags, show_choice_labels=self.show_choice_labels, language=self.language, + host=self.host, ) flat_dict.update(reindexed) yield flat_dict diff --git a/onadata/libs/utils/export_builder.py b/onadata/libs/utils/export_builder.py index 705a079c81..9cad67094d 100644 --- a/onadata/libs/utils/export_builder.py +++ b/onadata/libs/utils/export_builder.py @@ -106,7 +106,9 @@ def encode_if_str(row, key, encode_dates=False, sav_writer=None): # pylint: disable=too-many-arguments,too-many-locals,too-many-branches -def dict_to_joined_export(data, index, indices, name, survey, row, media_xpaths=None): +def dict_to_joined_export( + data, index, indices, name, survey, row, host, media_xpaths=None +): """ Converts a dict into one or more tabular datasets :param data: current record which can be changed or updated @@ -129,7 +131,14 @@ def dict_to_joined_export(data, index, indices, name, survey, row, media_xpaths= indices[key] += 1 child_index = indices[key] new_output = dict_to_joined_export( - child, child_index, indices, key, survey, row, media_xpaths + child, + child_index, + indices, + key, + survey, + row, + host, + media_xpaths, ) item = { INDEX: child_index, @@ -163,6 +172,7 @@ def dict_to_joined_export(data, index, indices, name, survey, row, media_xpaths= data_dictionary, media_xpaths, row and row.get(ATTACHMENTS), + host=host, ) return output @@ -922,6 +932,8 @@ def write_row(row, csv_writer, fields): index = 1 indices = {} survey_name = self.survey.name + options = kwargs.get("options") + host = options.get("host") if options else None for i, row_data in enumerate(data, start=1): # decode mongo section names joined_export = dict_to_joined_export( @@ -931,6 +943,7 @@ def write_row(row, csv_writer, fields): survey_name, self.survey, row_data, + host, media_xpaths, ) output = decode_mongo_encoded_section_names(joined_export) @@ -1060,6 +1073,9 @@ def write_row(data, work_sheet, fields, work_sheet_titles): index = 1 indices = {} survey_name = self.survey.name + + options = kwargs.get("options") + host = options.get("host") if options else None for i, row_data in enumerate(data, start=1): joined_export = dict_to_joined_export( row_data, @@ -1068,6 +1084,7 @@ def write_row(data, work_sheet, fields, work_sheet_titles): survey_name, self.survey, row_data, + host, media_xpaths, ) output = decode_mongo_encoded_section_names(joined_export) @@ -1122,6 +1139,7 @@ def to_flat_csv_export( xform = kwargs.get("xform") options = kwargs.get("options") total_records = kwargs.get("total_records") + host = options.get("host") if options else None win_excel_utf8 = options.get("win_excel_utf8") if options else False index_tags = options.get(REPEAT_INDEX_TAGS, self.REPEAT_INDEX_TAGS) show_choice_labels = options.get("show_choice_labels", False) @@ -1149,6 +1167,7 @@ def to_flat_csv_export( show_choice_labels=show_choice_labels, include_reviews=self.INCLUDE_REVIEWS, language=language, + host=host, ) csv_builder.export_to(path, dataview=dataview) @@ -1394,6 +1413,9 @@ def write_row(row, sav_writer, fields): index = 1 indices = {} survey_name = self.survey.name + + options = kwargs.get("options") + host = options.get("host") if options else None for i, row_data in enumerate(data, start=1): # decode mongo section names joined_export = dict_to_joined_export( @@ -1403,6 +1425,7 @@ def write_row(row, sav_writer, fields): survey_name, self.survey, row_data, + host, media_xpaths, ) output = decode_mongo_encoded_section_names(joined_export) diff --git a/onadata/libs/utils/user_auth.py b/onadata/libs/utils/user_auth.py index 5e3d9c75cc..ec1f91ab34 100644 --- a/onadata/libs/utils/user_auth.py +++ b/onadata/libs/utils/user_auth.py @@ -34,7 +34,7 @@ class HttpResponseNotAuthorized(HttpResponse): status_code = 401 - def __init__(self): + def __init__(self, *args, **kwargs): HttpResponse.__init__(self) self["WWW-Authenticate"] = f'Basic realm="{Site.objects.get_current().name}"' From 3ae3e5c55b1e1d20784dbb76380462d149fd74c5 Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 25 Jul 2023 14:09:18 +0300 Subject: [PATCH 041/270] Ensure PROJECT_INVITATION_URL setting works for multipe domains --- docs/projects.rst | 2 +- .../tests/viewsets/test_project_viewset.py | 39 +++++++++++++++++-- onadata/libs/tests/utils/test_email.py | 22 +++++++++-- onadata/libs/utils/email.py | 7 +++- 4 files changed, 61 insertions(+), 9 deletions(-) diff --git a/docs/projects.rst b/docs/projects.rst index 677f76ae1e..f25f20d941 100644 --- a/docs/projects.rst +++ b/docs/projects.rst @@ -650,7 +650,7 @@ adding the setting ``PROJECT_INVITATION_URL`` :: - PROJECT_INVITATION_URL = 'https://example.com/register' + PROJECT_INVITATION_URL = {'*': 'https://example.com/register'} Update a project invitation diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 7bea96d600..0e00a3c0eb 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -2841,7 +2841,13 @@ def test_only_admins_allowed(self, mock_send_mail): else: self.assertEqual(response.status_code, 403) - @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + @override_settings( + PROJECT_INVITATION_URL={ + "*": "https://example.com/register", + "onadata.com": "https://onadata.com/register", + } + ) + @override_settings(ALLOWED_HOSTS=["*"]) def test_create_invitation(self, mock_send_mail): """Project invitation can be created""" post_data = { @@ -2882,6 +2888,31 @@ def test_create_invitation(self, mock_send_mail): response = self.view(request, pk=self.project.pk) self.assertEqual(response.status_code, 400) + # Project invitations are created for non-default host + request = self.factory.post( + "/", + data=json.dumps(post_data), + content_type="application/json", + **self.extra, + ) + request.META["HTTP_HOST"] = "onadata.com" + response = self.view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(self.project.invitations.count(), 1) + invitation = self.project.invitations.first() + self.assertEqual( + response.data, + { + "id": invitation.pk, + "email": "janedoe@example.com", + "role": "editor", + "status": 1, + }, + ) + mock_send_mail.assert_called_once_with( + invitation.pk, "https://example.com/register" + ) + def test_email_required(self, mock_send_mail): """email is required""" # blank string @@ -3073,7 +3104,7 @@ def test_only_admins_allowed(self, mock_send_mail): else: self.assertEqual(response.status_code, 403) - @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + @override_settings(PROJECT_INVITATION_URL={"*": "https://example.com/register"}) def test_update(self, mock_send_mail): """We can update an invitation""" payload = { @@ -3133,7 +3164,7 @@ def test_update_role_only(self, mock_send_mail): ) mock_send_mail.assert_not_called() - @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + @override_settings(PROJECT_INVITATION_URL={"*": "https://example.com/register"}) def test_update_email_only(self, mock_send_mail): """We can update email only""" payload = { @@ -3351,7 +3382,7 @@ def test_only_admins_allowed(self, mock_send_mail): mock_send_mail.assert_not_called() - @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + @override_settings(PROJECT_INVITATION_URL={"*": "https://example.com/register"}) def test_resend_invite(self, mock_send_mail): """Invitation is revoked""" invitation = self.project.invitations.create( diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index 12cc55c9e5..81a9e94e28 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -26,7 +26,7 @@ def setUp(self): @override_settings( VERIFICATION_URL={ - "stage-testserver": 'https://stage-testserver/email-verification-confirmation', + "stage-testserver": "https://stage-testserver/email-verification-confirmation", "*": None, } ) @@ -89,7 +89,10 @@ def test_get_verification_url(self): self.assertEqual( verification_url, - ("https://stage-testserver/email-verification-confirmation?%s" % string_query_params) + ( + "https://stage-testserver/email-verification-confirmation?%s" + % string_query_params + ), ) def _get_email_data(self, include_redirect_url=False): @@ -215,12 +218,25 @@ def setUp(self): self.custom_request = RequestFactory().get("/path", data={"name": "test"}) - @override_settings(PROJECT_INVITATION_URL="https://example.com/register") + @override_settings(PROJECT_INVITATION_URL={"*": "https://example.com/register"}) def test_url_configured(self): """settings.PROJECT_INVITATION_URL is set""" url = get_project_invitation_url(self.custom_request) self.assertEqual(url, "https://example.com/register") + @override_settings( + PROJECT_INVITATION_URL={ + "*": "https://example.com/register", + "new-domain.com": "https://new-domain.com/register", + } + ) + @override_settings(ALLOWED_HOSTS=["*"]) + def test_url_configured(self): + """settings.PROJECT_INVITATION_URL is set""" + self.custom_request.META["HTTP_HOST"] = "new-domain.com" + url = get_project_invitation_url(self.custom_request) + self.assertEqual(url, "https://new-domain.com/register") + def test_url_not_configured(self): """settings.PROJECT_INVITATION_URL not set""" url = get_project_invitation_url(self.custom_request) diff --git a/onadata/libs/utils/email.py b/onadata/libs/utils/email.py index 559d3c699d..0a4fb035bc 100644 --- a/onadata/libs/utils/email.py +++ b/onadata/libs/utils/email.py @@ -95,7 +95,12 @@ def send_generic_email(email, message_txt, subject): def get_project_invitation_url(request: HttpRequest): """Get project invitation url""" - url: str = getattr(settings, "PROJECT_INVITATION_URL", "") + invitation_url_setting: dict = getattr(settings, "PROJECT_INVITATION_URL", {}) + + site_domain = request.get_host() + url = ( + site_domain in invitation_url_setting and invitation_url_setting[site_domain] + ) or ("*" in invitation_url_setting and invitation_url_setting["*"]) if not url: url = reverse("userprofile-list", request=request) From 9e4b0ac663718721e9bc3e5daa939b8889bd93e7 Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 25 Jul 2023 14:31:56 +0300 Subject: [PATCH 042/270] Refactor get_host_domain into a function --- .../api/tests/viewsets/test_project_viewset.py | 16 ++++++++++------ onadata/apps/api/tools.py | 8 +++++++- onadata/apps/logger/views.py | 4 ++-- .../libs/serializers/user_profile_serializer.py | 4 ++-- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 0e00a3c0eb..0557332d2b 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -2889,6 +2889,10 @@ def test_create_invitation(self, mock_send_mail): self.assertEqual(response.status_code, 400) # Project invitations are created for non-default host + post_data = { + "email": "bobalice@onadata.com", + "role": "editor", + } request = self.factory.post( "/", data=json.dumps(post_data), @@ -2898,20 +2902,20 @@ def test_create_invitation(self, mock_send_mail): request.META["HTTP_HOST"] = "onadata.com" response = self.view(request, pk=self.project.pk) self.assertEqual(response.status_code, 200) - self.assertEqual(self.project.invitations.count(), 1) - invitation = self.project.invitations.first() + self.assertEqual(self.project.invitations.count(), 2) + invitation = self.project.invitations.last() self.assertEqual( response.data, { "id": invitation.pk, - "email": "janedoe@example.com", + "email": "bobalice@onadata.com", "role": "editor", "status": 1, }, ) - mock_send_mail.assert_called_once_with( - invitation.pk, "https://example.com/register" - ) + mock_send_mail.assert_called_with( + invitation.pk, "https://onadata.com/register" + ) def test_email_required(self, mock_send_mail): """email is required""" diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index ee34913fb3..64832a7a66 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -753,9 +753,15 @@ def update_role_by_meta_xform_perms(xform): role.add(user, xform) +def get_host_domain(request): + """Get host from reques or check the Site model""" + request_host = request and request.get_host() + return request_host or Site.objects.get_current().domain + + def replace_attachment_name_with_url(data, request): """Replaces the attachment filename with a URL in ``data`` object.""" - site_url = request.get_host() or Site.objects.get_current().domain + site_url = get_host_domain(request) for record in data: attachments: dict = record.json.get("_attachments") diff --git a/onadata/apps/logger/views.py b/onadata/apps/logger/views.py index cf073a54f0..d837be284e 100644 --- a/onadata/apps/logger/views.py +++ b/onadata/apps/logger/views.py @@ -12,7 +12,6 @@ from django.contrib import messages from django.contrib.auth.decorators import login_required from django.contrib.auth import get_user_model -from django.contrib.sites.models import Site from django.core.files import File from django.core.files.storage import get_storage_class from django.http import ( @@ -34,6 +33,7 @@ from onadata.apps.logger.models.attachment import Attachment from onadata.apps.logger.models.instance import Instance from onadata.apps.logger.models.xform import XForm +from onadata.apps.api.tools import get_host_domain from onadata.apps.main.models import MetaData, UserProfile from onadata.libs.exceptions import EnketoError from onadata.libs.utils.decorators import is_owner @@ -91,7 +91,7 @@ def _html_submission_response(request, instance): data = {} data["username"] = instance.xform.user.username data["id_string"] = instance.xform.id_string - data["domain"] = request.get_host() or Site.objects.get(id=settings.SITE_ID).domain + data["domain"] = get_host_domain(request) return render(request, "submission.html", data) diff --git a/onadata/libs/serializers/user_profile_serializer.py b/onadata/libs/serializers/user_profile_serializer.py index 7a20cc6aa1..b8660e9bae 100644 --- a/onadata/libs/serializers/user_profile_serializer.py +++ b/onadata/libs/serializers/user_profile_serializer.py @@ -8,7 +8,6 @@ from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.auth.password_validation import validate_password -from django.contrib.sites.models import Site from django.core.cache import cache from django.core.exceptions import ValidationError from django.db import IntegrityError, transaction @@ -22,6 +21,7 @@ from rest_framework import serializers from onadata.apps.api.models.temp_token import TempToken +from onadata.apps.api.tools import get_host_domain from onadata.apps.api.tasks import ( send_verification_email, ) @@ -272,7 +272,7 @@ def create(self, validated_data): metadata = {} username = params.get("username") password = params.get("password1", "") - site = request.get_host() or Site.objects.get(pk=settings.SITE_ID).domain + site = get_host_domain(request) new_user = None try: From 81f07986b2722676af7387a1b33260a5a50dc473 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 26 Jul 2023 14:23:28 +0300 Subject: [PATCH 043/270] Enhance performance of open-data endpoint `api/v2/open-data//data` (#2456) * enhance performance of the open-data endpoint use raw SQL queries to improve the performance * update docstring * rename variables * add code comment * refactor code * ignore bandit rule Test for SQL injection we are not using string formatting on raw queries or quote placeholders in the SQL strings as per the Django guidelines * refactor code * refactor code * remove order by when fetching data for the open-data endpoint order by is hurting the performance of the SQL query when then number of instances is very large * remove string interpolation in SQL query string interpolation exposes the query to SQL injection --- .../tests/viewsets/test_tableau_viewset.py | 11 +++ .../apps/api/viewsets/v2/tableau_viewset.py | 93 +++++++++++++------ onadata/libs/pagination.py | 38 ++++++++ 3 files changed, 112 insertions(+), 30 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index f4cd9fa1f3..397f647a83 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -362,3 +362,14 @@ def test_count_query_param(self): response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, {"count": 2}) + + def test_gt_id_query_param(self): + """gt_id query param works""" + self.view = TableauViewSet.as_view({"get": "data"}) + _open_data = get_or_create_opendata(self.xform) + uuid = _open_data[0].uuid + request = self.factory.get("/", data={"gt_id": 500}, **self.extra) + response = self.view(request, uuid=uuid) + self.assertEqual(response.status_code, 200) + row_data = streaming_data(response) + self.assertEqual(len(row_data), 0) diff --git a/onadata/apps/api/viewsets/v2/tableau_viewset.py b/onadata/apps/api/viewsets/v2/tableau_viewset.py index 5e99970d2d..c9c7f4d67d 100644 --- a/onadata/apps/api/viewsets/v2/tableau_viewset.py +++ b/onadata/apps/api/viewsets/v2/tableau_viewset.py @@ -24,6 +24,8 @@ PARENT_TABLE, REPEAT_SELECT_TYPE, ) +from onadata.libs.pagination import RawSQLQueryPageNumberPagination + DEFAULT_TABLE_NAME = "data" GPS_PARTS = ["latitude", "longitude", "altitude", "precision"] @@ -167,6 +169,17 @@ class TableauViewSet(OpenDataViewSet): TableauViewSet - the /api/v2/tableau API endpoin implementation. """ + pagination_class = RawSQLQueryPageNumberPagination + data_count = None + + def paginate_queryset(self, queryset): + """Returns a paginated queryset.""" + if self.paginator is None: + return None + return self.paginator.paginate_queryset( + queryset, self.request, view=self, count=self.data_count + ) + @action(methods=["GET"], detail=True) def data(self, request, **kwargs): # pylint: disable=attribute-defined-outside-init @@ -188,41 +201,61 @@ def data(self, request, **kwargs): return Response(status=status.HTTP_404_NOT_FOUND) xform = self.object.content_object - if xform.is_merged_dataset: - qs_kwargs = { - "xform_id__in": list( + + if should_paginate or count: + qs_kwargs = {} + + if xform.is_merged_dataset: + xform_pks = list( xform.mergedxform.xforms.values_list("pk", flat=True) ) - } - else: - qs_kwargs = {"xform_id": xform.pk} + qs_kwargs = {"xform__pk__in": xform_pks} + + else: + qs_kwargs = {"xform__pk": xform.pk} + + if gt_id: + qs_kwargs.update({"id__gt": gt_id}) + + self.data_count = ( + Instance.objects.filter(**qs_kwargs, deleted_at__isnull=True) + .only("pk") + .count() + ) + + if count: + return Response({"count": self.data_count}) + + sql_where = "" + sql_where_params = [] + + # Raw SQL queries are used to improve the performance for large querysets if gt_id: - qs_kwargs.update({"id__gt": gt_id}) - - # Filter out deleted submissions - instances = Instance.objects.filter( - **qs_kwargs, deleted_at__isnull=True - ).only("json") - # we prefer to use len(instances) instead of instances.count() as using - # len is less expensive as no db query is made. Read more - # https://docs.djangoproject.com/en/4.2/topics/db/optimization/ - num_instances = len(instances) - - if count: - return Response({"count": num_instances}) - - # there currently exists a peculiar intermittent bug where after ordering - # the queryset and the first item is accessed such as instances[0] or by - # slicing instances[0:1] (as in the the pagination implementation) the - # execution freezes and no result is returned. This causes the server to - # timeout. The workaround below only ensures we order and paginate - # the results only when the queryset returns more than 1 item - if num_instances > 1: - instances = instances.order_by("pk") - - if should_paginate and num_instances > 1: + sql_where += " AND id > %s" + sql_where_params.append(gt_id) + + sql = ( + "SELECT id, json from logger_instance" # nosec + " WHERE xform_id IN %s AND deleted_at IS NULL" + sql_where # noqa W503 + ) + xform_pks = [xform.id] + + if xform.is_merged_dataset: + xform_pks = list(xform.mergedxform.xforms.values_list("pk", flat=True)) + + sql_params = [tuple(xform_pks)] + sql_where_params + + if should_paginate: + offset, limit = self.paginator.get_offset_limit( + self.request, self.data_count + ) + sql += " LIMIT %s OFFSET %s" + instances = Instance.objects.raw(sql, sql_params + [limit, offset]) instances = self.paginate_queryset(instances) + else: + instances = Instance.objects.raw(sql, sql_params) + # Switch out media file names for url links in queryset data = replace_attachment_name_with_url(instances, request) data = process_tableau_data( diff --git a/onadata/libs/pagination.py b/onadata/libs/pagination.py index 10b6317409..24b5095611 100644 --- a/onadata/libs/pagination.py +++ b/onadata/libs/pagination.py @@ -2,6 +2,7 @@ """ Pagination classes. """ +from typing import Tuple from django.conf import settings from django.core.paginator import Paginator from django.db.models import QuerySet @@ -137,3 +138,40 @@ def paginate_queryset(self, queryset, request, view, count=None): self.request = request return list(self.page) + + +class RawSQLQueryPaginator(CountOverridablePaginator): + """Paginator class for raw SQL queries""" + + def page(self, number): + """Return page + + self.object_list is NOT sliced because self.object_list should + have been paginated via OFFSET and LIMIT before creating a + RawPaginator instance + """ + number = self.validate_number(number) + return self._get_page(self.object_list, number, self) + + +class RawSQLQueryPageNumberPagination(CountOverridablePageNumberPagination): + """PageNumberPagination class for raw SQL queries""" + + django_paginator_class = RawSQLQueryPaginator + + def get_offset_limit(self, request, count) -> Tuple[int, int]: + """Returns the offset and limit to be used in a raw SQL query""" + page_size = self.get_page_size(request) + # pass an empty object_list since we are not handling any pagination + # at this point, we are specifically interested in the count + paginator = self.django_paginator_class([], page_size, count_override=count) + page_number = paginator.validate_number( + self.get_page_number(request, paginator) + ) + offset = (page_number - 1) * paginator.per_page + limit = offset + paginator.per_page + + if limit + paginator.orphans >= paginator.count: + limit = paginator.count + + return (offset, limit) From 1813f937148bbb034abc10f34e38d6cca5cedecf Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 27 Jul 2023 11:43:36 +0300 Subject: [PATCH 044/270] Fix AttributeError: 'ExportBuilder' object has no attribute 'to_google_sheets' (#2458) * handle validation for missing google export implementation prevent application from crashing when google export is not enabled by performing validation * refactor test suite * fix failing tests * fix failing tests * revert changes made to onadata/libs/tests/utils/test_export_tools.py * add test case * refactor code --- .flake8 | 2 +- .../api/tests/viewsets/test_xform_viewset.py | 1144 +++++++++-------- onadata/apps/api/viewsets/xform_viewset.py | 56 +- onadata/libs/utils/api_export_tools.py | 10 +- 4 files changed, 643 insertions(+), 569 deletions(-) diff --git a/.flake8 b/.flake8 index 55fb4e173f..2edebf42be 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,5 @@ [flake8] max-line-length = 88 select = C,E,F,W,B,B950 -extend-ignore = E203,E501 +extend-ignore = E203,E501,W503 per-file-ignores = __init__.py:F401 diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index b028d4f3cf..f17b249b29 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -34,7 +34,6 @@ from mock import Mock, patch from onadata.libs.utils.api_export_tools import get_existing_file_format from rest_framework import status -from rest_framework.viewsets import ModelViewSet from onadata.apps.api.tests.mocked_data import ( enketo_error500_mock, @@ -62,7 +61,6 @@ from onadata.apps.main.models import MetaData from onadata.apps.messaging.constants import FORM_UPDATED, XFORM from onadata.apps.viewer.models import Export -from onadata.apps.viewer.models.export import ExportTypeError from onadata.libs.permissions import ( ROLES_ORDERED, DataEntryMinorRole, @@ -114,7 +112,32 @@ def raise_bad_status_line(arg): raise BadStatusLine("RANDOM STATUS") -class TestXFormViewSet(TestAbstractViewSet): +class XFormViewSetBaseTestCase(TestAbstractViewSet): + def _make_submission_over_date_range(self, start, days=1): + self._publish_xls_form_to_project() + + start_time = start + curr_time = start_time + for survey in self.surveys: + _submission_time = curr_time + self._make_submission( + os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=_submission_time, + ) + curr_time += timedelta(days=days) + + +class TestXFormViewSet(XFormViewSetBaseTestCase): """Test XFormViewSet""" def setUp(self): @@ -3559,34 +3582,6 @@ def test_delete_xform_async(self, mock_get_status): self.assertEqual(response.status_code, 404) - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_form_data_async(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - - for format in ["xlsx", "osm", "csv"]: - request = self.factory.get("/", data={"format": format}, **self.extra) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - def test_xform_retrieve_osm_format(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() @@ -3602,197 +3597,6 @@ def test_xform_retrieve_osm_format(self): response = view(request, pk=formid) self.assertEqual(response.status_code, 200) - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_zip_async(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - self._make_submissions() - form_view = XFormViewSet.as_view( - { - "get": "retrieve", - } - ) - export_async_view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - fmt = "zip" - - request = self.factory.get("/", data={"format": fmt}, **self.extra) - response = export_async_view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = export_async_view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - - request = self.factory.get("/", **self.extra) - response = form_view(request, pk=formid, format=fmt) - self.assertTrue(response.status_code, 200) - headers = dict(response.items()) - content_disposition = headers["Content-Disposition"] - filename = filename_from_disposition(content_disposition) - basename, ext = os.path.splitext(filename) - self.assertEqual(ext, ".zip") - - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_async_connection_error(self, async_result): - with HTTMock(enketo_mock): - async_result.side_effect = ConnectionError( - "Error opening socket: a socket error occurred" - ) - self._publish_xls_form_to_project() - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - - format = "xlsx" - request = self.factory.get("/", data={"format": format}, **self.extra) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 503) - self.assertEqual(response.status_text.upper(), "SERVICE UNAVAILABLE") - self.assertEqual( - response.data["detail"], - "Service temporarily unavailable, try again later.", - ) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_create_xls_report_async(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - self._make_submissions() - - data_value = "template 1|http://xls_server" - self._add_form_metadata(self.xform, "external_export", data_value) - # pylint: disable=no-member - metadata = MetaData.objects.get( - object_id=self.xform.id, data_type="external_export" - ) - paths = [ - os.path.join( - self.main_directory, - "fixtures", - "transportation", - "instances_w_uuid", - s, - s + ".xml", - ) - for s in ["transport_2011-07-25_19-05-36"] - ] - - self._make_submission(paths[0]) - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - with HTTMock(external_mock): - # External export - request = self.factory.get( - "/", data={"format": "xlsx", "meta": metadata.pk}, **self.extra - ) - response = view(request, pk=formid) - - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - - data = response.data - get_data = {"job_uuid": data.get("job_uuid")} - - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid, format="xlsx") - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_create_xls_report_async_with_data_id(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - self._make_submissions() - - data_value = "template 1|http://xls_server" - self._add_form_metadata(self.xform, "external_export", data_value) - # pylint: disable=no-member - metadata = MetaData.objects.get( - object_id=self.xform.id, data_type="external_export" - ) - paths = [ - os.path.join( - self.main_directory, - "fixtures", - "transportation", - "instances_w_uuid", - s, - s + ".xml", - ) - for s in ["transport_2011-07-25_19-05-36"] - ] - - self._make_submission(paths[0]) - self.assertEqual(self.response.status_code, 201) - - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - data = {"meta": metadata.pk, "data_id": self.xform.instances.all()[0].pk} - formid = self.xform.pk - request = self.factory.get("/", data=data, **self.extra) - with HTTMock(external_mock): - # External export - request = self.factory.get( - "/", - data={ - "format": "xlsx", - "meta": metadata.pk, - "data_id": self.xform.instances.all()[0].pk, - }, - **self.extra, - ) - response = view(request, pk=formid) - - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - - data = response.data - get_data = {"job_uuid": data.get("job_uuid")} - - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid, format="xlsx") - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - def test_check_async_publish_empty_uuid(self): view = XFormViewSet.as_view({"get": "create_async"}) @@ -4429,52 +4233,10 @@ def test_csv_export_no_new_generated(self): basename, ext = os.path.splitext(filename) self.assertEqual(ext, ".csv") - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_csv_data_async_with_remove_group_name(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - - request = self.factory.get( - "/", data={"format": "csv", "remove_group_name": True}, **self.extra - ) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - - export_pk = Export.objects.all().order_by("pk").reverse()[0].pk - - # metaclaass for mocking results - job = type( - str("AsyncResultMock"), (), {"state": "SUCCESS", "result": export_pk} - ) - async_result.return_value = job - - get_data = {"job_uuid": task_id, "remove_group_name": True} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - - export = Export.objects.last() - self.assertIn(str(export.pk), response.data.get("export_url")) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - - def test_xform_linked_dataviews(self): - xlsform_path = os.path.join( - settings.PROJECT_ROOT, "libs", "tests", "utils", "fixtures", "tutorial.xlsx" - ) + def test_xform_linked_dataviews(self): + xlsform_path = os.path.join( + settings.PROJECT_ROOT, "libs", "tests", "utils", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xlsform_path) for x in range(1, 9): @@ -4617,29 +4379,6 @@ def test_multitple_enketo_urls(self): self.assertEqual(response.status_code, 200) self.assertIn("enketo_url", response.data) - def _make_submission_over_date_range(self, start, days=1): - self._publish_xls_form_to_project() - - start_time = start - curr_time = start_time - for survey in self.surveys: - _submission_time = curr_time - self._make_submission( - os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "instances", - survey, - survey + ".xml", - ), - forced_submission_time=_submission_time, - ) - curr_time += timedelta(days=days) - def _validate_csv_export( self, response, test_file_path, field=None, test_data=None ): @@ -4706,56 +4445,6 @@ def test_csv_export_filtered_by_date(self): self.assertIn("query", export.options) self.assertEqual(export.options["query"], query_str) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_form_data_async_with_filtered_date(self, async_result): - with HTTMock(enketo_mock): - start_date = datetime(2015, 12, 2, tzinfo=utc) - self._make_submission_over_date_range(start_date) - - first_datetime = start_date.strftime(MONGO_STRFTIME) - second_datetime = start_date + timedelta(days=1, hours=20) - query_str = ( - '{"_submission_time": {"$gte": "' - + first_datetime - + '", "$lte": "' - + second_datetime.strftime(MONGO_STRFTIME) - + '"}}' - ) - count = Export.objects.all().count() - - export_view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - - for export_format in ["csv"]: - request = self.factory.get( - "/", - data={"format": export_format, "query": query_str}, - **self.extra, - ) - response = export_view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - self.assertEqual(count + 1, Export.objects.all().count()) - - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = export_view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - - export = Export.objects.last() - self.assertIn("query", export.options) - self.assertEqual(export.options["query"], query_str) - def test_previous_export_with_date_filter_is_returned(self): with HTTMock(enketo_mock): start_date = datetime(2015, 12, 2, tzinfo=utc) @@ -4867,114 +4556,6 @@ def test_normal_export_after_export_with_date_filter(self): # should create a new export self.assertEqual(count + 1, Export.objects.all().count()) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_form_data_async_include_labels(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - self._make_submissions() - export_view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - form_view = XFormViewSet.as_view( - { - "get": "retrieve", - } - ) - formid = self.xform.pk - - for export_format in ["csv"]: - request = self.factory.get( - "/", - data={"format": export_format, "include_labels": "true"}, - **self.extra, - ) - response = export_view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = export_view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - with default_storage.open(export.filepath, "r") as f: - csv_reader = csv.reader(f) - # jump over headers first - next(csv_reader) - labels = next(csv_reader) - self.assertIn("Is ambulance available daily or weekly?", labels) - - request = self.factory.get( - "/", data={"include_labels": "true"}, **self.extra - ) - response = form_view(request, pk=formid, format=export_format) - f = StringIO( - "".join([c.decode("utf-8") for c in response.streaming_content]) - ) - csv_reader = csv.reader(f) - # jump over headers first - next(csv_reader) - labels = next(csv_reader) - self.assertIn("Is ambulance available daily or weekly?", labels) - - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_export_form_data_async_include_labels_only(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - self._make_submissions() - export_view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - form_view = XFormViewSet.as_view( - { - "get": "retrieve", - } - ) - formid = self.xform.pk - - for export_format in ["csv"]: - request = self.factory.get( - "/", - data={"format": export_format, "include_labels_only": "true"}, - **self.extra, - ) - response = export_view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = export_view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - with default_storage.open(export.filepath, "r") as f: - csv_reader = csv.reader(f) - headers = next(csv_reader) - self.assertIn("Is ambulance available daily or weekly?", headers) - - request = self.factory.get( - "/", data={"include_labels_only": "true"}, **self.extra - ) - response = form_view(request, pk=formid, format=export_format) - f = StringIO( - "".join([c.decode("utf-8") for c in response.streaming_content]) - ) - csv_reader = csv.reader(f) - headers = next(csv_reader) - self.assertIn("Is ambulance available daily or weekly?", headers) - @override_settings(ALLOWED_HOSTS=["*"]) def test_csv_exports_w_images_link(self): with HTTMock(enketo_mock): @@ -5099,6 +4680,7 @@ def test_csv_export_with_and_without_labels_only(self): with self.assertRaises(KeyError): self._validate_csv_export(response, None, key, expected_data) + @override_settings(GOOGLE_EXPORT=True) def test_xform_gsheet_exports_disabled_sync_mode(self): xlsform_path = os.path.join( settings.PROJECT_ROOT, "libs", "tests", "utils", "fixtures", "tutorial.xlsx" @@ -5133,34 +4715,6 @@ def test_xform_gsheet_exports_disabled_sync_mode(self): self.assertEqual(response.status_code, 403) self.assertEqual(response.data, text_response) - @patch("onadata.libs.utils.api_export_tools._get_google_credential") - def test_xform_gsheet_exports_authorization_url(self, mock_google_creds): - redirect_url = "https://google.com/api/example/authorization_url" - mock_google_creds.return_value = HttpResponseRedirect(redirect_to=redirect_url) - - self._publish_xls_form_to_project() - self._make_submissions() - - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - - data = {"format": "gsheets"} - request = self.factory.get("/", data=data, **self.extra) - response = view(request, pk=self.xform.pk) - - self.assertTrue(mock_google_creds.called) - - expected_response = { - "details": "Google authorization needed", - "url": redirect_url, - } - - self.assertEqual(response.status_code, 403) - self.assertEqual(response.data, expected_response) - @flaky def test_sav_zip_export_long_variable_length(self): self._publish_xls_form_to_project() @@ -5187,39 +4741,14 @@ def test_sav_zip_export_long_variable_length(self): response = view(request, pk=self.xform.pk, format="savzip") self.assertEqual(response.status_code, 200) - @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_sav_zip_export_long_variable_length_async(self, async_result): + def test_xform_version_count(self): self._publish_xls_form_to_project() + + self._make_submissions() + view = XFormViewSet.as_view( { - "get": "export_async", - } - ) - formid = self.xform.pk - request = self.factory.get("/", data={"format": "savzip"}, **self.extra) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - get_data = {"job_uuid": task_id} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_successful) - - def test_xform_version_count(self): - self._publish_xls_form_to_project() - - self._make_submissions() - - view = XFormViewSet.as_view( - { - "get": "retrieve", + "get": "retrieve", } ) @@ -5441,42 +4970,6 @@ def test_created_by_field_on_cloned_forms(self): cloned_form = XForm.objects.last() self.assertEqual(cloned_form.created_by.username, "alice") - @override_settings(CELERY_TASK_ALWAYS_EAGER=False) - @patch("onadata.libs.utils.api_export_tools.AsyncResult") - def test_pending_export_async(self, async_result): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view( - { - "get": "export_async", - } - ) - formid = self.xform.pk - request = self.factory.get("/", data={"format": "csv"}, **self.extra) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id = response.data.get("job_uuid") - - request = self.factory.get("/", data={"format": "csv"}, **self.extra) - response = view(request, pk=formid) - self.assertIsNotNone(response.data) - self.assertEqual(response.status_code, 202) - self.assertTrue("job_uuid" in response.data) - task_id_two = response.data.get("job_uuid") - - self.assertEqual(task_id, task_id_two) - - get_data = {"job_uuid": task_id_two} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - - self.assertTrue(async_result.called) - self.assertEqual(response.status_code, 202) - export = Export.objects.get(task_id=task_id) - self.assertTrue(export.is_pending) - def test_form_publishing_floip(self): with HTTMock(enketo_mock): xforms = XForm.objects.count() @@ -5591,6 +5084,571 @@ def test_csv_xls_import_errors(self): self.assertEqual(response.status_code, 400) self.assertEqual(response.data.get("error"), "csv_file not a csv file") + +class ExportAsyncTestCase(XFormViewSetBaseTestCase): + """Tests for exporting form data asynchronously""" + + def _google_credentials_mock(self): + """Returns a mock of a Google Credentials instance""" + + class GoogleCredentialsMock: + def to_json(self): + return { + "refresh_token": "refresh-token", + "token_uri": "https://oauth2.googleapis.com/token", + "client_id": "client-id", + "client_secret": "client-secret", + "scopes": ["https://www.googleapis.com/auth/drive.file"], + "expiry": datetime(2016, 8, 18, 12, 43, 30, 316792), + } + + return GoogleCredentialsMock() + + def setUp(self): + super().setUp() + + self.view = XFormViewSet.as_view({"get": "export_async"}) + + def test_authentication(self): + """Authentication is required""" + self._publish_xls_form_to_project() + request = self.factory.get("/") + response = self.view(request, pk=self.xform.pk) + self.assertEqual(response.status_code, 404) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_form_data_async(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + + for format in ["xlsx", "osm", "csv"]: + request = self.factory.get("/", data={"format": format}, **self.extra) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_zip_async(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + self._make_submissions() + form_view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) + export_async_view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + fmt = "zip" + + request = self.factory.get("/", data={"format": fmt}, **self.extra) + response = export_async_view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = export_async_view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + request = self.factory.get("/", **self.extra) + response = form_view(request, pk=formid, format=fmt) + self.assertTrue(response.status_code, 200) + headers = dict(response.items()) + content_disposition = headers["Content-Disposition"] + filename = filename_from_disposition(content_disposition) + basename, ext = os.path.splitext(filename) + self.assertEqual(ext, ".zip") + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_async_connection_error(self, async_result): + with HTTMock(enketo_mock): + async_result.side_effect = ConnectionError( + "Error opening socket: a socket error occurred" + ) + self._publish_xls_form_to_project() + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + format = "xlsx" + request = self.factory.get("/", data={"format": format}, **self.extra) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 503) + self.assertEqual(response.status_text.upper(), "SERVICE UNAVAILABLE") + self.assertEqual( + response.data["detail"], + "Service temporarily unavailable, try again later.", + ) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_create_xls_report_async(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + self._make_submissions() + + data_value = "template 1|http://xls_server" + self._add_form_metadata(self.xform, "external_export", data_value) + # pylint: disable=no-member + metadata = MetaData.objects.get( + object_id=self.xform.id, data_type="external_export" + ) + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances_w_uuid", + s, + s + ".xml", + ) + for s in ["transport_2011-07-25_19-05-36"] + ] + + self._make_submission(paths[0]) + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + with HTTMock(external_mock): + # External export + request = self.factory.get( + "/", data={"format": "xlsx", "meta": metadata.pk}, **self.extra + ) + response = view(request, pk=formid) + + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + + data = response.data + get_data = {"job_uuid": data.get("job_uuid")} + + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid, format="xlsx") + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_create_xls_report_async_with_data_id(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + self._make_submissions() + + data_value = "template 1|http://xls_server" + self._add_form_metadata(self.xform, "external_export", data_value) + # pylint: disable=no-member + metadata = MetaData.objects.get( + object_id=self.xform.id, data_type="external_export" + ) + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances_w_uuid", + s, + s + ".xml", + ) + for s in ["transport_2011-07-25_19-05-36"] + ] + + self._make_submission(paths[0]) + self.assertEqual(self.response.status_code, 201) + + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + data = {"meta": metadata.pk, "data_id": self.xform.instances.all()[0].pk} + formid = self.xform.pk + request = self.factory.get("/", data=data, **self.extra) + with HTTMock(external_mock): + # External export + request = self.factory.get( + "/", + data={ + "format": "xlsx", + "meta": metadata.pk, + "data_id": self.xform.instances.all()[0].pk, + }, + **self.extra, + ) + response = view(request, pk=formid) + + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + + data = response.data + get_data = {"job_uuid": data.get("job_uuid")} + + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid, format="xlsx") + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_csv_data_async_with_remove_group_name(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + + request = self.factory.get( + "/", data={"format": "csv", "remove_group_name": True}, **self.extra + ) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + + export_pk = Export.objects.all().order_by("pk").reverse()[0].pk + + # metaclaass for mocking results + job = type( + str("AsyncResultMock"), (), {"state": "SUCCESS", "result": export_pk} + ) + async_result.return_value = job + + get_data = {"job_uuid": task_id, "remove_group_name": True} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + + export = Export.objects.last() + self.assertIn(str(export.pk), response.data.get("export_url")) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_form_data_async_with_filtered_date(self, async_result): + with HTTMock(enketo_mock): + start_date = datetime(2015, 12, 2, tzinfo=utc) + self._make_submission_over_date_range(start_date) + + first_datetime = start_date.strftime(MONGO_STRFTIME) + second_datetime = start_date + timedelta(days=1, hours=20) + query_str = ( + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' + ) + count = Export.objects.all().count() + + export_view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + + for export_format in ["csv"]: + request = self.factory.get( + "/", + data={"format": export_format, "query": query_str}, + **self.extra, + ) + response = export_view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + self.assertEqual(count + 1, Export.objects.all().count()) + + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = export_view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + export = Export.objects.last() + self.assertIn("query", export.options) + self.assertEqual(export.options["query"], query_str) + + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_form_data_async_include_labels(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + self._make_submissions() + export_view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + form_view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) + formid = self.xform.pk + + for export_format in ["csv"]: + request = self.factory.get( + "/", + data={"format": export_format, "include_labels": "true"}, + **self.extra, + ) + response = export_view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = export_view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + with default_storage.open(export.filepath, "r") as f: + csv_reader = csv.reader(f) + # jump over headers first + next(csv_reader) + labels = next(csv_reader) + self.assertIn("Is ambulance available daily or weekly?", labels) + + request = self.factory.get( + "/", data={"include_labels": "true"}, **self.extra + ) + response = form_view(request, pk=formid, format=export_format) + f = StringIO( + "".join([c.decode("utf-8") for c in response.streaming_content]) + ) + csv_reader = csv.reader(f) + # jump over headers first + next(csv_reader) + labels = next(csv_reader) + self.assertIn("Is ambulance available daily or weekly?", labels) + + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_export_form_data_async_include_labels_only(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + self._make_submissions() + export_view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + form_view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) + formid = self.xform.pk + + for export_format in ["csv"]: + request = self.factory.get( + "/", + data={"format": export_format, "include_labels_only": "true"}, + **self.extra, + ) + response = export_view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = export_view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + with default_storage.open(export.filepath, "r") as f: + csv_reader = csv.reader(f) + headers = next(csv_reader) + self.assertIn("Is ambulance available daily or weekly?", headers) + + request = self.factory.get( + "/", data={"include_labels_only": "true"}, **self.extra + ) + response = form_view(request, pk=formid, format=export_format) + f = StringIO( + "".join([c.decode("utf-8") for c in response.streaming_content]) + ) + csv_reader = csv.reader(f) + headers = next(csv_reader) + self.assertIn("Is ambulance available daily or weekly?", headers) + + @override_settings(GOOGLE_EXPORT=True) + @patch("onadata.libs.utils.api_export_tools._get_google_credential") + def test_xform_gsheet_exports_authorization_url(self, mock_google_creds): + redirect_url = "https://google.com/api/example/authorization_url" + mock_google_creds.return_value = HttpResponseRedirect(redirect_to=redirect_url) + + self._publish_xls_form_to_project() + self._make_submissions() + + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + + data = {"format": "gsheets"} + request = self.factory.get("/", data=data, **self.extra) + response = view(request, pk=self.xform.pk) + + self.assertTrue(mock_google_creds.called) + + expected_response = { + "details": "Google authorization needed", + "url": redirect_url, + } + + self.assertEqual(response.status_code, 403) + self.assertEqual(response.data, expected_response) + + @override_settings(GOOGLE_EXPORT=False) + @patch("onadata.libs.utils.api_export_tools._get_google_credential") + def test_google_exports_setting_false(self, mock_google_creds): + """Google sheet export not allowed if setting.GOOGLE_EXPORT is false""" + mock_google_creds.return_value = self._google_credentials_mock() + self._publish_xls_form_to_project() + data = {"format": "gsheets"} + request = self.factory.get("/", data=data, **self.extra) + response = self.view(request, pk=self.xform.pk) + expected_response = {"details": "Export format not supported"} + self.assertEqual(response.status_code, 403) + self.assertEqual(response.data, expected_response) + + @patch("onadata.libs.utils.api_export_tools._get_google_credential") + def test_google_exports_setting_missing(self, mock_google_creds): + """Google sheet export not allowed if setting.GOOGLE_EXPORT is missing""" + mock_google_creds.return_value = self._google_credentials_mock() + self._publish_xls_form_to_project() + data = {"format": "gsheets"} + request = self.factory.get("/", data=data, **self.extra) + response = self.view(request, pk=self.xform.pk) + expected_response = {"details": "Export format not supported"} + self.assertEqual(response.status_code, 403) + self.assertEqual(response.data, expected_response) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_sav_zip_export_long_variable_length_async(self, async_result): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + request = self.factory.get("/", data={"format": "savzip"}, **self.extra) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + get_data = {"job_uuid": task_id} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_successful) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=False) + @patch("onadata.libs.utils.api_export_tools.AsyncResult") + def test_pending_export_async(self, async_result): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) + formid = self.xform.pk + request = self.factory.get("/", data={"format": "csv"}, **self.extra) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id = response.data.get("job_uuid") + + request = self.factory.get("/", data={"format": "csv"}, **self.extra) + response = view(request, pk=formid) + self.assertIsNotNone(response.data) + self.assertEqual(response.status_code, 202) + self.assertTrue("job_uuid" in response.data) + task_id_two = response.data.get("job_uuid") + + self.assertEqual(task_id, task_id_two) + + get_data = {"job_uuid": task_id_two} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + + self.assertTrue(async_result.called) + self.assertEqual(response.status_code, 202) + export = Export.objects.get(task_id=task_id) + self.assertTrue(export.is_pending) + def test_export_csvzip_form_data_async(self): with HTTMock(enketo_mock): xls_path = os.path.join( diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 46ac045c9e..9ad28451dc 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -80,6 +80,7 @@ get_existing_file_format, process_async_export, response_for_format, + _get_export_type, ) from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete from onadata.libs.utils.common_tools import json_stream @@ -863,28 +864,24 @@ def versions(self, request, *args, **kwargs): @action(methods=["GET"], detail=True) def export_async(self, request, *args, **kwargs): """Returns the status of an async export.""" - job_uuid = request.query_params.get("job_uuid") - export_type = request.query_params.get("format") - query = request.query_params.get("query") xform = self.get_object() + export_type = request.query_params.get("format") - token = request.query_params.get("token") - meta = request.query_params.get("meta") - data_id = request.query_params.get("data_id") - options = parse_request_export_options(request.query_params) - options["host"] = request.get_host() - - options.update( - { - "meta": meta, - "token": token, - "data_id": data_id, - } - ) - if query: - options.update({"query": query}) + if export_type: + try: + _get_export_type(export_type) - if request.query_params.get("format") in ["csvzip", "savzip"]: + except exceptions.ParseError: + payload = {"details": _("Export format not supported")} + return Response( + data=payload, + status=status.HTTP_403_FORBIDDEN, + content_type="application/json", + ) + + job_uuid = request.query_params.get("job_uuid") + + if export_type in ["csvzip", "savzip"]: # Overide renderer and mediatype because all response are # suppose to be in json # TODO: Avoid overiding the format query param for export type @@ -903,6 +900,23 @@ def export_async(self, request, *args, **kwargs): except NameError: resp = get_async_response(job_uuid, request, xform) else: + query = request.query_params.get("query") + token = request.query_params.get("token") + meta = request.query_params.get("meta") + data_id = request.query_params.get("data_id") + options = parse_request_export_options(request.query_params) + options["host"] = request.get_host() + options.update( + { + "meta": meta, + "token": token, + "data_id": data_id, + } + ) + + if query: + options.update({"query": query}) + resp = process_async_export(request, xform, export_type, options) if isinstance(resp, HttpResponseRedirect): @@ -917,7 +931,9 @@ def export_async(self, request, *args, **kwargs): self.etag_data = f"{timezone.now()}" return Response( - data=resp, status=status.HTTP_202_ACCEPTED, content_type="application/json" + data=resp, + status=status.HTTP_202_ACCEPTED, + content_type="application/json", ) def _get_streaming_response(self): diff --git a/onadata/libs/utils/api_export_tools.py b/onadata/libs/utils/api_export_tools.py index ee96000e35..d738c86b7e 100644 --- a/onadata/libs/utils/api_export_tools.py +++ b/onadata/libs/utils/api_export_tools.py @@ -114,14 +114,15 @@ def include_hxl_row(dv_columns, hxl_columns): def _get_export_type(export_type): - if export_type in list(EXPORT_EXT): - export_type = EXPORT_EXT[export_type] - else: + if export_type not in EXPORT_EXT or ( + export_type == Export.GOOGLE_SHEETS_EXPORT + and not getattr(settings, "GOOGLE_EXPORT", False) + ): raise exceptions.ParseError( _(f"'{export_type}' format not known or not implemented!") ) - return export_type + return EXPORT_EXT[export_type] # pylint: disable=too-many-arguments, too-many-locals, too-many-branches @@ -181,7 +182,6 @@ def custom_response_handler( # noqa: C0901 export = get_object_or_404(Export, id=export_id, xform=xform) else: if export_type == Export.GOOGLE_SHEETS_EXPORT: - return Response( data=json.dumps( {"details": _("Sheets export only supported in async mode")} From 560635f6588ac9743047a0dfc0abd7c381335175 Mon Sep 17 00:00:00 2001 From: apiyo Date: Fri, 28 Jul 2023 12:07:27 +0300 Subject: [PATCH 045/270] Tag release v3.11.0 --- CHANGES.rst | 19 +++++++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7aafe57b90..f564441180 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,25 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.11.0(2023-07-28) +------------------- + +- Ensure onadata can work in a multi-domain setup + `PR #2450 ` + [@FrankApiyo] +- Fix AttributeError: 'ExportBuilder' object has no attribute 'to_google_sheets' + `PR #2458 ` + [@kelvin-muchiri] +- Enhance performance of open-data endpoint api/v2/open-data//data + `PR #2456 ` + [@kelvin-muchiri] +- Fix AttributeError: 'NoneType' object has no attribute 'strip' when exporting form data + `PR #2453 ` + [@kelvin-muchiri] +- Add ability to create, update project invitations + `PR #2430 ` + [@kelvin-muchiri] + v3.10.1(2023-07-20) ------------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index 671e4cfd42..e0e6a6c193 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.10.1" +__version__ = "3.11.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 02ce64bd06..483b96ab19 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.10.1 +version = 3.11.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 9549f422528cc4bbd813873a7f9783a584d42a92 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 8 Mar 2023 10:57:16 +0300 Subject: [PATCH 046/270] Add indexes to instance model date fields Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 3bc59b1221..709530bb3a 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -652,19 +652,19 @@ class Instance(models.Model, InstanceBaseClass): survey_type = models.ForeignKey("logger.SurveyType", on_delete=models.PROTECT) # shows when we first received this instance - date_created = models.DateTimeField(auto_now_add=True) + date_created = models.DateTimeField(auto_now_add=True, db_index=True) # this will end up representing "date last parsed" - date_modified = models.DateTimeField(auto_now=True) + date_modified = models.DateTimeField(auto_now=True, db_index=True) # this will end up representing "date instance was deleted" - deleted_at = models.DateTimeField(null=True, default=None) + deleted_at = models.DateTimeField(null=True, default=None, db_index=True) deleted_by = models.ForeignKey( User, related_name="deleted_instances", null=True, on_delete=models.SET_NULL ) # this will be edited when we need to create a new InstanceHistory object - last_edited = models.DateTimeField(null=True, default=None) + last_edited = models.DateTimeField(null=True, default=None, db_index=True) # ODK keeps track of three statuses for an instance: # incomplete, submitted, complete From fe4c26f276d6b434101f80fc67bd042c978dfcad Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 8 Mar 2023 10:58:32 +0300 Subject: [PATCH 047/270] Add migration for adding indexes to date fields on the instance model Signed-off-by: Kipchirchir Sigei --- .../migrations/0005_auto_20230308_0252.py | 23 +++++++++++++++++++ onadata/apps/logger/models/instance.py | 4 ++-- 2 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 onadata/apps/logger/migrations/0005_auto_20230308_0252.py diff --git a/onadata/apps/logger/migrations/0005_auto_20230308_0252.py b/onadata/apps/logger/migrations/0005_auto_20230308_0252.py new file mode 100644 index 0000000000..23e17c9524 --- /dev/null +++ b/onadata/apps/logger/migrations/0005_auto_20230308_0252.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.16 on 2023-03-08 07:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('logger', '0004_update_instance_geoms'), + ] + + operations = [ + migrations.AlterField( + model_name='instance', + name='date_created', + field=models.DateTimeField(auto_now_add=True, db_index=True), + ), + migrations.AlterField( + model_name='instance', + name='date_modified', + field=models.DateTimeField(auto_now=True, db_index=True), + ), + ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 709530bb3a..942e0c25f3 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -658,13 +658,13 @@ class Instance(models.Model, InstanceBaseClass): date_modified = models.DateTimeField(auto_now=True, db_index=True) # this will end up representing "date instance was deleted" - deleted_at = models.DateTimeField(null=True, default=None, db_index=True) + deleted_at = models.DateTimeField(null=True, default=None) deleted_by = models.ForeignKey( User, related_name="deleted_instances", null=True, on_delete=models.SET_NULL ) # this will be edited when we need to create a new InstanceHistory object - last_edited = models.DateTimeField(null=True, default=None, db_index=True) + last_edited = models.DateTimeField(null=True, default=None) # ODK keeps track of three statuses for an instance: # incomplete, submitted, complete From d8d1febd7e46b4e770fac9b23f787a42deda3a76 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 9 Mar 2023 10:07:49 +0300 Subject: [PATCH 048/270] Apply indexes concurrently to prevent lock writes Signed-off-by: Kipchirchir Sigei --- .../migrations/0004_update_instance_geoms.py | 13 +++-- .../migrations/0005_auto_20230308_0252.py | 47 ++++++++++++++----- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/onadata/apps/logger/migrations/0004_update_instance_geoms.py b/onadata/apps/logger/migrations/0004_update_instance_geoms.py index 59fb98c4db..6ef70c3fe6 100644 --- a/onadata/apps/logger/migrations/0004_update_instance_geoms.py +++ b/onadata/apps/logger/migrations/0004_update_instance_geoms.py @@ -2,16 +2,19 @@ from django.db import migrations from onadata.apps.logger.models.instance import Instance +from onadata.libs.utils.model_tools import queryset_iterator def update_instance_geoms(apps, schema_editor): """ Update instance geom field with valid geom values """ - for inst in Instance.objects.filter( - deleted_at__isnull=True, - xform__downloadable=True, - xform__deleted_at__isnull=True, + for inst in queryset_iterator( + Instance.objects.filter( + deleted_at__isnull=True, + xform__downloadable=True, + xform__deleted_at__isnull=True, + ) ): if inst.geom and inst.geom.empty: inst.geom = None @@ -21,7 +24,7 @@ def update_instance_geoms(apps, schema_editor): class Migration(migrations.Migration): dependencies = [ - ('logger', '0003_alter_instance_media_all_received'), + ("logger", "0003_alter_instance_media_all_received"), ] operations = [migrations.RunPython(update_instance_geoms)] diff --git a/onadata/apps/logger/migrations/0005_auto_20230308_0252.py b/onadata/apps/logger/migrations/0005_auto_20230308_0252.py index 23e17c9524..485a8bfa6f 100644 --- a/onadata/apps/logger/migrations/0005_auto_20230308_0252.py +++ b/onadata/apps/logger/migrations/0005_auto_20230308_0252.py @@ -4,20 +4,45 @@ class Migration(migrations.Migration): + atomic = False dependencies = [ - ('logger', '0004_update_instance_geoms'), + ("logger", "0004_update_instance_geoms"), ] operations = [ - migrations.AlterField( - model_name='instance', - name='date_created', - field=models.DateTimeField(auto_now_add=True, db_index=True), - ), - migrations.AlterField( - model_name='instance', - name='date_modified', - field=models.DateTimeField(auto_now=True, db_index=True), - ), + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql=""" + CREATE INDEX "logger_instance_date_created_b2427770" + ON "logger_instance" ("date_created"); + """, + reverse_sql=""" + DROP INDEX "logger_instance_date_created_b2427770"; + """, + ), + migrations.RunSQL( + sql=""" + CREATE INDEX "logger_instance_date_modified_a32599fc" + ON "logger_instance" ("date_modified"); + """, + reverse_sql=""" + DROP INDEX "logger_instance_date_modified_a32599fc"; + """, + ), + ], + state_operations=[ + migrations.AlterField( + model_name="instance", + name="date_created", + field=models.DateTimeField(auto_now_add=True, db_index=True), + ), + migrations.AlterField( + model_name="instance", + name="date_modified", + field=models.DateTimeField(auto_now=True, db_index=True), + ), + ], + ) ] From fa6f2def3dc5ff11d7cc2e7a64acd08fbcb72b79 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 14 Mar 2023 16:47:37 +0300 Subject: [PATCH 049/270] cleanup Signed-off-by: Kipchirchir Sigei --- .../0005_add_date_fields_indexing.py | 54 +++++++++++++++++++ .../migrations/0005_auto_20230308_0252.py | 48 ----------------- onadata/apps/logger/models/instance.py | 9 +++- 3 files changed, 61 insertions(+), 50 deletions(-) create mode 100644 onadata/apps/logger/migrations/0005_add_date_fields_indexing.py delete mode 100644 onadata/apps/logger/migrations/0005_auto_20230308_0252.py diff --git a/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py b/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py new file mode 100644 index 0000000000..f5af866ac6 --- /dev/null +++ b/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py @@ -0,0 +1,54 @@ +# Generated by Django 3.2.18 on 2023-03-14 12:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ('logger', '0004_update_instance_geoms'), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_cr_42899d_idx" ON "logger_instance" ("date_created");', + reverse_sql='DROP INDEX "logger_inst_date_cr_42899d_idx";', + ), + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_mo_5a1bd3_idx" ON "logger_instance" ("date_modified");', + reverse_sql='DROP INDEX "logger_inst_date_mo_5a1bd3_idx";', + ), + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_deleted_da31a3_idx" ON "logger_instance" ("deleted_at");', + reverse_sql='DROP INDEX "logger_inst_deleted_da31a3_idx";', + ), + ], + state_operations=[ + migrations.AddIndex( + model_name="instance", + index=models.Index( + fields=["date_created"], name="logger_inst_date_cr_42899d_idx" + ), + ), + migrations.AddIndex( + model_name="instance", + index=models.Index( + fields=["date_modified"], name="logger_inst_date_mo_5a1bd3_idx" + ), + ), + migrations.AddIndex( + model_name="instance", + index=models.Index( + fields=["deleted_at"], name="logger_inst_deleted_da31a3_idx" + ), + ), + ], + ) + ] + + + + diff --git a/onadata/apps/logger/migrations/0005_auto_20230308_0252.py b/onadata/apps/logger/migrations/0005_auto_20230308_0252.py deleted file mode 100644 index 485a8bfa6f..0000000000 --- a/onadata/apps/logger/migrations/0005_auto_20230308_0252.py +++ /dev/null @@ -1,48 +0,0 @@ -# Generated by Django 3.2.16 on 2023-03-08 07:52 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - atomic = False - - dependencies = [ - ("logger", "0004_update_instance_geoms"), - ] - - operations = [ - migrations.SeparateDatabaseAndState( - database_operations=[ - migrations.RunSQL( - sql=""" - CREATE INDEX "logger_instance_date_created_b2427770" - ON "logger_instance" ("date_created"); - """, - reverse_sql=""" - DROP INDEX "logger_instance_date_created_b2427770"; - """, - ), - migrations.RunSQL( - sql=""" - CREATE INDEX "logger_instance_date_modified_a32599fc" - ON "logger_instance" ("date_modified"); - """, - reverse_sql=""" - DROP INDEX "logger_instance_date_modified_a32599fc"; - """, - ), - ], - state_operations=[ - migrations.AlterField( - model_name="instance", - name="date_created", - field=models.DateTimeField(auto_now_add=True, db_index=True), - ), - migrations.AlterField( - model_name="instance", - name="date_modified", - field=models.DateTimeField(auto_now=True, db_index=True), - ), - ], - ) - ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 942e0c25f3..5cb57c34fb 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -652,10 +652,10 @@ class Instance(models.Model, InstanceBaseClass): survey_type = models.ForeignKey("logger.SurveyType", on_delete=models.PROTECT) # shows when we first received this instance - date_created = models.DateTimeField(auto_now_add=True, db_index=True) + date_created = models.DateTimeField(auto_now_add=True) # this will end up representing "date last parsed" - date_modified = models.DateTimeField(auto_now=True, db_index=True) + date_modified = models.DateTimeField(auto_now=True) # this will end up representing "date instance was deleted" deleted_at = models.DateTimeField(null=True, default=None) @@ -697,6 +697,11 @@ class Instance(models.Model, InstanceBaseClass): class Meta: app_label = "logger" unique_together = ("xform", "uuid") + indexes = [ + models.Index(fields=['date_created']), + models.Index(fields=['date_modified']), + models.Index(fields=['deleted_at']), + ] @classmethod def set_deleted_at(cls, instance_id, deleted_at=timezone.now(), user=None): From 0f372f83f524134d98ce96ca5e7133b1e92f1c15 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 7 Aug 2023 00:57:05 +0300 Subject: [PATCH 050/270] Apply indexing to json fields Signed-off-by: Kipchirchir Sigei --- .../0005_add_date_fields_indexing.py | 49 +++++++++++-------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py b/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py index f5af866ac6..b789dd441d 100644 --- a/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py +++ b/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.18 on 2023-03-14 12:22 +# Generated by Django 3.2.20 on 2023-08-06 13:39 from django.db import migrations, models @@ -13,42 +13,49 @@ class Migration(migrations.Migration): operations = [ migrations.SeparateDatabaseAndState( database_operations=[ + # index the json date fields migrations.RunSQL( - sql='CREATE INDEX CONCURRENTLY "logger_inst_date_cr_42899d_idx" ON "logger_instance" ("date_created");', + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_cr_json_42899d_idx" ON "logger_instance" ((json->>\'_date_created\')) WHERE (json->>\'_date_created\') IS NOT NULL;', + reverse_sql='DROP INDEX "logger_inst_date_cr_json_42899d_idx";', + ), + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_mo_json_5a1bd3_idx" ON "logger_instance" ((json->>\'_date_modified\')) WHERE (json->>\'_date_modified\') IS NOT NULL;', + reverse_sql='DROP INDEX "logger_inst_date_mo_json_5a1bd3_idx";', + ), + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_deleted_at_json_da31a3_idx" ON "logger_instance" ((json->>\'_deleted_at\')) WHERE (json->>\'_deleted_at\') IS NOT NULL;', + reverse_sql='DROP INDEX "logger_inst_deleted_at_json_da31a3_idx";', + ), + # index model date fields + migrations.RunSQL( + sql='CREATE INDEX "logger_inst_date_cr_42899d_idx" ON "logger_instance" ("date_created");', reverse_sql='DROP INDEX "logger_inst_date_cr_42899d_idx";', ), migrations.RunSQL( - sql='CREATE INDEX CONCURRENTLY "logger_inst_date_mo_5a1bd3_idx" ON "logger_instance" ("date_modified");', + sql='CREATE INDEX "logger_inst_date_mo_5a1bd3_idx" ON "logger_instance" ("date_modified");', reverse_sql='DROP INDEX "logger_inst_date_mo_5a1bd3_idx";', ), migrations.RunSQL( - sql='CREATE INDEX CONCURRENTLY "logger_inst_deleted_da31a3_idx" ON "logger_instance" ("deleted_at");', - reverse_sql='DROP INDEX "logger_inst_deleted_da31a3_idx";', + sql='CREATE INDEX "logger_inst_deleted_at_da31a3_idx" ON "logger_instance" ("deleted_at");', + reverse_sql='DROP INDEX "logger_inst_deleted_at_da31a3_idx";', ), ], state_operations=[ - migrations.AddIndex( + migrations.AlterField( model_name="instance", - index=models.Index( - fields=["date_created"], name="logger_inst_date_cr_42899d_idx" - ), + name="date_created", + field=models.DateTimeField(auto_now_add=True, db_index=True), ), - migrations.AddIndex( + migrations.AlterField( model_name="instance", - index=models.Index( - fields=["date_modified"], name="logger_inst_date_mo_5a1bd3_idx" - ), + name="date_modified", + field=models.DateTimeField(auto_now=True, db_index=True), ), - migrations.AddIndex( + migrations.AlterField( model_name="instance", - index=models.Index( - fields=["deleted_at"], name="logger_inst_deleted_da31a3_idx" - ), + name="deleted_at", + field=models.DateTimeField(auto_now=True, db_index=True), ), ], ) ] - - - - From 4ad42526cfdd5e727463c138dba304b3a16029a3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 7 Aug 2023 11:14:52 +0300 Subject: [PATCH 051/270] Fix conflicting migrations Signed-off-by: Kipchirchir Sigei --- ...ng.py => 0008_add_date_fields_indexing.py} | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) rename onadata/apps/logger/migrations/{0005_add_date_fields_indexing.py => 0008_add_date_fields_indexing.py} (65%) diff --git a/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py b/onadata/apps/logger/migrations/0008_add_date_fields_indexing.py similarity index 65% rename from onadata/apps/logger/migrations/0005_add_date_fields_indexing.py rename to onadata/apps/logger/migrations/0008_add_date_fields_indexing.py index b789dd441d..8c933ccc92 100644 --- a/onadata/apps/logger/migrations/0005_add_date_fields_indexing.py +++ b/onadata/apps/logger/migrations/0008_add_date_fields_indexing.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.20 on 2023-08-06 13:39 +# Generated by Django 3.2.20 on 2023-08-07 08:11 from django.db import migrations, models @@ -7,7 +7,7 @@ class Migration(migrations.Migration): atomic = False dependencies = [ - ('logger', '0004_update_instance_geoms'), + ('logger', '0007_alter_projectinvitation_unique_together'), ] operations = [ @@ -28,33 +28,36 @@ class Migration(migrations.Migration): ), # index model date fields migrations.RunSQL( - sql='CREATE INDEX "logger_inst_date_cr_42899d_idx" ON "logger_instance" ("date_created");', + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_cr_42899d_idx" ON "logger_instance" ("date_created");', reverse_sql='DROP INDEX "logger_inst_date_cr_42899d_idx";', ), migrations.RunSQL( - sql='CREATE INDEX "logger_inst_date_mo_5a1bd3_idx" ON "logger_instance" ("date_modified");', + sql='CREATE INDEX CONCURRENTLY "logger_inst_date_mo_5a1bd3_idx" ON "logger_instance" ("date_modified");', reverse_sql='DROP INDEX "logger_inst_date_mo_5a1bd3_idx";', ), migrations.RunSQL( - sql='CREATE INDEX "logger_inst_deleted_at_da31a3_idx" ON "logger_instance" ("deleted_at");', + sql='CREATE INDEX CONCURRENTLY "logger_inst_deleted_at_da31a3_idx" ON "logger_instance" ("deleted_at");', reverse_sql='DROP INDEX "logger_inst_deleted_at_da31a3_idx";', ), ], state_operations=[ - migrations.AlterField( + migrations.AddIndex( model_name="instance", - name="date_created", - field=models.DateTimeField(auto_now_add=True, db_index=True), + index=models.Index( + fields=["date_created"], name="logger_inst_date_cr_42899d_idx" + ), ), - migrations.AlterField( + migrations.AddIndex( model_name="instance", - name="date_modified", - field=models.DateTimeField(auto_now=True, db_index=True), + index=models.Index( + fields=["date_modified"], name="logger_inst_date_mo_5a1bd3_idx" + ), ), - migrations.AlterField( + migrations.AddIndex( model_name="instance", - name="deleted_at", - field=models.DateTimeField(auto_now=True, db_index=True), + index=models.Index( + fields=["deleted_at"], name="logger_inst_deleted_at_da31a3_idx" + ), ), ], ) From aafd54521e6c5e2c1642eb1bc808526d95727c9b Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 7 Aug 2023 20:29:48 +0300 Subject: [PATCH 052/270] Tag release v3.12.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 7 +++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f564441180..918a1db58b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,13 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.12.0(2023-08-07) +------------------- + +- Add indexes to instance model date fields + `PR #2393 ` + [@KipSigei] + v3.11.0(2023-07-28) ------------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index e0e6a6c193..862703f407 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.11.0" +__version__ = "3.12.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 483b96ab19..a7b805953c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.11.0 +version = 3.12.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 12a1cc12e0c177dd27f69d3a3e9bc99969adf532 Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 8 Aug 2023 10:28:35 +0300 Subject: [PATCH 053/270] Fix off-by-one error in xls_to_csv function --- onadata/libs/utils/csv_import.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/onadata/libs/utils/csv_import.py b/onadata/libs/utils/csv_import.py index f75c3d5386..4ce7f19f4e 100644 --- a/onadata/libs/utils/csv_import.py +++ b/onadata/libs/utils/csv_import.py @@ -129,7 +129,7 @@ def dict_merge(part_a, part_b): if not isinstance(part_b, dict): return part_b result = deepcopy(part_a) - for (k, v) in iteritems(part_b): + for k, v in iteritems(part_b): if k in result and isinstance(result[k], dict): result[k] = dict_merge(result[k], v) else: @@ -628,16 +628,16 @@ def submission_xls_to_csv(xls_file): # noqa # convert excel dates(floats) to datetime for date_column_index in date_columns: try: - row_values[date_column_index] = ( - row_values[date_column_index].strftime("%Y-%m-%d").isoformat() - ) + row_values[date_column_index - 1] = row_values[ + date_column_index - 1 + ].isoformat() except (ValueError, TypeError): pass # convert excel boolean to true/false for boolean_column_index in boolean_columns: - row_values[boolean_column_index] = bool( - row_values[boolean_column_index] == EXCEL_TRUE + row_values[boolean_column_index - 1] = bool( + row_values[boolean_column_index - 1] == EXCEL_TRUE ) csv_writer.writerow(row_values) From 1daac1143362a0a9bc070abffb3b5cb6a0a8058c Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 8 Aug 2023 10:29:13 +0300 Subject: [PATCH 054/270] Ensure csvs converted from xls have the correct file_name --- onadata/apps/api/viewsets/xform_viewset.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 9ad28451dc..4b904d3869 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -685,6 +685,7 @@ def data_import(self, request, *args, **kwargs): else: if xls_file and xls_file.name.split(".")[-1] in XLS_EXTENSIONS: + xls_file_name = xls_file.name csv_file = submission_xls_to_csv(xls_file) overwrite = request.query_params.get("overwrite") overwrite = ( @@ -705,8 +706,12 @@ def data_import(self, request, *args, **kwargs): ) else: csv_file.seek(0) + if hasattr(csv_file, "name"): + file_name = csv_file.name + else: + file_name = xls_file_name upload_to = os.path.join( - request.user.username, "csv_imports", csv_file.name + request.user.username, "csv_imports", file_name ) file_name = default_storage.save(upload_to, csv_file) task = submit_csv_async.delay( From 9b3ae09935ea59ed08913a187f2f93f9c965843e Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 8 Aug 2023 10:38:25 +0300 Subject: [PATCH 055/270] Refactor: Fix error raised by linter --- onadata/apps/api/viewsets/xform_viewset.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 4b904d3869..21b9dcf718 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -685,7 +685,6 @@ def data_import(self, request, *args, **kwargs): else: if xls_file and xls_file.name.split(".")[-1] in XLS_EXTENSIONS: - xls_file_name = xls_file.name csv_file = submission_xls_to_csv(xls_file) overwrite = request.query_params.get("overwrite") overwrite = ( @@ -706,10 +705,7 @@ def data_import(self, request, *args, **kwargs): ) else: csv_file.seek(0) - if hasattr(csv_file, "name"): - file_name = csv_file.name - else: - file_name = xls_file_name + file_name = getattr(csv_file, "name", xls_file and xls_file.name) upload_to = os.path.join( request.user.username, "csv_imports", file_name ) From c7f9b6c1038984713902747d92061b4b0502229d Mon Sep 17 00:00:00 2001 From: apiyo Date: Tue, 8 Aug 2023 18:05:09 +0300 Subject: [PATCH 056/270] test: Add test case for xlsx import --- .../api/tests/viewsets/test_xform_viewset.py | 22 ++++++++++++++++++ .../tests/fixtures/double_image_form.xlsx | Bin 0 -> 6030 bytes .../double_image_field_form_data.xlsx | Bin 0 -> 5583 bytes 3 files changed, 22 insertions(+) create mode 100644 onadata/apps/main/tests/fixtures/double_image_form.xlsx create mode 100644 onadata/libs/tests/utils/fixtures/double_image_field_form_data.xlsx diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index f17b249b29..ea0abb6648 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -4987,6 +4987,28 @@ def test_form_publishing_floip(self): self.assertEqual(response.status_code, 201, response.data) self.assertEqual(xforms + 1, XForm.objects.count()) + def test_xlsx_import(self): + with HTTMock(enketo_mock): + xls_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "double_image_form.xlsx", + ) + self._publish_xls_form_to_project(xlsform_path=xls_path) + view = XFormViewSet.as_view({"post": "data_import"}) + xls_import = fixtures_path("double_image_field_form_data.xlsx") + post_data = {"xls_file": xls_import} + request = self.factory.post("/", data=post_data, **self.extra) + response = view(request, pk=self.xform.id) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data.get("additions"), 3) + self.assertEqual(response.data.get("updates"), 0) + def test_xls_import(self): with HTTMock(enketo_mock): xls_path = os.path.join( diff --git a/onadata/apps/main/tests/fixtures/double_image_form.xlsx b/onadata/apps/main/tests/fixtures/double_image_form.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..4a363ee9791920eb1002adec23cb0716fe58d218 GIT binary patch literal 6030 zcmZ`-1ymE@8Xe8(Aqdh9qd{7v!BH|o8c|ZZTUw+9WYXOTNDQP~lp3XgG)Q-+fbgci z_k8l++s^i%v$OAR=idLT3spu#CjkHe*vOY8K(DdMK??-{Aj1FvZXrK0kae(kHMe&) z*79;RcQNAjw6pybhp_*^OC*2#BcgkXU8v%1aYS2AjCrKiIZD}m`rFZVUYI!k#;hnK z+FXQzBdYH_ccxvsD#fYFty8(^bN0)g_|BT%5m~NwstRgW%{(wjpjRx+KPJGb!8BX^ zJnGm$5TIx6u3HhoHVq6c4$@~hMKLUnllThz;*b_jH)=Q_h3DN&ncc+b7^k_|uF!$S zw){ckq)utd=r#6jJ@+{vrFOd+_E$E=BJPBT*I7%HsA}n@m>jqE#DGa>@k3fyn6~GS zzx2rE(x-Mq1}h)3XE&$F(L$N-&+Q&4s~9IJNFV5q6ots%f&>r9R=$8>x+kAj#>Y5{ zJ@-*iJ&pYsPYSWY^*nh?&H#xb1&k(m6kv77?c8#myPCNCx&qki==$DxdZ*oO3K-~!mJFP_xybNQu<6nhAEc77vW-VBR?L>rSmWpz~U$Kaja z@Yhm!QC3*dPFt&%h|PZSsEsI%6DmB;5(*Lw#6ajk8{*2!iI5!^Gn5gF-!Ts_b;aU` z3Zqg{PjieODWn_IJN7Kgt>(UJ5=S&iuNuNi20S>j9_HQ_eNw4Z3u=1j)Wa1k5Lx)z4w8t7R?-yo3hO)D2 z(w>1diykf@^jqDiri_5AgRhijYOm|5QbK|lJ@kO4m;7H?g@>L4{$}KTE`oax$%FzL z06>8}J#Be@lj7mvY-8f!VDs~0{vxOu)^FXxOY~yb7YBZ-Uo)Jb!LX`2=DVg6g*l$V|1GZvsF@|H%z7uRO8H^Oieu?`Z#%K z#wQdl-hL?dmwaB8fH52~uXwjbUiTIx_%r6a2h~R;DVgPgTh9T@86Eu&fkA ziWfT*iY*#++7Qws)wXxg`8MlbAFWlWXYRKZ6JsptHo4LDf0mO*L`9WTezr+r&adb@ zmL<^wHxmj^B~^0X(Z1sPmetJ1_x$vhJ{w+KjlYti>AjCSSee^e-}1I-w}*)~8X1_i zE=vU^_x4ETX2@-b({&K_^LjaZQX@~we0EOQr{#n!=UH~Q-;LHyUvNUkXSubXS72p^ zn4YXvEhjBY7(`8cwcNaGDzqrN<}9@K<(9&mtV3I~h7>^~uMeIkVuK0=n|j05tW6n7 z=UWz)`DD5~Ra4)2Kiej)$&hD;yy@_7LO)!)c3PMaf$xgdYZ5Yw)N4(F3{}6H&ARP+ z;m_3-hm7Qf^hN}I5@g;u6MG47_amPWa({;BURVs5L}$~P7%nI3FLI38yIeK%NBLXf zxSe2A9i+nVkhS6eD9ry$VGUI^WT9w2duGEd-K zXXKfV>WerYBC6)9s2`LJHZZV4^|M|TN$#_uB%839frd|OXSQKTb^AO+h)vEhQp#Yd zZ{^*Fm;>mwr8bfhItA!W#kjIpyo2x|h^4pI_MBQ6!%?UiZt`xQPzO`1Qg9Uqak9^2 zFjwj!Pb1gim{tP}IPCVe7Y!s)1&VP%M7>v7ka&TO$?s zobT9>{UW|`GsRP#DL9Rm<1k{IF)T;G-Ky%bF9KhUA%qJ)eCFj{a2@KI_2P;Qzqt+!-%Cj&S!XJM@TYY--PUx)Dr}Ot-E4uB=+|w z?q;OV=|Z@(4)B{8|59guDX)$ zw|j3p>-)?N|DFt;BjzHwqR?I%wGauCM!3M$3ucTq9~fW z=G=6=-SaJFyWpxlkHXhG_{ghVQ~KD7#PTy9D#eKJtq%C!^oN0NM&vZkVu<<_>FpQr^0HWUw4knJQ0 zu!R}tg^|=3QrxYfw{{Teaz=}4Yq64U9GDzec+Fwg53iZ|rKl>r5 zZF)kpfCf1|`c`65U;wVWHyiF^3Br?X zJjsk`luYnP`P@@)PXT`CAj&f&$d(Ml$7`|z7q)9HOWAAQ<1 zhS`^8VZMNTQzhq`aXTTx5KT;TqZ%4;L?mXg} zWx5CA95&JIOz=ko<9vulCKc*_r-3X&MNa1stMD|lcSaQBJ;>1VFuHFU!X`C@ri06| zJdCCVb%Z&MS)JQEn~y%ccbv&3OyRjcU(+jA#^rL7;hh1OE?qE)hBukYQjU6Q3DM8p zwFrje>t3X=92GV{HP#Odp1SA#IetmKaJV&fddSv7WtQnmkgG~EiZt;gp*5>eM)*jc z)M_^VGCm{D2(%zVu8es-%zO9PeB12+CU$^mF8Y9nUrNDAecO^*=YsXw-MTL3japHk zlw9qL2Pq>bPdA?YpgETOTRHM`1+5C?EU!WNd!9!ohwC$QJM-V~zh;4LJ=K^gIr5f6 z4uzK5=v(*t%Th{(Ch|hIYf5I`zwHXY4==pq^Deo^-bHpB1O%!Rkulj*5kZUM>*9>7 zpy=`&#@~(~!1gUF`)eWr)uK!p`-gu1XVj}3)mVJ859A=dZ)%ne%k;vSBnY1o*OcB1 zzRFJs$@`!fa7PU?l|pP~Gol=0Jc4NKtWiNQn5PE9n@L_|}gVEKynKE-t>v0v`+ zx+f*+bJIYN#Jy7e;h8d>y&cqDuFbiL9rskyrrI{)#JwLJl&ZB6gVLbf9#3HUCl{Jz zQL~Q|_>4^$dtW(GZnGI6%7P@+OPsknPwUsv=m@#B1*EN0V4xF}u}}L@Xd19B72U=o zg3FlO&b;#V5r*ocsdKhZP!Mi+$*N?N_Z3OA_tnzUeASy!$xLx?*MmJ)w0Zor7h>~> zU5V?>RJEZ@wN~HnXUcaUZ=B8fT^@}YxA3CH6FlD^eKVJN-)sNa(*ODOIAHF!=i3e} z0T$vJhOV`Mt|zI|fIAU~>Uaz)93&#Bc_`zW>KL9j5y`+Y`bVoXq(cCew#_#eq(+2{ zeCi{B>8J%9#~7Ed^B z5h%)#^zQ8Q(PLKn#A#nPJn(I2rUkpklf&(Y$QSqp!PwblXBNBM8;%wx zt+LU@5H{>S6;5DD-M&}`8x^HsC*N?aFg!v>%TV}N0%BMPD-T6vJ*l+HY<({3VK(f+;EAHcv3tFLBYH`IA z;L`a;%SFp>OQf&IBd+o(J2L#QUso5>jGLSxH0qrMqj9mR{0{xZaRlcT$vnNh>G#}|e5k=IF?VQTDJ5lE-4$%o zn4O8gYJ2dA%}qM$ReQq+3Z2=9Nt%GMxD`(etJ&l)w*9G4d~8cB|3W<;IoZx93PF9l z7{PkF+~RWlA5g%iOXT=Y6gCZ|$KK^uUxX+eSnbm3))%Hp7n*RxH0;1iC(fdkc?~jM zS@AljoJ))E?V_H*f)FAYe*894Ng;|;2al|JZ6d$FZ(n)uYR!RP76u8?x~Hpv@yO5S zWP?d>&G>Y>kC4nXAe*wZP(FA0GE#rMP*z!iaiN6tUZlBKj2a0jRNe8_OIrp4-nH%9 z6Qoe!;Hz_6`Yf3l{{7;3W9{~J9RYKOk*F*#>vOr>IV0^mrC*_GtTxTx@QA}zq|nJHK}?79`x^Q@t}P(<$=^*Vg6m< zKb=HRFe6(t6gmBXkgeIw!4&H3;ON3*>fmhtbM#4vA$~5zNV^6nx0!2#+ZTH(u*Pgu zDjSSH1GIvL=@T+lM_;#8z7YYMjV`avin>@r1*eLHzGJNBe-`Y$8$>)bja?)xc&A-OmcxLqqjZoO1H`A-E~ z;bX)fHEEAI975$!eCc510XvH{@q-87%|#!mN=Jw@$AG~JOBpN7&0MW7W66H7W{Kf_ z7v9GnUv0TW@2CBnQ$tq2bblnLT*z^U=s!7q>gf2JQt8MkUZRlQAEXMWQBB!##AaN~ zQob_$I;KVW66Ri6NCuU{Z&yswriUYc3kjKF4SuBU3($Rof!O`@WY6jC;@(4s| zhabNzfMe1Ii@L7qaU%}id}Pcmhqpv(W{I$td0)*9i6sTbJa#WkI(&aloLhw{a-Dbe zR27GWg1dzVUyONGH9olJ89~p1VxrPOjI~YMycpIdCRH|UEXZ4i+@c006;hI+bDx5s zi0(@t+x)8rvmM5~R}G%j4cHQDIuAD*j~$MvJmx{< zJXI}R#df&B%}eoN0~L(pxa>ESmz%dQX*MHQeND$-B_5t^UM%PI&D9YlrohJJ$NZ7j z5DF>@+W+?~k&69#1O)y({)dBk6X2%b_B$K^2nY;8-h%&d-);upw59$8CL+`E|C>}d zacIF))9A0~h)1MBbLlpBEbN58)J3wg3PC literal 0 HcmV?d00001 diff --git a/onadata/libs/tests/utils/fixtures/double_image_field_form_data.xlsx b/onadata/libs/tests/utils/fixtures/double_image_field_form_data.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..5196d5dd562c34132045f1fdb9647b3c82d49a2a GIT binary patch literal 5583 zcmZ`-1yodP+a0>QhfYztq+7Zb5EwuN1{i86rBfuOQyL^hV(9J?Lpl5I^|&!L_|nzFWakZtP)I4UsP`U%a(qjY2?&cz2vL1C&~M z5!d$Kaw#ECn10*}OCk|~f-0&(+pNTyF3qmU${QfLkpO=+s;XvDHp%c7r|9*%)(v5- za;_U8Lxq?2`gXMo_)c#TGX}v=H>WS9lOp>Z-%}Z<4kRJwy8m!@pJTveAL%9+DgZ$8 zcXzFzZs1@3jwE*5_wwV1JprHR4651DaEyS9N(eBy9KYE(){8{bTRE`#{}4CN6l}nv zKDXRDE74gp^v(|p8hv^|J!sLE>9}8@8_=u$z#Q=4nF5#9Yf@ejBE_miga}=?#&_m8 zY>^FzD%FP?iTr4Ioy5k|_U!s{YKl!u^(F^5LMJrE6c%xm^h$3(;2MeQoNSarzhb68 z4ecJ-N8NszUKVKSjy1zBu2fq%;L81;V*^1*(Ize3vHA>eg+;A|zdcHM?DI;P*`3b& zM03}%&qQFEA&Vuo8xGaWmIj@pm9kgPJka@whGfUfk;(RE7x=}`t_jF}R*XDgA2#M?d+LNYa6h=Aj?5kMe6H?w=&m;7 zOA@0PM{nzd(7;ZW`^3Yk1iOh^Etyj2+*?23N-7(hV&2Dlyq23`5y?kh7N7a%rdk{M zzAQZ!WFwL1rzwFQbm10hn%A2^0KH=VLu91A^C8jL8eZhXQ>IHGARp`0cHP{9VJO5g zpIXJ*!rz?RPHs$J&jk8}scwx?=$v9$KL-2;h3v6BM{`8o(S>9G)$A7;@aUEVy1O>JrfuuyX5&XH|Y2R+{ zs}Hjt!cM@Yl|wJFhG9gs3my8BUTaQI_K9O>p*&9A8_1aT%;{`pLh-wpM>-Q{{);=A zc3HSFAe{*Z&xMKD$bpJ6FM^Iko$%hu$bqskj~xido$E@6!YxWW1?80;A5x0?HMt2U zfRW4dsgDS>RP(!+&ys01}%O%IIr#3M;=2Kv#bAPAaNVP&`Mcnm^-Xy3S|t{+Uk zA!?~75n&oXHF~#R7l@rP82nu>>nncKtoAb*NCtC)-~F?vNY^DMKmQZOt4_x^mxK z%`Hiq)>44Gv~lAc{$K??4Zq_oN#-U&XRF{hdU~2r#4b3xe}TKBEoIASm|A5vI=Y(k z%8j*k;nc|gR7b^`b-J{xmSs9wnN2Bokf&vpM66zoK#)upJ3B1hUY8EF5Jw<2KkU42 zpqylh%)hY%dvGx(;;amsA^kYtNNqOsoLCvSoaMgmKJNGft4MkL)SzwtO_~t}N*eW| z@H-Li{wc4NuEUBfDS9%}kz_nf`LHxL4$@Nb04-Ir8?K_+o2&`BgJeFoOZ9{}NG)rL z2gk^!5U^VrM*A$>4k6`pgK?21f=ib`OOI+xTdT|*3TZuHfU(79RP>N=DacsT+y}bV zQwBGmh$ZVCwdB;?2)d!{l;rM^nUQ6jq~D2(+c!x*PM)X+ZnUAVB!TLwW~ zjKgLInKd6TT+!5lHr>h)UzDHYv!Xp^=OJC3POHamBZKetVM%vq#B;m8UE9gz9PVCzW($R781t}F#%l!E%Cj|HMSnQB z*N3xd4PwU`)}H{2Mv>+y&^m8D`mUZby#{OMY?1=4Zo%?BWVlB!q@HZljo=9>Ho9qS zHzaq|bBL**RDX2QW*Ekow3SMvZ}7~c7x{5&_I#DKi2qTVViQoI1I?)=dtO2;Cq1DV zcj93@wD)4G-wU`b{u#a5KmD=O#?1KmkBx4adrtqtG6`F|HWQ#V66G#3NO; zN~S7SFF2-o)Um;()@LBrsU*}unnvBWvi{yx`%x3=^9CYR!RA)+QXjB@ZSm0}4?Yez zD8iU@S<7tq(?(`ir4P&V%QVk!i=%6oAr+!E$&Du`W{Q;orwc2W&PkH0npxicBq$zA zsH`b;n3Pn4tGauM@t9;NBgsP#4p2j^i|K;WFY(%s@SE>az$;|L1C_X6glU^2*h$Hw zs_-$GsoYlf_mk{BUrk?f2Rnu83tE4o`k)xuEiL4xIHS*gTsg~u_i}Dof20-I!C|4% z>*JiwbLZ!r`J{9mm9+Blo7YPWwc+lWK21GJJHL&~`6EUq`flh><#F1N8 z6jxUa%nIOp2N@mX=i7)ahAU+Ng+4`>i^t$XOT#{6YA#RZU+W?xbyvf{O%$ZM#k>5 z64cjR_%KgLhwnWAPm`Kt4u~D%6n@gr>PR#1RmQ_~ah5`-9;9l@(Z@ISyklSWNF3PQ zd2c7&2Hw0WwQf9)-{Q{vE@wq}JZq!8dN_)z=FZ)nEF9wsTkS`mvif;AN9oXmc2>E0 zT6zQK?w5vNyD`_)EhV)~a35uT(qZ&Ati&{%xt?=LgRd)yi1h~a+g#;D?|&*x6|p5N z;<-}2c=!39C;1A*Smx~qF$Rxd^^Zafk#*b>njNWreYK8b*LjHRN^by_xG zlBCLz_XnWNz1p7$qmNi-opX`iJ(RDTxE|-8XNVdcn#|CUInKa_4N44;X--)Ubeq?j z>U4Kp`0PLDe0X3vgKZey!ODjAY3YGOmkL=BRkh9eps>`)n!N(S@;4?_2=NBT18QFH zg~2sKMS^cV>2Kqc_*i6QCh~Y=h8`|l$yrhkUtFKQP+#mE!^c2TkBabip}JI6$P7tQb1e3kB{CzOxn zFWvT3;Xo`)W*3$IZf^|bxN`I$F%C*7PY`!dr~JI4Gm-CorX|K8U%;lYk2MnvD}un( zJul(z*sTB4Aji$i`{-TIA){CTv2yinc8^5im6#Vd2GH*p_^bDEAj1j5;QB;DvJj99i_z>MGWnRP z8yu;_+tqXnKb|lIu=uDwW?W6Ue_2|agZBs>%r4DeB#;+s98YRTLg}5Ft7~_`u;faf z$HZc^VLK}Ssk(_y0X2BeGj$HrM*VWY&x!^H(V5XQR6wPgL4PD0gl$2zCwSi0>(aRI z(>gl4)8?&p-LFe{vXImED9OoWaBggNT>8r<10&DlQ4U_gotsU$^w}KR^k-09kMe>8 z-@CZ2{l{j40P`~mTI7z1_A=J3aJ_70gBI*V@#exUg`-r1kD4zGbtOMl+X zp-bj!$V=w>Ml#9Tm1-Mpe5Qg`Fo<~)w5JsuYoPDaQb6qLKb^7{YNip(%E{Je4Jbjp zT+;7IuC;t^L>w9)np{rvC<&aMNzDkwMEXWU+^4q0YR?{o5-pcu6d=0N?-~1?N!uedFFgSd*))sRP|1gIpUeV?t<1(6tZnIU|yM#kB zI_mUe*XNexGDbYn@|>Y)+dC_f=qPh<7PPPZA_FTUYV037n&9fKJGsJ*KRSrWo!%*G z!>QofX6#vy8#*BRXGJ-AAy43g9M;ot0RVzO6~*1d+Y#*kYZzTHG=paHlV5XD?$fKy z=)MgKXI9Fn(@L2QZuY^;@p5S5CoZ)w(wh`Xpbf3~wpq1<=Bq^q4V=6Qyonh2ZUnHAiPmExFrX6l-FBNLqet0zH9YXP*a1VZR zHwaF%M!>N-ZYQyJ(34KU=ezV!rsq{)o#<@IsXZGPb%eQQ?wBLyYZt(^Lw%kG5m-~cj{`g{{~mG2A(!@ zRvl_{)n_ckxa%tWx{p?7+YjZdNKIy?Scoesr3#>T*@DI*tE*@EO6R-dxDRUBT0;yA zD7jWiJeWc;V=KHr(j7OjpNl7od)U4FjTYHRUiW3{#sSsiUNUw?015!Wj65jOpKScq-z}j~hhO;^qc^VJD@S~l;=*Y0a6F`FD_V&s zf|3haLNaZ(7My10>Ske{NyOae%M)MV+`S*?k2L+bBVvcrJek*vNgz}Z;4o%ejOe6- zZZXHHp~spc5QGWp$CPW+fYv(Z_(XS?rzTCMPVBrySC_}dJ?&!nN*8z$pruV+Ef;>2 zo%waQi;V%zr%f&;^CMez{xP0_IcLPP*2JOa`H#w$MBGLk2T#_dCNA1UYTF)|4hb5O8tab>^DCRkX>hr(*}X@*rK-i=O-{MpPvqzm7ds`Lb~sW*eg2o;Z=SMSe!eI%n^7L6+2wqQ|oZh@aSX zjo1MZf1BB6sgm_wbqe6BW{+LN{qBd$!v+8-I6fd>Py}q=K=A@N>i6g1n%>F); zOt0rhj%t9VZAs7KRawU@u6C8tz@lEC1R_2gUVEY2P3HBE(>HnNlTe)-wA5?Mn^x;U z>vflT_yKMVP%Bd?WRwqQ7 z7g?ojR>(=1botCKf1pvE17b+LZC^Js|wY3W^2(Z!3R|3%8-S%l2PT z3}kWpTM@qvyj=$W0`4J!$fN%~VQ+(PXZc@XP2|>u1pkkmzisDsBK>7YfD{1um$cH= Wz(8ty000j1D}yZHzP}_s!2bbK+^nMj literal 0 HcmV?d00001 From a1b02a5c3e070d6793c53a547af032500cc52092 Mon Sep 17 00:00:00 2001 From: apiyo Date: Wed, 9 Aug 2023 10:52:37 +0300 Subject: [PATCH 057/270] test: Test that dates are formatted correctly for xls imports --- onadata/apps/api/tests/viewsets/test_xform_viewset.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index ea0abb6648..e5824b44a3 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -5004,6 +5004,15 @@ def test_xlsx_import(self): request = self.factory.post("/", data=post_data, **self.extra) response = view(request, pk=self.xform.id) + # check that date columns are formatted correctly + self.assertEqual( + self.xform.instances.values("json___submission_time")[::1], + [ + {"json___submission_time": "2023-02-03T10:27:41"}, + {"json___submission_time": "2023-02-03T10:27:42"}, + {"json___submission_time": "2023-03-13T08:42:57"}, + ], + ) self.assertEqual(response.status_code, 200) self.assertEqual(response.get("Cache-Control"), None) self.assertEqual(response.data.get("additions"), 3) From f1825db683f0612816536419f99c3fadb9fa4c1f Mon Sep 17 00:00:00 2001 From: apiyo Date: Wed, 9 Aug 2023 10:59:08 +0300 Subject: [PATCH 058/270] docs: Add documentation for xlsx import testcase --- onadata/apps/api/tests/viewsets/test_xform_viewset.py | 1 + 1 file changed, 1 insertion(+) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index e5824b44a3..b90e9f14f3 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -4988,6 +4988,7 @@ def test_form_publishing_floip(self): self.assertEqual(xforms + 1, XForm.objects.count()) def test_xlsx_import(self): + """Ensure XLSX imports work as expected and dates are formatted correctly""" with HTTMock(enketo_mock): xls_path = os.path.join( settings.PROJECT_ROOT, From d462bb16bbb7841c962c0692c26f418c377795b4 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 9 Aug 2023 13:13:52 +0300 Subject: [PATCH 059/270] Update dependencies Signed-off-by: Kipchirchir Sigei --- requirements/azure.pip | 19 +-- requirements/base.pip | 150 +++++++++++--------- requirements/dev.pip | 309 ++++++++++++++++++++--------------------- requirements/s3.pip | 12 +- requirements/ses.pip | 14 +- 5 files changed, 261 insertions(+), 243 deletions(-) diff --git a/requirements/azure.pip b/requirements/azure.pip index 81bc6b2a44..2d6febf604 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -4,19 +4,19 @@ # # pip-compile --output-file=requirements/azure.pip requirements/azure.in # -asgiref==3.6.0 +asgiref==3.7.2 # via django -azure-core==1.26.3 +azure-core==1.29.0 # via azure-storage-blob -azure-storage-blob==12.15.0 +azure-storage-blob==12.17.0 # via django-storages -certifi==2022.12.7 +certifi==2023.7.22 # via requests cffi==1.15.1 # via cryptography -charset-normalizer==3.1.0 +charset-normalizer==3.2.0 # via requests -cryptography==40.0.1 +cryptography==41.0.3 # via # -r requirements/azure.in # azure-storage-blob @@ -34,7 +34,7 @@ pycparser==2.21 # via cffi pytz==2023.3 # via django -requests==2.28.2 +requests==2.31.0 # via azure-core six==1.16.0 # via @@ -42,9 +42,10 @@ six==1.16.0 # isodate sqlparse==0.4.4 # via django -typing-extensions==4.5.0 +typing-extensions==4.7.1 # via + # asgiref # azure-core # azure-storage-blob -urllib3==1.26.15 +urllib3==2.0.4 # via requests diff --git a/requirements/base.pip b/requirements/base.pip index 485387f4aa..e64d03011d 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -26,45 +26,46 @@ analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.6.0 +asgiref==3.7.2 # via django async-timeout==4.0.2 # via redis -attrs==22.2.0 +attrs==23.1.0 # via # jsonlines # jsonschema + # referencing babel==2.12.1 # via sphinx backoff==1.10.0 # via analytics-python -billiard==3.6.4.0 +billiard==4.1.0 # via celery -boto3==1.26.103 +boto3==1.28.22 # via tabulator -botocore==1.29.103 +botocore==1.31.22 # via # boto3 # s3transfer cached-property==1.5.2 # via tableschema -cachetools==5.3.0 +cachetools==5.3.1 # via google-auth -celery==5.2.7 +celery==5.3.1 # via onadata -certifi==2022.12.7 +certifi==2023.7.22 # via # requests # sentry-sdk cffi==1.15.1 # via cryptography -chardet==5.1.0 +chardet==5.2.0 # via # datapackage # tabulator -charset-normalizer==3.1.0 +charset-normalizer==3.2.0 # via requests -click==8.1.3 +click==8.1.6 # via # celery # click-didyoumean @@ -77,9 +78,9 @@ click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 # via celery -click-repl==0.2.0 +click-repl==0.3.0 # via celery -cryptography==40.0.1 +cryptography==41.0.3 # via # jwcrypto # onadata @@ -91,7 +92,7 @@ defusedxml==0.7.1 # djangorestframework-xml # onadata # pyxform -deprecated==1.2.13 +deprecated==1.2.14 # via # jwcrypto # onadata @@ -118,11 +119,11 @@ django==3.2.20 # onadata django-activity-stream==1.4.2 # via onadata -django-cors-headers==3.14.0 +django-cors-headers==4.2.0 # via onadata django-csp==3.7 # via onadata -django-debug-toolbar==3.8.1 +django-debug-toolbar==4.1.0 # via onadata django-filter==21.1 # via onadata @@ -132,13 +133,13 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.2.0 +django-oauth-toolkit==2.3.0 # via onadata django-ordered-model==3.7.4 # via onadata -django-query-builder==3.0.1 +django-query-builder==3.1.0 # via onadata -django-redis==5.2.0 +django-redis==5.3.0 # via onadata django-registration-redux==2.12 # via onadata @@ -146,7 +147,7 @@ django-render-block==0.9.2 # via django-templated-email django-reversion==5.0.4 # via onadata -django-taggit==3.1.0 +django-taggit==4.0.0 # via onadata django-templated-email==3.0.1 # via onadata @@ -170,11 +171,11 @@ djangorestframework-jsonp==1.0.2 # via onadata djangorestframework-xml==2.0.0 # via onadata -dnspython==2.3.0 +dnspython==2.4.1 # via pymongo docutils==0.19 # via sphinx -dpath==2.1.5 +dpath==2.1.6 # via onadata elaphe3==0.2.0 # via onadata @@ -186,7 +187,7 @@ future==0.18.3 # via python-json2xlsclient geojson==3.0.1 # via onadata -google-auth==2.17.1 +google-auth==2.22.0 # via # google-auth-oauthlib # onadata @@ -198,7 +199,7 @@ httplib2==0.22.0 # via onadata idna==3.4 # via requests -ijson==3.2.0.post0 +ijson==3.2.3 # via tabulator imagesize==1.4.1 # via sphinx @@ -216,23 +217,25 @@ jsonlines==3.1.0 # via tabulator jsonpickle==3.0.1 # via onadata -jsonpointer==2.3 +jsonpointer==2.4 # via datapackage -jsonschema==4.17.3 +jsonschema==4.19.0 # via # datapackage # tableschema -jwcrypto==1.4.2 +jsonschema-specifications==2023.7.1 + # via jsonschema +jwcrypto==1.5.0 # via django-oauth-toolkit -kombu==5.2.4 +kombu==5.3.1 # via celery linear-tsv==1.1.0 # via tabulator -lxml==4.9.2 +lxml==4.9.3 # via onadata -markdown==3.4.3 +markdown==3.4.4 # via onadata -markupsafe==2.1.2 +markupsafe==2.1.3 # via jinja2 modilabs-python-utils==0.1.5 # via onadata @@ -240,7 +243,7 @@ monotonic==1.6 # via analytics-python nose==1.3.7 # via django-nose -numpy==1.24.2 +numpy==1.25.2 # via onadata oauthlib==3.2.2 # via @@ -251,44 +254,43 @@ openpyxl==3.0.9 # onadata # pyxform # tabulator -packaging==23.0 +packaging==23.1 # via sphinx paho-mqtt==1.6.1 # via onadata -pillow==9.4.0 +pillow==10.0.0 # via # elaphe3 # onadata -prompt-toolkit==3.0.38 +prompt-toolkit==3.0.39 # via click-repl -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.7 # via onadata -pyasn1==0.4.8 +pyasn1==0.5.0 # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 +pyasn1-modules==0.3.0 # via google-auth pycparser==2.21 # via cffi -pygments==2.14.0 +pygments==2.16.1 # via sphinx -pyjwt[crypto]==2.6.0 +pyjwt[crypto]==2.8.0 # via # ona-oidc # onadata pylibmc==1.6.3 # via onadata -pymongo==4.3.3 +pymongo==4.4.1 # via onadata -pyparsing==3.0.9 +pyparsing==3.1.1 # via httplib2 -pyrsistent==0.19.3 - # via jsonschema python-dateutil==2.8.2 # via # analytics-python # botocore + # celery # fleming # onadata # tableschema @@ -296,21 +298,24 @@ python-memcached==1.59 # via onadata pytz==2023.3 # via - # celery # django # django-query-builder # djangorestframework # fleming # onadata -pyxform==1.12.0 +pyxform==1.12.1 # via # onadata # pyfloip recaptcha-client==1.0.6 # via onadata -redis==4.5.4 +redis==4.6.0 # via django-redis -requests==2.28.2 +referencing==0.30.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 # via # analytics-python # datapackage @@ -326,21 +331,23 @@ requests-oauthlib==1.3.1 # via google-auth-oauthlib rfc3986==2.0.0 # via tableschema +rpds-py==0.9.2 + # via + # jsonschema + # referencing rsa==4.9 # via google-auth -s3transfer==0.6.0 +s3transfer==0.6.1 # via boto3 -sentry-sdk==1.18.0 +sentry-sdk==1.29.2 # via onadata -simplejson==3.18.4 +simplejson==3.19.1 # via onadata six==1.16.0 # via # analytics-python # appoptics-metrics - # click-repl # datapackage - # django-query-builder # djangorestframework-csv # google-auth # isodate @@ -351,21 +358,27 @@ six==1.16.0 # tabulator snowballstemmer==2.2.0 # via sphinx -sphinx==6.1.3 - # via onadata -sphinxcontrib-applehelp==1.0.4 +sphinx==6.2.1 + # via + # onadata + # sphinxcontrib-applehelp + # sphinxcontrib-devhelp + # sphinxcontrib-htmlhelp + # sphinxcontrib-qthelp + # sphinxcontrib-serializinghtml +sphinxcontrib-applehelp==1.0.5 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.3 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.0.2 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==1.0.4 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.6 # via sphinx -sqlalchemy==2.0.7 +sqlalchemy==2.0.19 # via tabulator sqlparse==0.4.4 # via @@ -377,9 +390,13 @@ tabulator==1.53.5 # via # datapackage # tableschema -typing-extensions==4.5.0 - # via sqlalchemy -ujson==5.7.0 +typing-extensions==4.7.1 + # via + # asgiref + # sqlalchemy +tzdata==2023.3 + # via celery +ujson==5.8.0 # via onadata unicodecsv==0.14.1 # via @@ -388,12 +405,13 @@ unicodecsv==0.14.1 # onadata # tableschema # tabulator -urllib3==1.26.15 +urllib3==1.26.16 # via # botocore + # google-auth # requests # sentry-sdk -uwsgi==2.0.21 +uwsgi==2.0.22 # via onadata vine==5.0.0 # via diff --git a/requirements/dev.pip b/requirements/dev.pip index 0cb7ca6faf..dba6ded00c 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -18,63 +18,60 @@ # via -r requirements/base.in -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in -alabaster==0.7.12 +alabaster==0.7.13 # via sphinx amqp==5.1.1 # via kombu -analytics-python==1.4.0 +analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.5.2 +asgiref==3.7.2 # via django -astroid==2.11.6 - # via - # pylint - # pylint-celery - # pylint-flask - # requirements-detector -asttokens==2.0.5 +astroid==2.15.6 + # via pylint +asttokens==2.2.1 # via stack-data async-timeout==4.0.2 # via redis -attrs==21.4.0 +attrs==23.1.0 # via # jsonlines # jsonschema -babel==2.10.3 + # referencing +babel==2.12.1 # via sphinx backcall==0.2.0 # via ipython backoff==1.10.0 # via analytics-python -billiard==3.6.4.0 +billiard==4.1.0 # via celery -boto3==1.24.21 +boto3==1.28.22 # via tabulator -botocore==1.27.21 +botocore==1.31.22 # via # boto3 # s3transfer cached-property==1.5.2 # via tableschema -cachetools==5.2.0 +cachetools==5.3.1 # via google-auth -celery==5.2.7 +celery==5.3.1 # via onadata -certifi==2022.6.15 +certifi==2023.7.22 # via # requests # sentry-sdk cffi==1.15.1 # via cryptography -chardet==5.0.0 +chardet==5.2.0 # via # datapackage # tabulator -charset-normalizer==2.1.0 +charset-normalizer==3.2.0 # via requests -click==8.1.3 +click==8.1.6 # via # celery # click-didyoumean @@ -87,9 +84,9 @@ click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 # via celery -click-repl==0.2.0 +click-repl==0.3.0 # via celery -cryptography==39.0.1 +cryptography==41.0.3 # via # jwcrypto # onadata @@ -105,17 +102,17 @@ defusedxml==0.7.1 # djangorestframework-xml # onadata # pyxform -deprecated==1.2.13 +deprecated==1.2.14 # via # jwcrypto # onadata - # redis -dict2xml==1.7.1 +dict2xml==1.7.3 # via onadata -dill==0.3.5.1 +dill==0.3.7 # via pylint django==3.2.20 # via + # django-activity-stream # django-cors-headers # django-csp # django-debug-toolbar @@ -133,15 +130,15 @@ django==3.2.20 # djangorestframework-jsonapi # ona-oidc # onadata -django-activity-stream==1.4.1 +django-activity-stream==1.4.2 # via onadata -django-cors-headers==3.13.0 +django-cors-headers==4.2.0 # via onadata django-csp==3.7 # via onadata -django-debug-toolbar==3.5.0 +django-debug-toolbar==4.1.0 # via onadata -django-extensions==3.1.5 +django-extensions==3.2.3 # via -r requirements/dev.in django-filter==21.1 # via onadata @@ -151,25 +148,25 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.1.0 +django-oauth-toolkit==2.3.0 # via onadata -django-ordered-model==3.6 +django-ordered-model==3.7.4 # via onadata -django-query-builder==2.0.1 +django-query-builder==3.1.0 # via onadata -django-redis==5.2.0 +django-redis==5.3.0 # via onadata -django-registration-redux==2.11 +django-registration-redux==2.12 # via onadata -django-render-block==0.9.1 +django-render-block==0.9.2 # via django-templated-email -django-reversion==5.0.1 +django-reversion==5.0.4 # via onadata -django-taggit==3.0.0 +django-taggit==4.0.0 # via onadata -django-templated-email==3.0.0 +django-templated-email==3.0.1 # via onadata -djangorestframework==3.13.1 +djangorestframework==3.14.0 # via # djangorestframework-csv # djangorestframework-gis @@ -183,23 +180,23 @@ djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==5.0.0 +djangorestframework-jsonapi==6.0.0 # via onadata djangorestframework-jsonp==1.0.2 # via onadata djangorestframework-xml==2.0.0 # via onadata -docutils==0.18.1 +dnspython==2.4.1 + # via pymongo +docutils==0.19 # via sphinx -dodgy==0.2.1 - # via prospector -dpath==2.0.6 +dpath==2.1.6 # via onadata elaphe3==0.2.0 # via onadata et-xmlfile==1.1.0 # via openpyxl -executing==0.8.3 +executing==1.2.0 # via stack-data flake8==4.0.1 # via @@ -213,39 +210,41 @@ fleming==0.7.0 # via django-query-builder future==0.18.3 # via python-json2xlsclient -geojson==2.5.0 +geojson==3.0.1 # via onadata -google-auth==2.9.0 +google-auth==2.22.0 # via # google-auth-oauthlib # onadata -google-auth-oauthlib==0.5.2 +google-auth-oauthlib==1.0.0 # via onadata -greenlet==1.1.2 +greenlet==2.0.2 # via sqlalchemy httmock==1.4.0 # via -r requirements/dev.in -httplib2==0.20.4 +httplib2==0.22.0 # via onadata -idna==3.3 +idna==3.4 # via requests -ijson==3.1.4 +ijson==3.2.3 # via tabulator -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx +importlib-metadata==6.8.0 + # via yapf inflection==0.5.1 # via djangorestframework-jsonapi -ipdb==0.13.9 +ipdb==0.13.13 # via -r requirements/dev.in -ipython==8.4.0 +ipython==8.14.0 # via ipdb isodate==0.6.1 # via tableschema -isort==5.10.1 +isort==5.12.0 # via # -r requirements/dev.in # pylint -jedi==0.18.1 +jedi==0.19.0 # via ipython jinja2==3.1.2 # via sphinx @@ -253,38 +252,37 @@ jmespath==1.0.1 # via # boto3 # botocore -jsonlines==3.0.0 +jsonlines==3.1.0 # via tabulator -jsonpickle==2.2.0 +jsonpickle==3.0.1 # via onadata -jsonpointer==2.3 +jsonpointer==2.4 # via datapackage -jsonschema==4.6.1 +jsonschema==4.19.0 # via # datapackage # tableschema -jwcrypto==1.4.2 +jsonschema-specifications==2023.7.1 + # via jsonschema +jwcrypto==1.5.0 # via django-oauth-toolkit -kombu==5.2.4 +kombu==5.3.1 # via celery -lazy-object-proxy==1.7.1 +lazy-object-proxy==1.9.0 # via astroid linear-tsv==1.1.0 # via tabulator -lxml==4.9.1 +lxml==4.9.3 # via onadata -markdown==3.3.7 +markdown==3.4.4 # via onadata -markupsafe==2.1.1 +markupsafe==2.1.3 # via jinja2 -matplotlib-inline==0.1.3 +matplotlib-inline==0.1.6 # via ipython mccabe==0.6.1 - # via - # flake8 - # prospector - # pylint -mock==4.0.3 + # via pylint +mock==5.1.0 # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata @@ -292,9 +290,9 @@ monotonic==1.6 # via analytics-python nose==1.3.7 # via django-nose -numpy==1.23.0 +numpy==1.25.2 # via onadata -oauthlib==3.2.1 +oauthlib==3.2.2 # via # django-oauth-toolkit # requests-oauthlib @@ -303,10 +301,8 @@ openpyxl==3.0.9 # onadata # pyxform # tabulator -packaging==21.3 - # via - # redis - # sphinx +packaging==23.1 + # via sphinx paho-mqtt==1.6.1 # via onadata parso==0.8.3 @@ -317,29 +313,29 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==9.3.0 +pillow==10.0.0 # via # elaphe3 # onadata -platformdirs==2.5.2 - # via pylint -prompt-toolkit==3.0.30 +platformdirs==3.10.0 + # via + # pylint + # yapf +prompt-toolkit==3.0.39 # via # click-repl # ipython -prospector==1.7.7 - # via -r requirements/dev.in -psycopg2-binary==2.9.3 +psycopg2-binary==2.9.7 # via onadata ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pyasn1==0.4.8 +pyasn1==0.5.0 # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 +pyasn1-modules==0.3.0 # via google-auth pycodestyle==2.8.0 # via @@ -347,23 +343,17 @@ pycodestyle==2.8.0 # prospector pycparser==2.21 # via cffi -pydocstyle==6.1.1 - # via prospector -pyflakes==2.4.0 - # via - # flake8 - # prospector -pygments==2.12.0 +pygments==2.16.1 # via # ipython # sphinx -pyjwt[crypto]==2.4.0 +pyjwt[crypto]==2.8.0 # via # ona-oidc # onadata -pylibmc==1.6.1 +pylibmc==1.6.3 # via onadata -pylint==2.14.4 +pylint==2.17.5 # via # -r requirements/dev.in # prospector @@ -374,35 +364,24 @@ pylint==2.14.4 pylint-celery==0.3 # via prospector pylint-django==2.5.3 - # via - # -r requirements/dev.in - # prospector -pylint-flask==0.6 - # via prospector -pylint-plugin-utils==0.7 - # via - # prospector - # pylint-celery - # pylint-django - # pylint-flask -pymongo==4.1.1 + # via -r requirements/dev.in +pylint-plugin-utils==0.8.2 + # via pylint-django +pymongo==4.4.1 # via onadata -pyparsing==3.0.9 - # via - # httplib2 - # packaging -pyrsistent==0.18.1 - # via jsonschema +pyparsing==3.1.1 + # via httplib2 python-dateutil==2.8.2 # via # analytics-python # botocore + # celery # fleming # onadata # tableschema python-memcached==1.59 # via onadata -pytz==2022.1 +pytz==2023.3 # via # babel # celery @@ -411,7 +390,7 @@ pytz==2022.1 # djangorestframework # fleming # onadata -pyxform==1.12.0 +pyxform==1.12.1 # via # onadata # pyfloip @@ -419,9 +398,13 @@ pyyaml==6.0 # via prospector recaptcha-client==1.0.6 # via onadata -redis==4.3.4 +redis==4.6.0 # via django-redis -requests==2.28.1 +referencing==0.30.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 # via # analytics-python # datapackage @@ -435,7 +418,7 @@ requests==2.28.1 # sphinx # tableschema # tabulator -requests-mock==1.9.3 +requests-mock==1.11.0 # via -r requirements/dev.in requests-oauthlib==1.3.1 # via google-auth-oauthlib @@ -443,15 +426,17 @@ requirements-detector==0.7 # via prospector rfc3986==2.0.0 # via tableschema -rsa==4.8 +rpds-py==0.9.2 + # via + # jsonschema + # referencing +rsa==4.9 # via google-auth -s3transfer==0.6.0 +s3transfer==0.6.1 # via boto3 -sentry-sdk==1.6.0 +sentry-sdk==1.29.2 # via onadata -setoptconf-tmp==0.3.1 - # via prospector -simplejson==3.17.6 +simplejson==3.19.1 # via onadata six==1.16.0 # via @@ -471,30 +456,34 @@ six==1.16.0 # tableschema # tabulator snowballstemmer==2.2.0 + # via sphinx +sphinx==6.2.1 # via - # pydocstyle - # sphinx -sphinx==5.0.2 - # via onadata -sphinxcontrib-applehelp==1.0.2 + # onadata + # sphinxcontrib-applehelp + # sphinxcontrib-devhelp + # sphinxcontrib-htmlhelp + # sphinxcontrib-qthelp + # sphinxcontrib-serializinghtml +sphinxcontrib-applehelp==1.0.5 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.3 # via sphinx -sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-htmlhelp==2.0.2 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==1.0.4 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.6 # via sphinx -sqlalchemy==1.4.39 +sqlalchemy==2.0.19 # via tabulator -sqlparse==0.4.2 +sqlparse==0.4.4 # via # django # django-debug-toolbar -stack-data==0.3.0 +stack-data==0.6.2 # via ipython tableschema==1.20.2 # via datapackage @@ -502,21 +491,27 @@ tabulator==1.53.5 # via # datapackage # tableschema -tblib==1.7.0 +tblib==2.0.0 # via -r requirements/dev.in -toml==0.10.2 +tomli==2.0.1 # via # ipdb - # prospector -tomli==2.0.1 - # via pylint -tomlkit==0.11.0 + # pylint + # yapf +tomlkit==0.12.1 # via pylint -traitlets==5.3.0 +traitlets==5.9.0 # via # ipython # matplotlib-inline -ujson==5.4.0 +typing-extensions==4.7.1 + # via + # asgiref + # astroid + # sqlalchemy +tzdata==2023.3 + # via celery +ujson==5.8.0 # via onadata unicodecsv==0.14.1 # via @@ -525,21 +520,22 @@ unicodecsv==0.14.1 # onadata # tableschema # tabulator -urllib3==1.26.9 +urllib3==1.26.16 # via # botocore + # google-auth # requests # sentry-sdk -uwsgi==2.0.20 +uwsgi==2.0.22 # via onadata vine==5.0.0 # via # amqp # celery # kombu -wcwidth==0.2.5 +wcwidth==0.2.6 # via prompt-toolkit -wrapt==1.14.1 +wrapt==1.15.0 # via # astroid # deprecated @@ -551,8 +547,7 @@ xlwt==1.3.0 # via onadata xmltodict==0.13.0 # via onadata -yapf==0.32.0 +yapf==0.40.1 # via -r requirements/dev.in - -# The following packages are considered to be unsafe in a requirements file: -# setuptools +zipp==3.16.2 + # via importlib-metadata diff --git a/requirements/s3.pip b/requirements/s3.pip index a051f86a47..6f8abf5b4d 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -4,11 +4,11 @@ # # pip-compile --output-file=requirements/s3.pip requirements/s3.in # -asgiref==3.6.0 +asgiref==3.7.2 # via django -boto3==1.26.103 +boto3==1.28.22 # via -r requirements/s3.in -botocore==1.29.103 +botocore==1.31.22 # via # boto3 # s3transfer @@ -26,11 +26,13 @@ python-dateutil==2.8.2 # via botocore pytz==2023.3 # via django -s3transfer==0.6.0 +s3transfer==0.6.1 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -urllib3==1.26.15 +typing-extensions==4.7.1 + # via asgiref +urllib3==1.26.16 # via botocore diff --git a/requirements/ses.pip b/requirements/ses.pip index 51e19aa96b..740663a8d5 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -4,13 +4,13 @@ # # pip-compile --output-file=requirements/ses.pip requirements/ses.in # -asgiref==3.6.0 +asgiref==3.7.2 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.26.103 +boto3==1.28.22 # via django-ses -botocore==1.29.103 +botocore==1.31.22 # via # boto3 # s3transfer @@ -18,7 +18,7 @@ django==3.2.20 # via # -r requirements/ses.in # django-ses -django-ses==3.3.0 +django-ses==3.5.0 # via -r requirements/ses.in jmespath==1.0.1 # via @@ -30,11 +30,13 @@ pytz==2023.3 # via # django # django-ses -s3transfer==0.6.0 +s3transfer==0.6.1 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -urllib3==1.26.15 +typing-extensions==4.7.1 + # via asgiref +urllib3==1.26.16 # via botocore From b3eb11b18376b4292036f7172a26d12d8d349171 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 9 Aug 2023 13:50:20 +0300 Subject: [PATCH 060/270] Use requests allowed_methods; method_whitelist is deprecated Signed-off-by: Kipchirchir Sigei --- onadata/libs/utils/project_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/utils/project_utils.py b/onadata/libs/utils/project_utils.py index 2876da158f..cb0f9dc0cd 100644 --- a/onadata/libs/utils/project_utils.py +++ b/onadata/libs/utils/project_utils.py @@ -238,7 +238,7 @@ def propagate_project_permissions( max_retries=Retry( total=5, backoff_factor=2, - method_whitelist=["GET", "POST", "DELETE"], + allowed_methods=["GET", "POST", "DELETE"], status_forcelist=[502, 503, 504], ) ), From 3226612f6c06706a0f537cae3cfbaa56307c39b6 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 9 Aug 2023 13:59:42 +0300 Subject: [PATCH 061/270] Build image using latest focal image Signed-off-by: Kipchirchir Sigei --- docker/onadata-uwsgi/Dockerfile.ubuntu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index a8ccc0fe51..bcbcee5aba 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -13,7 +13,7 @@ RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts # hadolint ignore=DL3013 RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi -FROM ubuntu:focal-20230412 +FROM ubuntu:focal-20230801 ARG release_version=v3.7.1 From 0690a261d417939b31e1d860efa4f98def64844c Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 9 Aug 2023 15:13:04 +0300 Subject: [PATCH 062/270] Image.ANTIALIAS has been replaced with Image.LANCZOS on pillow v10 Signed-off-by: Kipchirchir Sigei --- onadata/libs/utils/image_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/utils/image_tools.py b/onadata/libs/utils/image_tools.py index 0f64056692..c61d5c752e 100644 --- a/onadata/libs/utils/image_tools.py +++ b/onadata/libs/utils/image_tools.py @@ -144,7 +144,7 @@ def _save_thumbnails(image, path, size, suffix, extension): try: # Ensure conversion to float in operations - image.thumbnail(get_dimensions(image.size, float(size)), Image.ANTIALIAS) + image.thumbnail(get_dimensions(image.size, float(size)), Image.LANCZOS) except ZeroDivisionError: pass From c99f1165e71c22f9b659aa2ec67d4bf53289a95a Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 9 Aug 2023 15:13:20 +0300 Subject: [PATCH 063/270] Use mock v4.0.3 Signed-off-by: Kipchirchir Sigei --- requirements/dev.pip | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.pip b/requirements/dev.pip index dba6ded00c..23ad4f12c6 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -282,7 +282,7 @@ matplotlib-inline==0.1.6 # via ipython mccabe==0.6.1 # via pylint -mock==5.1.0 +mock==4.0.3 # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata From 075c19358fee83aba245314127e7d54e1c2c061d Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 14 Aug 2023 10:46:51 +0300 Subject: [PATCH 064/270] Fix pagination on endpoint `/api/v2/open-data//data` returning duplicates (#2467) * fix api/v2/open-data//data results growing with increase page number With each increase in page number, the number of results returned increase by a 100 * order /api/v2/open-data//data results when pagination is applied For pagination to work without dupliacates, the results have to be ordered. Otherwise, the database will not guarantee a record encountered in a previous page will not be returned in a future page as the database does not order results by default and will return randomly * enhance test case * refactor code * fix flaky test --- .gitignore | 1 + .../tests/viewsets/test_tableau_viewset.py | 48 +++++++++--- .../apps/api/viewsets/v2/tableau_viewset.py | 24 ++---- onadata/libs/pagination.py | 7 +- onadata/libs/tests/test_pagination.py | 74 +++++++++++++------ 5 files changed, 98 insertions(+), 56 deletions(-) diff --git a/.gitignore b/.gitignore index 0bc817ab3e..79d20b8a53 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,7 @@ build # media folder used by tests # TODO figure out a way to clean this up rather than ignore it. /onadata/test_media +/onadata/test_data_media /onadata/media /onadata/static diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index 397f647a83..8107b129d0 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -328,27 +328,55 @@ def test_replace_media_links(self): def test_pagination(self): """Pagination works correctly""" self.view = TableauViewSet.as_view({"get": "data"}) - # test 1 submission _open_data = get_or_create_opendata(self.xform) uuid = _open_data[0].uuid + # Multiple submissions are ordered by primary key + # For pagination to work without duplicates, the results have to + # be ordered. Otherwise, the database will not guarantee a record + # encountered in a previous page will not be returned in a future page + # as the database does not order results by default and will return + # randomly + path = os.path.join(self.fixture_dir, "repeats_sub.xml") + # Create additional submissions to increase our chances of the results + # being random + for _ in range(200): + self._make_submission(path, forced_submission_time=self._submission_time) + + # Page 1 request = self.factory.get( "/", data={"page": 1, "page_size": 100}, **self.extra ) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) row_data = streaming_data(response) - self.assertEqual(len(row_data), 1) + self.assertEqual(len(row_data), 100) + instances = self.xform.instances.all().order_by("pk") + self.assertEqual(len(instances), 201) - # multiple submissions are ordered by primary key - path = os.path.join(self.fixture_dir, "repeats_sub.xml") - self._make_submission(path, forced_submission_time=self._submission_time) + for index, instance in enumerate(instances[:100]): + self.assertEqual(row_data[index]["_id"], instance.pk) + + # Page 2 + request = self.factory.get( + "/", data={"page": 2, "page_size": 100}, **self.extra + ) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) row_data = streaming_data(response) - self.assertEqual(len(row_data), 2) - instances = self.xform.instances.all().order_by("pk") - self.assertEqual(row_data[0]["_id"], instances[0].pk) - self.assertEqual(row_data[1]["_id"], instances[1].pk) + self.assertEqual(len(row_data), 100) + + for index, instance in enumerate(instances[100:101]): + self.assertEqual(row_data[index]["_id"], instance.pk) + + # Page 3 + request = self.factory.get( + "/", data={"page": 3, "page_size": 100}, **self.extra + ) + response = self.view(request, uuid=uuid) + self.assertEqual(response.status_code, 200) + row_data = streaming_data(response) + self.assertEqual(len(row_data), 1) + self.assertEqual(row_data[0]["_id"], instances.last().pk) def test_count_query_param(self): """count query param works""" @@ -368,7 +396,7 @@ def test_gt_id_query_param(self): self.view = TableauViewSet.as_view({"get": "data"}) _open_data = get_or_create_opendata(self.xform) uuid = _open_data[0].uuid - request = self.factory.get("/", data={"gt_id": 500}, **self.extra) + request = self.factory.get("/", data={"gt_id": 10000}, **self.extra) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) row_data = streaming_data(response) diff --git a/onadata/apps/api/viewsets/v2/tableau_viewset.py b/onadata/apps/api/viewsets/v2/tableau_viewset.py index c9c7f4d67d..81ebeace2d 100644 --- a/onadata/apps/api/viewsets/v2/tableau_viewset.py +++ b/onadata/apps/api/viewsets/v2/tableau_viewset.py @@ -169,17 +169,6 @@ class TableauViewSet(OpenDataViewSet): TableauViewSet - the /api/v2/tableau API endpoin implementation. """ - pagination_class = RawSQLQueryPageNumberPagination - data_count = None - - def paginate_queryset(self, queryset): - """Returns a paginated queryset.""" - if self.paginator is None: - return None - return self.paginator.paginate_queryset( - queryset, self.request, view=self, count=self.data_count - ) - @action(methods=["GET"], detail=True) def data(self, request, **kwargs): # pylint: disable=attribute-defined-outside-init @@ -195,6 +184,7 @@ def data(self, request, **kwargs): query_param_keys = request.query_params should_paginate = any(k in query_param_keys for k in pagination_keys) data = [] + data_count = 0 if isinstance(self.object.content_object, XForm): if not self.object.active: @@ -217,14 +207,14 @@ def data(self, request, **kwargs): if gt_id: qs_kwargs.update({"id__gt": gt_id}) - self.data_count = ( + data_count = ( Instance.objects.filter(**qs_kwargs, deleted_at__isnull=True) .only("pk") .count() ) if count: - return Response({"count": self.data_count}) + return Response({"count": data_count}) sql_where = "" sql_where_params = [] @@ -246,12 +236,10 @@ def data(self, request, **kwargs): sql_params = [tuple(xform_pks)] + sql_where_params if should_paginate: - offset, limit = self.paginator.get_offset_limit( - self.request, self.data_count - ) - sql += " LIMIT %s OFFSET %s" + raw_paginator = RawSQLQueryPageNumberPagination() + offset, limit = raw_paginator.get_offset_limit(self.request, data_count) + sql += " ORDER BY id LIMIT %s OFFSET %s" instances = Instance.objects.raw(sql, sql_params + [limit, offset]) - instances = self.paginate_queryset(instances) else: instances = Instance.objects.raw(sql, sql_params) diff --git a/onadata/libs/pagination.py b/onadata/libs/pagination.py index 24b5095611..d103a09c19 100644 --- a/onadata/libs/pagination.py +++ b/onadata/libs/pagination.py @@ -159,7 +159,7 @@ class RawSQLQueryPageNumberPagination(CountOverridablePageNumberPagination): django_paginator_class = RawSQLQueryPaginator - def get_offset_limit(self, request, count) -> Tuple[int, int]: + def get_offset_limit(self, request, count: int) -> Tuple[int, int]: """Returns the offset and limit to be used in a raw SQL query""" page_size = self.get_page_size(request) # pass an empty object_list since we are not handling any pagination @@ -169,9 +169,6 @@ def get_offset_limit(self, request, count) -> Tuple[int, int]: self.get_page_number(request, paginator) ) offset = (page_number - 1) * paginator.per_page - limit = offset + paginator.per_page - - if limit + paginator.orphans >= paginator.count: - limit = paginator.count + limit = paginator.per_page return (offset, limit) diff --git a/onadata/libs/tests/test_pagination.py b/onadata/libs/tests/test_pagination.py index 9df7caf754..44d4556365 100644 --- a/onadata/libs/tests/test_pagination.py +++ b/onadata/libs/tests/test_pagination.py @@ -7,7 +7,10 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models import Instance -from onadata.libs.pagination import StandardPageNumberPagination +from onadata.libs.pagination import ( + StandardPageNumberPagination, + RawSQLQueryPageNumberPagination, +) class TestPaginationModule(TestBase): @@ -17,42 +20,67 @@ class TestPaginationModule(TestBase): def test_generate_link_header_function(self): req = HttpRequest() - req.META['SERVER_NAME'] = 'testserver' - req.META['SERVER_PORT'] = '80' - req.META['QUERY_STRING'] = "page=1&page_size=1" + req.META["SERVER_NAME"] = "testserver" + req.META["SERVER_PORT"] = "80" + req.META["QUERY_STRING"] = "page=1&page_size=1" req.GET = {"page": 1, "page_size": 1} self._publish_transportation_form() self._make_submissions() qs = Instance.objects.filter(xform=self.xform) - out = StandardPageNumberPagination().generate_link_header( - Request(req), qs - ) + out = StandardPageNumberPagination().generate_link_header(Request(req), qs) expected_out = { - 'Link': '; rel="next",' - ' ; rel="last"' + "Link": '; rel="next",' + ' ; rel="last"' } self.assertEqual(out, expected_out) # First page link is created when not on the first page - req.META['QUERY_STRING'] = "page=2&page_size=1" + req.META["QUERY_STRING"] = "page=2&page_size=1" req.GET = {"page": 2, "page_size": 1} - out = StandardPageNumberPagination().generate_link_header( - Request(req), qs - ) + out = StandardPageNumberPagination().generate_link_header(Request(req), qs) expected_out = { - 'Link': '; rel="prev", ' - '; rel="next", ' - '; rel="last", ' - '; rel="first"'} + "Link": '; rel="prev", ' + '; rel="next", ' + '; rel="last", ' + '; rel="first"' + } self.assertEqual(out, expected_out) # Last page link is not created on last page - req.META['QUERY_STRING'] = "page=4&page_size=1" + req.META["QUERY_STRING"] = "page=4&page_size=1" req.GET = {"page": 4, "page_size": 1} - out = StandardPageNumberPagination().generate_link_header( - Request(req), qs - ) + out = StandardPageNumberPagination().generate_link_header(Request(req), qs) expected_out = { - 'Link': '; rel="prev", ' - '; rel="first"'} + "Link": '; rel="prev", ' + '; rel="first"' + } self.assertEqual(out, expected_out) + + +class RawSQLQueryPageNumberPaginationTestCase(TestBase): + """Tests for the RawSQLQueryPageNumberPagination class""" + + def setUp(self): + super().setUp() + + self.request = HttpRequest() + self.request.method = "GET" + self.paginator = RawSQLQueryPageNumberPagination() + + def test_offset_limit(self): + """Returns the correct values for offset and limit""" + # page 1 + self.request.GET = {"page": 1, "page_size": 100} + offset, limit = self.paginator.get_offset_limit(Request(self.request), 500) + self.assertEqual(offset, 0) + self.assertEqual(limit, 100) + # page 2 + self.request.GET = {"page": 2, "page_size": 100} + offset, limit = self.paginator.get_offset_limit(Request(self.request), 500) + self.assertEqual(offset, 100) + self.assertEqual(limit, 100) + # page 3 + self.request.GET = {"page": 3, "page_size": 100} + offset, limit = self.paginator.get_offset_limit(Request(self.request), 500) + self.assertEqual(offset, 200) + self.assertEqual(limit, 100) From 7b09d5802f87324d2bb674454ef0c2f3ab3654b5 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 14 Aug 2023 11:41:15 +0300 Subject: [PATCH 065/270] Tag release v3.12.1 (#2468) * tag release v3.12.1 * add change to v3.12.1 --- CHANGES.rst | 13 +++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 918a1db58b..3796e66715 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,19 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.12.1(2023-08-14) +------------------- + +- Fix pagination on endpoint /api/v2/open-data//data returning duplicates + `PR #2467 ` + [@kelvin-muchiri] +- Update dependencies + `PR @2466 ` + [@KipSigei] +- Fix attribute error when uploading xls datasets + `PR #2465 ` + [@FrankApiyo] + v3.12.0(2023-08-07) ------------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index 862703f407..df990dface 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.12.0" +__version__ = "3.12.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index a7b805953c..71298a0c23 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.12.0 +version = 3.12.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 88f3b003b05eec2f9ad2c725a737a40df65ca10a Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 17 Aug 2023 18:57:51 +0300 Subject: [PATCH 066/270] Solve intermittent bug where form permissions are not applied for new forms (#2470) * solve intermittent bug where form permissions are not applied Django post_save signal is run at the end of of the Model.save() method. It is not guaranteed that the transaction will have commited. This leads to situation where XForm.DoesNotExist exception is at times raised when a new form is created * refactor code do not call set_project_perms_to_xform asynchronously is the post_save signal when a new form is created * refactor code * remove unnecessary CELERY_TASK_ALWAYS_EAGER override in tests * fix failing tests * refactor test case --- .../test_organization_profile_viewset.py | 28 +++++++++---- .../tests/viewsets/test_project_viewset.py | 40 ++++++++++++------- onadata/apps/viewer/models/data_dictionary.py | 7 +++- 3 files changed, 52 insertions(+), 23 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py index a011415b8a..3e9b9a4167 100644 --- a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py @@ -7,6 +7,7 @@ from django.contrib.auth.models import User, timezone from django.core.cache import cache +from django.test.utils import override_settings from guardian.shortcuts import get_perms from mock import patch @@ -28,8 +29,12 @@ from onadata.apps.api.viewsets.user_profile_viewset import UserProfileViewSet from onadata.apps.logger.models.project import Project from onadata.apps.main.models import UserProfile -from onadata.libs.permissions import DataEntryRole, OwnerRole, EditorRole -from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, PROJ_PERM_CACHE, PROJ_TEAM_USERS_CACHE +from onadata.libs.permissions import DataEntryRole, OwnerRole +from onadata.libs.utils.cache_tools import ( + PROJ_OWNER_CACHE, + PROJ_PERM_CACHE, + PROJ_TEAM_USERS_CACHE, +) # pylint: disable=too-many-public-methods @@ -796,12 +801,19 @@ def test_add_members_to_owner_role(self): self.assertNotIn(aboy, owner_team.user_set.all()) + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_org_members_added_to_projects(self): # create org self._org_create() view = OrganizationProfileViewSet.as_view( {"post": "members", "get": "retrieve", "put": "members"} ) + # create a proj + project_data = {"owner": self.company_data["user"]} + self._project_create(project_data) + + with self.captureOnCommitCallbacks(execute=True): + self._publish_xls_form_to_project() # create aboy self.profile_data["username"] = "aboy" @@ -809,16 +821,14 @@ def test_org_members_added_to_projects(self): data = {"username": "aboy", "role": "owner"} request = self.factory.post( - "/", data=json.dumps(data), content_type="application/json", **self.extra + "/", + data=json.dumps(data), + content_type="application/json", + **self.extra, ) response = view(request, user="denoinc") self.assertEqual(response.status_code, 201) - # create a proj - project_data = {"owner": self.company_data["user"]} - self._project_create(project_data) - self._publish_xls_form_to_project() - # create alice self.profile_data["username"] = "alice" alice = self._create_user_profile().user @@ -833,6 +843,8 @@ def test_org_members_added_to_projects(self): self.assertEqual(response.status_code, 201) # Assert that user added in org is added to teams in proj + aboy.refresh_from_db() + alice.refresh_from_db() self.assertTrue(OwnerRole.user_has_role(aboy, self.project)) self.assertTrue(OwnerRole.user_has_role(alice, self.project)) self.assertTrue(OwnerRole.user_has_role(aboy, self.xform)) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 0557332d2b..b90c0fbd69 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -777,13 +777,18 @@ def test_project_manager_can_assign_form_to_project_no_perm(self): self.assertEqual(response.status_code, 403) # pylint: disable=invalid-name + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_project_users_get_readonly_role_on_add_form(self): self._project_create() alice_data = {"username": "alice", "email": "alice@localhost.com"} alice_profile = self._create_user_profile(alice_data) ReadOnlyRole.add(alice_profile.user, self.project) self.assertTrue(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) - self._publish_xls_form_to_project() + + with self.captureOnCommitCallbacks(execute=True): + self._publish_xls_form_to_project() + + alice_profile.refresh_from_db() self.assertTrue(ReadOnlyRole.user_has_role(alice_profile.user, self.xform)) self.assertFalse(OwnerRole.user_has_role(alice_profile.user, self.xform)) @@ -1253,6 +1258,7 @@ def test_project_share_endpoint(self, mock_send_mail): role_class._remove_obj_permissions(alice_profile.user, self.project) # pylint: disable=invalid-name + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) @patch("onadata.apps.api.viewsets.project_viewset.send_mail") def test_project_share_endpoint_form_published_later(self, mock_send_mail): # create project @@ -1280,7 +1286,10 @@ def test_project_share_endpoint_form_published_later(self, mock_send_mail): self.assertTrue(role_class.user_has_role(alice_profile.user, self.project)) # publish form after project sharing - self._publish_xls_form_to_project() + with self.captureOnCommitCallbacks(execute=True): + self._publish_xls_form_to_project() + + alice_profile.user.refresh_from_db() self.assertTrue(role_class.user_has_role(alice_profile.user, self.xform)) # Reset the mock called value to False mock_send_mail.called = False @@ -1673,6 +1682,7 @@ def test_projects_get_exception(self): error_msg = "Invalid value for project_id. It must be a positive integer." self.assertEqual(str(response.data["detail"]), error_msg) + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_publish_to_public_project(self): public_project = Project( name="demo", @@ -1682,12 +1692,14 @@ def test_publish_to_public_project(self): organization=self.user, ) public_project.save() - self.project = public_project - self._publish_xls_form_to_project(public=True) - self.assertEqual(self.xform.shared, True) - self.assertEqual(self.xform.shared_data, True) + with self.captureOnCommitCallbacks(execute=True): + self._publish_xls_form_to_project(public=True) + + self.xform.refresh_from_db() + self.assertTrue(self.xform.shared) + self.assertTrue(self.xform.shared_data) def test_public_form_private_project(self): self.project = Project( @@ -1738,6 +1750,7 @@ def test_public_form_private_project(self): self.assertFalse(self.xform.shared_data) self.assertFalse(self.project.shared) + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_publish_to_public_project_public_form(self): public_project = Project( name="demo", @@ -1747,9 +1760,7 @@ def test_publish_to_public_project_public_form(self): organization=self.user, ) public_project.save() - self.project = public_project - data = { "owner": f"http://testserver/api/v1/users/{self.project.organization.username}", "public": True, @@ -1763,10 +1774,13 @@ def test_publish_to_public_project_public_form(self): "title": "transportation_2011_07_25", "bamboo_dataset": "", } - self._publish_xls_form_to_project(publish_data=data, merge=False) - self.assertEqual(self.xform.shared, True) - self.assertEqual(self.xform.shared_data, True) + with self.captureOnCommitCallbacks(execute=True): + self._publish_xls_form_to_project(publish_data=data, merge=False) + + self.xform.refresh_from_db() + self.assertTrue(self.xform.shared) + self.assertTrue(self.xform.shared_data) def test_project_all_users_can_share_remove_themselves(self): self._publish_xls_form_to_project() @@ -2913,9 +2927,7 @@ def test_create_invitation(self, mock_send_mail): "status": 1, }, ) - mock_send_mail.assert_called_with( - invitation.pk, "https://onadata.com/register" - ) + mock_send_mail.assert_called_with(invitation.pk, "https://onadata.com/register") def test_email_required(self, mock_send_mail): """email is required""" diff --git a/onadata/apps/viewer/models/data_dictionary.py b/onadata/apps/viewer/models/data_dictionary.py index 856ea96868..d71486ccd3 100644 --- a/onadata/apps/viewer/models/data_dictionary.py +++ b/onadata/apps/viewer/models/data_dictionary.py @@ -7,6 +7,7 @@ from django.core.files.uploadedfile import InMemoryUploadedFile from django.db.models.signals import post_save, pre_save +from django.db import transaction from django.utils import timezone from django.utils.translation import gettext as _ @@ -214,7 +215,11 @@ def set_object_permissions(sender, instance=None, created=False, **kwargs): ) # noqa try: - set_project_perms_to_xform_async.delay(xform.pk, instance.project.pk) + transaction.on_commit( + lambda: set_project_perms_to_xform_async.delay( + xform.pk, instance.project.pk + ) + ) except OperationalError: # pylint: disable=import-outside-toplevel from onadata.libs.utils.project_utils import ( From 3fa23c529081fd1a103ec78d5fc66ca9f2377893 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 23 Aug 2023 11:24:25 +0300 Subject: [PATCH 067/270] Enhance performance when exporting data on endpoint `api/v1/data/.` (#2460) * remove default ordering by id when exporting data from Data ViewSet the default ordering by id is making queries for run extremely slow when exporting large amounts of data. to sort data by id in ascending order, the query parameter sort={"_id":1} will be used. For more info read https://github.com/onaio/onadata/blob/main/docs/data.rst#sort-submitted-data-of-a-specific-form-using-existing-fields * remove duplicate db queries on endpoint /api/v1/data/.csv * fix failing test * fix flaky test * fix flaky test * fix flaky test * remove futile test case Notes are excluded when exporting the csv. The column _notes is usually added in the CSV but its always overriden to be blank as per the implmentation in the CSVDataFrameBuilder class. So this test case is futile * use query param to sort exported data in XFormViewSet * disable pylint rule too-many-line for file * fix flaky tests fix flaky tests by ensuring queryset is always ordered * order records in test cases to avoid flaky results * optimize performance by removing redundant implementations * stop tracking onadata/test_data_media * disable pylint rules fixing the rules would require alot of refactor so disabling the rules will suffice for now * disable linting warning * address lint warning * address lint warnings * refactor code * address failing tests * address lint warnings * address linting error * address lint warning * fix failing test * refactor code * fix failing tests * refactor * suppress lint warning * get sort param from request * remove unused arg * update documentation * update documentation * sort paginated data by id * address linting error line too long * fix failing test * update test case --- docs/data.rst | 16 +- .../api/tests/viewsets/test_data_viewset.py | 212 ++- .../api/tests/viewsets/test_note_viewset.py | 36 - .../tests/viewsets/test_tableau_viewset.py | 2 +- .../api/tests/viewsets/test_xform_viewset.py | 8 +- onadata/apps/api/viewsets/data_viewset.py | 124 +- onadata/apps/api/viewsets/xform_viewset.py | 10 +- .../apps/logger/tests/models/test_instance.py | 12 +- .../apps/logger/tests/test_form_submission.py | 47 +- .../apps/main/tests/test_form_api_delete.py | 49 +- onadata/apps/main/views.py | 51 +- onadata/apps/viewer/models/parsed_instance.py | 243 +-- .../tests/fixtures/transportation_w_notes.csv | 5 - onadata/apps/viewer/tests/test_exports.py | 1486 ++++++++-------- .../apps/viewer/tests/test_parsed_instance.py | 129 +- onadata/libs/tests/utils/test_csv_builder.py | 1555 +++++++++-------- .../libs/tests/utils/test_export_builder.py | 13 +- onadata/libs/tests/utils/test_export_tools.py | 6 +- onadata/libs/utils/api_export_tools.py | 1 + onadata/libs/utils/csv_builder.py | 79 +- onadata/libs/utils/export_builder.py | 2 +- onadata/libs/utils/export_tools.py | 56 +- 22 files changed, 2323 insertions(+), 1819 deletions(-) delete mode 100644 onadata/apps/viewer/tests/fixtures/transportation_w_notes.csv diff --git a/docs/data.rst b/docs/data.rst index 851e351b57..48c8000602 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -372,7 +372,9 @@ Sample response with link header Sort submitted data of a specific form using existing fields ------------------------------------------------------------- -Provides a sorted list of json submitted data for a specific form by specifing the order in which the query returns matching data. Use the `sort` parameter to filter the list of submissions.The sort parameter has field and value pairs. +Provides a sorted list of json submitted data for a specific form by specifing the order in which the query returns matching data. + +No ordering is applied by default -- the data is returned in any arbitrary order. Use the `sort` parameter to filter the list of submissions. The sort parameter has field and value pairs. :: @@ -390,6 +392,18 @@ Descending sort query using the age field: {"age":-1} +Query sorted by id field in ascending. + +:: + + {"_id":1} + +Query sorted by id field in descending. + +:: + + {"_id":-1} + Example of Ascending Sort ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index cb751b7888..429f2be2dd 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -8,6 +8,8 @@ import json import logging import os +import csv +from io import StringIO from builtins import open from datetime import timedelta from tempfile import NamedTemporaryFile @@ -115,6 +117,7 @@ class TestDataViewSet(SerializeMixin, TestBase): """ Test /data API endpoint implementation. """ + lockfile = __file__ def setUp(self): @@ -269,6 +272,51 @@ def test_numeric_types_are_rendered_as_required(self): self.assertEqual(response.data[0].get("net_worth"), 100000.00) self.assertEqual(response.data[0].get("imei"), "351746052009472") + def test_fields_query_params(self): + """fields query params works""" + view = DataViewSet.as_view({"get": "list"}) + fixture_dir = os.path.join(self.this_directory, "fixtures", "csv_export") + form_path = os.path.join(fixture_dir, "tutorial_w_repeats.xlsx") + self._publish_xls_file_and_set_xform(form_path) + submission_path = os.path.join(fixture_dir, "repeats_sub.xml") + + for _ in range(102): + self._make_submission(submission_path) + + fields_query = {"fields": '["_id", "_status"]'} + request = self.factory.get("/", data=fields_query, **self.extra) + response = view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertEqual(list(response.data[0].keys()), ["_id", "_status"]) + + # With pagination + instances = self.xform.instances.all().order_by("pk") + # Page 1 + request = self.factory.get( + "/", + data={**fields_query, "page": 1, "page_size": 100}, + **self.extra, + ) + response = view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 100) + + for index, instance in enumerate(instances[:100]): + self.assertEqual(response.data[index]["_id"], instance.pk) + + # Page 2 + request = self.factory.get( + "/", + data={**fields_query, "page": 2, "page_size": 100}, + **self.extra, + ) + response = view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 2) + + for index, instance in enumerate(instances[100:101]): + self.assertEqual(response.data[index]["_id"], instance.pk) + def test_data_jsonp(self): self._make_submissions() view = DataViewSet.as_view({"get": "list"}) @@ -1118,11 +1166,11 @@ def test_data_with_query_parameter(self): second_datetime = start_time + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) request = self.factory.get("/?query=%s" % query_str, **self.extra) @@ -1679,7 +1727,6 @@ def test_post_save_signal_on_submission_deletion(self, mock, send_message_mock): @patch("onadata.apps.api.viewsets.data_viewset.send_message") def test_deletion_of_bulk_submissions(self, send_message_mock): - self._make_submissions() self.xform.refresh_from_db() formid = self.xform.pk @@ -1756,7 +1803,7 @@ def test_submissions_permanent_deletion(self, send_message_mock): self.xform.refresh_from_db() self.assertEqual(self.xform.num_of_submissions, 3) self.assertEqual(self.xform.instances.count(), 3) - + # Test project details updated successfully self.assertEqual( self.xform.project.date_modified.strftime("%Y-%m-%d %H:%M:%S"), @@ -1865,7 +1912,9 @@ def test_permanent_instance_delete_inactive_form(self, send_message_mock): dataid = self.xform.instances.filter(deleted_at=None).order_by("id")[0].pk - request = self.factory.delete("/", **self.extra, data={"permanent_delete": True}) + request = self.factory.delete( + "/", **self.extra, data={"permanent_delete": True} + ) response = view(request, pk=formid, dataid=dataid) self.assertEqual(response.status_code, 204) @@ -2330,16 +2379,17 @@ def test_instances_with_empty_geopoints(self, mock_signal): self.assertEqual(response.status_code, 200) self.assertEqual( json.dumps(response.data)[:94], - '{"type": "FeatureCollection", "features":' + - ' [{"type": "Feature", "geometry": null, "properties":') - self.assertEqual(len(response.data['features']), 1) - feature = dict(response.data['features'][0]) - self.assertEqual(feature['type'], 'Feature') - self.assertEqual(feature['geometry'], None) - self.assertTrue(isinstance(feature['properties'], dict)) + '{"type": "FeatureCollection", "features":' + + ' [{"type": "Feature", "geometry": null, "properties":', + ) + self.assertEqual(len(response.data["features"]), 1) + feature = dict(response.data["features"][0]) + self.assertEqual(feature["type"], "Feature") + self.assertEqual(feature["geometry"], None) + self.assertTrue(isinstance(feature["properties"], dict)) self.assertEqual(self.xform.instances.count(), 2) - self.assertEqual(self.xform.polygon_xpaths(), ['shape']) - self.assertEqual(self.xform.geotrace_xpaths(), ['path']) + self.assertEqual(self.xform.polygon_xpaths(), ["shape"]) + self.assertEqual(self.xform.geotrace_xpaths(), ["path"]) # check if instances_with_geopoints is True for the form self.xform.refresh_from_db() @@ -2347,7 +2397,6 @@ def test_instances_with_empty_geopoints(self, mock_signal): @patch("onadata.apps.viewer.signals._post_process_submissions") def test_instances_with_empty_geopoints_no_polygons(self, mock_signal): - # publish sample geo submissions self._publish_submit_geojson(has_empty_geoms=True, only_geopoints=True) @@ -3448,3 +3497,130 @@ def test_data_retrieve_instance_osm_format(self): ) response = view(request, pk=formid) self.assertEqual(len(response.data), 0) + + +@override_settings(MEDIA_ROOT=os.path.join(settings.PROJECT_ROOT, "test_data_media/")) +class ExportDataTestCase(SerializeMixin, TestBase): + """Tests exporting data""" + + lockfile = __file__ + + def setUp(self): + super().setUp() + self._create_user_and_login() + self._publish_transportation_form() + self.factory = RequestFactory() + self.extra = {"HTTP_AUTHORIZATION": "Token %s" % self.user.auth_token} + self.view = DataViewSet.as_view({"get": "list"}) + + def test_csv_export(self): + """Data is exported as CSV""" + self._make_submissions() + formid = self.xform.pk + request = self.factory.get("/", data={"format": "csv"}, **self.extra) + response = self.view(request, pk=formid) + self.assertEqual(response.status_code, 200) + csv_file_obj = StringIO( + "".join([c.decode("utf-8") for c in response.streaming_content]) + ) + csv_reader = csv.reader(csv_file_obj) + headers = next(csv_reader) + expected_headers = [ + "transport/available_transportation_types_to_referral_facility/ambulance", + "transport/available_transportation_types_to_referral_facility/bicycle", + "transport/available_transportation_types_to_referral_facility/boat_canoe", + "transport/available_transportation_types_to_referral_facility/bus", + "transport/available_transportation_types_to_referral_facility/donkey_mule_cart", + "transport/available_transportation_types_to_referral_facility/keke_pepe", + "transport/available_transportation_types_to_referral_facility/lorry", + "transport/available_transportation_types_to_referral_facility/motorbike", + "transport/available_transportation_types_to_referral_facility/taxi", + "transport/available_transportation_types_to_referral_facility/other", + "transport/available_transportation_types_to_referral_facility_other", + "transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", + "image1", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] + self.assertEqual(headers, expected_headers) + number_records = len(list(csv_reader)) + self.assertEqual(number_records, 4) + + def test_sort_query_param(self): + """sort query param works with exports""" + + self._make_submissions() + formid = self.xform.pk + # sort csv export data by id in descending order + request = self.factory.get( + "/", data={"format": "csv", "sort": '{"_id": -1}'}, **self.extra + ) + response = self.view(request, pk=formid) + self.assertEqual(response.status_code, 200) + csv_file_obj = StringIO( + "".join([c.decode("utf-8") for c in response.streaming_content]) + ) + csv_reader = csv.reader(csv_file_obj) + instances = Instance.objects.filter(xform_id=formid).order_by("-id") + self.assertEqual(instances.count(), 4) + headers = next(csv_reader) + expected_headers = [ + "transport/available_transportation_types_to_referral_facility/ambulance", + "transport/available_transportation_types_to_referral_facility/bicycle", + "transport/available_transportation_types_to_referral_facility/boat_canoe", + "transport/available_transportation_types_to_referral_facility/bus", + "transport/available_transportation_types_to_referral_facility/donkey_mule_cart", + "transport/available_transportation_types_to_referral_facility/keke_pepe", + "transport/available_transportation_types_to_referral_facility/lorry", + "transport/available_transportation_types_to_referral_facility/motorbike", + "transport/available_transportation_types_to_referral_facility/taxi", + "transport/available_transportation_types_to_referral_facility/other", + "transport/available_transportation_types_to_referral_facility_other", + "transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", + "image1", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] + self.assertEqual(headers, expected_headers) + # csv records should be ordered by id in descending order + for instance in instances: + row = next(csv_reader) + self.assertEqual(str(instance.id), row[22]) diff --git a/onadata/apps/api/tests/viewsets/test_note_viewset.py b/onadata/apps/api/tests/viewsets/test_note_viewset.py index 9b6cba82d1..f35b595847 100644 --- a/onadata/apps/api/tests/viewsets/test_note_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_note_viewset.py @@ -1,13 +1,7 @@ -import os -from datetime import datetime - -from django.conf import settings -from django.utils.timezone import make_aware from django.test import RequestFactory from guardian.shortcuts import assign_perm from onadata.apps.api.viewsets.note_viewset import NoteViewSet -from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.logger.models import Note from onadata.apps.main.tests.test_base import TestBase from onadata.libs.serializers.note_serializer import NoteSerializer @@ -186,36 +180,6 @@ def test_only_add_question_notes_to_existing_fields(self): instance = self.xform.instances.all()[0] self.assertEqual(len(instance.json["_notes"]), 0) - def test_csv_export_form_w_notes(self): - """ - Test CSV exports include notes for submissions that have notes. - """ - self._add_notes_to_data_point() - self._add_notes_to_data_point() - - time = make_aware(datetime(2016, 7, 1)) - for instance in self.xform.instances.all(): - instance.date_created = time - instance.save() - instance.parsed_instance.save() - - view = XFormViewSet.as_view({"get": "retrieve"}) - - request = self.factory.get("/", **self.extra) - response = view(request, pk=self.xform.pk, format="csv") - self.assertTrue(response.status_code, 200) - - test_file_path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "viewer", - "tests", - "fixtures", - "transportation_w_notes.csv", - ) - - self._test_csv_response(response, test_file_path) - def test_attribute_error_bug(self): """NoteSerializer: Should not raise AttributeError exeption""" note = Note(note="Hello", instance=self._first_xform_instance) diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index 8107b129d0..483b6515c2 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -396,7 +396,7 @@ def test_gt_id_query_param(self): self.view = TableauViewSet.as_view({"get": "data"}) _open_data = get_or_create_opendata(self.xform) uuid = _open_data[0].uuid - request = self.factory.get("/", data={"gt_id": 10000}, **self.extra) + request = self.factory.get("/", data={"gt_id": 100000}, **self.extra) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) row_data = streaming_data(response) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index b90e9f14f3..3eed7a1cba 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -3858,8 +3858,8 @@ def test_csv_export_with_win_excel_utf8(self): # check that response has property 'has_hxl_support' which is true self.assertEqual(response.status_code, 200) self.assertTrue(response.data.get("has_hxl_support")) - - data = {"win_excel_utf8": True} + # sort csv data in ascending order + data = {"win_excel_utf8": True, "sort": '{"_id": 1}'} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) @@ -3887,8 +3887,8 @@ def test_csv_export_with_win_excel_utf8(self): filename = filename_from_disposition(content_disposition) basename, ext = os.path.splitext(filename) self.assertEqual(ext, ".csv") - - data = {"win_excel_utf8": False} + # sort csv data in ascending order + data = {"win_excel_utf8": False, "sort": '{"_id": 1}'} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index b13e512a7e..2448c70926 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -10,7 +10,6 @@ from django.conf import settings from django.core.exceptions import PermissionDenied from django.db.models import Q -from django.db.models.query import QuerySet from django.db.utils import DataError, OperationalError from django.http import Http404, StreamingHttpResponse from django.utils import timezone @@ -26,7 +25,9 @@ from rest_framework.viewsets import ModelViewSet from onadata.libs.serializers.geojson_serializer import GeoJsonSerializer -from onadata.libs.pagination import CountOverridablePageNumberPagination +from onadata.libs.pagination import ( + CountOverridablePageNumberPagination, +) from onadata.apps.api.permissions import ConnectViewsetPermissions, XFormPermissions from onadata.apps.api.tools import add_tags_to_instance, get_baseviewset_class @@ -41,6 +42,9 @@ get_sql_with_params, get_where_clause, query_data, + query_fields_data, + _get_sort_fields, + ParsedInstance, ) from onadata.libs import filters from onadata.libs.data import parse_int, strtobool @@ -74,6 +78,7 @@ settings, "SUBMISSION_RETRIEVAL_THRESHOLD", 10000 ) + # pylint: disable=invalid-name BaseViewset = get_baseviewset_class() @@ -162,9 +167,9 @@ def get_serializer_class(self): elif fmt == "xml": serializer_class = DataInstanceXMLSerializer elif ( - form_pk is not None and - dataid is None and - form_pk != self.public_data_endpoint + form_pk is not None + and dataid is None + and form_pk != self.public_data_endpoint ): if sort or fields: serializer_class = JsonDataSerializer @@ -412,7 +417,6 @@ def destroy(self, request, *args, **kwargs): ) if isinstance(self.object, Instance): - if request.user.has_perm(CAN_DELETE_SUBMISSION, self.object.xform): instance_id = self.object.pk if permanent_delete: @@ -644,7 +648,8 @@ def list(self, request, *args, **kwargs): if export_type == "geojson": # raise 404 if all instances dont have geoms if not xform.instances_with_geopoints and not ( - xform.polygon_xpaths() or xform.geotrace_xpaths()): + xform.polygon_xpaths() or xform.geotrace_xpaths() + ): raise Http404(_("Not Found")) # add pagination when fetching geojson features @@ -660,14 +665,18 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): """ Set the submission instances queryset. """ + xform = None + try: enable_etag = True + if not is_public_request: xform = self.get_object() self.data_count = xform.num_of_submissions enable_etag = self.data_count < SUBMISSION_RETRIEVAL_THRESHOLD where, where_params = get_where_clause(query) + if where: # pylint: disable=attribute-defined-outside-init self.object_list = self.object_list.extra( @@ -675,29 +684,69 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): ) if (start and limit or limit) and (not sort and not fields): - start = start if start is not None else 0 - limit = limit if start is None or start == 0 else start + limit + start_index = start if start is not None else 0 + end_index = limit if start is None or start == 0 else start + limit # pylint: disable=attribute-defined-outside-init self.object_list = filter_queryset_xform_meta_perms( self.get_object(), self.request.user, self.object_list ) # pylint: disable=attribute-defined-outside-init - self.object_list = self.object_list[start:limit] + self.object_list = self.object_list[start_index:end_index] elif (sort or limit or start or fields) and not is_public_request: try: query = filter_queryset_xform_meta_perms_sql( self.get_object(), self.request.user, query ) - # pylint: disable=attribute-defined-outside-init - self.object_list = query_data( - xform, - query=query, - sort=sort, - start_index=start, - limit=limit, - fields=fields, - json_only=not self.kwargs.get("format") == "xml", + # pylint: disable=protected-access + has_json_fields = sort and ParsedInstance._has_json_fields( + _get_sort_fields(sort) ) + should_query_json_fields = fields or has_json_fields + + if self._should_paginate(): + retrieval_threshold = getattr( + settings, "SUBMISSION_RETRIEVAL_THRESHOLD", 10000 + ) + query_param_keys = self.request.query_params + page = int( + query_param_keys.get(self.paginator.page_query_param, 1) + ) + page_size = int( + query_param_keys.get( + self.paginator.page_size_query_param, + retrieval_threshold, + ) + ) + start = (page - 1) * page_size + limit = page_size + + if sort is None: + # Paginated data needs to be sorted. We order by + # id ascending if sort is empty + sort = '{"_id": 1}' + + if should_query_json_fields: + data = query_fields_data( + xform, + fields=fields, + query=query, + sort=sort, + start_index=start, + limit=limit, + ) + # pylint: disable=attribute-defined-outside-init + self.object_list = data + else: + data = query_data( + xform, + query=query, + sort=sort, + start_index=start, + limit=limit, + json_only=not self.kwargs.get("format") == "xml", + ) + # pylint: disable=attribute-defined-outside-init + self.object_list = data except NoRecordsPermission: # pylint: disable=attribute-defined-outside-init self.object_list = [] @@ -705,12 +754,10 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): # ETags are Disabled for XForms with Submissions that surpass # the configured SUBMISSION_RETRIEVAL_THRESHOLD setting if enable_etag: - if isinstance(self.object_list, QuerySet): - setattr( - self, "etag_hash", (get_etag_hash_from_query(self.object_list)) - ) - else: - sql, params, records = get_sql_with_params( + sql = params = None + + if xform: + sql, params = get_sql_with_params( xform, query=query, sort=sort, @@ -718,11 +765,12 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): limit=limit, fields=fields, ) - setattr( - self, - "etag_hash", - (get_etag_hash_from_query(records, sql, params)), - ) + + setattr( + self, + "etag_hash", + (get_etag_hash_from_query(sql, params)), + ) except ValueError as e: raise ParseError(str(e)) from e except DataError as e: @@ -736,17 +784,20 @@ def paginate_queryset(self, queryset): queryset, self.request, view=self, count=self.data_count ) - # pylint: disable=too-many-arguments,too-many-locals - def _get_data(self, query, fields, sort, start, limit, is_public_request): - self.set_object_list(query, fields, sort, start, limit, is_public_request) - - retrieval_threshold = getattr(settings, "SUBMISSION_RETRIEVAL_THRESHOLD", 10000) + def _should_paginate(self) -> bool: + """Check whether the request is a pagination request""" pagination_keys = [ self.paginator.page_query_param, self.paginator.page_size_query_param, ] query_param_keys = self.request.query_params - should_paginate = any(k in query_param_keys for k in pagination_keys) + return any(k in query_param_keys for k in pagination_keys) + + # pylint: disable=too-many-arguments,too-many-locals + def _get_data(self, query, fields, sort, start, limit, is_public_request): + self.set_object_list(query, fields, sort, start, limit, is_public_request) + should_paginate = self._should_paginate() + retrieval_threshold = getattr(settings, "SUBMISSION_RETRIEVAL_THRESHOLD", 10000) if not should_paginate and not is_public_request: # Paginate requests that try to retrieve data that surpasses @@ -758,6 +809,7 @@ def _get_data(self, query, fields, sort, start, limit, is_public_request): self.paginator.page_size = retrieval_threshold if not isinstance(self.object_list, types.GeneratorType) and should_paginate: + query_param_keys = self.request.query_params current_page = query_param_keys.get(self.paginator.page_query_param, 1) current_page_size = query_param_keys.get( self.paginator.page_size_query_param, retrieval_threshold diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 21b9dcf718..6620fd2f91 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# pylint: disable=too-many-lines """ The /forms API endpoint. """ @@ -574,7 +575,14 @@ def retrieve(self, request, *args, **kwargs): # perform default viewset retrieve, no data export return super().retrieve(request, *args, **kwargs) - return custom_response_handler(request, xform, query, export_type, token, meta) + return custom_response_handler( + request, + xform, + query, + export_type, + token, + meta, + ) @action(methods=["POST"], detail=True) def share(self, request, *args, **kwargs): diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 3716a1894d..cae12061fc 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -13,7 +13,11 @@ numeric_checker, ) from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.viewer.models.parsed_instance import ParsedInstance, query_data +from onadata.apps.viewer.models.parsed_instance import ( + ParsedInstance, + query_data, + query_fields_data, +) from onadata.libs.serializers.submission_review_serializer import ( SubmissionReviewSerializer, ) @@ -190,7 +194,7 @@ def test_query_filter_by_integer(self): # with fields data = [ i.get("_id") - for i in query_data( + for i in query_fields_data( self.xform, query='{"_id": %s}' % (oldest), fields='["_id"]' ) ] @@ -200,7 +204,7 @@ def test_query_filter_by_integer(self): # mongo $gt data = [ i.get("_id") - for i in query_data( + for i in query_fields_data( self.xform, query='{"_id": {"$gt": %s}}' % (oldest), fields='["_id"]' ) ] @@ -231,7 +235,7 @@ def test_query_filter_by_datetime_field(self, mock_time): # mongo $gt data = [ i.get("_submission_time") - for i in query_data( + for i in query_fields_data( self.xform, query='{"_submission_time": {"$lt": "%s"}}' % (atime), fields='["_submission_time"]', diff --git a/onadata/apps/logger/tests/test_form_submission.py b/onadata/apps/logger/tests/test_form_submission.py index 52a7ee33fa..cfc06d6fe5 100644 --- a/onadata/apps/logger/tests/test_form_submission.py +++ b/onadata/apps/logger/tests/test_form_submission.py @@ -21,7 +21,7 @@ from onadata.apps.logger.models.project import Project from onadata.apps.logger.models.xform import XForm from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml -from onadata.apps.viewer.models.parsed_instance import query_data +from onadata.apps.viewer.models.parsed_instance import query_data, query_count from onadata.apps.viewer.signals import process_submission from onadata.libs.utils.common_tags import GEOLOCATION, LAST_EDITED @@ -312,10 +312,8 @@ def test_edited_submission(self): ) num_instances_history = InstanceHistory.objects.count() num_instances = Instance.objects.count() - query_args = {"xform": self.xform, "query": "{}", "fields": "[]", "count": True} - - cursor = [r for r in query_data(**query_args)] - num_data_instances = cursor[0]["count"] + query_args = {"xform": self.xform} + num_data_instances = query_count(**query_args) # make first submission self._make_submission(xml_submission_file_path) self.assertEqual(self.response.status_code, 201) @@ -330,8 +328,8 @@ def test_edited_submission(self): # no new record in instances history self.assertEqual(InstanceHistory.objects.count(), num_instances_history) # check count of mongo instances after first submission - cursor = query_data(**query_args) - self.assertEqual(cursor[0]["count"], num_data_instances + 1) + count = query_count(**query_args) + self.assertEqual(count, num_data_instances + 1) # edited submission xml_edit_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), @@ -369,12 +367,10 @@ def test_edited_submission(self): # check that '_last_edited' key is not in the json self.assertIn(LAST_EDITED, edited_instance.json) - cursor = query_data(**query_args) - self.assertEqual(cursor[0]["count"], num_data_instances + 1) + count = query_count(**query_args) + self.assertEqual(count, num_data_instances + 1) # make sure we edited the mongo db record and NOT added a new row - query_args["count"] = False - cursor = query_data(**query_args) - record = cursor[0] + record = [item for item in query_data(**query_args)][0] with open(xml_edit_submission_file_path, "r") as f: xml_str = f.read() xml_str = clean_and_parse_xml(xml_str).toxml() @@ -390,8 +386,7 @@ def test_edited_submission(self): "tutorial_2012-06-27_11-27-53_w_uuid_edited_again.xml", ) self._make_submission(xml_edit_submission_file_path) - cursor = query_data(**query_args) - record = cursor[0] + record = [item for item in query_data(**query_args)][0] edited_instance = self.xform.instances.first() instance_history_2 = InstanceHistory.objects.last() self.assertEqual( @@ -457,7 +452,7 @@ def test_fail_submission_if_bad_id_string(self): self._make_submission(path=xml_submission_file_path) def test_edit_updated_geopoint_cache(self): - query_args = {"xform": self.xform, "query": "{}", "fields": "[]", "count": True} + query_args = {"xform": self.xform} xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "..", @@ -470,8 +465,7 @@ def test_edit_updated_geopoint_cache(self): self._make_submission(xml_submission_file_path) self.assertEqual(self.response.status_code, 201) # query mongo for the _geopoint field - query_args["count"] = False - records = query_data(**query_args) + records = [item for item in query_data(**query_args)] self.assertEqual(len(records), 1) # submit the edited instance xml_submission_file_path = os.path.join( @@ -484,7 +478,7 @@ def test_edit_updated_geopoint_cache(self): ) self._make_submission(xml_submission_file_path) self.assertEqual(self.response.status_code, 201) - records = query_data(**query_args) + records = [item for item in query_data(**query_args)] self.assertEqual(len(records), 1) cached_geopoint = records[0][GEOLOCATION] # the cached geopoint should equal the gps field @@ -549,9 +543,8 @@ def test_edited_submission_require_auth(self): num_instances_history = InstanceHistory.objects.count() num_instances = Instance.objects.count() - query_args = {"xform": self.xform, "query": "{}", "fields": "[]", "count": True} - cursor = query_data(**query_args) - num_data_instances = cursor[0]["count"] + query_args = {"xform": self.xform} + num_data_instances = query_count(**query_args) # make first submission self._make_submission(xml_submission_file_path) @@ -560,8 +553,8 @@ def test_edited_submission_require_auth(self): # no new record in instances history self.assertEqual(InstanceHistory.objects.count(), num_instances_history) # check count of mongo instances after first submission - cursor = query_data(**query_args) - self.assertEqual(cursor[0]["count"], num_data_instances + 1) + count = query_count(**query_args) + self.assertEqual(count, num_data_instances + 1) # create a new user alice = self._create_user("alice", "alice") @@ -590,12 +583,10 @@ def test_edited_submission_require_auth(self): self.assertEqual(Instance.objects.count(), num_instances + 1) # should be a new record in instances history self.assertEqual(InstanceHistory.objects.count(), num_instances_history + 1) - cursor = query_data(**query_args) - self.assertEqual(cursor[0]["count"], num_data_instances + 1) + count = query_count(**query_args) + self.assertEqual(count, num_data_instances + 1) # make sure we edited the mongo db record and NOT added a new row - query_args["count"] = False - cursor = query_data(**query_args) - record = cursor[0] + record = [item for item in query_data(**query_args)][0] with open(xml_submission_file_path, "r") as f: xml_str = f.read() xml_str = clean_and_parse_xml(xml_str).toxml() diff --git a/onadata/apps/main/tests/test_form_api_delete.py b/onadata/apps/main/tests/test_form_api_delete.py index 8dba62e806..11dd306a3a 100644 --- a/onadata/apps/main/tests/test_form_api_delete.py +++ b/onadata/apps/main/tests/test_form_api_delete.py @@ -5,23 +5,25 @@ from onadata.apps.logger.models.instance import Instance from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import delete_data -from onadata.apps.viewer.models.parsed_instance import query_data +from onadata.apps.viewer.models.parsed_instance import query_data, query_fields_data class TestFormAPIDelete(TestBase): - def setUp(self): TestBase.setUp(self) self._create_user_and_login() self._publish_transportation_form_and_submit_instance() - self.delete_url = reverse(delete_data, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + self.delete_url = reverse( + delete_data, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) self.data_args = { - 'xform': self.xform, - 'query': "{}", 'limit': 1, - 'sort': '-pk', 'fields': '["_id","_uuid"]'} + "xform": self.xform, + "query": "{}", + "limit": 1, + "sort": "-pk", + "fields": '["_id","_uuid"]', + } def _get_data(self): cursor = query_data(**self.data_args) @@ -33,21 +35,18 @@ def test_get_request_does_not_delete(self): count = Instance.objects.filter(deleted_at=None).count() response = self.anon.get(self.delete_url) self.assertEqual(response.status_code, 405) - self.assertEqual( - Instance.objects.filter(deleted_at=None).count(), count) + self.assertEqual(Instance.objects.filter(deleted_at=None).count(), count) def test_anon_user_cant_delete(self): # Only authenticated user are allowed to access the url count = Instance.objects.filter(deleted_at=None).count() - instance = Instance.objects.filter( - xform=self.xform).latest('date_created') + instance = Instance.objects.filter(xform=self.xform).latest("date_created") # delete - params = {'id': instance.id} + params = {"id": instance.id} response = self.anon.post(self.delete_url, params) self.assertEqual(response.status_code, 302) self.assertIn("accounts/login/?next=", response["Location"]) - self.assertEqual( - Instance.objects.filter(deleted_at=None).count(), count) + self.assertEqual(Instance.objects.filter(deleted_at=None).count(), count) def test_delete_shared(self): # Test if someone can delete data from a shared form @@ -55,32 +54,28 @@ def test_delete_shared(self): self.xform.save() self._create_user_and_login("jo") count = Instance.objects.filter(deleted_at=None).count() - instance = Instance.objects.filter( - xform=self.xform).latest('date_created') + instance = Instance.objects.filter(xform=self.xform).latest("date_created") # delete - params = {'id': instance.id} + params = {"id": instance.id} response = self.client.post(self.delete_url, params) self.assertEqual(response.status_code, 403) - self.assertEqual( - Instance.objects.filter(deleted_at=None).count(), count) + self.assertEqual(Instance.objects.filter(deleted_at=None).count(), count) def test_owner_can_delete(self): # Test if Form owner can delete # check record exist before delete and after delete count = Instance.objects.filter(deleted_at=None).count() - instance = Instance.objects.filter( - xform=self.xform).latest('date_created') + instance = Instance.objects.filter(xform=self.xform).latest("date_created") self.assertEqual(instance.deleted_at, None) # delete - params = {'id': instance.id} + params = {"id": instance.id} response = self.client.post(self.delete_url, params) self.assertEqual(response.status_code, 200) - self.assertEqual( - Instance.objects.filter(deleted_at=None).count(), count - 1) + self.assertEqual(Instance.objects.filter(deleted_at=None).count(), count - 1) instance = Instance.objects.get(id=instance.id) self.assertTrue(isinstance(instance.deleted_at, datetime)) self.assertNotEqual(instance.deleted_at, None) query = '{"_id": %s}' % instance.id self.data_args.update({"query": query}) - after = [r for r in query_data(**self.data_args)] + after = list(query_fields_data(**self.data_args)) self.assertEqual(len(after), count - 1) diff --git a/onadata/apps/main/views.py b/onadata/apps/main/views.py index 3c6812177b..cb7ef87ee3 100644 --- a/onadata/apps/main/views.py +++ b/onadata/apps/main/views.py @@ -60,7 +60,14 @@ from onadata.apps.sms_support.providers import providers_doc from onadata.apps.sms_support.tools import check_form_sms_compatibility, is_sms_related from onadata.apps.viewer.models.data_dictionary import DataDictionary, upload_to -from onadata.apps.viewer.models.parsed_instance import DATETIME_FORMAT, query_data +from onadata.apps.viewer.models.parsed_instance import ( + DATETIME_FORMAT, + query_data, + query_fields_data, + query_count, + ParsedInstance, + _get_sort_fields, +) from onadata.apps.viewer.views import attachment_url from onadata.libs.exceptions import EnketoError from onadata.libs.utils.decorators import is_owner @@ -569,7 +576,6 @@ def api(request, username=None, id_string=None): # noqa C901 return HttpResponseForbidden(_("Not shared.")) query = request.GET.get("query") - total_records = xform.num_of_submissions try: args = { @@ -590,15 +596,12 @@ def api(request, username=None, id_string=None): # noqa C901 start_index = (page - 1) * page_size args["start_index"] = start_index - args["limit"] = page_size - if query: - count_args = args.copy() - count_args["count"] = True - count_results = list(query_data(**count_args)) - if count_results: - total_records = count_results[0].get("count", total_records) + if args.get("sort") is None: + # Paginated data needs to be sorted. We order by id ascending if + # sort is empty + args["sort"] = '{"_id": 1}' if "start" in request.GET: args["start_index"] = int(request.GET.get("start")) @@ -606,20 +609,38 @@ def api(request, username=None, id_string=None): # noqa C901 if "limit" in request.GET: args["limit"] = int(request.GET.get("limit")) - if "count" in request.GET: - args["count"] = int(request.GET.get("count")) > 0 + if "count" in request.GET and int(request.GET.get("count")) > 0: + count = query_count(xform, query) + cursor = [{"count": count}] + + else: + has_json_fields = False + + if args.get("sort"): + sort_fields = _get_sort_fields(args.get("sort")) + # pylint: disable=protected-access + has_json_fields = ParsedInstance._has_json_fields(sort_fields) + + should_query_json_fields = bool(args.get("fields")) or has_json_fields + + if should_query_json_fields: + cursor = list(query_fields_data(**args)) + + else: + args.pop("fields") + # pylint: disable=unexpected-keyword-arg + cursor = list(query_data(**args)) - cursor = query_data(**args) except (ValueError, TypeError) as e: return HttpResponseBadRequest(conditional_escape(str(e))) if "callback" in request.GET and request.GET.get("callback") != "": callback = request.GET.get("callback") - response_text = json_util.dumps(list(cursor)) + response_text = json_util.dumps(cursor) response_text = f"{callback}({response_text})" response = HttpResponse(response_text) else: - response = JsonResponse(list(cursor), safe=False) + response = JsonResponse(cursor, safe=False) add_cors_headers(response) @@ -1452,8 +1473,6 @@ def stringify_unknowns(obj): query_args["start"] = int(request.GET.get("start")) if "limit" in request.GET: query_args["limit"] = int(request.GET.get("limit")) - if "count" in request.GET: - query_args["count"] = int(request.GET.get("count")) > 0 cursor = AuditLog.query_data(**query_args) except ValueError as e: return HttpResponseBadRequest(str(e)) diff --git a/onadata/apps/viewer/models/parsed_instance.py b/onadata/apps/viewer/models/parsed_instance.py index e91e058d62..5db709fab0 100644 --- a/onadata/apps/viewer/models/parsed_instance.py +++ b/onadata/apps/viewer/models/parsed_instance.py @@ -3,11 +3,9 @@ ParsedInstance model """ import datetime -import types from django.conf import settings from django.db import connection, models -from django.db.models.query import EmptyQuerySet from django.utils.translation import gettext as _ import six @@ -130,7 +128,7 @@ def parse_json(data): sql = "SELECT COUNT(*) FROM (" + sql + ") AS CQ" fields = ["count"] - cursor.execute(sql, [str(i) for i in sql_params]) + cursor.execute(sql, sql_params) if fields is None: for row in cursor.fetchall(): yield parse_json(row[0]) if row[0] else None @@ -141,11 +139,9 @@ def parse_json(data): ) -def get_etag_hash_from_query(queryset, sql=None, params=None): +def get_etag_hash_from_query(sql=None, params=None): """Returns md5 hash from the date_modified field or""" - if not isinstance(queryset, EmptyQuerySet): - if sql is None: - sql, params = queryset.query.sql_with_params() + if sql: from_index = sql.find("FROM ") sql = ( "SELECT md5(string_agg(date_modified::text, ''))" @@ -160,65 +156,76 @@ def get_etag_hash_from_query(queryset, sql=None, params=None): # pylint: disable=too-many-arguments -def _start_index_limit(records, sql, fields, params, sort, start_index, limit): +def _start_index_limit(sql, params, start_index, limit): if (start_index is not None and start_index < 0) or ( limit is not None and limit < 0 ): raise ValueError(_("Invalid start/limit params")) - if (start_index is not None or limit is not None) and not sql: - sql, params = records.query.sql_with_params() - params = list(params) start_index = 0 if limit is not None and start_index is None else start_index - # pylint: disable=protected-access - has_json_fields = ParsedInstance._has_json_fields(sort) - if start_index is not None and (has_json_fields or fields): + if start_index is not None: params += [start_index] - sql = f"{sql} OFFSET %s" - if limit is not None and (has_json_fields or fields): - sql = f"{sql} LIMIT %s" + sql += " OFFSET %s" + if limit is not None: + sql += " LIMIT %s" params += [limit] - if ( - start_index is not None - and limit is not None - and not fields - and not has_json_fields - ): - end_index = start_index + limit - records = records[start_index:end_index] - if start_index is not None and limit is None and not fields and not has_json_fields: - records = records[start_index:] - return records, sql, params + return sql, params + +def _get_sort_fields(sort): + sort = [] if sort is None else sort_from_mongo_sort_str(sort) + + return list(_parse_sort_fields(sort)) -def _get_instances(xform, start, end): - kwargs = {"deleted_at": None} + +def build_sql_where(xform, query, start=None, end=None): + """Build SQL WHERE clause""" + known_integers = [ + get_name_from_survey_element(e) + for e in xform.get_survey_elements_of_type("integer") + ] + where = [] + where_params = [] + + if query and isinstance(query, list): + for qry in query: + _where, _where_params = get_where_clause(qry, known_integers) + where += _where + where_params += _where_params + + else: + where, where_params = get_where_clause(query, known_integers) + + sql_where = "WHERE xform_id in %s AND deleted_at IS NULL" + + if where_params: + sql_where += " AND " + " AND ".join(where) if isinstance(start, datetime.datetime): - kwargs.update({"date_created__gte": start}) + sql_where += " AND date_created >= %s" + where_params += [start.isoformat()] if isinstance(end, datetime.datetime): - kwargs.update({"date_created__lte": end}) + sql_where += " AND date_created <= %s" + where_params += [end.isoformat()] + + xform_pks = [xform.pk] if xform.is_merged_dataset: - xforms = xform.mergedxform.xforms.filter(deleted_at__isnull=True).values_list( - "id", flat=True + merged_xform_ids = list( + xform.mergedxform.xforms.filter(deleted_at__isnull=True).values_list( + "id", flat=True + ) ) - xform_ids = list(xforms) or [xform.pk] - instances = Instance.objects.filter(xform_id__in=xform_ids) - else: - instances = xform.instances - - return instances.filter(**kwargs) + if merged_xform_ids: + xform_pks = list(merged_xform_ids) + params = [tuple(xform_pks)] + where_params -def _get_sort_fields(sort): - sort = ["id"] if sort is None else sort_from_mongo_sort_str(sort) - - return list(_parse_sort_fields(sort)) + return sql_where, params -# pylint: disable=too-many-locals +# pylint: disable=too-many-locals,too-many-statements,too-many-branches def get_sql_with_params( xform, query=None, @@ -228,23 +235,12 @@ def get_sql_with_params( end=None, start_index=None, limit=None, - count=None, json_only: bool = True, ): - """ - Returns the SQL and related parameters. - """ - records = _get_instances(xform, start, end) - params = [] + """Returns the SQL and related parameters""" sort = _get_sort_fields(sort) sql = "" - known_integers = [ - get_name_from_survey_element(e) - for e in xform.get_survey_elements_of_type("integer") - ] - where, where_params = get_where_clause(query, known_integers) - if fields and isinstance(fields, six.string_types): fields = json.loads(fields) @@ -252,32 +248,25 @@ def get_sql_with_params( field_list = ["json->%s" for _i in fields] sql = f"SELECT {','.join(field_list)} FROM logger_instance" - sql_where = "" - if where_params: - sql_where = " AND " + " AND ".join(where) - - sql += " WHERE xform_id = %s " + sql_where + " AND deleted_at IS NULL" - params = [xform.pk] + where_params else: if json_only: - records = records.values_list("json", flat=True) + # pylint: disable=protected-access + if sort and ParsedInstance._has_json_fields(sort): + sql = "SELECT json FROM logger_instance" - if query and isinstance(query, list): - for qry in query: - _where, _where_params = get_where_clause(qry, known_integers) - records = records.extra(where=_where, params=_where_params) + else: + sql = "SELECT id,json FROM logger_instance" else: - if where_params: - records = records.extra(where=where, params=where_params) + sql = "SELECT * FROM logger_instance" + + sql_where, params = build_sql_where(xform, query, start, end) + sql += f" {sql_where}" # apply sorting - if not count and sort: + if sort: # pylint: disable=protected-access if ParsedInstance._has_json_fields(sort): - if not fields: - # we have to do a sql query for json field order - sql, params = records.query.sql_with_params() params = list(params) + json_order_by_params( sort, none_json_fields=NONE_JSON_FIELDS ) @@ -287,54 +276,104 @@ def get_sql_with_params( sql = f"{sql} {_json_order_by}" else: if not fields: - records = records.order_by(*sort) + sql += " ORDER BY" + + for index, sort_field in enumerate(sort): + if sort_field.startswith("-"): + sort_field = sort_field.removeprefix("-") + # It's safe to use string interpolation since this + # is a column and not a value + sql += f" {sort_field} DESC" + else: + sql += f" {sort_field} ASC" + + if index != len(sort) - 1: + sql += "," - records, sql, params = _start_index_limit( - records, sql, fields, params, sort, start_index, limit + sql, params = _start_index_limit(sql, params, start_index, limit) + + return sql, params + + +def query_count( + xform, + query=None, + date_created_gte=None, + date_created_lte=None, +): + """Count number of instances matching query""" + sql_where, params = build_sql_where( + xform, + query, + date_created_gte, + date_created_lte, ) + sql = f"SELECT COUNT(id) FROM logger_instance {sql_where}" # nosec + + with connection.cursor() as cursor: + cursor.execute(sql, params) + (count,) = cursor.fetchone() - return sql, params, records + return count + + +def query_fields_data( + xform, + fields, + query=None, + sort=None, + start=None, + end=None, + start_index=None, + limit=None, +): + """Query the submissions table and return json fields data""" + sql, params = get_sql_with_params( + xform, + query=query, + fields=fields, + sort=sort, + start=start, + end=end, + start_index=start_index, + limit=limit, + ) + + if isinstance(fields, six.string_types): + fields = json.loads(fields) + + return _query_iterator(sql, fields, params) def query_data( xform, query=None, - fields=None, sort=None, start=None, end=None, start_index=None, limit=None, - count=None, json_only: bool = True, ): - """Query the submissions table and returns the results.""" - - sql, params, records = get_sql_with_params( + """Query the submissions table and returns the results""" + sql, params = get_sql_with_params( xform, - query, - fields, - sort, - start, - end, - start_index, - limit, - count, + query=query, + sort=sort, + start=start, + end=end, + start_index=start_index, + limit=limit, json_only=json_only, ) - if fields and isinstance(fields, six.string_types): - fields = json.loads(fields) - sort = _get_sort_fields(sort) - # pylint: disable=protected-access - if (ParsedInstance._has_json_fields(sort) or fields) and sql: - records = _query_iterator(sql, fields, params, count) - if count and isinstance(records, types.GeneratorType): - return list(records) - if count: - return [{"count": records.count()}] + instances = Instance.objects.raw(sql, params) - return records + for instance in instances.iterator(): + if json_only: + yield instance.json + else: + yield instance class ParsedInstance(models.Model): diff --git a/onadata/apps/viewer/tests/fixtures/transportation_w_notes.csv b/onadata/apps/viewer/tests/fixtures/transportation_w_notes.csv deleted file mode 100644 index d82d3b652a..0000000000 --- a/onadata/apps/viewer/tests/fixtures/transportation_w_notes.csv +++ /dev/null @@ -1,5 +0,0 @@ -transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2016-07-01T00:00:00,,,2014111,,bob,1,0,False -True,True,False,False,False,False,False,False,False,False,n/a,daily,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:f3d8dc65-91a6-4d0f-9e97-802128083390,f3d8dc65-91a6-4d0f-9e97-802128083390,2016-07-01T00:00:00,,,2014111,,bob,0,0,True -True,False,False,False,False,False,False,False,False,False,n/a,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:9c6f3468-cfda-46e8-84c1-75458e72805d,9c6f3468-cfda-46e8-84c1-75458e72805d,2016-07-01T00:00:00,,,2014111,,bob,0,0,True -False,False,False,False,False,False,False,False,True,True,camel,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,daily,uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf,9f0a1508-c3b7-4c99-be00-9b237c26bcbf,2016-07-01T00:00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/viewer/tests/test_exports.py b/onadata/apps/viewer/tests/test_exports.py index e79bae1f44..ce5393c463 100644 --- a/onadata/apps/viewer/tests/test_exports.py +++ b/onadata/apps/viewer/tests/test_exports.py @@ -20,40 +20,57 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import delete_data from onadata.apps.viewer.models.export import Export -from onadata.apps.viewer.models.parsed_instance import query_data +from onadata.apps.viewer.models.parsed_instance import query_data, query_count from onadata.apps.viewer.tasks import create_xlsx_export from onadata.apps.viewer.tests.export_helpers import viewer_fixture_path -from onadata.apps.viewer.views import delete_export, export_list, \ - create_export, export_progress, export_download +from onadata.apps.viewer.views import ( + delete_export, + export_list, + create_export, + export_progress, + export_download, +) from onadata.apps.viewer.xls_writer import XlsWriter from onadata.libs.utils.common_tools import get_response_content from onadata.libs.utils.export_builder import dict_to_joined_export -from onadata.libs.utils.export_tools import generate_export, \ - increment_index_in_filename, clean_keys_of_slashes +from onadata.libs.utils.export_tools import ( + generate_export, + increment_index_in_filename, + clean_keys_of_slashes, +) -AMBULANCE_KEY = 'transport/available_transportation_types_to_referral_fac'\ - 'ility/ambulance' -AMBULANCE_KEY_DOTS = 'transport.available_transportation_types_to_referra'\ - 'l_facility.ambulance' +AMBULANCE_KEY = ( + "transport/available_transportation_types_to_referral_fac" "ility/ambulance" +) +AMBULANCE_KEY_DOTS = ( + "transport.available_transportation_types_to_referra" "l_facility.ambulance" +) def _main_fixture_path(instance_name): - return os.path.join(settings.PROJECT_ROOT, 'apps', 'main', 'tests', - 'fixtures', 'transportation', 'instances_w_uuid', - instance_name, instance_name + '.xml') + return os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances_w_uuid", + instance_name, + instance_name + ".xml", + ) class TestExports(TestBase): - def setUp(self): super(TestExports, self).setUp() - self._submission_time = parse_datetime('2013-02-18 15:54:01Z') + self._submission_time = parse_datetime("2013-02-18 15:54:01Z") self.options = {"extension": "xlsx"} def test_unique_xls_sheet_name(self): xls_writer = XlsWriter() - xls_writer.add_sheet('section9_pit_latrine_with_slab_group') - xls_writer.add_sheet('section9_pit_latrine_without_slab_group') + xls_writer.add_sheet("section9_pit_latrine_with_slab_group") + xls_writer.add_sheet("section9_pit_latrine_without_slab_group") # create a set of sheet names keys sheet_names_set = set(xls_writer._sheets) self.assertEqual(len(sheet_names_set), 2) @@ -63,17 +80,26 @@ def test_csv_http_response(self): survey = self.surveys[0] self._make_submission( os.path.join( - self.this_directory, 'fixtures', 'transportation', - 'instances', survey, survey + '.xml'), - forced_submission_time=self._submission_time) - response = self.client.get(reverse( - 'csv_export', - kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + self.this_directory, + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=self._submission_time, + ) + response = self.client.get( + reverse( + "csv_export", + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) - test_file_path = viewer_fixture_path('transportation.csv') + test_file_path = viewer_fixture_path("transportation.csv") self._test_csv_response(response, test_file_path) def test_csv_without_na_values(self): @@ -81,19 +107,28 @@ def test_csv_without_na_values(self): survey = self.surveys[0] self._make_submission( os.path.join( - self.this_directory, 'fixtures', 'transportation', - 'instances', survey, survey + '.xml'), - forced_submission_time=self._submission_time) + self.this_directory, + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=self._submission_time, + ) na_rep_restore = settings.NA_REP - settings.NA_REP = u'' - response = self.client.get(reverse( - 'csv_export', - kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + settings.NA_REP = "" + response = self.client.get( + reverse( + "csv_export", + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) - test_file_path = viewer_fixture_path('transportation_without_na.csv') + test_file_path = viewer_fixture_path("transportation_without_na.csv") self._test_csv_response(response, test_file_path) settings.NA_REP = na_rep_restore @@ -105,22 +140,20 @@ def test_responses_for_empty_exports(self): self._publish_transportation_form() # test csv though xls uses the same view url = reverse( - 'csv_export', - kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - } + "csv_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, ) self.response = self.client.get(url) self.assertEqual(self.response.status_code, 200) - self.assertIn('application/csv', self.response['content-type']) + self.assertIn("application/csv", self.response["content-type"]) # Unpack response streaming data - export_data = [i.decode( - 'utf-8').replace('\n', '').split( - ',') for i in self.response.streaming_content] + export_data = [ + i.decode("utf-8").replace("\n", "").split(",") + for i in self.response.streaming_content + ] xform_headers = self.xform.get_headers() # Remove review headers from xform headers - for x in ['_review_status', '_review_comment']: + for x in ["_review_status", "_review_comment"]: xform_headers.remove(x) # Test export data returned is xform headers list self.assertEqual(xform_headers, export_data[0]) @@ -130,49 +163,36 @@ def test_create_export(self): storage = get_storage_class()() # test xls - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) self.assertTrue(storage.exists(export.filepath)) path, ext = os.path.splitext(export.filename) - self.assertEqual(ext, '.xlsx') + self.assertEqual(ext, ".xlsx") # test csv self.options["extension"] = "csv" - export = generate_export( - Export.CSV_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.CSV_EXPORT, self.xform, None, self.options) self.assertTrue(storage.exists(export.filepath)) path, ext = os.path.splitext(export.filename) - self.assertEqual(ext, '.csv') + self.assertEqual(ext, ".csv") # test xls with existing export_id - existing_export = Export.objects.create(xform=self.xform, - export_type=Export.XLSX_EXPORT) + existing_export = Export.objects.create( + xform=self.xform, export_type=Export.XLSX_EXPORT + ) self.options["extension"] = "xlsx" self.options["export_id"] = existing_export.id export = generate_export( - Export.XLSX_EXPORT, - self.xform, - existing_export.id, - self.options) + Export.XLSX_EXPORT, self.xform, existing_export.id, self.options + ) self.assertEqual(existing_export.id, export.id) def test_delete_file_on_export_delete(self): self._publish_transportation_form() self._submit_transport_instance() - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) storage = get_storage_class()() self.assertTrue(storage.exists(export.filepath)) # delete export object @@ -184,11 +204,7 @@ def test_graceful_exit_on_export_delete_if_file_doesnt_exist(self): self._submit_transport_instance() self.options["id_string"] = self.xform.id_string - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) storage = get_storage_class()() # delete file storage.delete(export.filepath) @@ -198,12 +214,15 @@ def test_graceful_exit_on_export_delete_if_file_doesnt_exist(self): export.filedir = None export.save() # delete export record, which should try to delete file as well - delete_url = reverse(delete_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) - post_data = {'export_id': export.id} + delete_url = reverse( + delete_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) + post_data = {"export_id": export.id} response = self.client.post(delete_url, post_data) self.assertEqual(response.status_code, 302) @@ -213,18 +232,12 @@ def test_delete_oldest_export_on_limit(self): # create first export first_export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + Export.XLSX_EXPORT, self.xform, None, self.options + ) self.assertIsNotNone(first_export.pk) # create exports that exceed set limit for i in range(Export.MAX_EXPORTS): - generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) # first export should be deleted exports = Export.objects.filter(id=first_export.id) self.assertEqual(len(exports), 0) @@ -233,11 +246,14 @@ def test_create_export_url_with_unavailable_id_string(self): self._publish_transportation_form() self._submit_transport_instance() - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string', - 'export_type': Export.XLSX_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + "export_type": Export.XLSX_EXPORT, + }, + ) response = self.client.post(create_export_url) self.assertEqual(response.status_code, 404) @@ -247,27 +263,33 @@ def test_create_export_url(self): self._submit_transport_instance() num_exports = Export.objects.count() # create export - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.XLSX_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + }, + ) # anonymous user has to login first response = self.anon.post(create_export_url) self.assertEqual(response.status_code, 302) - self.assertIn("/accounts/login", response['location']) + self.assertIn("/accounts/login", response["location"]) response = self.client.post(create_export_url) self.assertEqual(response.status_code, 302) self.assertEqual(Export.objects.count(), num_exports + 1) # test with unavailable id_string - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string', - 'export_type': Export.XLSX_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + "export_type": Export.XLSX_EXPORT, + }, + ) response = self.client.post(create_export_url) self.assertEqual(response.status_code, 404) @@ -278,24 +300,23 @@ def test_delete_export_url(self): # create export self.options["id_string"] = self.xform.id_string - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) exports = Export.objects.filter(id=export.id) self.assertEqual(len(exports), 1) - delete_url = reverse(delete_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) - post_data = {'export_id': export.id} + delete_url = reverse( + delete_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) + post_data = {"export_id": export.id} # anonymous user has to login first response = self.anon.post(delete_url, post_data) self.assertEqual(response.status_code, 302) - self.assertIn("/accounts/login", response['location']) + self.assertIn("/accounts/login", response["location"]) response = self.client.post(delete_url, post_data) self.assertEqual(response.status_code, 302) @@ -303,11 +324,14 @@ def test_delete_export_url(self): self.assertEqual(len(exports), 0) # test with unavailable id_string - delete_url = reverse(delete_export, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string', - 'export_type': 'xlsx' - }) + delete_url = reverse( + delete_export, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + "export_type": "xlsx", + }, + ) response = self.client.post(delete_url, post_data) self.assertEqual(response.status_code, 404) @@ -318,45 +342,52 @@ def test_export_progress_output(self): # create exports for i in range(2): - generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) self.assertEqual(Export.objects.count(), 2) - get_data = {'export_ids': [e.id for e in Export.objects.all()]} + get_data = {"export_ids": [e.id for e in Export.objects.all()]} # test with unavailable id_string - progress_url = reverse(export_progress, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string', - 'export_type': 'xlsx' - }) + progress_url = reverse( + export_progress, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + "export_type": "xlsx", + }, + ) response = self.client.get(progress_url, get_data) self.assertEqual(response.status_code, 404) # progress for multiple exports - progress_url = reverse(export_progress, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) + progress_url = reverse( + export_progress, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) response = self.client.get(progress_url, get_data) content = json.loads(response.content) self.assertEqual(len(content), 2) - self.assertEqual(sorted(['url', 'export_id', 'complete', 'filename']), - sorted(list(content[0]))) + self.assertEqual( + sorted(["url", "export_id", "complete", "filename"]), + sorted(list(content[0])), + ) def test_auto_export_if_none_exists(self): self._publish_transportation_form() self._submit_transport_instance() # get export list url num_exports = Export.objects.count() - export_list_url = reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.XLSX_EXPORT - }) + export_list_url = reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + }, + ) self.client.get(export_list_url) self.assertEqual(Export.objects.count(), num_exports + 1) @@ -364,18 +395,24 @@ def test_dont_auto_export_if_exports_exist(self): self._publish_transportation_form() self._submit_transport_instance() # create export - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.XLSX_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + }, + ) self.client.post(create_export_url) num_exports = Export.objects.count() - export_list_url = reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.XLSX_EXPORT - }) + export_list_url = reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + }, + ) self.client.get(export_list_url) self.assertEqual(Export.objects.count(), num_exports) @@ -384,84 +421,95 @@ def test_last_submission_time_on_export(self): self._submit_transport_instance() # create export - generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) num_exports = Export.objects.filter( - xform=self.xform, export_type=Export.XLSX_EXPORT).count() + xform=self.xform, export_type=Export.XLSX_EXPORT + ).count() # check that our function knows there are no more submissions self.assertFalse( - Export.exports_outdated(xform=self.xform, - export_type=Export.XLSX_EXPORT)) + Export.exports_outdated(xform=self.xform, export_type=Export.XLSX_EXPORT) + ) sleep(1) # force new last submission date on xform - last_submission = self.xform.instances.order_by('-date_created')[0] + last_submission = self.xform.instances.order_by("-date_created")[0] last_submission.date_created += datetime.timedelta(hours=1) last_submission.save() # check that our function knows data has changed self.assertTrue( - Export.exports_outdated(xform=self.xform, - export_type=Export.XLSX_EXPORT)) + Export.exports_outdated(xform=self.xform, export_type=Export.XLSX_EXPORT) + ) # check that requesting list url will generate a new export - export_list_url = reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.XLSX_EXPORT - }) + export_list_url = reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + }, + ) self.client.get(export_list_url) self.assertEqual( - Export.objects.filter(xform=self.xform, - export_type=Export.XLSX_EXPORT).count(), - num_exports + 1) + Export.objects.filter( + xform=self.xform, export_type=Export.XLSX_EXPORT + ).count(), + num_exports + 1, + ) # make sure another export type causes auto-generation num_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() - export_list_url = reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.CSV_EXPORT - }) + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() + export_list_url = reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.CSV_EXPORT, + }, + ) self.client.get(export_list_url) self.assertEqual( - Export.objects.filter(xform=self.xform, - export_type=Export.CSV_EXPORT).count(), - num_exports + 1) + Export.objects.filter( + xform=self.xform, export_type=Export.CSV_EXPORT + ).count(), + num_exports + 1, + ) def test_last_submission_time_empty(self): self._publish_transportation_form() self._submit_transport_instance() # create export - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) # set time of last submission to None export.time_of_last_submission = None export.save() - self.assertTrue(Export.exports_outdated(xform=self.xform, - export_type=Export.XLSX_EXPORT)) + self.assertTrue( + Export.exports_outdated(xform=self.xform, export_type=Export.XLSX_EXPORT) + ) def test_invalid_export_type(self): self._publish_transportation_form() self._submit_transport_instance() - export_list_url = reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'invalid' - }) + export_list_url = reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "invalid", + }, + ) response = self.client.get(export_list_url) self.assertEqual(response.status_code, 400) # test create url - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'invalid' - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "invalid", + }, + ) response = self.client.post(create_export_url) self.assertEqual(response.status_code, 400) @@ -482,7 +530,7 @@ class FakeDate(datetime.datetime): def now(cls, tz=None): return cls(2010, 1, 1) - @patch('onadata.libs.utils.export_tools.datetime', FakeDate) + @patch("onadata.libs.utils.export_tools.datetime", FakeDate) def test_duplicate_export_filename_is_renamed(self): self._publish_transportation_form() self._submit_transport_instance() @@ -491,19 +539,20 @@ def test_duplicate_export_filename_is_renamed(self): # create an export object in the db basename = "%s_%s" % ( self.xform.id_string, - target.strftime("%Y_%m_%d_%H_%M_%S_%f")) + target.strftime("%Y_%m_%d_%H_%M_%S_%f"), + ) filename = basename + ".csv" self.options["extension"] = Export.CSV_EXPORT - Export.objects.create(xform=self.xform, export_type=Export.CSV_EXPORT, - filename=filename, options=self.options) + Export.objects.create( + xform=self.xform, + export_type=Export.CSV_EXPORT, + filename=filename, + options=self.options, + ) # 2nd export - export_2 = generate_export( - Export.CSV_EXPORT, - self.xform, - None, - self.options) + export_2 = generate_export(Export.CSV_EXPORT, self.xform, None, self.options) new_filename = increment_index_in_filename(filename) self.assertEqual(new_filename, export_2.filename) @@ -513,44 +562,45 @@ def test_export_download_url(self): self._submit_transport_instance() self.options["extension"] = Export.CSV_EXPORT - export = generate_export( - Export.CSV_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.CSV_EXPORT, self.xform, None, self.options) # test with unavailable id_string - csv_export_url = reverse(export_download, kwargs={ - "username": self.user.username, - "id_string": 'random_id_string', - "export_type": Export.CSV_EXPORT, - "filename": export.filename - }) + csv_export_url = reverse( + export_download, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + "export_type": Export.CSV_EXPORT, + "filename": export.filename, + }, + ) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 404) - csv_export_url = reverse(export_download, kwargs={ - "username": self.user.username, - "id_string": self.xform.id_string, - "export_type": Export.CSV_EXPORT, - "filename": export.filename - }) + csv_export_url = reverse( + export_download, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.CSV_EXPORT, + "filename": export.filename, + }, + ) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) # test xls self.options["extension"] = "xlsx" - export = generate_export( - Export.XLSX_EXPORT, - self.xform, - None, - self.options) - xlsx_export_url = reverse(export_download, kwargs={ - "username": self.user.username, - "id_string": self.xform.id_string, - "export_type": Export.XLSX_EXPORT, - "filename": export.filename - }) + export = generate_export(Export.XLSX_EXPORT, self.xform, None, self.options) + xlsx_export_url = reverse( + export_download, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.XLSX_EXPORT, + "filename": export.filename, + }, + ) response = self.client.get(xlsx_export_url) self.assertEqual(response.status_code, 200) @@ -563,17 +613,16 @@ def test_404_on_export_io_error(self): self._submit_transport_instance() self.options["extension"] = Export.CSV_EXPORT - export = generate_export( - Export.CSV_EXPORT, - self.xform, - None, - self.options) - export_url = reverse(export_download, kwargs={ - "username": self.user.username, - "id_string": self.xform.id_string, - "export_type": Export.CSV_EXPORT, - "filename": export.filename - }) + export = generate_export(Export.CSV_EXPORT, self.xform, None, self.options) + export_url = reverse( + export_download, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.CSV_EXPORT, + "filename": export.filename, + }, + ) # delete the export export.delete() # access the export @@ -584,33 +633,32 @@ def test_query_data_with_invalid_args(self): self._publish_transportation_form() with self.assertRaises(TypeError): query = "select '{0}' from '{1}'" % 2, "two" - query_data(self.xform, query, None, '{}', count=True) + query_data(self.xform, query, None, "{}", count=True) def test_deleted_submission_not_in_export(self): self._publish_transportation_form() - initial_count = query_data( - self.xform, '{}', None, '{}', - count=True)[0]['count'] + initial_count = query_count(self.xform) self._submit_transport_instance(0) self._submit_transport_instance(1) - count = query_data( - self.xform, '{}', None, '{}', count=True)[0]['count'] + count = query_count(self.xform) self.assertEqual(count, initial_count + 2) # get id of second submission - instance_id = Instance.objects.filter( - xform=self.xform).order_by('id').reverse()[0].id + instance_id = ( + Instance.objects.filter(xform=self.xform).order_by("id").reverse()[0].id + ) delete_url = reverse( - delete_data, kwargs={"username": self.user.username, - "id_string": self.xform.id_string}) - params = {'id': instance_id} + delete_data, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + params = {"id": instance_id} self.client.post(delete_url, params) - count = query_data( - self.xform, '{}', '[]', '{}', count=True)[0]['count'] + count = query_count(self.xform) self.assertEqual(count, initial_count + 1) # create the export csv_export_url = reverse( - 'csv_export', kwargs={"username": self.user.username, - "id_string": self.xform.id_string}) + "csv_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) f = StringIO(get_response_content(response)) @@ -622,25 +670,23 @@ def test_deleted_submission_not_in_export(self): def test_edited_submissions_in_exports(self): self._publish_transportation_form() - initial_count = query_data( - self.xform, '{}', None, '{}', count=True)[0]['count'] - instance_name = 'transport_2011-07-25_19-05-36' + initial_count = query_count(self.xform) + instance_name = "transport_2011-07-25_19-05-36" path = _main_fixture_path(instance_name) self._make_submission(path) - count = query_data( - self.xform, '{}', '[]', count=True)[0]['count'] + count = query_count(self.xform) self.assertEqual(count, initial_count + 1) # make edited submission - simulating what enketo would return - instance_name = 'transport_2011-07-25_19-05-36-edited' + instance_name = "transport_2011-07-25_19-05-36-edited" path = _main_fixture_path(instance_name) self._make_submission(path) - count = query_data( - self.xform, '{}', '[]', count=True)[0]['count'] + count = query_count(self.xform) self.assertEqual(count, initial_count + 1) # create the export csv_export_url = reverse( - 'csv_export', kwargs={"username": self.user.username, - "id_string": self.xform.id_string}) + "csv_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) f = StringIO(get_response_content(response)) @@ -650,8 +696,10 @@ def test_edited_submissions_in_exports(self): num_rows = len(data) # number of rows == initial_count + 1 self.assertEqual(num_rows, initial_count + 1) - key = 'transport/loop_over_transport_types_frequency/ambulance/'\ - 'frequency_to_referral_facility' + key = ( + "transport/loop_over_transport_types_frequency/ambulance/" + "frequency_to_referral_facility" + ) self.assertEqual(data[initial_count][key], "monthly") def test_export_ids_dont_have_comma_separation(self): @@ -662,16 +710,21 @@ def test_export_ids_dont_have_comma_separation(self): self._publish_transportation_form() self._submit_transport_instance() # create an in-complete export - export = Export.objects.create(id=1234, xform=self.xform, - export_type=Export.XLSX_EXPORT, - options=self.options) + export = Export.objects.create( + id=1234, + xform=self.xform, + export_type=Export.XLSX_EXPORT, + options=self.options, + ) self.assertEqual(export.pk, 1234) export_list_url = reverse( - export_list, kwargs={ + export_list, + kwargs={ "username": self.user.username, "id_string": self.xform.id_string, - "export_type": Export.XLSX_EXPORT - }) + "export_type": Export.XLSX_EXPORT, + }, + ) response = self.client.get(export_list_url) self.assertContains(response, '#delete-1234"') self.assertNotContains(response, '#delete-1,234"') @@ -684,15 +737,17 @@ def test_export_progress_updates(self): """ self._publish_transportation_form() # generate an export that fails because of the NoRecordsFound exception - export = Export.objects.create(xform=self.xform, - export_type=Export.XLSX_EXPORT) + export = Export.objects.create(xform=self.xform, export_type=Export.XLSX_EXPORT) # check that progress url says pending - progress_url = reverse(export_progress, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) - params = {'export_ids': [export.id]} + progress_url = reverse( + export_progress, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) + params = {"export_ids": [export.id]} response = self.client.get(progress_url, params) status = json.loads(response.content)[0] self.assertEqual(status["complete"], False) @@ -701,12 +756,15 @@ def test_export_progress_updates(self): export.internal_status = Export.FAILED export.save() # check that progress url says failed - progress_url = reverse(export_progress, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) - params = {'export_ids': [export.id]} + progress_url = reverse( + export_progress, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) + params = {"export_ids": [export.id]} response = self.client.get(progress_url, params) status = json.loads(response.content)[0] self.assertEqual(status["complete"], True) @@ -716,11 +774,9 @@ def test_export_progress_updates(self): self._submit_transport_instance() create_xlsx_export( - self.user.username, - self.xform.id_string, - export.id, - **self.options) - params = {'export_ids': [export.id]} + self.user.username, self.xform.id_string, export.id, **self.options + ) + params = {"export_ids": [export.id]} response = self.client.get(progress_url, params) status = json.loads(response.content)[0] self.assertEqual(status["complete"], True) @@ -735,23 +791,26 @@ def test_direct_export_returns_newest_export_if_not_updated_since(self): self.assertEqual(self.response.status_code, 201) initial_num_csv_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() initial_num_xlsx_exports = Export.objects.filter( - xform=self.xform, export_type=Export.XLSX_EXPORT).count() + xform=self.xform, export_type=Export.XLSX_EXPORT + ).count() # request a direct csv export - csv_export_url = reverse('csv_export', kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) - xlsx_export_url = reverse('xlsx_export', kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + csv_export_url = reverse( + "csv_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + xlsx_export_url = reverse( + "xlsx_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) # we should have initial_num_exports + 1 exports num_csv_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() self.assertEqual(num_csv_exports, initial_num_csv_exports + 1) # request another export without changing the data @@ -760,7 +819,8 @@ def test_direct_export_returns_newest_export_if_not_updated_since(self): # we should still only have a single export object num_csv_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() self.assertEqual(num_csv_exports, initial_num_csv_exports + 1) # this should not affect a direct XLS export @@ -768,43 +828,46 @@ def test_direct_export_returns_newest_export_if_not_updated_since(self): response = self.client.get(xlsx_export_url) self.assertEqual(response.status_code, 200) num_xlsx_exports = Export.objects.filter( - xform=self.xform, export_type=Export.XLSX_EXPORT).count() + xform=self.xform, export_type=Export.XLSX_EXPORT + ).count() self.assertEqual(num_xlsx_exports, initial_num_xlsx_exports + 1) # make sure xls doesnt re-generate if data hasn't changed response = self.client.get(xlsx_export_url) self.assertEqual(response.status_code, 200) num_xlsx_exports = Export.objects.filter( - xform=self.xform, export_type=Export.XLSX_EXPORT).count() + xform=self.xform, export_type=Export.XLSX_EXPORT + ).count() self.assertEqual(num_xlsx_exports, initial_num_xlsx_exports + 1) sleep(1) # check that data edits cause a re-generation - self._submit_transport_instance_w_uuid( - "transport_2011-07-25_19-05-36-edited") + self._submit_transport_instance_w_uuid("transport_2011-07-25_19-05-36-edited") self.assertEqual(self.response.status_code, 201) self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) # we should have an extra export now that the data has been updated num_csv_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() self.assertEqual(num_csv_exports, initial_num_csv_exports + 2) sleep(1) # and when we delete - delete_url = reverse(delete_data, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) - instance = Instance.objects.filter().order_by('-pk')[0] - response = self.client.post(delete_url, {'id': instance.id}) + delete_url = reverse( + delete_data, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + instance = Instance.objects.filter().order_by("-pk")[0] + response = self.client.post(delete_url, {"id": instance.id}) self.assertEqual(response.status_code, 200) response = self.client.get(csv_export_url) self.assertEqual(response.status_code, 200) # we should have an extra export now that the data # has been updated by the delete num_csv_exports = Export.objects.filter( - xform=self.xform, export_type=Export.CSV_EXPORT).count() + xform=self.xform, export_type=Export.CSV_EXPORT + ).count() self.assertEqual(num_csv_exports, initial_num_csv_exports + 3) def test_exports_outdated_doesnt_consider_failed_exports(self): @@ -812,24 +875,26 @@ def test_exports_outdated_doesnt_consider_failed_exports(self): self._submit_transport_instance() # create a bad export export = Export.objects.create( - xform=self.xform, export_type=Export.XLSX_EXPORT, - internal_status=Export.FAILED) - self.assertTrue( - Export.exports_outdated(self.xform, export.export_type)) + xform=self.xform, + export_type=Export.XLSX_EXPORT, + internal_status=Export.FAILED, + ) + self.assertTrue(Export.exports_outdated(self.xform, export.export_type)) def test_exports_outdated_considers_pending_exports(self): self._publish_transportation_form() self._submit_transport_instance() # create a pending export export = Export.objects.create( - xform=self.xform, export_type=Export.XLSX_EXPORT, - internal_status=Export.PENDING) - self.assertFalse( - Export.exports_outdated(self.xform, export.export_type)) + xform=self.xform, + export_type=Export.XLSX_EXPORT, + internal_status=Export.PENDING, + ) + self.assertFalse(Export.exports_outdated(self.xform, export.export_type)) def _get_csv_data(self, filepath): storage = get_storage_class()() - csv_file = storage.open(filepath, mode='r') + csv_file = storage.open(filepath, mode="r") reader = csv.DictReader(csv_file) data = next(reader) csv_file.close() @@ -851,48 +916,57 @@ def test_column_header_delimiter_export_option(self): # survey 1 has ambulance and bicycle as values for # transport/available_transportation_types_to_referral_facility self._submit_transport_instance(survey_at=1) - create_csv_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'csv' - }) + create_csv_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "csv", + }, + ) default_params = {} custom_params = { - 'options[group_delimiter]': '.', + "options[group_delimiter]": ".", } # test csv with default group delimiter response = self.client.post(create_csv_export_url, default_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='csv').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="csv").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_csv_data(export.filepath) self.assertTrue(AMBULANCE_KEY in data) - self.assertEqual(data[AMBULANCE_KEY], 'True') + self.assertEqual(data[AMBULANCE_KEY], "True") sleep(1) # test csv with dot delimiter response = self.client.post(create_csv_export_url, custom_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='csv').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="csv").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_csv_data(export.filepath) self.assertTrue(AMBULANCE_KEY_DOTS in data) - self.assertEqual(data[AMBULANCE_KEY_DOTS], 'True') + self.assertEqual(data[AMBULANCE_KEY_DOTS], "True") # test xls with default group delimiter - create_csv_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) + create_csv_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) response = self.client.post(create_csv_export_url, default_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='xlsx').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="xlsx").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_xls_data(export.full_filepath) self.assertTrue(AMBULANCE_KEY in data) @@ -903,8 +977,9 @@ def test_column_header_delimiter_export_option(self): # test xls with dot delimiter response = self.client.post(create_csv_export_url, custom_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='xlsx').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="xlsx").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_xls_data(export.full_filepath) self.assertTrue(AMBULANCE_KEY_DOTS in data) @@ -914,48 +989,52 @@ def test_column_header_delimiter_export_option(self): def test_split_select_multiple_export_option(self): self._publish_transportation_form() self._submit_transport_instance(survey_at=1) - create_csv_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'csv' - }) + create_csv_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "csv", + }, + ) default_params = {} - custom_params = { - 'options[dont_split_select_multiples]': 'yes' - } + custom_params = {"options[dont_split_select_multiples]": "yes"} # test csv with default split select multiples response = self.client.post(create_csv_export_url, default_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='csv').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="csv").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_csv_data(export.filepath) # we should have transport/available_transportation_types_to_referral_f # acility/ambulance as a separate column self.assertTrue(AMBULANCE_KEY in data) - self.assertEqual(data[AMBULANCE_KEY], 'True') + self.assertEqual(data[AMBULANCE_KEY], "True") sleep(1) # test csv with default split select multiples, binary select multiples settings.BINARY_SELECT_MULTIPLES = True response = self.client.post(create_csv_export_url, default_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='csv').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="csv").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_csv_data(export.filepath) # we should have transport/available_transportation_types_to_referral_f # acility/ambulance as a separate column self.assertTrue(AMBULANCE_KEY in data) - self.assertEqual(data[AMBULANCE_KEY], '1') + self.assertEqual(data[AMBULANCE_KEY], "1") settings.BINARY_SELECT_MULTIPLES = False sleep(1) # test csv without default split select multiples response = self.client.post(create_csv_export_url, custom_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='csv').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="csv").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_csv_data(export.filepath) # transport/available_transportation_types_to_referral_facility/ambulan @@ -964,24 +1043,31 @@ def test_split_select_multiple_export_option(self): # transport/available_transportation_types_to_referral_facility should # be a column self.assertTrue( - 'transport/available_transportation_types_to_referral_facility' in - data) + "transport/available_transportation_types_to_referral_facility" in data + ) # check that ambulance is one the values within the transport/available # _transportation_types_to_referral_facility column - self.assertTrue("ambulance" in data[ - 'transport/available_transportation_types_to_referral_facility' - ].split(" ")) - - create_xlsx_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - }) + self.assertTrue( + "ambulance" + in data[ + "transport/available_transportation_types_to_referral_facility" + ].split(" ") + ) + + create_xlsx_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ) # test xls with default split select multiples response = self.client.post(create_xlsx_export_url, default_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='xlsx').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="xlsx").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_xls_data(export.full_filepath) # we should have transport/available_transportation_types_to_referral_f @@ -992,8 +1078,9 @@ def test_split_select_multiple_export_option(self): # test xls without default split select multiples response = self.client.post(create_xlsx_export_url, custom_params) self.assertEqual(response.status_code, 302) - export = Export.objects.filter( - xform=self.xform, export_type='xlsx').latest('created_on') + export = Export.objects.filter(xform=self.xform, export_type="xlsx").latest( + "created_on" + ) self.assertTrue(bool(export.filepath)) data = self._get_xls_data(export.full_filepath) # transport/available_transportation_types_to_referral_facility/ambulan @@ -1002,189 +1089,183 @@ def test_split_select_multiple_export_option(self): # transport/available_transportation_types_to_referral_facility should # be a column self.assertTrue( - 'transport/available_transportation_types_to_referral_facility' - in data) + "transport/available_transportation_types_to_referral_facility" in data + ) # check that ambulance is one the values within the transport/available # _transportation_types_to_referral_facility column - self.assertTrue("ambulance" in data[ - 'transport/available_transportation_types_to_referral_facility' - ].split(" ")) + self.assertTrue( + "ambulance" + in data[ + "transport/available_transportation_types_to_referral_facility" + ].split(" ") + ) def test_dict_to_joined_export_works(self): self._publish_transportation_form() - data =\ - { - 'name': 'Abe', - 'age': '35', - '_geolocation': [None, None], - 'attachments': ['abcd.jpg', 'efgh.jpg'], - 'children': - [ - { - 'children/name': 'Mike', - 'children/age': '5', - 'children/cartoons': - [ - { - 'children/cartoons/name': 'Tom & Jerry', - 'children/cartoons/why': 'Tom is silly', - }, - { - 'children/cartoons/name': 'Flinstones', - 'children/cartoons/why': - u"I like bamb bam\u0107", - } - ] - }, - { - 'children/name': 'John', - 'children/age': '2', - 'children/cartoons': [] - }, - { - 'children/name': 'Imora', - 'children/age': '3', - 'children/cartoons': - [ - { - 'children/cartoons/name': 'Shrek', - 'children/cartoons/why': 'He\'s so funny' - }, - { - 'children/cartoons/name': 'Dexter\'s Lab', - 'children/cartoons/why': 'He thinks hes smart', - 'children/cartoons/characters': - [ - { - 'children/cartoons/characters/name': - 'Dee Dee', - 'children/cartoons/characters/good_or_' - 'evil': 'good' - }, - { - 'children/cartoons/characters/name': - 'Dexter', - 'children/cartoons/characters/good_or_' - 'evil': 'evil' - }, - ] - } - ] - } - ] - } - expected_output =\ - { - 'survey': { - 'name': 'Abe', - 'age': '35' + data = { + "name": "Abe", + "age": "35", + "_geolocation": [None, None], + "attachments": ["abcd.jpg", "efgh.jpg"], + "children": [ + { + "children/name": "Mike", + "children/age": "5", + "children/cartoons": [ + { + "children/cartoons/name": "Tom & Jerry", + "children/cartoons/why": "Tom is silly", + }, + { + "children/cartoons/name": "Flinstones", + "children/cartoons/why": "I like bamb bam\u0107", + }, + ], }, - 'children': - [ - { - 'children/name': 'Mike', - 'children/age': '5', - '_index': 1, - '_parent_table_name': 'survey', - '_parent_index': 1 - }, - { - 'children/name': 'John', - 'children/age': '2', - '_index': 2, - '_parent_table_name': 'survey', - '_parent_index': 1 - }, - { - 'children/name': 'Imora', - 'children/age': '3', - '_index': 3, - '_parent_table_name': 'survey', - '_parent_index': 1 - }, - ], - 'children/cartoons': - [ - { - 'children/cartoons/name': 'Tom & Jerry', - 'children/cartoons/why': 'Tom is silly', - '_index': 1, - '_parent_table_name': 'children', - '_parent_index': 1 - }, - { - 'children/cartoons/name': 'Flinstones', - 'children/cartoons/why': u"I like bamb bam\u0107", - '_index': 2, - '_parent_table_name': 'children', - '_parent_index': 1 - }, - { - 'children/cartoons/name': 'Shrek', - 'children/cartoons/why': 'He\'s so funny', - '_index': 3, - '_parent_table_name': 'children', - '_parent_index': 3 - }, - { - 'children/cartoons/name': 'Dexter\'s Lab', - 'children/cartoons/why': 'He thinks hes smart', - '_index': 4, - '_parent_table_name': 'children', - '_parent_index': 3 - } - ], - 'children/cartoons/characters': - [ - { - 'children/cartoons/characters/name': 'Dee Dee', - 'children/cartoons/characters/good_or_evil': 'good', - '_index': 1, - '_parent_table_name': 'children/cartoons', - '_parent_index': 4 - }, - { - 'children/cartoons/characters/name': 'Dexter', - 'children/cartoons/characters/good_or_evil': 'evil', - '_index': 2, - '_parent_table_name': 'children/cartoons', - '_parent_index': 4 - } - ] - } - survey_name = 'survey' + {"children/name": "John", "children/age": "2", "children/cartoons": []}, + { + "children/name": "Imora", + "children/age": "3", + "children/cartoons": [ + { + "children/cartoons/name": "Shrek", + "children/cartoons/why": "He's so funny", + }, + { + "children/cartoons/name": "Dexter's Lab", + "children/cartoons/why": "He thinks hes smart", + "children/cartoons/characters": [ + { + "children/cartoons/characters/name": "Dee Dee", + "children/cartoons/characters/good_or_" + "evil": "good", + }, + { + "children/cartoons/characters/name": "Dexter", + "children/cartoons/characters/good_or_" + "evil": "evil", + }, + ], + }, + ], + }, + ], + } + expected_output = { + "survey": {"name": "Abe", "age": "35"}, + "children": [ + { + "children/name": "Mike", + "children/age": "5", + "_index": 1, + "_parent_table_name": "survey", + "_parent_index": 1, + }, + { + "children/name": "John", + "children/age": "2", + "_index": 2, + "_parent_table_name": "survey", + "_parent_index": 1, + }, + { + "children/name": "Imora", + "children/age": "3", + "_index": 3, + "_parent_table_name": "survey", + "_parent_index": 1, + }, + ], + "children/cartoons": [ + { + "children/cartoons/name": "Tom & Jerry", + "children/cartoons/why": "Tom is silly", + "_index": 1, + "_parent_table_name": "children", + "_parent_index": 1, + }, + { + "children/cartoons/name": "Flinstones", + "children/cartoons/why": "I like bamb bam\u0107", + "_index": 2, + "_parent_table_name": "children", + "_parent_index": 1, + }, + { + "children/cartoons/name": "Shrek", + "children/cartoons/why": "He's so funny", + "_index": 3, + "_parent_table_name": "children", + "_parent_index": 3, + }, + { + "children/cartoons/name": "Dexter's Lab", + "children/cartoons/why": "He thinks hes smart", + "_index": 4, + "_parent_table_name": "children", + "_parent_index": 3, + }, + ], + "children/cartoons/characters": [ + { + "children/cartoons/characters/name": "Dee Dee", + "children/cartoons/characters/good_or_evil": "good", + "_index": 1, + "_parent_table_name": "children/cartoons", + "_parent_index": 4, + }, + { + "children/cartoons/characters/name": "Dexter", + "children/cartoons/characters/good_or_evil": "evil", + "_index": 2, + "_parent_table_name": "children/cartoons", + "_parent_index": 4, + }, + ], + } + survey_name = "survey" indices = {survey_name: 0} - output = dict_to_joined_export(data, 1, indices, survey_name, - self.xform.get_survey(), data, None) + output = dict_to_joined_export( + data, 1, indices, survey_name, self.xform.get_survey(), data, None + ) self.assertEqual(output[survey_name], expected_output[survey_name]) # 1st level - self.assertEqual(len(output['children']), 3) - for child in enumerate(['Mike', 'John', 'Imora']): + self.assertEqual(len(output["children"]), 3) + for child in enumerate(["Mike", "John", "Imora"]): index = child[0] name = child[1] self.assertEqual( - [x for x in output['children'] - if x['children/name'] == name][0], - expected_output['children'][index]) + [x for x in output["children"] if x["children/name"] == name][0], + expected_output["children"][index], + ) # 2nd level - self.assertEqual(len(output['children/cartoons']), 4) + self.assertEqual(len(output["children/cartoons"]), 4) for cartoon in enumerate( - ['Tom & Jerry', 'Flinstones', 'Shrek', 'Dexter\'s Lab']): + ["Tom & Jerry", "Flinstones", "Shrek", "Dexter's Lab"] + ): index = cartoon[0] name = cartoon[1] self.assertEqual( - [x for x in output['children/cartoons'] - if x['children/cartoons/name'] == name][0], - expected_output['children/cartoons'][index]) + [ + x + for x in output["children/cartoons"] + if x["children/cartoons/name"] == name + ][0], + expected_output["children/cartoons"][index], + ) # 3rd level - self.assertEqual(len(output['children/cartoons/characters']), 2) - for characters in enumerate(['Dee Dee', 'Dexter']): + self.assertEqual(len(output["children/cartoons/characters"]), 2) + for characters in enumerate(["Dee Dee", "Dexter"]): index = characters[0] name = characters[1] self.assertEqual( - [x for x in output['children/cartoons/characters'] - if x['children/cartoons/characters/name'] == name][0], - expected_output['children/cartoons/characters'][index]) + [ + x + for x in output["children/cartoons/characters"] + if x["children/cartoons/characters/name"] == name + ][0], + expected_output["children/cartoons/characters"][index], + ) def test_generate_csv_zip_export(self): # publish xls form @@ -1195,15 +1276,11 @@ def test_generate_csv_zip_export(self): self.options["split_select_multiples"] = True self.options["id_string"] = self.xform.id_string - export = generate_export( - Export.CSV_ZIP_EXPORT, - self.xform, - None, - self.options) + export = generate_export(Export.CSV_ZIP_EXPORT, self.xform, None, self.options) storage = get_storage_class()() self.assertTrue(storage.exists(export.filepath)) path, ext = os.path.splitext(export.filename) - self.assertEqual(ext, '.zip') + self.assertEqual(ext, ".zip") def test_dict_to_joined_export_notes(self): self._publish_transportation_form() @@ -1220,43 +1297,50 @@ def test_dict_to_joined_export_notes(self): "note": "Note 1", "date_created": "2013-07-03T08:26:10", "id": 356, - "date_modified": "2013-07-03T08:26:10" + "date_modified": "2013-07-03T08:26:10", }, { "note": "Note 2", "date_created": "2013-07-03T08:34:40", "id": 357, - "date_modified": "2013-07-03T08:34:40" + "date_modified": "2013-07-03T08:34:40", }, { "note": "Note 3", "date_created": "2013-07-03T08:56:14", "id": 361, - "date_modified": "2013-07-03T08:56:14" - } + "date_modified": "2013-07-03T08:56:14", + }, ], "meta/instanceID": "uuid:5b4752eb-e13c-483e-87cb-e67ca6bb61e5", "formhub/uuid": "633ec390e024411ba5ce634db7807e62", "amount": "", } - survey_name = 'tutorial' + survey_name = "tutorial" indices = {survey_name: 0} - data = dict_to_joined_export(submission, 1, indices, survey_name, - self.xform.get_survey(), submission, None) + data = dict_to_joined_export( + submission, + 1, + indices, + survey_name, + self.xform.get_survey(), + submission, + None, + ) expected_data = { - 'tutorial': { - '_id': 579828, - '_submission_time': '2013-07-03T08:26:10', - '_uuid': '5b4752eb-e13c-483e-87cb-e67ca6bb61e5', - '_bamboo_dataset_id': '', - 'amount': '', - '_xform_id_string': 'test_data_types', - '_userform_id': 'larryweya_test_data_types', - '_status': 'submitted_via_web', - '_notes': 'Note 1\nNote 2\nNote 3', - 'meta/instanceID': 'uuid:5b4752eb-e13c-483e-87cb-e67ca6bb61e5', - 'formhub/uuid': '633ec390e024411ba5ce634db7807e62' + "tutorial": { + "_id": 579828, + "_submission_time": "2013-07-03T08:26:10", + "_uuid": "5b4752eb-e13c-483e-87cb-e67ca6bb61e5", + "_bamboo_dataset_id": "", + "amount": "", + "_xform_id_string": "test_data_types", + "_userform_id": "larryweya_test_data_types", + "_status": "submitted_via_web", + "_notes": "Note 1\nNote 2\nNote 3", + "meta/instanceID": "uuid:5b4752eb-e13c-483e-87cb-e67ca6bb61e5", + "formhub/uuid": "633ec390e024411ba5ce634db7807e62", } } self.assertEqual(sorted(data), sorted(expected_data)) @@ -1271,7 +1355,8 @@ def test_create_xlsx_export_non_existent_id(self): id_string = self.options.get("id_string") result = create_xlsx_export( - username, id_string, non_existent_id, **self.options) + username, id_string, non_existent_id, **self.options + ) self.assertEqual(result, None) @@ -1280,44 +1365,49 @@ def test_create_external_export_url(self): self._submit_transport_instance() num_exports = Export.objects.count() - server = 'http://localhost:8080/xls/23fa4c38c0054748a984ffd89021a295' - data_value = 'template 1 |{0}'.format(server) + server = "http://localhost:8080/xls/23fa4c38c0054748a984ffd89021a295" + data_value = "template 1 |{0}".format(server) meta = MetaData.external_export(self.xform, data_value) custom_params = { - 'meta': meta.id, + "meta": meta.id, } # create export - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.EXTERNAL_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.EXTERNAL_EXPORT, + }, + ) response = self.client.post(create_export_url, custom_params) self.assertEqual(response.status_code, 302) self.assertEqual(Export.objects.count(), num_exports + 1) - @patch('onadata.apps.viewer.tasks.get_object_or_404') - def test_create_external_export_url_with_non_existing_export_id( - self, mock_404): - mock_404.side_effect = Http404('No Export matches the given query.') + @patch("onadata.apps.viewer.tasks.get_object_or_404") + def test_create_external_export_url_with_non_existing_export_id(self, mock_404): + mock_404.side_effect = Http404("No Export matches the given query.") self._publish_transportation_form() self._submit_transport_instance() - server = 'http://localhost:8080/xls/23fa4c38c0054748a984ffd89021a295' - data_value = 'template 1 |{0}'.format(server) + server = "http://localhost:8080/xls/23fa4c38c0054748a984ffd89021a295" + data_value = "template 1 |{0}".format(server) meta = MetaData.external_export(self.xform, data_value) custom_params = { - 'meta': meta.id, + "meta": meta.id, } # create export - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.EXTERNAL_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.EXTERNAL_EXPORT, + }, + ) response = self.client.post(create_export_url, custom_params) self.assertEqual(response.status_code, 404) @@ -1328,141 +1418,121 @@ def test_create_external_export_without_template(self): num_exports = Export.objects.count() # create export - create_export_url = reverse(create_export, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': Export.EXTERNAL_EXPORT - }) + create_export_url = reverse( + create_export, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": Export.EXTERNAL_EXPORT, + }, + ) response = self.client.post(create_export_url) self.assertEqual(response.status_code, 403) - self.assertEqual(response.content, b'No XLS Template set.') + self.assertEqual(response.content, b"No XLS Template set.") self.assertEqual(Export.objects.count(), num_exports) def test_all_keys_cleaned_of_slashes(self): - data =\ - { - 'name': 'Abe', - 'age': '35', - '_geolocation': [None, None], - 'attachments': ['abcd.jpg', 'efgh.jpg'], - 'section1/location': True, - 'children': - [ - { - 'children/name': 'Mike', - 'children/age': '5', - 'children/cartoons': - [ - { - 'children/cartoons/name': 'Tom & Jerry', - 'children/cartoons/why': 'Tom is silly', - }, - { - 'children/cartoons/name': 'Flinstones', - 'children/cartoons/why': - u"I like bamb bam\u0107", - } - ] - }, - { - 'children/name': 'John', - 'children/age': '2', - 'children/cartoons': [] - }, - { - 'children/name': 'Imora', - 'children/age': '3', - 'children/cartoons': - [ - { - 'children/cartoons/name': 'Shrek', - 'children/cartoons/why': 'He\'s so funny' - }, - { - 'children/cartoons/name': 'Dexter\'s Lab', - 'children/cartoons/why': 'He thinks hes smart', - 'children/cartoons/characters': - [ - { - 'children/cartoons/characters/name': - 'Dee Dee', - 'children/cartoons/characters/good_or_' - 'evil': 'good' - }, - { - 'children/cartoons/characters/name': - 'Dexter', - 'children/cartoons/characters/good_or_' - 'evil': 'evil' - }, - ] - } - ] - } - ] - } - - expected_data = { - 'name': 'Abe', - 'age': '35', - '_geolocation': [None, None], - 'attachments': ['abcd.jpg', 'efgh.jpg'], - 'section1_location': True, - 'children': - [ + data = { + "name": "Abe", + "age": "35", + "_geolocation": [None, None], + "attachments": ["abcd.jpg", "efgh.jpg"], + "section1/location": True, + "children": [ { - 'children_name': 'Mike', - 'children_age': '5', - 'children_cartoons': - [ + "children/name": "Mike", + "children/age": "5", + "children/cartoons": [ { - 'children_cartoons_name': 'Tom & Jerry', - 'children_cartoons_why': 'Tom is silly', + "children/cartoons/name": "Tom & Jerry", + "children/cartoons/why": "Tom is silly", }, { - 'children_cartoons_name': 'Flinstones', - 'children_cartoons_why': - u"I like bamb bam\u0107", - } - ] + "children/cartoons/name": "Flinstones", + "children/cartoons/why": "I like bamb bam\u0107", + }, + ], }, + {"children/name": "John", "children/age": "2", "children/cartoons": []}, { - 'children_name': 'John', - 'children_age': '2', - 'children_cartoons': [] + "children/name": "Imora", + "children/age": "3", + "children/cartoons": [ + { + "children/cartoons/name": "Shrek", + "children/cartoons/why": "He's so funny", + }, + { + "children/cartoons/name": "Dexter's Lab", + "children/cartoons/why": "He thinks hes smart", + "children/cartoons/characters": [ + { + "children/cartoons/characters/name": "Dee Dee", + "children/cartoons/characters/good_or_" + "evil": "good", + }, + { + "children/cartoons/characters/name": "Dexter", + "children/cartoons/characters/good_or_" + "evil": "evil", + }, + ], + }, + ], }, + ], + } + + expected_data = { + "name": "Abe", + "age": "35", + "_geolocation": [None, None], + "attachments": ["abcd.jpg", "efgh.jpg"], + "section1_location": True, + "children": [ { - 'children_name': 'Imora', - 'children_age': '3', - 'children_cartoons': - [ + "children_name": "Mike", + "children_age": "5", + "children_cartoons": [ { - 'children_cartoons_name': 'Shrek', - 'children_cartoons_why': 'He\'s so funny' + "children_cartoons_name": "Tom & Jerry", + "children_cartoons_why": "Tom is silly", }, { - 'children_cartoons_name': 'Dexter\'s Lab', - 'children_cartoons_why': 'He thinks hes smart', - 'children_cartoons_characters': - [ + "children_cartoons_name": "Flinstones", + "children_cartoons_why": "I like bamb bam\u0107", + }, + ], + }, + {"children_name": "John", "children_age": "2", "children_cartoons": []}, + { + "children_name": "Imora", + "children_age": "3", + "children_cartoons": [ + { + "children_cartoons_name": "Shrek", + "children_cartoons_why": "He's so funny", + }, + { + "children_cartoons_name": "Dexter's Lab", + "children_cartoons_why": "He thinks hes smart", + "children_cartoons_characters": [ { - 'children_cartoons_characters_name': - 'Dee Dee', - 'children_cartoons_characters_good_or_' - 'evil': 'good' + "children_cartoons_characters_name": "Dee Dee", + "children_cartoons_characters_good_or_" + "evil": "good", }, { - 'children_cartoons_characters_name': - 'Dexter', - 'children_cartoons_characters_good_or_' - 'evil': 'evil' + "children_cartoons_characters_name": "Dexter", + "children_cartoons_characters_good_or_" + "evil": "evil", }, - ] - } - ] - } - ] + ], + }, + ], + }, + ], } result_data = clean_keys_of_slashes(data) diff --git a/onadata/apps/viewer/tests/test_parsed_instance.py b/onadata/apps/viewer/tests/test_parsed_instance.py index c3484e38f8..a3942df52d 100644 --- a/onadata/apps/viewer/tests/test_parsed_instance.py +++ b/onadata/apps/viewer/tests/test_parsed_instance.py @@ -5,7 +5,9 @@ from onadata.apps.main.models.user_profile import UserProfile from onadata.apps.main.tests.test_base import TestBase from onadata.apps.viewer.models.parsed_instance import ( - get_where_clause, get_sql_with_params, _parse_sort_fields + get_where_clause, + get_sql_with_params, + _parse_sort_fields, ) from onadata.apps.viewer.parsed_instance_tools import _parse_where @@ -33,10 +35,7 @@ def test_parse_where_clause_simple_query(self): def test_parse_where_with_date_value(self): query = { - "created_at": { - "$gte": datetime(2022, 1, 1), - "$lte": datetime(2022, 12, 31) - } + "created_at": {"$gte": datetime(2022, 1, 1), "$lte": datetime(2022, 12, 31)} } known_integers = [] known_decimals = [] @@ -76,23 +75,22 @@ def test_parse_where_with_null_value(self): def test_get_where_clause_with_json_query(self): query = '{"name": "bla"}' where, where_params = get_where_clause(query) - self.assertEqual(where, [u"json->>%s = %s"]) + self.assertEqual(where, ["json->>%s = %s"]) self.assertEqual(where_params, ["name", "bla"]) def test_get_where_clause_with_string_query(self): - query = 'bla' + query = "bla" where, where_params = get_where_clause(query) - self.assertEqual(where, [u"json::text ~* cast(%s as text)"]) + self.assertEqual(where, ["json::text ~* cast(%s as text)"]) self.assertEqual(where_params, ["bla"]) def test_get_where_clause_with_integer(self): - query = '11' + query = "11" where, where_params = get_where_clause(query) - self.assertEqual(where, [u"json::text ~* cast(%s as text)"]) + self.assertEqual(where, ["json::text ~* cast(%s as text)"]) self.assertEqual(where_params, [11]) def test_retrieve_records_based_on_form_verion(self): - self._create_user_and_login() self._publish_transportation_form() initial_version = self.xform.version @@ -104,33 +102,39 @@ def test_retrieve_records_based_on_form_verion(self): # the instances below have a different form vresion transport_instances_with_different_version = [ - 'transport_2011-07-25_19-05-51', - 'transport_2011-07-25_19-05-52' + "transport_2011-07-25_19-05-51", + "transport_2011-07-25_19-05-52", ] for a in transport_instances_with_different_version: - self._make_submission(os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'instances', a, a + '.xml')) - - instances = Instance.objects.filter( - xform__id_string=self.xform.id_string - ) + self._make_submission( + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + a, + a + ".xml", + ) + ) + + instances = Instance.objects.filter(xform__id_string=self.xform.id_string) instances_count = instances.count() self.assertEqual(instances_count, 4) # retrieve based on updated form version - sql, params, records = get_sql_with_params( + sql, params = get_sql_with_params( xform=self.xform, query='{"_version": "20170517"}' ) - - self.assertEqual(2, records.count()) + instances = Instance.objects.raw(sql, params) + self.assertEqual(2, len([instance for instance in instances])) # retrived record based on initial form version - sql, params, records = get_sql_with_params( + sql, params = get_sql_with_params( xform=self.xform, query='{"_version": "%s"}' % initial_version ) - self.assertEqual(2, records.count()) + instances = Instance.objects.raw(sql, params) + self.assertEqual(2, len([instance for instance in instances])) def test_retrieve_records_using_list_of_queries(self): self._create_user_and_login() @@ -142,38 +146,48 @@ def test_retrieve_records_using_list_of_queries(self): self._submit_transport_instance(survey_at=a) transport_instances_with_different_version = [ - 'transport_2011-07-25_19-05-51', - 'transport_2011-07-25_19-05-52' + "transport_2011-07-25_19-05-51", + "transport_2011-07-25_19-05-52", ] # make submissions for a in transport_instances_with_different_version: - self._make_submission(os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'instances', a, a + '.xml')) + self._make_submission( + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + a, + a + ".xml", + ) + ) instances = Instance.objects.filter( xform__id_string=self.xform.id_string - ).order_by('id') + ).order_by("id") instances_count = instances.count() self.assertEqual(instances_count, 4) # bob accesses all records - sql, params, records = get_sql_with_params( - xform=self.xform, query={'_submitted_by': 'bob'} + + sql, params = get_sql_with_params( + xform=self.xform, query={"_submitted_by": "bob"} ) - self.assertEqual(4, records.count()) + instances = Instance.objects.raw(sql, params) + self.assertEqual(4, len([instance for instance in instances])) # only three records with name ambulance - sql, params, records = get_sql_with_params( - xform=self.xform, query=[{'_submitted_by': 'bob'}, 'ambulance'] + sql, params = get_sql_with_params( + xform=self.xform, query=[{"_submitted_by": "bob"}, "ambulance"] ) - self.assertEqual(3, records.count()) + ambulance_instances = Instance.objects.raw(sql, params) + self.assertEqual(3, len([instance for instance in ambulance_instances])) # create user alice - user_alice = self._create_user('alice', 'alice') + user_alice = self._create_user("alice", "alice") # create user profile and set require_auth to false for tests - profile, created = UserProfile.objects.get_or_create(user=user_alice) + profile, _ = UserProfile.objects.get_or_create(user=user_alice) profile.require_auth = False profile.save() @@ -184,32 +198,35 @@ def test_retrieve_records_using_list_of_queries(self): self.xform.save() # bob accesses only two record - sql, params, records = get_sql_with_params( - xform=self.xform, query={'_submitted_by': 'bob'} + sql, params = get_sql_with_params( + xform=self.xform, query={"_submitted_by": "bob"} ) - self.assertEqual(2, records.count()) + instances = Instance.objects.raw(sql, params) + self.assertEqual(2, len([instance for instance in instances])) # both remaining records have ambulance - sql, params, records = get_sql_with_params( - xform=self.xform, query=[{'_submitted_by': 'bob'}, 'ambulance'] + sql, params = get_sql_with_params( + xform=self.xform, query=[{"_submitted_by": "bob"}, "ambulance"] ) instances_debug = list( - self.xform.instances.filter(user__username='bob').values_list( - 'json', flat=True)) - all_instances_debug = list( - instances.values_list('json', flat=True) + self.xform.instances.filter(user__username="bob").values_list( + "json", flat=True + ) ) + instances = Instance.objects.raw(sql, params) + all_instances_debug = [instance.json for instance in instances] self.assertEqual( - 2, records.count(), - 'Fields do not have ambulance. ' - f'Fields submitted by bob are {instances_debug}.' - f'All instances {all_instances_debug}') + 2, + len(all_instances_debug), + "Fields do not have ambulance. " + f"Fields submitted by bob are {instances_debug}." + f"All instances {all_instances_debug}", + ) def test_parse_sort_fields_function(self): """ Test that the _parse_sort_fields function works as intended """ - fields = ['name', '_submission_time', '-_date_modified'] - expected_return = ['name', 'date_created', '-date_modified'] - self.assertEqual( - [i for i in _parse_sort_fields(fields)], expected_return) + fields = ["name", "_submission_time", "-_date_modified"] + expected_return = ["name", "date_created", "-date_modified"] + self.assertEqual([i for i in _parse_sort_fields(fields)], expected_return) diff --git a/onadata/libs/tests/utils/test_csv_builder.py b/onadata/libs/tests/utils/test_csv_builder.py index 7d487a2946..f9b2b631f7 100644 --- a/onadata/libs/tests/utils/test_csv_builder.py +++ b/onadata/libs/tests/utils/test_csv_builder.py @@ -3,22 +3,24 @@ Test CSVDataFrameBuilder """ import csv -from onadata.libs.exceptions import NoRecordsFoundError import os from tempfile import NamedTemporaryFile from builtins import chr, open from django.test.utils import override_settings from django.utils.dateparse import parse_datetime -from mock import patch from onadata.apps.logger.models.xform import XForm from onadata.apps.logger.xform_instance_parser import xform_instance_to_dict from onadata.apps.main.tests.test_base import TestBase from onadata.libs.utils.common_tags import NA_REP from onadata.libs.utils.csv_builder import ( - AbstractDataFrameBuilder, CSVDataFrameBuilder, get_prefix_from_xpath, - remove_dups_from_list_maintain_order, write_to_csv) + AbstractDataFrameBuilder, + CSVDataFrameBuilder, + get_prefix_from_xpath, + remove_dups_from_list_maintain_order, + write_to_csv, +) def xls_filepath_from_fixture_name(fixture_name): @@ -26,8 +28,11 @@ def xls_filepath_from_fixture_name(fixture_name): Return an xls file path at tests/fixtures/[fixture]/fixture.xls """ return os.path.join( - os.path.dirname(os.path.abspath(__file__)), "fixtures", fixture_name, - fixture_name + ".xlsx") + os.path.dirname(os.path.abspath(__file__)), + "fixtures", + fixture_name, + fixture_name + ".xlsx", + ) # pylint: disable=invalid-name @@ -36,8 +41,12 @@ def xml_inst_filepath_from_fixture_name(fixture_name, instance_name): Returns the path to a fixture given fixture_name and instance_name. """ return os.path.join( - os.path.dirname(os.path.abspath(__file__)), "fixtures", fixture_name, - "instances", fixture_name + "_" + instance_name + ".xml") + os.path.dirname(os.path.abspath(__file__)), + "fixtures", + fixture_name, + "instances", + fixture_name + "_" + instance_name + ".xml", + ) class TestCSVDataFrameBuilder(TestBase): @@ -47,7 +56,7 @@ class TestCSVDataFrameBuilder(TestBase): def setUp(self): self._create_user_and_login() - self._submission_time = parse_datetime('2013-02-18 15:54:01Z') + self._submission_time = parse_datetime("2013-02-18 15:54:01Z") def _publish_xls_fixture_set_xform(self, fixture): """ @@ -60,38 +69,41 @@ def _publish_xls_fixture_set_xform(self, fixture): # pylint: disable=attribute-defined-outside-init self.xform = XForm.objects.all().reverse()[0] - def _submit_fixture_instance(self, fixture, instance, - submission_time=None): + def _submit_fixture_instance(self, fixture, instance, submission_time=None): """ Submit an instance at tests/fixtures/[fixture]/instances/[fixture]_[instance].xml """ xml_submission_file_path = xml_inst_filepath_from_fixture_name( - fixture, instance) + fixture, instance + ) self._make_submission( - xml_submission_file_path, forced_submission_time=submission_time) + xml_submission_file_path, forced_submission_time=submission_time + ) self.assertEqual(self.response.status_code, 201) def _publish_single_level_repeat_form(self): self._publish_xls_fixture_set_xform("new_repeats") # pylint: disable=attribute-defined-outside-init - self.survey_name = u"new_repeats" + self.survey_name = "new_repeats" def _publish_nested_repeats_form(self): self._publish_xls_fixture_set_xform("nested_repeats") # pylint: disable=attribute-defined-outside-init - self.survey_name = u"nested_repeats" + self.survey_name = "nested_repeats" def _publish_grouped_gps_form(self): self._publish_xls_fixture_set_xform("grouped_gps") # pylint: disable=attribute-defined-outside-init - self.survey_name = u"grouped_gps" + self.survey_name = "grouped_gps" def _csv_data_for_dataframe(self): csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) - # pylint: disable=protected-access - cursor = csv_df_builder._query_data() + self.user.username, self.xform.id_string, include_images=False + ) + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) return [d for d in csv_df_builder._format_for_dataframe(cursor)] def test_csv_dataframe_export_to(self): @@ -100,17 +112,26 @@ def test_csv_dataframe_export_to(self): """ self._publish_nested_repeats_form() self._submit_fixture_instance( - "nested_repeats", "01", submission_time=self._submission_time) + "nested_repeats", "01", submission_time=self._submission_time + ) self._submit_fixture_instance( - "nested_repeats", "02", submission_time=self._submission_time) + "nested_repeats", "02", submission_time=self._submission_time + ) csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) csv_fixture_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "fixtures", - "nested_repeats", "nested_repeats.csv") + os.path.dirname(os.path.abspath(__file__)), + "fixtures", + "nested_repeats", + "nested_repeats.csv", + ) temp_file.close() with open(temp_file.name) as csv_file: self._test_csv_files(csv_file, csv_fixture_path) @@ -125,24 +146,27 @@ def test_csv_columns_for_gps_within_groups(self): self._submit_fixture_instance("grouped_gps", "01") data = self._csv_data_for_dataframe() columns = list(data[0]) - expected_columns = [ - u'gps_group/gps', - u'gps_group/_gps_latitude', - u'gps_group/_gps_longitude', - u'gps_group/_gps_altitude', - u'gps_group/_gps_precision', - u'web_browsers/firefox', - u'web_browsers/chrome', - u'web_browsers/ie', - u'web_browsers/safari', - u'_xform_id', - ] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\ - AbstractDataFrameBuilder.IGNORED_COLUMNS + expected_columns = ( + [ + "gps_group/gps", + "gps_group/_gps_latitude", + "gps_group/_gps_longitude", + "gps_group/_gps_altitude", + "gps_group/_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "_xform_id", + ] + + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS + + AbstractDataFrameBuilder.IGNORED_COLUMNS + ) try: - expected_columns.remove(u'_deleted_at') - expected_columns.remove(u'_review_status') - expected_columns.remove(u'_review_comment') - expected_columns.remove(u'_review_date') + expected_columns.remove("_deleted_at") + expected_columns.remove("_review_status") + expected_columns.remove("_review_comment") + expected_columns.remove("_review_date") except ValueError: pass self.maxDiff = None @@ -164,23 +188,23 @@ def test_format_mongo_data_for_csv(self): if key in data_0: data_0.pop(key) expected_data_0 = { - u'gps': u'-1.2627557 36.7926442 0.0 30.0', - u'_gps_latitude': u'-1.2627557', - u'_gps_longitude': u'36.7926442', - u'_gps_altitude': u'0.0', - u'_gps_precision': u'30.0', - u'kids/has_kids': u'1', - u'info/age': 80, - u'kids/kids_details[1]/kids_name': u'Abel', - u'kids/kids_details[1]/kids_age': 50, - u'kids/kids_details[2]/kids_name': u'Cain', - u'kids/kids_details[2]/kids_age': 76, - u'web_browsers/chrome': True, - u'web_browsers/ie': True, - u'web_browsers/safari': False, - u'web_browsers/firefox': False, - u'info/name': u'Adam', - u'_xform_id': self.xform.pk, + "gps": "-1.2627557 36.7926442 0.0 30.0", + "_gps_latitude": "-1.2627557", + "_gps_longitude": "36.7926442", + "_gps_altitude": "0.0", + "_gps_precision": "30.0", + "kids/has_kids": "1", + "info/age": 80, + "kids/kids_details[1]/kids_name": "Abel", + "kids/kids_details[1]/kids_age": 50, + "kids/kids_details[2]/kids_name": "Cain", + "kids/kids_details[2]/kids_age": 76, + "web_browsers/chrome": True, + "web_browsers/ie": True, + "web_browsers/safari": False, + "web_browsers/firefox": False, + "info/name": "Adam", + "_xform_id": self.xform.pk, } self.assertEqual(expected_data_0, data_0) @@ -191,42 +215,43 @@ def test_split_select_multiples(self): self._publish_nested_repeats_form() self._submit_fixture_instance("nested_repeats", "01") csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) # pylint: disable=protected-access - cursor = [k for k in csv_df_builder._query_data()] + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) record = cursor[0] - select_multiples = \ - CSVDataFrameBuilder._collect_select_multiples(self.xform) - result = CSVDataFrameBuilder._split_select_multiples( - record, select_multiples) + select_multiples = CSVDataFrameBuilder._collect_select_multiples(self.xform) + result = CSVDataFrameBuilder._split_select_multiples(record, select_multiples) expected_result = { - u'web_browsers/ie': True, - u'web_browsers/safari': True, - u'web_browsers/firefox': False, - u'web_browsers/chrome': False + "web_browsers/ie": True, + "web_browsers/safari": True, + "web_browsers/firefox": False, + "web_browsers/chrome": False, } # build a new dictionary only composed of the keys we want to use in # the comparison - result = dict([(key, result[key]) for key in list(result) - if key in list(expected_result)]) + result = dict( + [(key, result[key]) for key in list(result) if key in list(expected_result)] + ) self.assertEqual(expected_result, result) csv_df_builder = CSVDataFrameBuilder( - self.user.username, - self.xform.id_string, - binary_select_multiples=True) + self.user.username, self.xform.id_string, binary_select_multiples=True + ) # pylint: disable=protected-access - result = csv_df_builder._split_select_multiples( - record, select_multiples) + result = csv_df_builder._split_select_multiples(record, select_multiples) expected_result = { - u'web_browsers/ie': 1, - u'web_browsers/safari': 1, - u'web_browsers/firefox': 0, - u'web_browsers/chrome': 0 + "web_browsers/ie": 1, + "web_browsers/safari": 1, + "web_browsers/firefox": 0, + "web_browsers/chrome": 0, } # build a new dictionary only composed of the keys we want to use in # the comparison - result = dict([(key, result[key]) for key in list(result) - if key in list(expected_result)]) + result = dict( + [(key, result[key]) for key in list(result) if key in list(expected_result)] + ) self.assertEqual(expected_result, result) def test_split_select_multiples_values(self): @@ -235,25 +260,25 @@ def test_split_select_multiples_values(self): """ self._publish_nested_repeats_form() self._submit_fixture_instance("nested_repeats", "01") - csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) - # pylint: disable=protected-access - cursor = [k for k in csv_df_builder._query_data()] + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) record = cursor[0] - select_multiples = \ - CSVDataFrameBuilder._collect_select_multiples(self.xform) + select_multiples = CSVDataFrameBuilder._collect_select_multiples(self.xform) result = CSVDataFrameBuilder._split_select_multiples( - record, select_multiples, value_select_multiples=True) + record, select_multiples, value_select_multiples=True + ) expected_result = { - u'web_browsers/ie': u'ie', - u'web_browsers/safari': u'safari', - u'web_browsers/firefox': None, - u'web_browsers/chrome': None + "web_browsers/ie": "ie", + "web_browsers/safari": "safari", + "web_browsers/firefox": None, + "web_browsers/chrome": None, } # build a new dictionary only composed of the keys we want to use in # the comparison - result = dict([(key, result[key]) for key in list(result) - if key in list(expected_result)]) + result = dict( + [(key, result[key]) for key in list(result) if key in list(expected_result)] + ) self.assertEqual(expected_result, result) # pylint: disable=invalid-name @@ -263,44 +288,43 @@ def test_split_select_multiples_within_repeats(self): """ self.maxDiff = None record = { - 'name': 'Tom', - 'age': 23, - 'browser_use': [{ - 'browser_use/year': '2010', - 'browser_use/browsers': 'firefox safari' - }, { - 'browser_use/year': '2011', - 'browser_use/browsers': 'firefox chrome' - }] + "name": "Tom", + "age": 23, + "browser_use": [ + {"browser_use/year": "2010", "browser_use/browsers": "firefox safari"}, + {"browser_use/year": "2011", "browser_use/browsers": "firefox chrome"}, + ], } # yapf: disable expected_result = { - 'name': 'Tom', - 'age': 23, - 'browser_use': [{ - 'browser_use/year': '2010', - 'browser_use/browsers/firefox': True, - 'browser_use/browsers/safari': True, - 'browser_use/browsers/ie': False, - 'browser_use/browsers/chrome': False - }, { - 'browser_use/year': '2011', - 'browser_use/browsers/firefox': True, - 'browser_use/browsers/safari': False, - 'browser_use/browsers/ie': False, - 'browser_use/browsers/chrome': True - }] + "name": "Tom", + "age": 23, + "browser_use": [ + { + "browser_use/year": "2010", + "browser_use/browsers/firefox": True, + "browser_use/browsers/safari": True, + "browser_use/browsers/ie": False, + "browser_use/browsers/chrome": False, + }, + { + "browser_use/year": "2011", + "browser_use/browsers/firefox": True, + "browser_use/browsers/safari": False, + "browser_use/browsers/ie": False, + "browser_use/browsers/chrome": True, + }, + ], } # yapf: disable select_multiples = { - 'browser_use/browsers': [ - ('browser_use/browsers/firefox', 'firefox', 'Firefox'), - ('browser_use/browsers/safari', 'safari', 'Safari'), - ('browser_use/browsers/ie', 'ie', 'Internet Explorer'), - ('browser_use/browsers/chrome', 'chrome', 'Google Chrome') + "browser_use/browsers": [ + ("browser_use/browsers/firefox", "firefox", "Firefox"), + ("browser_use/browsers/safari", "safari", "Safari"), + ("browser_use/browsers/ie", "ie", "Internet Explorer"), + ("browser_use/browsers/chrome", "chrome", "Google Chrome"), ] } # pylint: disable=protected-access - result = CSVDataFrameBuilder._split_select_multiples( - record, select_multiples) + result = CSVDataFrameBuilder._split_select_multiples(record, select_multiples) self.assertEqual(expected_result, result) def test_split_gps_fields(self): @@ -308,14 +332,14 @@ def test_split_gps_fields(self): Test GPS fields data are split into latitude, longitude, altitude, and precision segments. """ - record = {'gps': '5 6 7 8'} - gps_fields = ['gps'] + record = {"gps": "5 6 7 8"} + gps_fields = ["gps"] expected_result = { - 'gps': '5 6 7 8', - '_gps_latitude': '5', - '_gps_longitude': '6', - '_gps_altitude': '7', - '_gps_precision': '8', + "gps": "5 6 7 8", + "_gps_latitude": "5", + "_gps_longitude": "6", + "_gps_altitude": "7", + "_gps_precision": "8", } # pylint: disable=protected-access AbstractDataFrameBuilder._split_gps_fields(record, gps_fields) @@ -327,27 +351,26 @@ def test_split_gps_fields_within_repeats(self): Test GPS fields data is split within repeats. """ record = { - 'a_repeat': [{ - 'a_repeat/gps': '1 2 3 4' - }, { - 'a_repeat/gps': '5 6 7 8' - }] + "a_repeat": [{"a_repeat/gps": "1 2 3 4"}, {"a_repeat/gps": "5 6 7 8"}] } - gps_fields = ['a_repeat/gps'] + gps_fields = ["a_repeat/gps"] expected_result = { - 'a_repeat': [{ - 'a_repeat/gps': '1 2 3 4', - 'a_repeat/_gps_latitude': '1', - 'a_repeat/_gps_longitude': '2', - 'a_repeat/_gps_altitude': '3', - 'a_repeat/_gps_precision': '4', - }, { - 'a_repeat/gps': '5 6 7 8', - 'a_repeat/_gps_latitude': '5', - 'a_repeat/_gps_longitude': '6', - 'a_repeat/_gps_altitude': '7', - 'a_repeat/_gps_precision': '8', - }] + "a_repeat": [ + { + "a_repeat/gps": "1 2 3 4", + "a_repeat/_gps_latitude": "1", + "a_repeat/_gps_longitude": "2", + "a_repeat/_gps_altitude": "3", + "a_repeat/_gps_precision": "4", + }, + { + "a_repeat/gps": "5 6 7 8", + "a_repeat/_gps_latitude": "5", + "a_repeat/_gps_longitude": "6", + "a_repeat/_gps_altitude": "7", + "a_repeat/_gps_precision": "8", + }, + ] } # pylint: disable=protected-access AbstractDataFrameBuilder._split_gps_fields(record, gps_fields) @@ -386,24 +409,22 @@ def test_repeat_child_name_matches_repeat(self): # publish form so we have a dd to pass to xform inst. parser self._publish_xls_fixture_set_xform(fixture) submission_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "fixtures", fixture, - fixture + ".xml") + os.path.dirname(os.path.abspath(__file__)), + "fixtures", + fixture, + fixture + ".xml", + ) # get submission xml str with open(submission_path, "r") as f: xml_str = f.read() xform_instance_dict = xform_instance_to_dict(xml_str, self.xform) expected_dict = { - u'test_item_name_matches_repeat': { - u'formhub': { - u'uuid': u'c911d71ce1ac48478e5f8bac99addc4e' - }, - u'gps': [{ - u'info': u'Yo', - u'gps': u'-1.2625149 36.7924478 0.0 30.0' - }, { - u'info': u'What', - u'gps': u'-1.2625072 36.7924328 0.0 30.0' - }] + "test_item_name_matches_repeat": { + "formhub": {"uuid": "c911d71ce1ac48478e5f8bac99addc4e"}, + "gps": [ + {"info": "Yo", "gps": "-1.2625149 36.7924478 0.0 30.0"}, + {"info": "What", "gps": "-1.2625072 36.7924328 0.0 30.0"}, + ], } } self.assertEqual(xform_instance_dict, expected_dict) @@ -424,10 +445,10 @@ def test_get_prefix_from_xpath(self): """ xpath = "parent/child/grandhild" prefix = get_prefix_from_xpath(xpath) - self.assertEqual(prefix, 'parent/child/') + self.assertEqual(prefix, "parent/child/") xpath = "parent/child" prefix = get_prefix_from_xpath(xpath) - self.assertEqual(prefix, 'parent/') + self.assertEqual(prefix, "parent/") xpath = "parent" prefix = get_prefix_from_xpath(xpath) self.assertTrue(prefix is None) @@ -444,13 +465,20 @@ def test_csv_export(self): for _ in range(2): self._submit_fixture_instance("new_repeats", "01") csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 7) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all() + .order_by("id") + .order_by("id") + .values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) @@ -478,17 +506,21 @@ def test_windows_excel_compatible_csv_export(self): self.xform.id_string, remove_group_name=True, include_images=False, - win_excel_utf8=True) + win_excel_utf8=True, + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 7) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) - self.assertEqual(b'\xef\xbb\xbfname', header[0].encode('utf-8')) + self.assertEqual(b"\xef\xbb\xbfname", header[0].encode("utf-8")) # close and delete file csv_file.close() os.unlink(temp_file.name) @@ -503,46 +535,51 @@ def test_csv_column_indices_in_groups_within_repeats(self): data_0 = self._csv_data_for_dataframe()[0] # remove dynamic fields ignore_list = [ - '_uuid', 'meta/instanceID', 'formhub/uuid', '_submission_time', - '_id', '_bamboo_dataset_id', '_date_modified' + "_uuid", + "meta/instanceID", + "formhub/uuid", + "_submission_time", + "_id", + "_bamboo_dataset_id", + "_date_modified", ] for item in ignore_list: data_0.pop(item) expected_data_0 = { - u'_xform_id_string': u'groups_in_repeats', - u'_xform_id': self.xform.pk, - u'_status': u'submitted_via_web', - u'_tags': u'', - u'_notes': u'', - u'_version': self.xform.version, - u"_submitted_by": u'bob', - u'name': u'Abe', - u'age': 88, - u'has_children': u'1', - u'children[1]/childs_info/name': u'Cain', - u'children[2]/childs_info/name': u'Abel', - u'children[1]/childs_info/age': 56, - u'children[2]/childs_info/age': 48, - u'children[1]/immunization/immunization_received/polio_1': True, - u'children[1]/immunization/immunization_received/polio_2': False, - u'children[2]/immunization/immunization_received/polio_1': True, - u'children[2]/immunization/immunization_received/polio_2': True, - u'web_browsers/chrome': True, - u'web_browsers/firefox': False, - u'web_browsers/ie': False, - u'web_browsers/safari': False, - u'gps': u'-1.2626156 36.7923571 0.0 30.0', - u'_geolocation': [-1.2626156, 36.7923571], - u'_duration': '', - u'_edited': False, - u'_gps_latitude': u'-1.2626156', - u'_gps_longitude': u'36.7923571', - u'_gps_altitude': u'0.0', - u'_gps_precision': u'30.0', - u'_attachments': [], - u'_total_media': 0, - u'_media_count': 0, - u'_media_all_received': True + "_xform_id_string": "groups_in_repeats", + "_xform_id": self.xform.pk, + "_status": "submitted_via_web", + "_tags": "", + "_notes": "", + "_version": self.xform.version, + "_submitted_by": "bob", + "name": "Abe", + "age": 88, + "has_children": "1", + "children[1]/childs_info/name": "Cain", + "children[2]/childs_info/name": "Abel", + "children[1]/childs_info/age": 56, + "children[2]/childs_info/age": 48, + "children[1]/immunization/immunization_received/polio_1": True, + "children[1]/immunization/immunization_received/polio_2": False, + "children[2]/immunization/immunization_received/polio_1": True, + "children[2]/immunization/immunization_received/polio_2": True, + "web_browsers/chrome": True, + "web_browsers/firefox": False, + "web_browsers/ie": False, + "web_browsers/safari": False, + "gps": "-1.2626156 36.7923571 0.0 30.0", + "_geolocation": [-1.2626156, 36.7923571], + "_duration": "", + "_edited": False, + "_gps_latitude": "-1.2626156", + "_gps_longitude": "36.7923571", + "_gps_altitude": "0.0", + "_gps_precision": "30.0", + "_attachments": [], + "_total_media": 0, + "_media_count": 0, + "_media_all_received": True, } self.maxDiff = None self.assertEqual(data_0, expected_data_0) @@ -563,25 +600,53 @@ def test_csv_export_remove_group_name(self): self.xform.id_string, remove_group_name=True, include_images=False, - include_reviews=True) + include_reviews=True, + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 7) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) expected_header = [ - 'name', 'age', 'has_kids', 'kids_name', 'kids_age', 'kids_name', - 'kids_age', 'gps', '_gps_latitude', '_gps_longitude', - '_gps_altitude', '_gps_precision', 'web_browsers/firefox', - 'web_browsers/chrome', 'web_browsers/ie', 'web_browsers/safari', - 'instanceID', '_id', '_uuid', '_submission_time', '_date_modified', - '_tags', '_notes', '_version', '_duration', '_submitted_by', - '_total_media', '_media_count', '_media_all_received', - '_review_status', '_review_comment', '_review_date' + "name", + "age", + "has_kids", + "kids_name", + "kids_age", + "kids_name", + "kids_age", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_header, header) rows = [] @@ -604,42 +669,46 @@ def test_remove_group_name_for_gps_within_groups(self): self.xform.id_string, remove_group_name=True, include_images=False, - include_reviews=True) + include_reviews=True, + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 1) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 10 + len(csv_df_builder.extra_columns)) expected_header = [ - 'gps', - '_gps_latitude', - '_gps_longitude', - '_gps_altitude', - '_gps_precision', - 'web_browsers/firefox', - 'web_browsers/chrome', - 'web_browsers/ie', - 'web_browsers/safari', - 'instanceID', - '_id', - '_uuid', - '_submission_time', - '_date_modified', - '_tags', - '_notes', - '_version', - '_duration', - '_submitted_by', - '_total_media', - '_media_count', - '_media_all_received', - '_review_status', - '_review_comment', - '_review_date' + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_header, header) rows = [] @@ -666,40 +735,90 @@ def test_csv_export_with_labels(self): self.xform.id_string, remove_group_name=True, include_labels=True, - include_reviews=True) + include_reviews=True, + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 7) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) expected_header = [ - 'name', 'age', 'has_kids', 'kids_name', 'kids_age', 'kids_name', - 'kids_age', 'gps', '_gps_latitude', '_gps_longitude', - '_gps_altitude', '_gps_precision', 'web_browsers/firefox', - 'web_browsers/chrome', 'web_browsers/ie', 'web_browsers/safari', - 'instanceID', '_id', '_uuid', '_submission_time', '_date_modified', - '_tags', '_notes', '_version', '_duration', '_submitted_by', - '_total_media', '_media_count', '_media_all_received', - '_review_status', '_review_comment', '_review_date' + "name", + "age", + "has_kids", + "kids_name", + "kids_age", + "kids_name", + "kids_age", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_header, header) labels = next(csv_reader) self.assertEqual(len(labels), 17 + len(csv_df_builder.extra_columns)) expected_labels = [ - 'Name', 'age', 'Do you have kids?', 'Kids Name', 'Kids Age', - 'Kids Name', 'Kids Age', '5. Record your GPS coordinates.', - '_gps_latitude', '_gps_longitude', '_gps_altitude', - '_gps_precision', 'web_browsers/Mozilla Firefox', - 'web_browsers/Google Chrome', 'web_browsers/Internet Explorer', - 'web_browsers/Safari', 'instanceID', '_id', '_uuid', - '_submission_time', '_date_modified', '_tags', '_notes', - '_version', '_duration', '_submitted_by', '_total_media', - '_media_count', '_media_all_received', '_review_status', - '_review_comment', '_review_date' + "Name", + "age", + "Do you have kids?", + "Kids Name", + "Kids Age", + "Kids Name", + "Kids Age", + "5. Record your GPS coordinates.", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/Mozilla Firefox", + "web_browsers/Google Chrome", + "web_browsers/Internet Explorer", + "web_browsers/Safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_labels, labels) rows = [] @@ -727,27 +846,53 @@ def test_csv_export_with_labels_only(self): self.xform.id_string, remove_group_name=True, include_labels_only=True, - include_reviews=True) + include_reviews=True, + ) # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 7) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) labels = next(csv_reader) self.assertEqual(len(labels), 17 + len(csv_df_builder.extra_columns)) expected_labels = [ - 'Name', 'age', 'Do you have kids?', 'Kids Name', 'Kids Age', - 'Kids Name', 'Kids Age', '5. Record your GPS coordinates.', - '_gps_latitude', '_gps_longitude', '_gps_altitude', - '_gps_precision', 'web_browsers/Mozilla Firefox', - 'web_browsers/Google Chrome', 'web_browsers/Internet Explorer', - 'web_browsers/Safari', 'instanceID', '_id', '_uuid', - '_submission_time', '_date_modified', '_tags', '_notes', - '_version', '_duration', '_submitted_by', '_total_media', - '_media_count', '_media_all_received', '_review_status', - '_review_comment', '_review_date' + "Name", + "age", + "Do you have kids?", + "Kids Name", + "Kids Age", + "Kids Name", + "Kids Age", + "5. Record your GPS coordinates.", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/Mozilla Firefox", + "web_browsers/Google Chrome", + "web_browsers/Internet Explorer", + "web_browsers/Safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_labels, labels) rows = [] @@ -759,8 +904,7 @@ def test_csv_export_with_labels_only(self): csv_file.close() os.unlink(temp_file.name) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_no_split_select_multiples(self, mock_query_data): + def test_no_split_select_multiples(self): """ Test select multiples are not split within repeats. """ @@ -781,39 +925,45 @@ def test_no_split_select_multiples(self, mock_query_data): | | browsers | ie | Internet Explorer | | | browsers | safari | Safari | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Tom', - 'age': 23, - 'browser_use': [{ - 'browser_use/year': '2010', - 'browser_use/browsers': 'firefox safari' - }, { - 'browser_use/year': '2011', - 'browser_use/browsers': 'firefox chrome' - }] - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [ + { + "name": "Tom", + "age": 23, + "browser_use": [ + { + "browser_use/year": "2010", + "browser_use/browsers": "firefox safari", + }, + { + "browser_use/year": "2011", + "browser_use/browsers": "firefox chrome", + }, + ], + } + ] # yapf: disable + csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + include_images=False, + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Tom', - 'age': 23, - 'browser_use[1]/year': '2010', - 'browser_use[1]/browsers': 'firefox safari', - 'browser_use[2]/year': '2011', - 'browser_use[2]/browsers': 'firefox chrome' - }] + expected_result = [ + { + "name": "Tom", + "age": 23, + "browser_use[1]/year": "2010", + "browser_use[1]/browsers": "firefox safari", + "browser_use[2]/year": "2011", + "browser_use[2]/browsers": "firefox chrome", + } + ] self.maxDiff = None self.assertEqual(expected_result, result) - @override_settings(EXTRA_COLUMNS=['_xform_id']) + @override_settings(EXTRA_COLUMNS=["_xform_id"]) def test_csv_export_extra_columns(self): """ Test CSV export EXTRA_COLUMNS @@ -830,22 +980,51 @@ def test_csv_export_extra_columns(self): self.xform.id_string, remove_group_name=True, include_labels=True, - include_reviews=True) + include_reviews=True, + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) expected_header = [ - 'name', 'age', 'has_kids', 'kids_name', 'kids_age', 'kids_name', - 'kids_age', 'gps', '_gps_latitude', '_gps_longitude', - '_gps_altitude', '_gps_precision', 'web_browsers/firefox', - 'web_browsers/chrome', 'web_browsers/ie', 'web_browsers/safari', - 'instanceID', '_id', '_uuid', '_submission_time', '_date_modified', - '_tags', '_notes', '_version', '_duration', '_submitted_by', - '_total_media', '_media_count', '_media_all_received', '_xform_id', - '_review_status', '_review_comment', '_review_date' + "name", + "age", + "has_kids", + "kids_name", + "kids_age", + "kids_name", + "kids_age", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + "_xform_id", + "_review_status", + "_review_comment", + "_review_date", ] self.assertEqual(expected_header, header) # close and delete file @@ -857,10 +1036,14 @@ def test_csv_export_extra_columns(self): self.xform.id_string, remove_group_name=True, include_labels=True, - include_reviews=True) + include_reviews=True, + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) self.assertEqual(len(header), 17 + len(csv_df_builder.extra_columns)) @@ -878,59 +1061,68 @@ def test_index_tag_replacement(self): self.xform.get_keys() csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False, - index_tags=('_', '_')) - cursor = csv_df_builder._query_data() + self.user.username, + self.xform.id_string, + include_images=False, + index_tags=("_", "_"), + ) + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) result = [d for d in csv_df_builder._format_for_dataframe(cursor)][0] # remove dynamic fields ignore_list = [ - '_uuid', 'meta/instanceID', 'formhub/uuid', '_submission_time', - '_id', '_bamboo_dataset_id', '_date_modified' + "_uuid", + "meta/instanceID", + "formhub/uuid", + "_submission_time", + "_id", + "_bamboo_dataset_id", + "_date_modified", ] for item in ignore_list: result.pop(item) expected_result = { - u'_xform_id_string': u'groups_in_repeats', - u'_xform_id': self.xform.pk, - u'_status': u'submitted_via_web', - u'_tags': u'', - u'_notes': u'', - u'_version': self.xform.version, - u"_submitted_by": u'bob', - u'name': u'Abe', - u'age': 88, - u'has_children': u'1', - u'children_1_/childs_info/name': u'Cain', - u'children_2_/childs_info/name': u'Abel', - u'children_1_/childs_info/age': 56, - u'children_2_/childs_info/age': 48, - u'children_1_/immunization/immunization_received/polio_1': True, - u'children_1_/immunization/immunization_received/polio_2': False, - u'children_2_/immunization/immunization_received/polio_1': True, - u'children_2_/immunization/immunization_received/polio_2': True, - u'web_browsers/chrome': True, - u'web_browsers/firefox': False, - u'web_browsers/ie': False, - u'web_browsers/safari': False, - u'gps': u'-1.2626156 36.7923571 0.0 30.0', - u'_geolocation': [-1.2626156, 36.7923571], - u'_duration': '', - u'_edited': False, - u'_gps_latitude': u'-1.2626156', - u'_gps_longitude': u'36.7923571', - u'_gps_altitude': u'0.0', - u'_gps_precision': u'30.0', - u'_attachments': [], - u'_total_media': 0, - u'_media_count': 0, - u'_media_all_received': True + "_xform_id_string": "groups_in_repeats", + "_xform_id": self.xform.pk, + "_status": "submitted_via_web", + "_tags": "", + "_notes": "", + "_version": self.xform.version, + "_submitted_by": "bob", + "name": "Abe", + "age": 88, + "has_children": "1", + "children_1_/childs_info/name": "Cain", + "children_2_/childs_info/name": "Abel", + "children_1_/childs_info/age": 56, + "children_2_/childs_info/age": 48, + "children_1_/immunization/immunization_received/polio_1": True, + "children_1_/immunization/immunization_received/polio_2": False, + "children_2_/immunization/immunization_received/polio_1": True, + "children_2_/immunization/immunization_received/polio_2": True, + "web_browsers/chrome": True, + "web_browsers/firefox": False, + "web_browsers/ie": False, + "web_browsers/safari": False, + "gps": "-1.2626156 36.7923571 0.0 30.0", + "_geolocation": [-1.2626156, 36.7923571], + "_duration": "", + "_edited": False, + "_gps_latitude": "-1.2626156", + "_gps_longitude": "36.7923571", + "_gps_altitude": "0.0", + "_gps_precision": "30.0", + "_attachments": [], + "_total_media": 0, + "_media_count": 0, + "_media_all_received": True, } self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_multi_language(self, mock_query_data): + def test_show_choice_labels_multi_language(self): """ Test show_choice_labels=true for select one questions - multi language form. @@ -947,31 +1139,22 @@ def test_show_choice_labels_multi_language(self, mock_query_data): | | fruits | 2 | Orange | Orange | | | fruits | 3 | Apple | Pomme | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1' - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1"}] # yapf: disable csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False, show_choice_labels=True, language='French') - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + include_images=False, + show_choice_labels=True, + language="French", + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': 'Mangue' - }] + expected_result = [{"name": "Maria", "age": 25, "fruit": "Mangue"}] self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_multi_language_1(self, mock_query_data): + def test_show_choice_labels_multi_language_1(self): """ Test show_choice_labels=true for select one questions - multi language form selected language. @@ -988,31 +1171,23 @@ def test_show_choice_labels_multi_language_1(self, mock_query_data): | | fruits | 2 | Orange | Orange | | | fruits | 3 | Apple | Pomme | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1' - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1"}] # yapf: disable csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False, show_choice_labels=True, language='English') + include_images=False, + show_choice_labels=True, + language="English", + ) # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': 'Mango' - }] + expected_result = [{"name": "Maria", "age": 25, "fruit": "Mango"}] self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels(self, mock_query_data): + def test_show_choice_labels(self): """ Test show_choice_labels=true for select one questions. """ @@ -1028,31 +1203,21 @@ def test_show_choice_labels(self, mock_query_data): | | fruits | 2 | Orange | | | fruits | 3 | Apple | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1' - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1"}] # yapf: disable csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False, show_choice_labels=True) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + include_images=False, + show_choice_labels=True, + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': 'Mango' - }] + expected_result = [{"name": "Maria", "age": 25, "fruit": "Mango"}] self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple(self, mock_query_data): + def test_show_choice_labels_select_multiple(self): """ Test show_choice_labels=true for select multiple questions. """ @@ -1068,32 +1233,21 @@ def test_show_choice_labels_select_multiple(self, mock_query_data): | | fruits | 2 | Orange | | | fruits | 3 | Apple | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False, show_choice_labels=True) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + include_images=False, + show_choice_labels=True, + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': 'Mango Orange' - }] + expected_result = [{"name": "Maria", "age": 25, "fruit": "Mango Orange"}] self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple_language(self, - mock_query_data): + def test_show_choice_labels_select_multiple_language(self): """ Test show_choice_labels=true for select multiple questions - multi language form. @@ -1110,31 +1264,22 @@ def test_show_choice_labels_select_multiple_language(self, | | fruits | 2 | Orange | Orange | | | fruits | 3 | Apple | Pomme | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=False, - include_images=False, show_choice_labels=True, language='Fr') - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + include_images=False, + show_choice_labels=True, + language="Fr", + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': 'Mangue Orange' - }] + expected_result = [{"name": "Maria", "age": 25, "fruit": "Mangue Orange"}] self.maxDiff = None self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple_1(self, mock_query_data): + def test_show_choice_labels_select_multiple_1(self): """ Test show_choice_labels=true, split_select_multiples=true and value_select_multiples=true for select multiple questions. @@ -1151,35 +1296,30 @@ def test_show_choice_labels_select_multiple_1(self, mock_query_data): | | fruits | 2 | Orange | | | fruits | 3 | Apple | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data - + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable # Split Select multiples, value_select_multiples is True csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, - split_select_multiples=True, value_select_multiples=True, - include_images=False, show_choice_labels=True) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + split_select_multiples=True, + value_select_multiples=True, + include_images=False, + show_choice_labels=True, + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit/Mango': 'Mango', - 'fruit/Orange': 'Orange', - 'fruit/Apple': None - }] + expected_result = [ + { + "name": "Maria", + "age": 25, + "fruit/Mango": "Mango", + "fruit/Orange": "Orange", + "fruit/Apple": None, + } + ] self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple_1_language(self, - mock_query_data): + def test_show_choice_labels_select_multiple_1_language(self): """ Test show_choice_labels=true, split_select_multiples=true and value_select_multiples=true for select multiple questions - multi @@ -1197,34 +1337,31 @@ def test_show_choice_labels_select_multiple_1_language(self, | | fruits | 2 | Orange | Orange | | | fruits | 3 | Apple | Pomme | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data - + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable # Split Select multiples, value_select_multiples is True csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, - split_select_multiples=True, value_select_multiples=True, - include_images=False, show_choice_labels=True, language='Fr') - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder._query_data()] + split_select_multiples=True, + value_select_multiples=True, + include_images=False, + show_choice_labels=True, + language="Fr", + ) result = [k for k in csv_df_builder._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit/Mangue': 'Mangue', - 'fruit/Orange': 'Orange', - 'fruit/Pomme': None - }] + expected_result = [ + { + "name": "Maria", + "age": 25, + "fruit/Mangue": "Mangue", + "fruit/Orange": "Orange", + "fruit/Pomme": None, + } + ] self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple_2(self, mock_query_data): + def test_show_choice_labels_select_multiple_2(self): """ Test show_choice_labels=true, split_select_multiples=true, binary_select_multiples=true for select multiple questions. @@ -1241,30 +1378,27 @@ def test_show_choice_labels_select_multiple_2(self, mock_query_data): | | fruits | 2 | Orange | | | fruits | 3 | Apple | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data - + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable # Split Select multiples, binary_select_multiples is True csv_df_builder_1 = CSVDataFrameBuilder( self.user.username, xform.id_string, - split_select_multiples=True, binary_select_multiples=True, - include_images=False, show_choice_labels=True) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder_1._query_data()] + split_select_multiples=True, + binary_select_multiples=True, + include_images=False, + show_choice_labels=True, + ) result = [k for k in csv_df_builder_1._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit/Mango': 1, - 'fruit/Orange': 1, - 'fruit/Apple': 0 - }] + expected_result = [ + { + "name": "Maria", + "age": 25, + "fruit/Mango": 1, + "fruit/Orange": 1, + "fruit/Apple": 0, + } + ] self.assertEqual(expected_result, result) def test_export_data_for_xforms_without_submissions(self): @@ -1280,21 +1414,44 @@ def test_export_data_for_xforms_without_submissions(self): self.assertEqual(self.xform.instances.count(), 0) # Generate csv export for form csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) expected_header = [ - 'info/name', 'info/age', 'kids/has_kids', 'gps', '_gps_latitude', - '_gps_longitude', '_gps_altitude', '_gps_precision', - 'web_browsers/firefox', 'web_browsers/chrome', 'web_browsers/ie', - 'web_browsers/safari', 'meta/instanceID', '_id', '_uuid', - '_submission_time', '_date_modified', '_tags', '_notes', - '_version', '_duration', '_submitted_by', '_total_media', - '_media_count', '_media_all_received'] + "info/name", + "info/age", + "kids/has_kids", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] # Test form headers are present on exported csv file. self.assertEqual(header, expected_header) @@ -1313,21 +1470,44 @@ def test_export_data_for_xforms_with_newer_submissions(self): self.assertEqual(self.xform.instances.count(), 0) # Generate csv export for form csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) expected_header = [ - 'info/name', 'info/age', 'kids/has_kids', 'gps', '_gps_latitude', - '_gps_longitude', '_gps_altitude', '_gps_precision', - 'web_browsers/firefox', 'web_browsers/chrome', 'web_browsers/ie', - 'web_browsers/safari', 'meta/instanceID', '_id', '_uuid', - '_submission_time', '_date_modified', '_tags', '_notes', - '_version', '_duration', '_submitted_by', '_total_media', - '_media_count', '_media_all_received'] + "info/name", + "info/age", + "kids/has_kids", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] # Test form headers are present on exported csv file. self.assertEqual(header, expected_header) @@ -1336,23 +1516,47 @@ def test_export_data_for_xforms_with_newer_submissions(self): self._submit_fixture_instance("new_repeats", "01") self._submit_fixture_instance("new_repeats", "02") # pylint: disable=protected-access - record_count = csv_df_builder._query_data(count=True) + record_count = self.xform.instances.count() self.assertEqual(record_count, 5) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + cursor = ( + self.xform.instances.all().order_by("id").values_list("json", flat=True) + ) + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) newer_header = next(csv_reader) expected_headers = [ - 'info/name', 'info/age', 'kids/has_kids', - 'kids/kids_details[1]/kids_name', 'kids/kids_details[1]/kids_age', - 'kids/kids_details[2]/kids_name', 'kids/kids_details[2]/kids_age', - 'gps', '_gps_latitude', '_gps_longitude', '_gps_altitude', - '_gps_precision', 'web_browsers/firefox', 'web_browsers/chrome', - 'web_browsers/ie', 'web_browsers/safari', 'meta/instanceID', '_id', - '_uuid', '_submission_time', '_date_modified', '_tags', '_notes', - '_version', '_duration', '_submitted_by', '_total_media', - '_media_count', '_media_all_received'] + "info/name", + "info/age", + "kids/has_kids", + "kids/kids_details[1]/kids_name", + "kids/kids_details[1]/kids_age", + "kids/kids_details[2]/kids_name", + "kids/kids_details[2]/kids_age", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "web_browsers/firefox", + "web_browsers/chrome", + "web_browsers/ie", + "web_browsers/safari", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] # Test export headers are recreated with repeat data. self.assertEqual(newer_header, expected_headers) @@ -1367,29 +1571,7 @@ def test_export_data_for_xforms_with_newer_submissions(self): # close and delete file csv_file.close() - def test_export_raises_NoRecordsFound_for_form_without_instances(self): - """ - Test exporting records for forms without submissions raises - NorecordsFound exception. - """ - fixture = "new_repeats" - # publish form so we have a dd - self._publish_xls_fixture_set_xform(fixture) - - # Confirm form has not submissions so far - self.assertEqual(self.xform.instances.count(), 0) - # Generate csv export for form - csv_df_builder_1 = CSVDataFrameBuilder( - self.user.username, - self.xform.id_string, - split_select_multiples=True, binary_select_multiples=False, - include_images=False, show_choice_labels=True) - # Fetch form data throws NoRecordsFound exeption - with self.assertRaises(NoRecordsFoundError): - csv_df_builder_1._query_data() - - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_show_choice_labels_select_multiple_3(self, mock_query_data): + def test_show_choice_labels_select_multiple_3(self): """ Test show_choice_labels=true, split_select_multiples=true, binary_select_multiples=false for select multiple questions. @@ -1406,34 +1588,30 @@ def test_show_choice_labels_select_multiple_3(self, mock_query_data): | | fruits | 2 | Orange | | | fruits | 3 | Apple | """ - xform = self._publish_markdown(md_xform, self.user, id_string='b') - data = [{ - 'name': 'Maria', - 'age': 25, - 'fruit': '1 2' - }] # yapf: disable - mock_query_data.return_value = data - + xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [{"name": "Maria", "age": 25, "fruit": "1 2"}] # yapf: disable # Split Select multiples, binary_select_multiples is True csv_df_builder_1 = CSVDataFrameBuilder( self.user.username, xform.id_string, - split_select_multiples=True, binary_select_multiples=False, - include_images=False, show_choice_labels=True) - # pylint: disable=protected-access - cursor = [row for row in csv_df_builder_1._query_data()] + split_select_multiples=True, + binary_select_multiples=False, + include_images=False, + show_choice_labels=True, + ) result = [k for k in csv_df_builder_1._format_for_dataframe(cursor)] - expected_result = [{ - 'name': 'Maria', - 'age': 25, - 'fruit/Mango': True, - 'fruit/Orange': True, - 'fruit/Apple': 0 - }] + expected_result = [ + { + "name": "Maria", + "age": 25, + "fruit/Mango": True, + "fruit/Orange": True, + "fruit/Apple": 0, + } + ] self.assertEqual(expected_result, result) - @patch.object(CSVDataFrameBuilder, '_query_data') - def test_multiple_repeats_column_order(self, mock_query_data): + def test_multiple_repeats_column_order(self): """Test the order of the columns in a multiple repeats form export""" md_xform = """ | survey | @@ -1460,79 +1638,102 @@ def test_multiple_repeats_column_order(self, mock_query_data): | | food | Salad | Salad | | | | | food | Sandwich | Sandwich | | | """ # noqa: E501 - self.xform = self._publish_markdown(md_xform, self.user, id_string='b') - - data = [{ - 'food': 'Orange', - 'no_food': 2, - 'food_repeat': [{ - 'food_repeat/food_group': 'Banana' - }, { - 'food_repeat/food_group': 'Lasgna' - }] - }, { - 'food': 'Apple', - 'no_food_2': 2, - 'food_repeat_2': [{ - 'food_repeat_2/food_group_2': 'Cake' - }, { - 'food_repeat_2/food_group_2': 'Salad' - }] - }] - - mock_query_data.return_value = data + self.xform = self._publish_markdown(md_xform, self.user, id_string="b") + cursor = [ + { + "food": "Orange", + "no_food": 2, + "food_repeat": [ + {"food_repeat/food_group": "Banana"}, + {"food_repeat/food_group": "Lasgna"}, + ], + }, + { + "food": "Apple", + "no_food_2": 2, + "food_repeat_2": [ + {"food_repeat_2/food_group_2": "Cake"}, + {"food_repeat_2/food_group_2": "Salad"}, + ], + }, + ] expected_header = [ - 'food/Apple', 'food/Orange', 'food/Banana', 'food/Pizza', - 'food/Lasgna', 'food/Cake', 'food/Chocolate', 'food/Salad', - 'food/Sandwich', 'no_food', 'food_repeat_count', - 'food_repeat[1]/food_group/Apple', - 'food_repeat[1]/food_group/Orange', - 'food_repeat[1]/food_group/Banana', - 'food_repeat[1]/food_group/Pizza', - 'food_repeat[1]/food_group/Lasgna', - 'food_repeat[1]/food_group/Cake', - 'food_repeat[1]/food_group/Chocolate', - 'food_repeat[1]/food_group/Salad', - 'food_repeat[1]/food_group/Sandwich', - 'food_repeat[2]/food_group/Apple', - 'food_repeat[2]/food_group/Orange', - 'food_repeat[2]/food_group/Banana', - 'food_repeat[2]/food_group/Pizza', - 'food_repeat[2]/food_group/Lasgna', - 'food_repeat[2]/food_group/Cake', - 'food_repeat[2]/food_group/Chocolate', - 'food_repeat[2]/food_group/Salad', - 'food_repeat[2]/food_group/Sandwich', 'no_food_2', - 'food_repeat_2_count', 'food_repeat_2[1]/food_group_2/Apple', - 'food_repeat_2[1]/food_group_2/Orange', - 'food_repeat_2[1]/food_group_2/Banana', - 'food_repeat_2[1]/food_group_2/Pizza', - 'food_repeat_2[1]/food_group_2/Lasgna', - 'food_repeat_2[1]/food_group_2/Cake', - 'food_repeat_2[1]/food_group_2/Chocolate', - 'food_repeat_2[1]/food_group_2/Salad', - 'food_repeat_2[1]/food_group_2/Sandwich', - 'food_repeat_2[2]/food_group_2/Apple', - 'food_repeat_2[2]/food_group_2/Orange', - 'food_repeat_2[2]/food_group_2/Banana', - 'food_repeat_2[2]/food_group_2/Pizza', - 'food_repeat_2[2]/food_group_2/Lasgna', - 'food_repeat_2[2]/food_group_2/Cake', - 'food_repeat_2[2]/food_group_2/Chocolate', - 'food_repeat_2[2]/food_group_2/Salad', - 'food_repeat_2[2]/food_group_2/Sandwich', 'gps', - '_gps_latitude', '_gps_longitude', '_gps_altitude', - '_gps_precision', 'meta/instanceID', '_id', '_uuid', - '_submission_time', '_date_modified', '_tags', - '_notes', '_version', '_duration', '_submitted_by', - '_total_media', '_media_count', '_media_all_received'] + "food/Apple", + "food/Orange", + "food/Banana", + "food/Pizza", + "food/Lasgna", + "food/Cake", + "food/Chocolate", + "food/Salad", + "food/Sandwich", + "no_food", + "food_repeat_count", + "food_repeat[1]/food_group/Apple", + "food_repeat[1]/food_group/Orange", + "food_repeat[1]/food_group/Banana", + "food_repeat[1]/food_group/Pizza", + "food_repeat[1]/food_group/Lasgna", + "food_repeat[1]/food_group/Cake", + "food_repeat[1]/food_group/Chocolate", + "food_repeat[1]/food_group/Salad", + "food_repeat[1]/food_group/Sandwich", + "food_repeat[2]/food_group/Apple", + "food_repeat[2]/food_group/Orange", + "food_repeat[2]/food_group/Banana", + "food_repeat[2]/food_group/Pizza", + "food_repeat[2]/food_group/Lasgna", + "food_repeat[2]/food_group/Cake", + "food_repeat[2]/food_group/Chocolate", + "food_repeat[2]/food_group/Salad", + "food_repeat[2]/food_group/Sandwich", + "no_food_2", + "food_repeat_2_count", + "food_repeat_2[1]/food_group_2/Apple", + "food_repeat_2[1]/food_group_2/Orange", + "food_repeat_2[1]/food_group_2/Banana", + "food_repeat_2[1]/food_group_2/Pizza", + "food_repeat_2[1]/food_group_2/Lasgna", + "food_repeat_2[1]/food_group_2/Cake", + "food_repeat_2[1]/food_group_2/Chocolate", + "food_repeat_2[1]/food_group_2/Salad", + "food_repeat_2[1]/food_group_2/Sandwich", + "food_repeat_2[2]/food_group_2/Apple", + "food_repeat_2[2]/food_group_2/Orange", + "food_repeat_2[2]/food_group_2/Banana", + "food_repeat_2[2]/food_group_2/Pizza", + "food_repeat_2[2]/food_group_2/Lasgna", + "food_repeat_2[2]/food_group_2/Cake", + "food_repeat_2[2]/food_group_2/Chocolate", + "food_repeat_2[2]/food_group_2/Salad", + "food_repeat_2[2]/food_group_2/Sandwich", + "gps", + "_gps_latitude", + "_gps_longitude", + "_gps_altitude", + "_gps_precision", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] csv_df_builder = CSVDataFrameBuilder( - self.user.username, self.xform.id_string, include_images=False) + self.user.username, self.xform.id_string, include_images=False + ) temp_file = NamedTemporaryFile(suffix=".csv", delete=False) - csv_df_builder.export_to(temp_file.name) - csv_file = open(temp_file.name, 'r') + csv_df_builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") csv_reader = csv.reader(csv_file) header = next(csv_reader) @@ -1540,8 +1741,7 @@ def test_multiple_repeats_column_order(self, mock_query_data): csv_file.close() - @patch.object(CSVDataFrameBuilder, "_query_data") - def test_split_select_multiples_with_randomize(self, mock_query_data): + def test_split_select_multiples_with_randomize(self): """ Test select multiples choices are split with the randomize option true. """ @@ -1563,7 +1763,7 @@ def test_split_select_multiples_with_randomize(self, mock_query_data): | | browsers | safari | Safari | """ # noqa: E501 xform = self._publish_markdown(md_xform, self.user, id_string="b") - data = [ + cursor = [ { "name": "Tom", "age": 23, @@ -1579,42 +1779,29 @@ def test_split_select_multiples_with_randomize(self, mock_query_data): ], } ] # yapf: disable - mock_query_data.return_value = data csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, split_select_multiples=True, include_images=False, ) - # pylint: disable=protected-access - cursor = [k for k in csv_df_builder._query_data()] record = cursor[0] select_multiples = CSVDataFrameBuilder._collect_select_multiples(xform) - result = CSVDataFrameBuilder._split_select_multiples( - record, select_multiples) + result = CSVDataFrameBuilder._split_select_multiples(record, select_multiples) # build a new dictionary only composed of the keys we want to use in # the comparison result = dict( - [ - (key, result[key]) - for key in list(result) - if key in list(data[0]) - ] + [(key, result[key]) for key in list(result) if key in list(cursor[0])] ) - self.assertEqual(data[0], result) + self.assertEqual(cursor[0], result) csv_df_builder = CSVDataFrameBuilder( self.user.username, xform.id_string, binary_select_multiples=True ) # pylint: disable=protected-access - result = csv_df_builder._split_select_multiples( - record, select_multiples) + result = csv_df_builder._split_select_multiples(record, select_multiples) # build a new dictionary only composed of the keys we want to use in # the comparison result = dict( - [ - (key, result[key]) - for key in list(result) - if key in list(data[0]) - ] + [(key, result[key]) for key in list(result) if key in list(cursor[0])] ) - self.assertEqual(data[0], result) + self.assertEqual(cursor[0], result) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 7282c21459..c158b62268 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -1267,14 +1267,14 @@ def test_export_with_image_attachments(self): media_files=[media_file], ) - xdata = query_data(self.xform) + xdata = [datum for datum in query_data(self.xform)] survey = self.md_to_pyxform_survey(md, {"name": "exp"}) export_builder = ExportBuilder() export_builder.set_survey(survey) with NamedTemporaryFile(suffix=".xlsx") as temp_xls_file: export_builder.to_xlsx_export( temp_xls_file, xdata, options={"host": "example.com"} - ) + ) temp_xls_file.seek(0) workbook = load_workbook(temp_xls_file) children_sheet = workbook["exp"] @@ -2155,7 +2155,7 @@ def test_gps_xlsx_export_remove_group_name(self): self.assertEqual(self.xform.instances.count(), 0) self._make_submission(_logger_fixture_path("gps_data.xml")) self.assertEqual(self.xform.instances.count(), 1) - records = self.xform.instances.all() + records = self.xform.instances.all().order_by("id") inst_json = records.first().json with NamedTemporaryFile(suffix=".xlsx") as temp_xls_file: export_builder.to_xlsx_export(temp_xls_file.name, [inst_json]) @@ -2512,7 +2512,7 @@ def test_to_sav_export_with_labels(self): for item in self.data: outputs.append( dict_to_joined_export( - item, index, indices, survey_name, survey, item, None + item, index, indices, survey_name, survey, item, None ) ) index += 1 @@ -3479,11 +3479,12 @@ def test_no_group_name_gps_data(self): self._make_submission(_logger_fixture_path("gps_data.xml")) self.assertEqual(self.xform.instances.count(), 1) csv_export = NamedTemporaryFile(suffix=".csv") - records = self.xform.instances.all() + records = self.xform.instances.all().order_by("id") inst_json = records.first().json + csv_data = records.values_list("json", flat=True).iterator() export_builder.to_flat_csv_export( csv_export.name, - records, + csv_data, self.xform.user.username, self.xform.id_string, "", diff --git a/onadata/libs/tests/utils/test_export_tools.py b/onadata/libs/tests/utils/test_export_tools.py index 62e6dd993c..c3138bc13b 100644 --- a/onadata/libs/tests/utils/test_export_tools.py +++ b/onadata/libs/tests/utils/test_export_tools.py @@ -27,7 +27,7 @@ from onadata.apps.logger.models import Attachment, Instance, XForm from onadata.apps.main.tests.test_base import TestBase from onadata.apps.viewer.models.export import Export -from onadata.apps.viewer.models.parsed_instance import query_data +from onadata.apps.viewer.models.parsed_instance import query_fields_data from onadata.libs.serializers.merged_xform_serializer import MergedXFormSerializer from onadata.libs.serializers.xform_serializer import XFormSerializer from onadata.libs.utils.export_builder import ( @@ -961,7 +961,9 @@ def test_generate_filtered_attachments_zip_export(self): "query": '{"_submission_time": {"$lte": "2019-01-13T00:00:00"}}', } filter_query = options.get("query") - instance_ids = query_data(self.xform, fields='["_id"]', query=filter_query) + instance_ids = query_fields_data( + self.xform, fields='["_id"]', query=filter_query + ) export = generate_attachments_zip_export( Export.ZIP_EXPORT, self.user.username, self.xform.id_string, None, options diff --git a/onadata/libs/utils/api_export_tools.py b/onadata/libs/utils/api_export_tools.py index d738c86b7e..a8bad4bcac 100644 --- a/onadata/libs/utils/api_export_tools.py +++ b/onadata/libs/utils/api_export_tools.py @@ -252,6 +252,7 @@ def _generate_new_export( # noqa: C0901 "username": xform.user.username, "id_string": xform.id_string, "host": request.get_host(), + "sort": request.query_params.get('sort') } if query: options["query"] = query diff --git a/onadata/libs/utils/csv_builder.py b/onadata/libs/utils/csv_builder.py index 773b8f341a..bf497f8a2f 100644 --- a/onadata/libs/utils/csv_builder.py +++ b/onadata/libs/utils/csv_builder.py @@ -3,10 +3,9 @@ CSV export utility functions. """ from collections import OrderedDict -from itertools import chain +from itertools import chain, tee from django.conf import settings -from django.db.models.query import QuerySet from django.utils.translation import gettext as _ import unicodecsv as csv @@ -17,8 +16,6 @@ from onadata.apps.logger.models import OsmData from onadata.apps.logger.models.xform import XForm, question_types_to_exclude from onadata.apps.viewer.models.data_dictionary import DataDictionary -from onadata.apps.viewer.models.parsed_instance import ParsedInstance, query_data -from onadata.libs.exceptions import NoRecordsFoundError from onadata.libs.utils.common_tags import ( ATTACHMENTS, BAMBOO_DATASET_ID, @@ -481,49 +478,6 @@ def _split_gps_fields(cls, record, gps_fields): cls._split_gps_fields(list_item, gps_fields) record.update(updated_gps_fields) - # pylint: disable=too-many-arguments - def _query_data( - self, - query="{}", - start=0, - limit=ParsedInstance.DEFAULT_LIMIT, - fields="[]", - count=False, - ): - # query_data takes params as json strings - # so we dumps the fields dictionary - count_args = { - "xform": self.xform, - "query": query, - "start": self.start, - "end": self.end, - "fields": "[]", - "sort": "{}", - "count": True, - } - count_object = list(query_data(**count_args)) - record_count = count_object[0]["count"] - if record_count < 1: - raise NoRecordsFoundError("No records found for your query") - # if count was requested, return the count - if count: - return record_count - - query_args = { - "xform": self.xform, - "query": query, - "fields": fields, - "start": self.start, - "end": self.end, - "sort": "id", - "start_index": start, - "limit": limit, - "count": False, - } - cursor = query_data(**query_args) - - return cursor - # pylint: disable=too-few-public-methods class CSVDataFrameBuilder(AbstractDataFrameBuilder): @@ -843,22 +797,17 @@ def _format_for_dataframe(self, cursor): flat_dict.update(reindexed) yield flat_dict - def export_to(self, path, dataview=None): + def export_to(self, path, cursor, dataview=None): """Export a CSV formated to the given ``path``.""" self.ordered_columns = OrderedDict() self._build_ordered_columns(self.data_dictionary.survey, self.ordered_columns) + # creator copy of iterator cursor + cursor, ordered_col_cursor = tee(cursor) + self._update_ordered_columns_from_data(ordered_col_cursor) + # Unpack xform columns and data + data = self._format_for_dataframe(cursor) if dataview: - cursor = dataview.query_data( - dataview, all_data=True, filter_query=self.filter_query - ) - if isinstance(cursor, QuerySet): - cursor = cursor.iterator() - - self._update_ordered_columns_from_data(cursor) - - data = self._format_for_dataframe(cursor) - columns = list( chain.from_iterable( [ @@ -869,20 +818,6 @@ def export_to(self, path, dataview=None): ) ) else: - try: - cursor = self._query_data(self.filter_query) - except NoRecordsFoundError: - # Set cursor object to an an empty queryset - cursor = self.xform.instances.none() - - self._update_ordered_columns_from_data(cursor) - - if isinstance(cursor, QuerySet): - cursor = cursor.iterator() - - # Unpack xform columns and data - data = self._format_for_dataframe(cursor) - columns = list( chain.from_iterable( [ diff --git a/onadata/libs/utils/export_builder.py b/onadata/libs/utils/export_builder.py index 9cad67094d..12519f6857 100644 --- a/onadata/libs/utils/export_builder.py +++ b/onadata/libs/utils/export_builder.py @@ -1170,7 +1170,7 @@ def to_flat_csv_export( host=host, ) - csv_builder.export_to(path, dataview=dataview) + csv_builder.export_to(path, data, dataview=dataview) def get_default_language(self, languages): """Return the default languange of the XForm.""" diff --git a/onadata/libs/utils/export_tools.py b/onadata/libs/utils/export_tools.py index 6f30bf906f..13f57bf78d 100644 --- a/onadata/libs/utils/export_tools.py +++ b/onadata/libs/utils/export_tools.py @@ -40,7 +40,11 @@ from onadata.apps.logger.models.data_view import DataView from onadata.apps.main.models.meta_data import MetaData from onadata.apps.viewer.models.export import Export, get_export_options_query_kwargs -from onadata.apps.viewer.models.parsed_instance import query_data +from onadata.apps.viewer.models.parsed_instance import ( + query_data, + query_count, + query_fields_data, +) from onadata.libs.exceptions import J2XException, NoRecordsFoundError from onadata.libs.serializers.geojson_serializer import GeoJsonSerializer from onadata.libs.utils.common_tags import DATAVIEW_EXPORT, GROUPNAME_REMOVED_FLAG @@ -54,6 +58,7 @@ from onadata.libs.utils.osm import get_combined_osm from onadata.libs.utils.viewer_tools import create_attachments_zipfile, image_urls + DEFAULT_GROUP_DELIMITER = "/" DEFAULT_INDEX_TAGS = ("[", "]") SUPPORTED_INDEX_TAGS = ("[", "]", "(", ")", "{", "}", ".", "_") @@ -133,7 +138,7 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C filter_query = options.get("query") remove_group_name = options.get("remove_group_name", False) start = options.get("start") - + sort = options.get("sort") export_type_func_map = { Export.XLSX_EXPORT: "to_xlsx_export", Export.CSV_EXPORT: "to_flat_csv_export", @@ -151,16 +156,34 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C if options.get("dataview_pk"): dataview = DataView.objects.get(pk=options.get("dataview_pk")) records = dataview.query_data( - dataview, all_data=True, filter_query=filter_query + dataview, + all_data=True, + filter_query=filter_query, + sort=sort, ) - total_records = dataview.query_data(dataview, count=True)[0].get("count") + total_records = dataview.query_data( + dataview, + count=True, + sort=sort, + )[ + 0 + ].get("count") else: - records = query_data(xform, query=filter_query, start=start, end=end) + records = query_data( + xform, + query=filter_query, + start=start, + end=end, + sort=sort, + ) if filter_query: - total_records = query_data( - xform, query=filter_query, start=start, end=end, count=True - )[0].get("count") + total_records = query_count( + xform, + query=filter_query, + date_created_gte=start, + date_created_lte=end, + ) else: total_records = xform.num_of_submissions @@ -423,6 +446,7 @@ def generate_attachments_zip_export( """ export_type = options.get("extension", export_type) filter_query = options.get("query") + sort = options.get("sort") if xform is None: xform = XForm.objects.get(user__username=username, id_string=id_string) @@ -433,13 +457,18 @@ def generate_attachments_zip_export( instance_id__in=[ rec.get("_id") for rec in dataview.query_data( - dataview, all_data=True, filter_query=filter_query + dataview, + all_data=True, + filter_query=filter_query, + sort=sort, ) ], instance__deleted_at__isnull=True, ) else: - instance_ids = query_data(xform, fields='["_id"]', query=filter_query) + instance_ids = query_fields_data( + xform, fields=["_id"], query=filter_query, sort=sort + ) attachments = Attachment.objects.filter(instance__deleted_at__isnull=True) if xform.is_merged_dataset: attachments = attachments.filter( @@ -815,6 +844,7 @@ def generate_external_export( # noqa C901 filter_query = options.get("query") meta = options.get("meta") token = options.get("token") + sort = options.get("sort") if xform is None: xform = XForm.objects.get( @@ -839,7 +869,11 @@ def generate_external_export( # noqa C901 instances = [inst[0].json if inst else {}] else: - instances = query_data(xform, query=filter_query) + instances = query_data( + xform, + query=filter_query, + sort=sort, + ) records = _get_records(instances) From e5bdec91cb47179172b515bbcb91701262ff3377 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 24 Aug 2023 10:27:08 +0300 Subject: [PATCH 068/270] tag release v3.12.2 (#2471) --- CHANGES.rst | 9 +++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3796e66715..9cdcfbc341 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,15 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.12.2(2033-08-24) +------------------- +- Solve intermittent bug where form permissions are not applied for new forms + `PR #2470 ` + [@kelvin-muchiri] +- Enhance performance when exporting data on endpoint api/v1/data/. + `PR #2460 ` + [@kelvin-muchiri] + v3.12.1(2023-08-14) ------------------- diff --git a/onadata/__init__.py b/onadata/__init__.py index df990dface..19363dbe20 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.12.1" +__version__ = "3.12.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 71298a0c23..7901eb7800 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.12.1 +version = 3.12.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From cc2e0496cb83444d830f3b9a97a24ad550287b81 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 17 Aug 2023 09:58:49 +0300 Subject: [PATCH 069/270] Execute post submission async tasks after transaction commit Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 5cb57c34fb..840d5e6aaf 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -829,9 +829,19 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): _update_xform_submission_count_delete(instance) if ASYNC_POST_SUBMISSION_PROCESSING_ENABLED: - update_xform_submission_count_async.apply_async(args=[instance.pk, created]) - save_full_json_async.apply_async(args=[instance.pk, created]) - update_project_date_modified_async.apply_async(args=[instance.pk, created]) + transaction.on_commit( + lambda: update_xform_submission_count_async.apply_async( + args=[instance.pk, created] + ) + ) + transaction.on_commit( + lambda: save_full_json_async.apply_async(args=[instance.pk, created]) + ) + transaction.on_commit( + lambda: update_project_date_modified_async.apply_async( + args=[instance.pk, created] + ) + ) else: update_xform_submission_count(instance.pk, created) From 097c96b63e34a5d98a8daae483e920a9163a3ba0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 4 Sep 2023 09:08:49 +0300 Subject: [PATCH 070/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../apps/logger/tests/test_form_submission.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/onadata/apps/logger/tests/test_form_submission.py b/onadata/apps/logger/tests/test_form_submission.py index cfc06d6fe5..4ab3d651dc 100644 --- a/onadata/apps/logger/tests/test_form_submission.py +++ b/onadata/apps/logger/tests/test_form_submission.py @@ -10,6 +10,7 @@ from django.http import UnreadablePostError from django_digest.test import DigestAuth from django_digest.test import Client as DigestClient +from django.test.utils import override_settings from guardian.shortcuts import assign_perm from mock import patch, Mock, ANY from nose import SkipTest @@ -633,3 +634,30 @@ def test_form_submission_with_infinity_values(self): self._make_submission(path=xml_submission_file_path) self.assertEqual(400, self.response.status_code) self.assertIn("invalid input syntax for type json", str(self.response.message)) + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + @override_settings(ASYNC_POST_SUBMISSION_PROCESSING_ENABLED=True) + @patch("onadata.apps.viewer.signals._post_process_submissions") + def test_post_save_submission_count_update(self, mock): + """Test that submission count is updated asyncronously""" + # initial count should be 0 + self.assertEqual(0, self.xform.instances.count()) + # publish submission + xml_submission_file_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "../fixtures/tutorial/instances/tutorial_2012-06-27_11-27-53.xml", + ) + + self._make_submission(xml_submission_file_path) + self.assertEqual(self.response.status_code, 201) + + # test that post_save signal is executed + instance = self.xform.instances.first() + self.assertTrue(mock.called) + mock.assert_called_once_with(instance) + self.assertEqual(mock.call_count, 1) + self.xform.refresh_from_db() + + # test submission count + self.assertEqual(1, self.xform.instances.count()) + self.assertEqual(1, self.xform.num_of_submissions) From 33714c10a6c946d0f4ae70e6dc40d0351fdd0614 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 14 Jul 2023 15:19:16 +0300 Subject: [PATCH 071/270] Add project and form level submission and download list filtering Signed-off-by: Kipchirchir Sigei --- .../apps/api/viewsets/briefcase_viewset.py | 34 +++++++++++++++++-- onadata/apps/main/urls.py | 20 +++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 7056d80163..67b1efa65c 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -98,6 +98,19 @@ def get_object(self, queryset=None): id_string = _extract_id_string(form_id) uuid = _extract_uuid(form_id) username = self.kwargs.get("username") + form_pk = self.kwargs.get("xform_pk") + project_pk = self.kwargs.get("project_pk") + + if not username: + if form_pk: + queryset = self.queryset.filter(pk=form_pk) + if queryset.first(): + username = queryset.first().user.username + elif project_pk: + queryset = queryset.filter(project__pk=project_pk) + if queryset.first(): + username = queryset.first().user.username + obj = get_object_or_404( Instance, @@ -115,18 +128,35 @@ def filter_queryset(self, queryset): Filters an XForm submission instances using ODK Aggregate query parameters. """ username = self.kwargs.get("username") - if username is None and self.request.user.is_anonymous: + form_pk = self.kwargs.get("xform_pk") + project_pk = self.kwargs.get("project_pk") + + if (not username or not form_pk or not project_pk) and self.request.user.is_anonymous: # raises a permission denied exception, forces authentication self.permission_denied(self.request) if username is not None and self.request.user.is_anonymous: - profile = get_object_or_404(UserProfile, user__username__iexact=username) + profile = None + if username: + profile = get_object_or_404(UserProfile, user__username__iexact=username) + elif form_pk: + queryset = queryset.filter(pk=form_pk) + if queryset.first(): + profile = queryset.first().user.profile + elif project_pk: + queryset = queryset.filter(project__pk=project_pk) + if queryset.first(): + profile = queryset.first().user.profile if profile.require_auth: # raises a permission denied exception, forces authentication self.permission_denied(self.request) else: queryset = queryset.filter(user=profile.user) + elif form_pk: + queryset = queryset.filter(pk=form_pk) + elif project_pk: + queryset = queryset.filter(project__pk=project_pk) else: queryset = super().filter_queryset(queryset) diff --git a/onadata/apps/main/urls.py b/onadata/apps/main/urls.py index 0c622491df..57ecdbfa9e 100644 --- a/onadata/apps/main/urls.py +++ b/onadata/apps/main/urls.py @@ -201,11 +201,31 @@ BriefcaseViewset.as_view({"get": "list", "head": "list"}), name="view-submission-list", ), + re_path( + r"^forms/(?P\w+)/view/submissionList$", + BriefcaseViewset.as_view({"get": "list", "head": "list"}), + name="view-submission-list", + ), + re_path( + r"^projects/(?P\d+)/view/submissionList$", + BriefcaseViewset.as_view({"get": "list", "head": "list"}), + name="view-submission-list", + ), re_path( r"^(?P\w+)/view/downloadSubmission$", BriefcaseViewset.as_view({"get": "retrieve", "head": "retrieve"}), name="view-download-submission", ), + re_path( + r"^forms/(?P\w+)/view/downloadSubmission$", + BriefcaseViewset.as_view({"get": "retrieve", "head": "retrieve"}), + name="view-download-submission", + ), + re_path( + r"^projects/(?P\d+)/view/downloadSubmission$", + BriefcaseViewset.as_view({"get": "retrieve", "head": "retrieve"}), + name="view-download-submission", + ), re_path( r"^(?P\w+)/formUpload$", BriefcaseViewset.as_view({"post": "create", "head": "create"}), From f98e91085e5483178252c3ac110ae2d9cd4b713b Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jul 2023 16:20:18 +0300 Subject: [PATCH 072/270] WIP tests Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_briefcase_viewset.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index bfb984a611..979731e738 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -130,6 +130,35 @@ def test_view_submission_list(self): '{{resumptionCursor}}', '%s' % last_index) self.assertContains(response, expected_submission_list) + def test_view_submission_list_w_xformid(self): + view = BriefcaseViewset.as_view({'get': 'list'}) + self._publish_xml_form() + self._make_submissions() + request = self.factory.get( + self._submission_list_url, + data={'formId': self.xform.id_string}) + response = view(request, xform_pk=self.xform.pk) + self.assertEqual(response.status_code, 401) + auth = DigestAuth(self.login_username, self.login_password) + request.META.update(auth(request.META, response)) + response = view(request, xform_pk=self.xform.pk) + self.assertEqual(response.status_code, 200) + import ipdb; ipdb.set_trace() + submission_list_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'submissionList.xml') + instances = ordered_instances(self.xform) + + self.assertEqual(instances.count(), NUM_INSTANCES) + + last_index = instances[instances.count() - 1].pk + with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + expected_submission_list = f.read() + expected_submission_list = \ + expected_submission_list.replace( + '{{resumptionCursor}}', '%s' % last_index) + self.assertContains(response, expected_submission_list) + def test_view_submission_list_w_soft_deleted_submission(self): view = BriefcaseViewset.as_view({'get': 'list'}) self._publish_xml_form() From 404829b4db755c51cd433b4cb5cf17d0f52d3e35 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 4 Sep 2023 11:53:50 +0300 Subject: [PATCH 073/270] Add more tests Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_briefcase_viewset.py | 107 +++++++++++++++++- .../apps/api/viewsets/briefcase_viewset.py | 13 ++- 2 files changed, 114 insertions(+), 6 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index 979731e738..fb95b669f3 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -134,6 +134,9 @@ def test_view_submission_list_w_xformid(self): view = BriefcaseViewset.as_view({'get': 'list'}) self._publish_xml_form() self._make_submissions() + self._submission_list_url = reverse( + 'view-submission-list', + kwargs={'xform_pk': self.xform.pk}) request = self.factory.get( self._submission_list_url, data={'formId': self.xform.id_string}) @@ -143,7 +146,37 @@ def test_view_submission_list_w_xformid(self): request.META.update(auth(request.META, response)) response = view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) - import ipdb; ipdb.set_trace() + submission_list_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'submissionList.xml') + instances = ordered_instances(self.xform) + + self.assertEqual(instances.count(), NUM_INSTANCES) + + last_index = instances[instances.count() - 1].pk + with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + expected_submission_list = f.read() + expected_submission_list = \ + expected_submission_list.replace( + '{{resumptionCursor}}', '%s' % last_index) + self.assertContains(response, expected_submission_list) + + def test_view_submission_list_w_projectid(self): + view = BriefcaseViewset.as_view({'get': 'list'}) + self._publish_xml_form() + self._make_submissions() + self._submission_list_url = reverse( + 'view-submission-list', + kwargs={'project_pk': self.xform.project.pk}) + request = self.factory.get( + self._submission_list_url, + data={'formId': self.xform.id_string}) + response = view(request, project_pk=self.xform.project.pk) + self.assertEqual(response.status_code, 401) + auth = DigestAuth(self.login_username, self.login_password) + request.META.update(auth(request.META, response)) + response = view(request, project_pk=self.xform.project.pk) + self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( self.main_directory, 'fixtures', 'transportation', 'view', 'submissionList.xml') @@ -336,6 +369,78 @@ def test_view_downloadSubmission(self): self.assertContains(response, instanceId, status_code=200) self.assertMultiLineEqual(response.content.decode('utf-8'), text) + def test_view_downloadSubmission_w_xformid(self): + view = BriefcaseViewset.as_view({'get': 'retrieve'}) + self._publish_xml_form() + self.maxDiff = None + self._submit_transport_instance_w_attachment() + instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instance = Instance.objects.get(uuid=instanceId) + formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ + u'%(formId)s[@key=uuid:%(instanceId)s]' % { + 'formId': self.xform.id_string, + 'instanceId': instanceId} + params = {'formId': formId} + auth = DigestAuth(self.login_username, self.login_password) + self._download_submission_url = reverse( + 'view-download-submission', + kwargs={'xform_pk': self.xform.pk}) + request = self.factory.get( + self._download_submission_url, data=params) + response = view(request, xform_pk=self.xform.pk) + self.assertEqual(response.status_code, 401) + request.META.update(auth(request.META, response)) + response = view(request, xform_pk=self.xform.pk) + text = "uuid:%s" % instanceId + download_submission_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'downloadSubmission.xml') + with codecs.open(download_submission_path, encoding='utf-8') as f: + text = f.read() + for var in ((u'{{submissionDate}}', + instance.date_created.isoformat()), + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): + text = text.replace(*var) + self.assertContains(response, instanceId, status_code=200) + self.assertMultiLineEqual(response.content.decode('utf-8'), text) + + def test_view_downloadSubmission_w_projectid(self): + view = BriefcaseViewset.as_view({'get': 'retrieve'}) + self._publish_xml_form() + self.maxDiff = None + self._submit_transport_instance_w_attachment() + instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instance = Instance.objects.get(uuid=instanceId) + formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ + u'%(formId)s[@key=uuid:%(instanceId)s]' % { + 'formId': self.xform.id_string, + 'instanceId': instanceId} + params = {'formId': formId} + auth = DigestAuth(self.login_username, self.login_password) + self._download_submission_url = reverse( + 'view-download-submission', + kwargs={'project_pk': self.xform.project.pk}) + request = self.factory.get( + self._download_submission_url, data=params) + response = view(request, project_pk=self.xform.project.pk) + self.assertEqual(response.status_code, 401) + request.META.update(auth(request.META, response)) + response = view(request, project_pk=self.xform.project.pk) + text = "uuid:%s" % instanceId + download_submission_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'downloadSubmission.xml') + with codecs.open(download_submission_path, encoding='utf-8') as f: + text = f.read() + for var in ((u'{{submissionDate}}', + instance.date_created.isoformat()), + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): + text = text.replace(*var) + self.assertContains(response, instanceId, status_code=200) + self.assertMultiLineEqual(response.content.decode('utf-8'), text) + def test_view_downloadSubmission_OtherUser(self): view = BriefcaseViewset.as_view({'get': 'retrieve'}) self._publish_xml_form() diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 67b1efa65c..7e0648a7a5 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -107,11 +107,10 @@ def get_object(self, queryset=None): if queryset.first(): username = queryset.first().user.username elif project_pk: - queryset = queryset.filter(project__pk=project_pk) + queryset = self.queryset.filter(project__pk=project_pk) if queryset.first(): username = queryset.first().user.username - obj = get_object_or_404( Instance, xform__user__username__iexact=username, @@ -122,7 +121,7 @@ def get_object(self, queryset=None): return obj - # pylint: disable=too-many-branches + # pylint: disable=too-many-branches,too-many-statements def filter_queryset(self, queryset): """ Filters an XForm submission instances using ODK Aggregate query parameters. @@ -131,14 +130,18 @@ def filter_queryset(self, queryset): form_pk = self.kwargs.get("xform_pk") project_pk = self.kwargs.get("project_pk") - if (not username or not form_pk or not project_pk) and self.request.user.is_anonymous: + if ( + not username or not form_pk or not project_pk + ) and self.request.user.is_anonymous: # raises a permission denied exception, forces authentication self.permission_denied(self.request) if username is not None and self.request.user.is_anonymous: profile = None if username: - profile = get_object_or_404(UserProfile, user__username__iexact=username) + profile = get_object_or_404( + UserProfile, user__username__iexact=username + ) elif form_pk: queryset = queryset.filter(pk=form_pk) if queryset.first(): From 7b8d0d75a9f653382588135f0dfe057d3f55d5cb Mon Sep 17 00:00:00 2001 From: Kipchirchir Cheroigin Date: Tue, 5 Sep 2023 09:54:27 +0300 Subject: [PATCH 074/270] Tag release v3.13.0 (#2473) Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 11 ++++++++++- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9cdcfbc341..f7a281774e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,16 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` -v3.12.2(2033-08-24) +v3.13.0(2023-09-04) +------------------- +- Add project and form level odk submission-list and download endpoints + `PR #2451 ` + [@KipSigei] +- Fix async submission count discrepancy + `PR #2469 ` + [@KipSigei] + +v3.12.2(2023-08-24) ------------------- - Solve intermittent bug where form permissions are not applied for new forms `PR #2470 ` diff --git a/onadata/__init__.py b/onadata/__init__.py index 19363dbe20..27689c27b6 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.12.2" +__version__ = "3.13.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 7901eb7800..10c59827f0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.12.2 +version = 3.13.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 170c8ca07a40dc2bf681d461699813dbc3c5dc05 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 13 Sep 2023 16:33:09 +0300 Subject: [PATCH 075/270] Revert to have data exports default sorting by id (#2474) * revert to have data exports default sorting by id sort export data by id by default * add test case * remove assertions in test case when fields are provided in the query no ordering is applied * fix fetching data on select columns not sorting sorting when fetching data is not applied when fetching data on select columns that are not json fields * refactor code * fix lint error Unexpected keyword argument 'json_only' in function call * refactor code get rid of ctypes.ArgumentError: error raised when working with GIS fields with Manager.raw() by selecting only the columns we need * fix failing tests --- docs/data.rst | 16 +--------- .../api/tests/viewsets/test_data_viewset.py | 11 +++---- .../tests/viewsets/test_tableau_viewset.py | 3 +- .../api/tests/viewsets/test_xform_viewset.py | 4 +-- onadata/apps/api/viewsets/data_viewset.py | 5 --- onadata/apps/main/views.py | 5 --- onadata/apps/viewer/models/parsed_instance.py | 31 +++++++++---------- 7 files changed, 24 insertions(+), 51 deletions(-) diff --git a/docs/data.rst b/docs/data.rst index 48c8000602..851e351b57 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -372,9 +372,7 @@ Sample response with link header Sort submitted data of a specific form using existing fields ------------------------------------------------------------- -Provides a sorted list of json submitted data for a specific form by specifing the order in which the query returns matching data. - -No ordering is applied by default -- the data is returned in any arbitrary order. Use the `sort` parameter to filter the list of submissions. The sort parameter has field and value pairs. +Provides a sorted list of json submitted data for a specific form by specifing the order in which the query returns matching data. Use the `sort` parameter to filter the list of submissions.The sort parameter has field and value pairs. :: @@ -392,18 +390,6 @@ Descending sort query using the age field: {"age":-1} -Query sorted by id field in ascending. - -:: - - {"_id":1} - -Query sorted by id field in descending. - -:: - - {"_id":-1} - Example of Ascending Sort ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 429f2be2dd..a3e66b5e39 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -3565,22 +3565,19 @@ def test_csv_export(self): number_records = len(list(csv_reader)) self.assertEqual(number_records, 4) - def test_sort_query_param(self): - """sort query param works with exports""" - + def test_default_ordering(self): + """Export data is sorted by id by default""" self._make_submissions() formid = self.xform.pk # sort csv export data by id in descending order - request = self.factory.get( - "/", data={"format": "csv", "sort": '{"_id": -1}'}, **self.extra - ) + request = self.factory.get("/", data={"format": "csv"}, **self.extra) response = self.view(request, pk=formid) self.assertEqual(response.status_code, 200) csv_file_obj = StringIO( "".join([c.decode("utf-8") for c in response.streaming_content]) ) csv_reader = csv.reader(csv_file_obj) - instances = Instance.objects.filter(xform_id=formid).order_by("-id") + instances = Instance.objects.filter(xform_id=formid).order_by("id") self.assertEqual(instances.count(), 4) headers = next(csv_reader) expected_headers = [ diff --git a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py index 483b6515c2..31ac833754 100644 --- a/onadata/apps/api/tests/viewsets/test_tableau_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_tableau_viewset.py @@ -3,6 +3,7 @@ OpenData tests. """ import os +import sys import json from re import search from django.test import RequestFactory @@ -396,7 +397,7 @@ def test_gt_id_query_param(self): self.view = TableauViewSet.as_view({"get": "data"}) _open_data = get_or_create_opendata(self.xform) uuid = _open_data[0].uuid - request = self.factory.get("/", data={"gt_id": 100000}, **self.extra) + request = self.factory.get("/", data={"gt_id": sys.maxsize}, **self.extra) response = self.view(request, uuid=uuid) self.assertEqual(response.status_code, 200) row_data = streaming_data(response) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 3eed7a1cba..4c36f6cb38 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -3859,7 +3859,7 @@ def test_csv_export_with_win_excel_utf8(self): self.assertEqual(response.status_code, 200) self.assertTrue(response.data.get("has_hxl_support")) # sort csv data in ascending order - data = {"win_excel_utf8": True, "sort": '{"_id": 1}'} + data = {"win_excel_utf8": True} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) @@ -3888,7 +3888,7 @@ def test_csv_export_with_win_excel_utf8(self): basename, ext = os.path.splitext(filename) self.assertEqual(ext, ".csv") # sort csv data in ascending order - data = {"win_excel_utf8": False, "sort": '{"_id": 1}'} + data = {"win_excel_utf8": False} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 2448c70926..8ed0d70765 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -720,11 +720,6 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): start = (page - 1) * page_size limit = page_size - if sort is None: - # Paginated data needs to be sorted. We order by - # id ascending if sort is empty - sort = '{"_id": 1}' - if should_query_json_fields: data = query_fields_data( xform, diff --git a/onadata/apps/main/views.py b/onadata/apps/main/views.py index cb7ef87ee3..02b1c1531f 100644 --- a/onadata/apps/main/views.py +++ b/onadata/apps/main/views.py @@ -598,11 +598,6 @@ def api(request, username=None, id_string=None): # noqa C901 args["start_index"] = start_index args["limit"] = page_size - if args.get("sort") is None: - # Paginated data needs to be sorted. We order by id ascending if - # sort is empty - args["sort"] = '{"_id": 1}' - if "start" in request.GET: args["start_index"] = int(request.GET.get("start")) diff --git a/onadata/apps/viewer/models/parsed_instance.py b/onadata/apps/viewer/models/parsed_instance.py index 5db709fab0..f50a218e77 100644 --- a/onadata/apps/viewer/models/parsed_instance.py +++ b/onadata/apps/viewer/models/parsed_instance.py @@ -174,7 +174,7 @@ def _start_index_limit(sql, params, start_index, limit): def _get_sort_fields(sort): - sort = [] if sort is None else sort_from_mongo_sort_str(sort) + sort = ["id"] if sort is None else sort_from_mongo_sort_str(sort) return list(_parse_sort_fields(sort)) @@ -258,7 +258,7 @@ def get_sql_with_params( sql = "SELECT id,json FROM logger_instance" else: - sql = "SELECT * FROM logger_instance" + sql = "SELECT id,json,xml FROM logger_instance" sql_where, params = build_sql_where(xform, query, start, end) sql += f" {sql_where}" @@ -275,20 +275,19 @@ def get_sql_with_params( ) sql = f"{sql} {_json_order_by}" else: - if not fields: - sql += " ORDER BY" - - for index, sort_field in enumerate(sort): - if sort_field.startswith("-"): - sort_field = sort_field.removeprefix("-") - # It's safe to use string interpolation since this - # is a column and not a value - sql += f" {sort_field} DESC" - else: - sql += f" {sort_field} ASC" - - if index != len(sort) - 1: - sql += "," + sql += " ORDER BY" + + for index, sort_field in enumerate(sort): + if sort_field.startswith("-"): + sort_field = sort_field.removeprefix("-") + # It's safe to use string interpolation since this + # is a column and not a value + sql += f" {sort_field} DESC" + else: + sql += f" {sort_field} ASC" + + if index != len(sort) - 1: + sql += "," sql, params = _start_index_limit(sql, params, start_index, limit) From 2f773593a1be0b0e5800722fc43069559f9a6ba8 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 13 Sep 2023 17:19:03 +0300 Subject: [PATCH 076/270] tag release candidate v3.13.1 (#2476) --- CHANGES.rst | 6 ++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f7a281774e..0da14d42ca 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,12 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.13.1(2023-09-13) +------------------- +- Revert to have data exports default sorting by id + `PR #2474 ` + [@kelvin-muchiri] + v3.13.0(2023-09-04) ------------------- - Add project and form level odk submission-list and download endpoints diff --git a/onadata/__init__.py b/onadata/__init__.py index 27689c27b6..64f9e5359a 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.13.0" +__version__ = "3.13.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 10c59827f0..745996bffb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.13.0 +version = 3.13.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From d1ecff15f52af9167c7a15cb98d5653c63387ba4 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 22 Sep 2023 15:15:35 +0300 Subject: [PATCH 077/270] Ensure sas token is appended to azure blob url Signed-off-by: Kipchirchir Sigei --- onadata/apps/viewer/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 47ceedbe40..ff686bdbe7 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -56,7 +56,7 @@ str_to_bool, ) from onadata.libs.utils.google import create_flow -from onadata.libs.utils.image_tools import image_url +from onadata.libs.utils.image_tools import image_url, generate_media_download_url from onadata.libs.utils.log import Actions, audit_log from onadata.libs.utils.logger_tools import ( generate_content_disposition_header, @@ -894,7 +894,7 @@ def attachment_url(request, size="medium"): return response if not attachment.mimetype.startswith("image"): - return redirect(attachment.media_file.url) + return generate_media_download_url(attachment) media_url = image_url(attachment, size) if media_url: return redirect(media_url) From 468c0c5fcfa00fb9e19456617689b83c1a3166a0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 22 Sep 2023 21:34:00 +0300 Subject: [PATCH 078/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../apps/viewer/tests/test_attachment_url.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/onadata/apps/viewer/tests/test_attachment_url.py b/onadata/apps/viewer/tests/test_attachment_url.py index 166a72568d..3a7a8f31c6 100644 --- a/onadata/apps/viewer/tests/test_attachment_url.py +++ b/onadata/apps/viewer/tests/test_attachment_url.py @@ -1,9 +1,14 @@ import os from django.conf import settings +from django.contrib.auth import authenticate +from django.http import HttpResponseRedirect from django.urls import reverse +from mock import patch +from rest_framework.test import APIRequestFactory from onadata.apps.logger.models import Attachment +from onadata.apps.logger.views import submission from onadata.apps.main.tests.test_base import TestBase from onadata.apps.viewer.views import attachment_url @@ -18,6 +23,8 @@ def setUp(self): self._submit_transport_instance_w_attachment() self.url = reverse( attachment_url, kwargs={'size': 'original'}) + self._submission_url = reverse( + 'submissions', kwargs={'username': self.user.username}) def test_attachment_url(self): self.assertEqual( @@ -61,6 +68,54 @@ def test_attachment_url_w_media_id_no_redirect(self): 'no_redirect': 'true'}) self.assertEqual(response.status_code, 200) # no redirects to amazon + @patch("onadata.apps.viewer.views.generate_media_download_url") + def test_attachment_url_has_azure_sas_token(self, mock_media_url): + """Test attachment url has azure sas token""" + self._publish_xls_file( + os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_encrypted.xlsx", + ) + ) + files = {} + for filename in ["submission.xml", "submission.xml.enc"]: + files[filename] = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances_encrypted", + filename, + ) + with open(files["submission.xml.enc"], "rb") as encryped_file: + with open(files["submission.xml"], "rb") as f: + post_data = { + "xml_submission_file": f, + "submission.xml.enc": encryped_file, + } + self.factory = APIRequestFactory() + request = self.factory.post(self._submission_url, post_data) + request.user = authenticate(username="bob", password="bob") + response = submission(request, username=self.user.username) + self.assertEqual(response.status_code, 201) + + # get submission enc attachment + attachment = Attachment.objects.all()[1] + sas_token = "se=ab736fba7261" # nosec + expected_url = f"http://testserver/{attachment.media_file.name}?{sas_token}" + mock_media_url.return_value = HttpResponseRedirect(redirect_to=expected_url) + response = self.client.get(self.url, {"media_file": attachment.media_file.name}) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, expected_url) + self.assertIn(f"?{sas_token}", str(response.url)) + def tearDown(self): path = os.path.join(settings.MEDIA_ROOT, self.user.username) for root, dirs, files in os.walk(path, topdown=False): From 4f8e829ef9c22306b4f115b47c96af04b29d1830 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Sep 2023 15:30:01 +0300 Subject: [PATCH 079/270] Fix attribute error when handling geometry in repeat groups Signed-off-by: Kipchirchir Sigei --- .../libs/serializers/geojson_serializer.py | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/onadata/libs/serializers/geojson_serializer.py b/onadata/libs/serializers/geojson_serializer.py index 676f29331d..35adec674d 100644 --- a/onadata/libs/serializers/geojson_serializer.py +++ b/onadata/libs/serializers/geojson_serializer.py @@ -66,23 +66,24 @@ def geometry_from_string(points, simple_style): that adheres to the simplestyle-spec """ - points = points.split(";") - pnt_list = [tuple(map(float, reversed(point.split()[:2]))) for point in points] - - if len(pnt_list) == 1: - geometry = ( - geojson.Point(pnt_list[0]) - if str_to_bool(simple_style) - else geojson.GeometryCollection([geojson.Point(pnt_list[0])]) - ) - elif is_polygon(pnt_list): - # First and last point are same -> Polygon - geometry = geojson.Polygon([pnt_list]) - else: - # First and last point not same -> LineString - geometry = geojson.LineString(pnt_list) + if isinstance(points, str): + points = points.split(";") + pnt_list = [tuple(map(float, reversed(point.split()[:2]))) for point in points] + + if len(pnt_list) == 1: + geometry = ( + geojson.Point(pnt_list[0]) + if str_to_bool(simple_style) + else geojson.GeometryCollection([geojson.Point(pnt_list[0])]) + ) + elif is_polygon(pnt_list): + # First and last point are same -> Polygon + geometry = geojson.Polygon([pnt_list]) + else: + # First and last point not same -> LineString + geometry = geojson.LineString(pnt_list) - return geometry + return geometry class GeometryField(serializers.GeometryField): From 2562b9494795741f2c51467ff3b88b5618cfa2be Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Sat, 16 Sep 2023 19:58:39 +0300 Subject: [PATCH 080/270] Add support for geo shapes and geotraces Signed-off-by: Kipchirchir Sigei --- .../libs/serializers/geojson_serializer.py | 51 +++++++++++-------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/onadata/libs/serializers/geojson_serializer.py b/onadata/libs/serializers/geojson_serializer.py index 35adec674d..a2608225a4 100644 --- a/onadata/libs/serializers/geojson_serializer.py +++ b/onadata/libs/serializers/geojson_serializer.py @@ -9,6 +9,7 @@ from onadata.apps.logger.models.instance import Instance from onadata.libs.utils.common_tools import str_to_bool +from onadata.libs.utils.dict_tools import get_values_matching_key def create_feature(instance, geo_field, fields): @@ -65,25 +66,23 @@ def geometry_from_string(points, simple_style): `simple_style` param allows building geojson that adheres to the simplestyle-spec """ + points = points.split(";") + pnt_list = [tuple(map(float, reversed(point.split()[:2]))) for point in points] + + if len(pnt_list) == 1: + geometry = ( + geojson.Point(pnt_list[0]) + if str_to_bool(simple_style) + else geojson.GeometryCollection([geojson.Point(pnt_list[0])]) + ) + elif is_polygon(pnt_list): + # First and last point are same -> Polygon + geometry = geojson.Polygon([pnt_list]) + else: + # First and last point not same -> LineString + geometry = geojson.LineString(pnt_list) - if isinstance(points, str): - points = points.split(";") - pnt_list = [tuple(map(float, reversed(point.split()[:2]))) for point in points] - - if len(pnt_list) == 1: - geometry = ( - geojson.Point(pnt_list[0]) - if str_to_bool(simple_style) - else geojson.GeometryCollection([geojson.Point(pnt_list[0])]) - ) - elif is_polygon(pnt_list): - # First and last point are same -> Polygon - geometry = geojson.Polygon([pnt_list]) - else: - # First and last point not same -> LineString - geometry = geojson.LineString(pnt_list) - - return geometry + return geometry class GeometryField(serializers.GeometryField): @@ -127,17 +126,25 @@ def to_representation(self, instance): geo_field = request.query_params.get("geo_field") simple_style = request.query_params.get("simple_style") title = request.query_params.get("title") + + if fields: + for field in fields.split(","): + ret["properties"][field] = instance.json.get(field) + if geo_field: + xform = instance.xform + geotrace_xpaths = xform.geotrace_xpaths() + polygon_xpaths = xform.polygon_xpaths() if "properties" in ret: if title: ret["properties"]["title"] = instance.json.get(title) - if fields: - for field in fields.split(","): - ret["properties"][field] = instance.json.get(field) points = instance.json.get(geo_field) + if geo_field in geotrace_xpaths or geo_field in polygon_xpaths: + value = get_values_matching_key(instance.json, geo_field) + points = next(value) geometry = ( geometry_from_string(points, simple_style) - if points + if points and isinstance(points, str) else None ) From 65a64195496e0e39e307fb279c0942310fe81b05 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Sat, 16 Sep 2023 19:59:00 +0300 Subject: [PATCH 081/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_data_viewset.py | 109 ++++++++++++++++++ .../tests/fixtures/geolocation/Geoshapes.csv | 3 + .../tests/fixtures/geolocation/Geoshapes.xlsx | Bin 0 -> 7909 bytes .../tests/fixtures/geolocation/Geotraces.csv | 3 + .../tests/fixtures/geolocation/Geotraces.xlsx | Bin 0 -> 97603 bytes onadata/apps/main/tests/test_base.py | 64 ++++++++++ 6 files changed, 179 insertions(+) create mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geoshapes.csv create mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx create mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geotraces.csv create mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index a3e66b5e39..30ff050735 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -2328,6 +2328,115 @@ def test_geojson_format(self): self.assertEqual(response.status_code, 200) self.assertEqual(response.data, data) + def test_geotraces_in_repeats(self): + # publish sample geotrace submissions + self._publish_submit_geotraces_in_repeats() + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + self.assertEqual(response.status_code, 200) + # get geojson from geo_field + data_get = {"geo_field": "segment/blueline"} + request = self.factory.get("/", data=data_get, **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + instances = self.xform.instances.all().order_by("id") + self.assertEqual(response.status_code, 200) + self.assertEqual(self.xform.instances.count(), 2) + self.assertEqual(len(response.data["features"]), 2) + self.assertEqual(self.xform.geotrace_xpaths(), ["segment/blueline"]) + # test LineString geojson format + data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [36.790503, -1.283987], + [36.77264, -1.268026], + [36.79411, -1.266191], + [36.790757, -1.283009], + ], + }, + "properties": {"id": instances[0].pk, "xform": self.xform.pk}, + }, + { + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [36.809057, -1.269392], + [36.803303, -1.271966], + [36.805943, -1.268118], + [36.808822, -1.269405], + ], + }, + "properties": {"id": instances[1].pk, "xform": self.xform.pk}, + }, + ], + } + self.assertEqual(response.data, data) + + def test_geoshapes_in_repeats(self): + # publish sample geoshape submissions + self._publish_submit_geoshapes_in_repeats() + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + self.assertEqual(response.status_code, 200) + # get geojson from specific field + data_get = {"geo_field": "segment/blueline"} + request = self.factory.get("/", data=data_get, **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + instances = self.xform.instances.all().order_by("id") + self.assertEqual(response.status_code, 200) + self.assertEqual(self.xform.instances.count(), 2) + self.assertEqual(len(response.data["features"]), 2) + self.assertEqual(self.xform.polygon_xpaths(), ["segment/blueline"]) + # test Polygon geojson format + data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [36.788843, -1.297323], + [36.799246, -1.292646], + [36.797564, -1.299639], + [36.789099, -1.297537], + [36.794943, -1.296379], + [36.797134, -1.299167], + [36.788843, -1.297323], + ] + ], + }, + "properties": {"id": instances[0].pk, "xform": self.xform.pk}, + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [36.79198, -1.29728], + [36.785793, -1.298009], + [36.789744, -1.29961], + [36.790625, -1.300146], + [36.792107, -1.300897], + [36.79198, -1.29728], + ] + ], + }, + "properties": {"id": instances[1].pk, "xform": self.xform.pk}, + }, + ], + } + self.assertEqual(response.data, data) + def test_instances_with_geopoints(self): # publish sample geo submissions self._publish_submit_geojson() diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.csv b/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.csv new file mode 100644 index 0000000000..0b8fda26e8 --- /dev/null +++ b/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.csv @@ -0,0 +1,3 @@ +today,start,end,deviceid,segment[1]/point_position,segment[1]/blueline,segment[2]/point_position,segment[2]/blueline,meta/instanceID,_id,_uuid,_submission_time,_date_modified,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,_xform_id +2023-09-15,2023-09-15T12:53:19.451+03:00,2023-09-15T12:55:10.386+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.297323 36.788843 0 0;-1.292646 36.799246 0 0;-1.299639 36.797564 0 0;-1.297537 36.789099 2 3;-1.296379 36.794943 0 0;-1.299167 36.797134 0 0;-1.297323 36.788843 0 0,n/a,n/a,uuid:cb2f1dae-47f2-4919-9fea-5ede2fda841c,122094074,cb2f1dae-47f2-4919-9fea-5ede2fda841c,2023-09-15T09:55:11,2023-09-15T09:55:11,,,202309150951,111.0,kipsigei,0,0,True,764655 +2023-09-15,2023-09-15T12:55:10.436+03:00,2023-09-15T12:55:37.212+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.29728 36.79198 0 0;-1.298009 36.785793 0 0;-1.29961 36.789744 0 0;-1.300146 36.790625 0 0;-1.300897 36.792107 0 0;-1.29728 36.79198 0 0,2,-1.297391 36.789659 0 0;-1.298163 36.78296 0 0;-1.299751 36.7882 0 0;-1.297391 36.789659 0 0,uuid:6ac3e33c-d064-45e0-8597-880227e36435,122094094,6ac3e33c-d064-45e0-8597-880227e36435,2023-09-15T09:55:37,2023-09-15T09:55:37,,,202309150951,27.0,kipsigei,0,0,True,764655 diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx b/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..1f82673804d5a7f08bd2858d0b07b9b9de156c6f GIT binary patch literal 7909 zcmbVRWmH_-lExi^YX^7NAi)WQ;1Dcm2s93j6I_Brkj5c61b5c}!QHK)(ICMkK+u7E zXC`@>_wJ9WUVUV(U3K=ZQ|GJNU#ZB$BjCfJprF7QDRF4Q{3hrR*M<&2Yex?D$73nD zL+L9gM$n;081n_+k~m7%=Vk@jW-4Ctc3NAq+R&V?4=3j@@Nh9cR(*}M_x8Damj_j_ zWrcFm%NvAhya|PCr8oA>){kFhqisH zC_fInWxQtsBO)H`ps(S|ePO4ielxgl{r7+Xh3Ky+Y8{lWimRdavEi@*0h-c?taK%Tw}4;g5k&5 zBPT;GE{$N;tqC|(wzFgTlc#F5(t2{6CV-9%kM*_<%mA69Trr@GmldkiQ$!e8k(Vz%Tr#2x9$a-~OMJA%s%W8Xy zX81}LP7@v3e9oeDWJEAYwdAhxP=dbG=(B)V z_){=8GMr?vt=>8Ie__9HSoVFP1P)H$s+C&}pkS+84kyU_d`jUGN6j@xQt^Y@(UL~; zeEpU;Nv7I@6J_SkeC4`6wj+4_J$DFbJ&B&eCfn^M)^k3aeVfSnhK-c81AYU-(fsT(+w6CKdl`4Sm zwQT73e)%NcsFV5CG%$X;t)ysQy{M*$nL(hds4~WWA@_^74MqTG+oXc2Yuvmfxvz)U zfrzLcMBC`x-B4u$UfXH==r8m^T&F28;qyV&27qP0gzl27 zO-~8+WVj{oVJ4!;J}pPr#Cv0C)k-D7Zhem+sm<& zGYle~sCi2$QF_MOLvplqgw}<&pEj=!REjM83*80-3L#$k&g^4UD4-E!^-o}h>b^mCiT%B#|58c6}6J$BcT-EKMGZ=JV zDj5zZu_P1HW$)uB&>^yimb`XPjyEY){CrwQb8K+eO~aDts;sTapxI{0 zf^zNIo0#j!k~|O@B=z%(XG90A3uZDe&#Pa?h&zcALQMQVbp$}0=D&!yl;8Rmjrx)s zCON!!M%I*;2Tal*VZu(u6Iy1G_X$C(+tfkBy3^`itR7x+Y~oq}B^c{VZU&<>4>KRL zbBvmSVYM613Nwq8>>6m(F$kb#vh;Mf8K90juhfX{X7hzsi4*&*}Bmp*I@`-}^* z%xnN@kDHQbnhTBD66Fm!=0qhKzUFm5PLb7Zgc26MT#Ssj76sl&DXUil1hIIUTo_kaJrYRUo1bg0L?r+kZmt=ls_2 zRICyA1KO>r(@5}>xz@*t>n!Q(fgfkvvbV_FL3rs8ob=>z zy$8wux5u|k+vB1hyeK?~Tnn}rxovk6ZhJ{Do}Q=Ezrye4$~8~XA<2!lwcoo|;SEWZ z5qER?+-rd;X44=^UIxFNEUHeWqP!{>;ErZhD&n$q7DIcUkNJ&D)PuahC1P}QoDbOh zBF4K2o8KJaYdWo;a1-G^A8D_zQ03ix#uHw-Y{pl6jBfqw)f2kq1tO#~Pi29|#UB;sF*&CwhPbaLeQ>%{elO|3c*aH;^d z*K&F0`>5fi=&H1V%Y(|`DW$5y1%9LrE`kVcDi3zUFW?5^db>}-m4>(iLQeHpH316B z_k+9as#qZ#TuR7A&~bIdJgUgGGq=pUOVQhnA~~fGylA+evi+`t9DWQQmEWP$e>q{3 zR5wDJ@XWMj8WweJc59GsraWB1$g?*2(9fQ^mmutV8j(qf`(z(nmSv&jO4L=TqkXAx z*M$VNUc>%q->{q&M<|0L2RG0;T5q4AHDiz?Rz(!E5~x@kmXEX6KwP-LlVAcO3P#v^ zFZG-X8lTl)ebxl+7j|9EK&V6SEB87NB^jC9NMED<45F5Co+Ik{f&;{3z>ndJOkz)^ z?8hM+^fx;T*^W|(m^{2BgD+Y=s90vBAV1GZ%8MQ>!>%DIF#IvvR`#ZB$x%Rg>@IX% z9@?OJ!seY^#dz^5T1y5|fX2Qi>~)ul+&SGPzc<_hP{#j(2XwFt}o%H=kH) zvT9$E3X5bT1ma6G*xraJCMEXPFX1U!qT?(eTnC&GCy6OA-?U4T7ch-lc3Q7RToY%} zu2yTrM>`S?`56}uJnw1l<&@Pfb5gWg6btF48mMkUgB}@d}}t)Z?P0@hskvR;P8 z&Gts@^um5CZJ9C>4=O^S`uJps*U(HkNv&ml{oSo<8CAz2Bf(n8qTLR+1Us7sVdQIJ zDA6(FPoKOw8`Big)P$ZUS13M3$!^$ai<>^dsJi+ymVPy6`pHp&Y>3}?7wgGMz}GUM zi3ZQh7J6-P2^PhBvu|n=K(3W+Y|U+7+;d-)f>17G>APlzyI?PRclNt)Ckzq^{LHM2 zTh@2Lx6-IK>x_~X#k#AGL@j7+uJ+7#qdR0AR0bG+TeX z6Ri3vZ0w4yIlB+n%=$Vu>}ip1fn5uE^3C0OuJNyg@?Pe5=af2IU$5(*%O$oxON43^ z^|I+V*&r4>1Gd)n-SgzjyYsTVTEz5=&tXfwGz@aw&)iQfAYO}+y9OO*wb$D=kf>Fs z5|*hFgMG0pd-#c{dA_pPB}?Q9NFAmAmx1q7#UipGNWjpVvu;aoSgfV^2cwJ?qQH;mkK*nKw!PqRgYPHTM5dg+Tw35;zG-PJy1$0E7*MvFonf~mSL1?ctAyIO!@e%HO+%@$5uH26fke%q=H{sh3&8){BqnPxCA z%v#Pg(nY{BnjNdLWFw6XUY>6{_}*li6lr8m=wbSS%ZB3nOzV`_XA_B_KI0n$PsbrL zFUfIZwlBv!-)xrJ)Cg=l89F@z3?~kCg(@h6g+|_;ho=W?0#8E%1i~AI*5oZ>;tOScal2D7 z2j9!(bM;4+SPA7BvAxBD2rRP=e$2`fx3h25btLUt!l##Iq`7 zn-eF5I4n~w_Rqcz5zZ%h6Jo7}TlO@B7Q+I?kf25lbAiX3fIMLvrtsOZlcZilJR!${ z;c7f@tYoMjax&xYSXuR_fY@r`!96~|cv;c?B3Bqa2?w%)0?`HHTPMqoFLE6D3V1)? zyq!w~a7n){mDH`Gzw!Xi{>ZsR3o;c{$TzyR7wX2R<%Jx)0*~H2IbNdXBA;1BrbC`4 z@hHL$rt7LVmgTdXxh&7U^mBmIWOJ=OP8AqJyhe}|2RajZ{s!E-A46SK77+RP@PuO=_J*RLs7 zo?N$GP`_v#!G@1nWBfk!Wp0$CRAvO(3I92Ze5)naSD6q7WA28SdD_&#n0m?0l_}Lr za=6{}2J?g1gGI0Jh4=eGiwya<-0e&D)qL{0Tou!`xS5^GVa{Bqy&akSp+Wtb8mQ+R zcwAvpW34ceKhXE2jSHuVls1v87%*;Z1oX*Pn)f<+uAI=o%-2?3zydYXYs)d~{2IpH zhD9U-eB(~ggw6=Ah*aE>a^tPmmHD$i+5NX`s+^=GUeE;YLZ(=7A@_aedb$yk>9U2fohgnQ!fTQf+078exgPT9Z$n2LX zQx^n+MjX-&kZIt4km<9Kb$g87OnwD$!IR|;$!&Kniee~{pn?vT1fT!lJ!PS)eSxnt6_mBN1K%r#UIgj=fDFo0 z${EQt8kV+5HySw8@uvTtoo$zwJVwf=oS>jSwHMiH`iWVd#W&a@{ll6bN1-}H(M(I{ z<@%la@{ao3J#rEi$}}#Fr`bI9>+B>75Ps_`vd0oR%AI?B?_s}={J?_!XYPme?-KcM zmh5*1=r6{s6+CMBz?cOedZFc9uhEhjM+mgWNtEke!1SoDjdeW(pXYuC!LWPoBCenKt@JY;`~uDA1s5c9h(@MMaDb0dZyUu^+iZ_y zuD(!|VTYt>p79CacPqv0+{kA;msj9pP|W!r&bQf>{P=I8QQtoe{SX_R+A^_a7a|6s zHZx?k00)1vLYBFUWhnLyu3;ZNWcRINXW?P}RsYD2;s5O+M{^?wpvh||2Mb%XzsQYv zO-0bu14VYqQUbsR%vq~SFq%wCNqelSgl3cHgo>nUJM_Hq$=zV+Sl*^EcByZN4WNJd z{Ln3ZYdGSyp%+Ml2*vm3%KvMlF-FP4y##*a{Uv-OG~ z2Rs2IZkcweI@pgW!rF4X2x%lNXXeUWh%VHn0rfOJD6OouunYlH=`Dn~fDuXrS179n zqRtkKdn3i=*OfHZZqCXom|P&y_==NDbc2S;Wryy^eP~mFM3*eIom$dC8R?CGv`Igw zK(!7Fyz88veB{KJ+NsTW<+ z#4C()_c(dH(I*>((^QAggN3qGqu@_ml*UQt_3!g^af(n+hiVJby>pMAx1-wiarg~i z_4#37iS}(c2A5hOe(>mHuf-}DouG2}RDzhqR^%$%8F74YysPvg*Q8e-v9z}+aZ9(& z2-k6OxaV)$$cWrzc#Nvvpr#u9hlr{|`KRv==OL<0K*lN#AUj76V~_*zF-3c+>4W?^ zANaCzN-y%bHN6^Gg}UziWLDMcsQNO!O0u}mrP=r1hyjhmd;3sFrO{hU;pcII^~g3a z=C?nWZI094VlrNJzHSf|HHQAw|0l|3q}d9Gb2;^2KUK zd9wFkNPq&q4RMBrf!X{=B*6atof+BLJ-)KEXhl%xV^86lK%=Wxkx4gPg|x?2Pd>-R`U6Js440D;-`Axj4U7P1$OzakGmA4pl;G z9)BjQOzD>>=86=3!Sz_4aoANaIJ2a$iI>pU+b=TTFseavN8YxYa>aWG?{msxl_x}z zDs*YQnIZm+BIDb)L=eK>HBEI##LOzTL{zslPDf7n)I47sLGFlc-`b=omehQj_Esmx zY%6Yqm0o^Z#@pk)*kQY3C1XlcK0i55D`)IR@`{zZp9_I)QBda@|NJS|=xd z;ObI7$~;6JIcB0JT;RF-MKrVy))CaF81Cgk<9`qtOd+Z?DRL`Sf<2F8*~r1d1jB}j z#lQx-%@cUl(ZBMRJRtKM5^J-|tR4+wNIz^iE2WiSKE7q%+{g-MqM$K5jWkFt-Cm71 za-=hPZw)q9NPSv$J`nFGwWr|EnLzhnG+gm4{7^w;6=!ab2r3FusWovp#%dn>7V_^Hb;oi0oP7Q`pfu ze68vdVExl7#T$!ZT1xC4-@OXFl34z!k7;ZEBj}8%pUjtLn2ThtEB#)UV$$biJaTDO z_+}iXDNJYO2{cHA=aLrSaPbr6GH|*l5EHoi9*?4tMu^W3rG)5~ra=p1iXlI!!BODI zxORTQjZHZLWYHED#XWJuW(snq(|LTwYsrvgS?~%LwO&*fTe?RyC zFqwZ^`MvP_S1ZR@|JKT1+rvMt{9YVA7Ky)v=HYvOn9F}C8UM8Ndpdk9rGAO@!|kIx z{w%EiwD3DPKc>xJl1lh*lIWl6zhl%Rg8vfb2k-oE4*qXE|5N*SBzVNJUlL66oAxhM l`_sVh0s5~73?JP3Z{R~k9`PZTVPMc7emxKQ2o>;n_g}tflx+Y2 literal 0 HcmV?d00001 diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geotraces.csv b/onadata/apps/main/tests/fixtures/geolocation/Geotraces.csv new file mode 100644 index 0000000000..095422ab9f --- /dev/null +++ b/onadata/apps/main/tests/fixtures/geolocation/Geotraces.csv @@ -0,0 +1,3 @@ +today,start,end,deviceid,segment[1]/point_position,segment[1]/blueline,segment[2]/point_position,segment[2]/blueline,meta/instanceID,_id,_uuid,_submission_time,_date_modified,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,_xform_id +2023-09-15,2023-09-15T12:39:32.676+03:00,2023-09-15T12:39:52.828+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.283987 36.790503 0 0;-1.268026 36.77264 0 0;-1.266191 36.79411 0 0;-1.283009 36.790757 2 2,n/a,n/a,uuid:51558e36-de1c-49c1-bf66-199060b2655b,122093541,51558e36-de1c-49c1-bf66-199060b2655b,2023-09-15T09:39:53,2023-09-15T09:39:53,,,202309150939,20.0,kipsigei,0,0,True,764649 +2023-09-15,2023-09-15T12:39:52.858+03:00,2023-09-15T12:40:31.290+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.269392 36.809057 0 0;-1.271966 36.803303 0 0;-1.268118 36.805943 0 0;-1.269405 36.808822 0 0,2,-1.25616 36.865203 0 0;-1.251569 36.86919 0 0;-1.255495 36.870234 0 0;-1.256203 36.8654 0 0,uuid:39c3ee3f-bab3-4fca-81ce-011c6f26eb98,122093563,39c3ee3f-bab3-4fca-81ce-011c6f26eb98,2023-09-15T09:40:32,2023-09-15T09:40:32,,,202309150939,39.0,kipsigei,0,0,True,764649 diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx b/onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..56c9d5bcd2e40a0e17721f50d8e5cda77fa312b2 GIT binary patch literal 97603 zcmeEubyQYc_b!405`qedbSfPJf^>s)gLH_bq;x7Bf)XM~E6qzt2}nze2oloL(%p5} zegWlt-??M_?%%(A#yOrb&f06PIiLB=XU?_XJ&uAj(q$Y3WCT>Z5{E|(Ax zaF7uYZX%!}YKd4|Ju|j?rl;a!V{EU(%^c>e$Y^}iT_uPUup^(@#` z>hl7k)oNb^)3UQsi7JS+sD7iobrxp0F|>|>b6kD1C^K8bSJ0d}$k1){-p!g({YUDp zh=>FxY+VnBC@#I@dAi7cGikD{@4e)!sNQTlynBU+I6lWqveuv1y|1ckeHDE;mGt4u zXJ1W&d0vfCTJ22SHIuhe`RlQ9FBVIOu=!26|9k|t#K6cVLG^eDp`j=|2DT6sh}CdrOZ4fQVcf#>)3^?ICjh?-e{ea zBcG(o@1ZqxJAXD>K*5hoXvn+oK*}4Y&rT(R`&*}XQHP0sZ@S)cVwi#Nnw^z}VMw>; z?aE#cUjY*F+jTA{76w{51_N zS>j8Xi>t#Wfx_^)UxBxgXJ0Eb)b8p06c<+>S;x5i9Ai<~Xl#0j)$X$Hu%qccHJ{N2 zMx*qd8c>%fCrAhi|ESm!RTj!kKtmZ&km#Ud_3Vr-?U|XNKmUKF{x2@i3!&#m$jUdc zVEC^{9tO1R^p3p37M6ArlB^_G@$`N;j8*dTJr&_d&0QjF6#^e539l;8!?vLj-k0+& z-lA>~5jSQSo}^C>e$x^;|F z{KuMTA$BK|I>FVOX+-{be6i0pn`9r28t&#@>J?UAg83JeaVF14cE0oKf0I6kDG>NT zX0baKx5eJjWFXJI#gcq$k3dz~^nqD{VWJ&3g^S)}%i0amcQrSU+!^KKTa+l+Z|vUd zlxU_--ty8cW?yShaQVV{wIQcvwe>{^Y3As^7Rdo3^ST`pXmFs*VF6^EEt#FH?JNwf ztu3IImaD2`{hkZQL(l94X(Hi8oqP5i4V9>tnWL%$Hyd-b__eo&xGWa$s8l&lJn|D> zrFZZ@;~}KJQ_37*@7`IZ?K$WGD@hT)H`;0WY)SvWY@!fhn@?wz~4VK_-eOX|t?w7p$3s{kaLoSZ?x~2|7aphf)bW#^4 z;ruXtCr{)Rk@~!3!gPD*$JD7d(HqYp zt$vDz)mwAV(lv+7gH*&CabQmzF_f#*^6|cKO;K7H^oP3(l#b-QPq6k??TQP9RUlEy zuPWSDjM`5LyKMATOH522M`u;{ITluhXRkf7TbGT^CkN>hlGtPgjbF^)(I{!zKEB}; z<>;dsjIC-}u914>bs~0Y^4PPr%%eS}epx>E;kSif0&gRuGz-xqc8a%8#@bLEDc)f? zzxTazk{^>qepRmpSqU*H{~E3vrb*ZfQV-gDo0AV1(FE3B3j+VADaE(v6e`uB}NwC(0=4%I`Nb-Wsa^XuUkO)`+RGEvy}|I+=D?* z@v`;CTdw9y)kW14S_QH^V?pv89+`PKYF*}Voru@<6?UXJ8&KeqwC{(Glbpw@W z+z!{5Rg)>$W=mMDJzwDG#*O}A*&u=x(%)+OGqEJ<0lkxkReD@Mf2pRKlu1X%dR2Ka z%ZJ~gU4BNRegcZ`zkAr|)gu~2P`EvyLdYG_J8+ZpCdcX zfuuLB_*D6HdDuo+8Aowfg$m_s?I7GyocR9b{+sQ))&h|t6@slyDYty64m||EXB>@Z ztbDnJY@VR{ikcR+I4wtWh-R0oil5hqtgo~?p(s;d@P&x2SD9CXoc*C_`W97cF!dGx zft3#*y7lvEZAqTS_b_e78QmtBb8zXyY~z(JyLZ^@ZJ16Q7X88|_dc1NPx!YJi^>Ws zX?NRclkhkB8ZFkhzCFf%5Ip~#jQ*`ARdef|pf6Jb_0Wd`{s8E*_TVD?tw%#okda2DBJ4;!<0e zJ=ouN;ESb{H|u`$0gti0D~R@)$JX)4I@#3wLDqCWvSK<39$2LqHcf>QregJP4Rm=r za>CSc*f4T-WN4J5vNDPye!7r|bBJrbux>rJY9lTEwUZcdTrhQpt+mKo(~(Cc;<4w6sSm|ggGX4mH`MQP#7m`!rg@N| zcy#1Xn+V}yyWXkl^SX|F_xWS_VT{$=xK%JJUcRDK6OBW04OjnI$JWNgm+{k=e4UwINFS`=hgO( z3e0w$yS9{H-n-af&H8%(mTdq0Bj#E<+NgLU-iUkM5V|kkC^T6 z+CI(i^4XM9d+h-e2x&lRD&u)izjo6!e{D9E7gt8{6MphoIOdyR0{qo6i%;dhep%QR zuhmLd`7mrmUcPHUAp5$^Kq^1rNmtbi&qI7Q%-lr8gd&{4u!!9a7w(3QfoU8TzcfK2 z8rE&ww~dte$;0=^xW(t2ZE>2MeMvG0l*q0{Sz40Md3$q_rKboa+?jhD_7xdb6s5Ik z2RGNaSMb@7t4Hj_oD-Jbf<$E3581VUV_`_Uy%SnreD%7>Z-`%k>^9BD+EVQ|*d1HV z?zQyhZ~1p@Q&WrNqxfUTg4&nD6P#oxoSlnJ3fS@=4ikH49eJrdKhoWWQysARaJdi=}#Ll&Pf@m8>oo*opV<9Ri7Oz-b+Bvp*$mtnnB zmshm;sn?N7`sAn9>J0|=8jF1gjing9fTMNumz0Dl7E~lgmG;Eb_K7-ojZs3sDpQBu zw~8I6f2xzEe|7oOX4G?f)r1>)qC3K---6n6N3^m$RBO$0-wXK1*SEmzw|k$H_G+3i z&M!9gF>GlQ+=+6Wk)dIe>HI7?N-LLjRqkG$;?4MCd)*gl37xlJ5D7g`7F8HP&kxAH zOTqEFDWZdAUPg{S=w^V`?|81SETNNE5?B9h5+y>lyL{!v4IyJ@h6ysIn`R8xZD0M6 zn)p%}dn@2JvTg3#+}mGNH;Fjv9`Zo}-Vm!n~H6_W6$D#t1@6;UPWro6|Zb`)1$M@aU*Ud!Tn4Uc?@gsAYPUu=IHw;aO$t9vGj ztPM6OI4d^MJ6Z*p^1OJDvp)M#I3p^DUkM2Ls)ZWHt!sir7+kM8Sxgh8A}S|3|NO0y zA*#Ari@;4YYTJ(-CYg_#)!D9fH$EY)3Gwljm2y|Q%ws$qS&v`RV?MzrM|D>Tn>I-0 z2s6+xT(Ki_?zJ=ifI0CL@?*I$`(wRd8_XmE_2UYI%ulaUHqsbJv9{gD4V4ndeSh

(m@k(S*11c!f%k5&~t| zLgSoWL$2~TogBp+j!}(G4oJ&SK1N>bsdH8JxjsJ`5&a{7k|NTsDp!_cBU4mO!I+lrJb^)>VfQfYM`1+;@6YO>C~u#dJnahK)ME(SDu(i1O^|qF{n4S zg5P`m5cd$XDjwY;mS^d{j7?8acQ7c3Q7@D$`s48@R1<|>FO)KCq=$JkL)*Xe1L%&e z6lm#6t%tEZ4)08rIu;mH2)tO_UzcpRCuaz#@U1&C>$Ds3zNG%)aZOEf?oYEoiANO# znrs4LlyP4TL+{sDlq|}I?v8T9jz53i6xPAkh`scji=s#V1~>ZX``QQ$v@eQ5V|=(R z?t)}tl2ghAvZkne!!v^ZCuli;3Bh*bH>3_64D{#C!nSWGP<9V>x>~*wraq_qUm*TWPP$% zb3p2KeB3OsW-UL)JQS1Ub$G+TUMbE&t@$Wp6FO>*R0}tHz7Y)1VOtcSX36 zPog7_omtV|-SpehNdYtn}s2E|lkj&lneJijTm5&r14S_Oe}h zu&eYs&;G83FR=#`SP74$bZF5NHkuD!Nnv$}t9tq+jQ^OxK*Cmc$OC8aILyE4Sy)X# zQ_u6AKilZRwlT+N9ik#ecL)gB;P$xs>%-*a{rLDAk91zbZkwBqtY5{|EG!G({!|tn z($k|W7nkRyGw&4y+gl8W6_>3Em+*b70BiKUJ-W)y)*=?wmwm_XyV(=Zk!WW0FLgCe zjH)rIE4vwtS7E~D=uSvHhZZ|#N1LSkwtsKfTGRLEW>rI)xD@AO#jo-s>I$xxv(rocBEQ@ zlYo%vFn0mT>p=*==0=I-&k8-B%^xwPW4un1olCuC1uK+T2knnG3s?5q?pu!J69*rJ zxk=3K-!D_2tn{=CM)?c^u5KKgag#7@9CggIX4_tU*IPcKg-YV~uCuPFr>rk#3VSR7 z=4L57GldH4B&KICQ=cEAG|SKEd`9(jLAp9Fp3s%Z#C#+GCSkg?ebT1te$51j_~Fog z#L9#5vT}!T*xWUnFH76UccSSJhjknFth-CAzuQ>%R0Oo`l}QX+tw_?TpOja<*mzwG zV}6E9T(X>Ecf54HY{+&(dg05;w@Bq-fk_c?Qu;Nm9fJk1P%h6fN5c8JC}E-w+OsD` z==n}-=Vx4A1KeCUv9&wZxZIo0-L$739qQu-3nW?_s$9j}c{4UXi6+9D05Y z*7jiKhr>$X&{F)GG>=?4pZs9g<%BhpEbHdIxT?(p$oUE<-@fIs4|d_r4AZ|M-@CQ@ zG99p@^)}ca1Lv)k%A1)R+}uq)(%7nxo?49r)bLF>x|z67lhk0Wtfg#m=y;6+{7gNh z)k7$)%A5qUh9_gj`S^>gr?0L+uKnnp01h0<((UXTJBMSztOuMh&jQ_hmmBSNNV}}( zC$gYb?Fjz!&$xUY+sZ0RV5I-lh(&*a18FIVZDqn}iQ4j7hVv??nbw$TswV`nHE{pS zl4D-I#WaamFp30lf7(KQ7jhGJ3jNNm19a#Q<@>smGkhE-3%HykJa5~cdJ>P$h%9VI zRrP;zk?$DrXq4T#mTs4!7vQx2otv+Vnn4COGvjf%KxtLsbUkNyv46aSzgSM*g^1T` zw7ElK_}Pj+-Oho7a}BV4Qzv4GRCy!OGXc|h_*wotRja*))i73x~uD~- zpTpmUgJ<>9ClE?gkWgViqL@3N=x3$v;f`O^C*=12&MmvweI5;P`rN?pZX#Wx){cYS z{?jPW9`1N%cI_G&=gM6F07tW|lfSW6*dgR5ui5TZB##6l~3m2YPYM9d71w{q+D|`)|DJM76XehsO0RXVjeIJd$-dQ|MH2c=Q6R(0)iFRlivu54UC>C zIRgbQq$m5R9aT<@iCQQ&ZY&}wHgER(u4c?k=Paa^gJ2Tlin=g5Mr!`-^Fh-1ZGPW! z^hf@jIo|z|xhKppq)$9=m4|B(eEZBgMlH|tQCGYZJLL1AQNZJB%=1IlMs$nxnXEa9 z7<AM1HcuM>u@QN=h;?umonOMLTqUudh5>f*8<3Yqc zZ-?6q8hwJ}Ct1vsI*EfR=)M>VcFB95?y8#F6ITp8bqe82^EVALx}A*}Wp|Vp95<_Q z>Cj%AI*Fd1bsOc2MI*ZQ1R=ICTbFcN7<^&gXP3d?xv9yGWviro(xiLWJbb^)Fz?l+ zQVtS#*#;w%@jL!R?D{7B3$-&Jai)L8)(Ufs%bm~5v7b|et;?b!6+6gL;#qv^rHJr^ z!dEK1GZrk*26X)Uf<_mKIpOK#c55xd^&(&JmM`1dLYf(hB**9E{#ELbddpic04g}t zj)91yBTEOYPZPKGe1zU3U9+dyOtjfu6J5x{5Wp?vC1md&qK7!q&B9FJ1@=Be9x+4I4tK~piXhuemZGd z<7PO3)!|aPO=975&R<+1%2hdbp5QxlZabADU^SsntI-T}m`f-bVq+%0r1G23o6Zir zxRhiImd@(&S?yA~oAP&S+;DJO00FPs@%+>Un<7s+N20<)vQ>tAhra9h^#$>p-9F7% z+_jRC!sQe-cd(%D*Eh!~zzh@(DopswXv##wVSVk62r;bPo`jgTOH}VbBYQ-N*skne znDpk-rp5MMSc|f)_K%x9gmpe*H$B)sNWDhm4b}Wb?ik`y>|fCN-mqm=@v^KSDhWf} zsA(0JVzA0DgC0bzLandw9o#)gaBJ@K6*Ax8)nM<6(a6Ths%^-;LSk6I=$0WY27 zwXs;PBgk-P+C{@wMV;>_wpX))8+^o+kHwdCqA75l@k?q&7V$PrJ4}-#8Zjp9@d8jn z?(9p+U0lO70&A>t^1KJsd87<+bILrbe;uOy)HXV(Esv`egcTTnT$j2RgolQkCV7QK zoN*PqvXS@2*5Z1mv%I^{)T(}$j2?5g(g#GW_jt~MBZee(N!!l8)Hha0zW(B&_6fl5 zkas~;h^FYZ%DPsr^@TFG{%4h^47HbQ*2-Z8cE_d=4SzO8G1xA0g(OUXPi)8YNSnUf z(}q)LUW;TA&tyMc<;?AdwG0E*ubw9~dq3nLKVzTF-s|?!H6?>WhFB zM6BkdM2g9M#r2F?&Ga1rHJb-RXgJ#peN&@(tX%N5^2fzdO6EUwNE%&TaBx zR9q3-5tJxkdZ7?*y2N>b5euwF8oR%0J)+F1ZBT&k4$k)=0bw&@xwVTpw*{%?;HJ&w z!cpToX3wP^nr4lek!Ri_#sZGf+qmP=?Mv7dR;F=B1&=aTi0<_NDEP7r(>u{Vaomlv zzLDyZS`tHy>x}9ZX8||gRt9h*;zD_KX0|Iof-9q>$S^?n45; z86T#vtsWI|>9r(rpMqUeJ|-Y_ys5do_a?w5vDmPlN~)vS=0zF=r@w0Q0WVn84&Up` zNAHpeVfyvXcA2zz`)s-7!|8JGUShy%6kA79u#&yn_&!t=U^8yk=_=tqv|1e22w9Hz zJGJZptK_Y(?<@0MR2tI}@HI^|JMK{=Cl>qkQ+q*yZac%kVUa()`*5!0?KYGQGqYFl zR6+GPzWbw~$uUWyH7PMiXDkuo&^(0(k&w20>v>RFl@NpS{i>`Fz|r=)Jm&uZ%XRE5 zlb#@xQkn(EH`1PHmNhEDk<1>4FXoMTcvYj$KD4NoN$rVO{Y?rg2O_82XE-@kYgb%Q zy|`z?EemM;KY4zKLks;fLv z<;!0QKqRW)GlWCdRMJ-o$GiWq+6&c7ivFf7bT(XNnv=HGEal6c(f|jp?o=L=i?T6V z`KD>@?!Q)rlkCV$d#9>j;G61?qm5~$v32Z3yOcd;Me}cTtEU{9oL|#{6Zo!Iy9k%O zCNmtAa{}iHjZH(fk-AyN3ur=}ap&SzscraHrrTa_e7LDDz_7#>lKO?2(`*cT^n1lG zwA{)g4HKx6YLa9BL8lw}mx5**tTk}Z{1#RY04nJJaH}Ep;1qBVuTCI=H#-gsHF1vp zAdSSpb%si7XlxcL>h--H$TPo*i=H)fpPcTl_38vwNmTMu+YktmKfGqTkt%qF-D{T) z=hCVx$I!Zlsa|^~GKZNb5%s31EXs=@2th2;b-4nCtZl~s2vUPHioG)U4xMTeRlBrD zA-W;uIRZ3fHKQC4a_I}%CL}_!;x^Es@vg`GL4f(s0~FDrQjNgbhq8#l{%Mw+cUo$r z^+FQ;S^W9KBLw)^t}nZw%C{biTs(62^vK+LK0x{Y%{xfnZvBacf*fvC<_R!5Bd@-` zaM7qr5Zq`3F=VuS+As9(h2)8_h;4X0i)Y4Y#o6)vx|X5d%3eKU%0F-+p2u3xTzqXp zv`Aw1zlC4z`rLK!=`TvW;_zCd#Vwl+!R{bDO?}d7>R%OfK`5z{H9tK8groGLlj?W` z?=qlPQ0IO_lEp?p3D_)O-sOsX(dYPZxX+XK;Xc2H7s5FiMg8FTMl;yCRYN=7KhkiR zz}q4yBl4#o;B5LHesB!%6+c$FfFZUlqi1{2B=zSw*2_)p={2HmMb^ zSLazXR0aTVWSD$v^*GVLPV`u_;6@M3B=X8KclfIbkN#T&KjP{w~m zamn0AW#Lt&80l)WO$`652oVokqHOFsA;mcYkIlAe0 z5ve;{Z0;yEbNQu#`iQof?18pmxd#!u`Gdbq@}*Z1(nlEM<(DYpr`cW_ie^2?LHLq+ zg)B@Ob3hP@jQZXF?IwF9Us+1z)Mttur*;do$SF+`K1?YKh`3mJqgp?FV1UYteHRag z1RVdemPJmFDjnl-fSy(<)0*k&zcGxUI1_>F2+&k+{4Np-+{W*Hb(fAZ@>o7nr^-HMk2Csjlo6-Xs%Z=~Sz=B%p9r%nx8dI) zy^Ov8>og>z?`NyxWrDsaX;Z9J!l-0ZD#UQNkjXiuKZs>$wy_F-^|5aQj<-I}2I~FE zA0J{tHY-gQ7Kkce<--sO1uqSC47sB4CbnI67@8na27ymhg`m^}Nz@P)*Ffx!D9d!I z*XCs^0mF=T*{r5#8`CODC`8$)lgK~6a6`S4{D=-7k0CrBdWShYDArK_K=ez#@!9VI zH?o?DG5{x)tCV7`o9s}pY2Ql zJF%o%EhI}W?0j*0uCt^FG`LEoefkQ$rpn3hXQw%LInY`j6NkDftUgI8BWmM#T~WMdXZ zWc?)yUek@MP0e@T{`YDWv`S;#lLdOfr9ee=K@8vz5gBeq@FE%tKnO=EpDQSTsnEl=2hBcurk|Yn|oYn0bXEECV<7(wm4Phx#n+P$3OGG@s&A zUz!Kdx)8wkY}~h1i?A7^u9{_e`z6r#BH}fJ77P}qxfNAWYq6`SYbMRlOtr&JwL9F% zdG&_8U^Ssd;ZjHn*f4#zp$@Wyg9BMQf?GOY0B_VAAxjU=odTBl-zbIRcC%u{(2Jrs zii4ZYAY(7NQ;2rnPZ;pG-Jv`8)oXHBX-~6ZrArW~MBqUr2I`mvUrlm45&c%uD_V5= ztmcVe1_ zQcV31D}>v?^Ec7M&@@6dYbQQwlN0FEmTTW#MPnsEL@(EtxWWLTteuCASrDb27wP85 zj``eAJMaBJ?F7{bXZqJh)L@jH!FwJ%tp)@yHvmQK)3Ekw93Bj5EDRq^@PQL}Xh|w* zk|kQ%BFlK?+J##15FZW`=67w_WUPtiVDZ=a=b3<#< zHT1e|HP7ppw{FFWG>Psmye20tn_La3X&&dXQf@Tg!m63NFOnOzyYOCPqyO$Cv1gcm z?QR9>H7~f2bbh9*{mkWUuP7iN;~MVzfCq>Kxev!zYrra*{ z+%x`WdqC3uV_7{zt?YmmkO z@Ai%H)pe9_oJ+vQ*43_GtI~PQm-&Ml!t-8=hX9nsS}7PPem#VO%##m94$!mnARl{3 zcSviaEMrs$nL_7e6;l-2yo$MYTbCu42g(qB-i2Z z!QYR>clU(L3nV>Jox33bz`EJ82ZN(A`<}X|LFI2F*6Rnw3fOeQ#LlJOFBAZoxvUhR zBDPYv!o!Nk$}H)U+2V+;u`bO01`WssjVLIypR`l=@a4PxOvk%Y-w*a8XY~P%8Girs z7f?7p?m1;tO6((Vf5r29l7foAgO!_HMWaJcpFCjhte%E&AQ2sU_A#?Sv!+(Kf$3Zo zGU2L_dGjF$dYAC)EO79b$W5rqo#-G!JIzxm8%%ow-hy_1s#cnQ5N|aTyt5yH;#UB2 zL6P`j7E+!^3rwqiGfr9lLj>Z6o=s#wA=jjl>jk>FT3>!q z%V710+rwHks?ZW#GII<+R@_yjtwdY+NeCAkb=8Yvb2Mt_Cos^Dcel31ziI*aP-xBpEi%>2Ywui(WwIsAtd zwyJ5M0aP!j6ohKK!#WHK$RG0Bru;M@F=Ix)7`H2;drlMvcg-B-9?LOCl?EBFi6u$t z;%i`l+=Fn^K|^eJ_?QRnHtfEz`%KzD*H7#WV>#2_JeeIQS~k?kNd3T>-A&|c4b zVnhMt#p@scm3tvEVcR4jx+8dEcK#QzVt9WwUU+R}^KM zk|`zqr}?ZvXW+$8F))-cyP{VW{`%8R&hk`L5LNvzLjS?kK*-~{-^_ott2cOWdT(i% z{#P@mGB>dO*A01n3}6~~r1@Wn%xP+9>uvsL=L76C0wDUOGQyb)r3zvv)E5#yz@Ym2 z^bbIo@EEpUlB~#SdS+U$mxAcuN}oor=2$KTI;?HE#@7SSU@h<(w(`$L|8`XnesH@^ z{30W@Ce8XEqJj1ugsefwY~ZA#l!U2RBAlHdgydLj&sa#A7?!F5&UFK%9Kr(`;nQwL<-*K_T2L$GaOT z9fTL$QhZtk*YToTd)?`XNB8vdKF|#v^2BPBQct29q$JCnhX&>&@Jur_i`?m_0J$>$ z<4N;$woD$pB7tF03+xwb!PpKQ2&Sjjl1w5P(X%}(XdKc;>rkO{E!PP;E#!OU1+HA3 z-xN*L;WHYfG^%&h!8|<89jmQzOIyD+XZF!~_y4;R($?kLZeI?Mt^+DkSEUU| zcRG6iS+TgD>OcYYkADdvyD7Eo6{O72P3s~V3kC}>iq>D}>gifT{nn2e&d1_EykW`> zV7VBoCkD=!D=@k(j7abMt_M4 zCN0fYIvRzS%q=wmyt~(9z2FUL^RJhxDH@y0@z^j~mT&!$lDqx6hn<=sE@tBhOU>-=#O^3_WX2h(vYDoHe&BqS|3q7>Ukbe7a*45b~kGH zvZZBy5xaOKabKfS+qDLYR)5eFIV7m6bN=4x+HkS}@R8dwh9 zh~mq&$>-mlDD~mL5pZ+`adseD>lNaII92}M$8Zsws>HZ{5S95y{>Qa5+lB=IK-*Z% znA#I}DwyXt((Lchl9~O}1J@h3|HIy$CJ;kM;Zv?4Fin8N{@n1;ZW{qql!k{Wq+pN! zFpz&?ELoaK-7l0CZ(PRyE1@fN^F7VcfXS%>BpFZ}lU!D!ivM%PG){6TjQZGFAo+1z z%hxnblI!ohq55YRq#N)(=NFzfemGd%111`jkK#qy|ISN)H-V#``{_>E{9*fI(djVM zSrGBDR@naU4KdoI4=&f+x7C#JF81P_kDaIb#NQy*hKOF(GJf9f|7XYCGl-?>X!|p1 zN}US4M=5GoMK?|vxE@d2*^?A2t+AN`7XUT*&!`a{vHs_U2T<3fzx$*fx?EK8gIu_Z zFS6x+_!^^KUCQ-``P+}+U17iksQ9602smrpZtn>1?tx77yu64`kp8xXfZX-~h3JC# z%ounP08N$fcC!7brVMIBa4%o)IsKiM{>BXQVfFYJ4Be-*-=v-K_JC2@gtSSUWw5#bO!QQSK@zfNfXSpmM#Q%{5Lbvo&pFsVOD+A28X1{hh= zK3teZqX$fM;5`Lm6n_v6ZRa`4^hKQ&@Sbv-Gmo6#=Q;bwp67n)pU!R#vF-4cG|(Fk zFz9K~87{n3xOb2n0`*AqELML|S5o7H|Zo=>ig^-b4+YSGflN1wHbzZ0tEqW<-7QJ^HVIC9|c zj{e^^orZZn3Ex5|=u!_|vVnfX-)XXf zB_IoM<$MliIH@+}KiAe_sqF#HHvR*Ju~@I`_ezzN66wUCvWkgVpR!`bW8(PD2#sS& z23tmn{;6lgfm*HF$Id?b>t9Le07prwf4mL6m?5K7sqNcu`0_At6BdiYBW_D4wnH~_ zZ6@kz+UVN)G~~OAaB}B>5h@CdjbL~MPSHJwM(k5TLF+n8i%~jA%MFcBJ@A*PpdKDR z(ze^s&VzRart+|M*yALay5(E1RtN38HhQ;qYX)~qF1_awU_3YacJ{XKEC?7~e&_`^ zspTk`Kx1#?*8(+5JqQWI>2EcTc@Z9^Z$5VN_c$@B`-VP^ipl`L%xe6M14ctR@uE&I z?11O<&CgmBeos4IC?~hgtX01jSLzo7F`(WS-#=bdBD{hM#(IMoY2eM7fSh95s=v&* z`K{)x!P%GpRS*Vb8IAjGs%Mu>xW0kj2T84eqxTg&f}0Oc)f-%;|7e(&pXqyTG2qIg zUKac{2B?I>s2LzgMXEq(1K2<}_*o-&`bGf0c4n{fVU*rM;c4%?Af?~(!Gtf|l(<3f zm)#Qu{oX!h_4-Wzz`b^njRBd#>igaml?&0I%ZxNKc*k)E4-6%zFI?#kavxE%R1DK! zU(9K5ymI7~zEw7MvYm8Nd9pUU;&r%@pH$83QRBY1*K9G+eWG!ikoX>;)OO`_hX~MY z2leE>=}{xwQ4MTzBSOE(JkS`NeNJx&Q1{i0aSLyBF%kk9!;uT7_SsWuKc7;dw%uJZ z?8fko5S`+&<$Ig%-m^vqz50NW@V57ERwyYNmz6&fyJ?N!YaU#Ej# z6+Sf_s|Q~dF@PBlTuc7z;0kVd{nS5<4xY7Bg><}yzp`-xuvEBMc25}G&>@uG+Z9y1)_N zq4Ki?b>WyL?wV;Aj>^5>HIB;IldFIJ88}oP?1(^DA=ft3MRRXFq^dbw%CvD7XCx|| zMD(NaD&JU8y-N8)fR$u-r&GD8KS;BcDm#=Z{Z3-;kDZS>QtNGW3T)=j zp_DxFx=|7lpmJSTMwZt(qti?scQaZw`AH8VN3#gp>tuf^%n}KGvsAT@Z|TNEvI@uF z>6(5Nnf5lu&AeSA*iraam?|?xNo1)II_Y^f0!#Xx8F9Uq=h<%yauGS+<7Ga$rV@CW zL&ZFkZ<||@{4 z`>J_nkHUgS1OC0P<%77lGREc;OFpxQX0zE8ZD$95U4tKpW!`c8p}Nnr*il~gYsozh zJlRbl2E0++SzSYPbAPm+cv^k(8$AkHa~9!84|}AHkZ^UHdVG^I@^cEA@{`4_rykHL z*f$}cLIB&rnF%H@Lh+;<2+g`G&^h)p`>-fiSPw#*?LIbz5MwB2+|ff182f#NIujY$ zU67PHs<{2QBRsw9}~iT2~@4KIuy$N&j)}nT`#j}1>`3cq7UBOlC8gO z4#;)tE)n6g&hOnn1+P@Ia%}0cDYL00KgksAxJ`)qIvKACxl_kHZ5EjhD-48kA``x; zVy>^VvnqueTa2L+@Q~bj)N!$|P z;K+t$1r-Y2M}(iIZ~>btIe41y19L-n% z;wg_FDw@AD_3XHaurU!u$*W{1p8E<{T}5RBOJnma0L0v14}}dO8i!+w(xy8G$5r>a zeROL4Hn3qq*9sgeKi*s8-F9W$^}tOlJu4m~BtXFWpUIC*D7Q)=phj3Z9Os8*2KLcnK~@ajExzzd5Ju&#wf*(jsBgFW>I5KDNy!P)OuD3k=J%I;eNo~@_pU6ucS zdRH^3o+L9%GO4GJE^ePi9x1E?U*#Nd7}Uqc;S+vkzmNUEOY35;M}Lw{T^kL4&BBg8K;i#v)=LITk&#wtNwdc*LoWU6v z$9ezL$3u7XZ#-fYBsN;6mcKFuWkWITfO5biX)F{f%ZJK`@6|!v>IV8tOwlaH9Knvv zvkK&}wzQ$XwC^GN)&>;lp|BCsb)ipZT6oIZX5^~!VS1YH8Fys3Kv{SKPbF!F&vYjg z)0c+Xa&JL(ZfHc`YI)A~+hp7I&8(|8zx8Oa=MVS)WgAsyiIGqF9tJzS+U>}Jsp^OJ zC-HhW|H+NP!n)xjx+N*{&4W0Wad^z)Dip^y?H95SBPRT6u|m8`|KWSaOem)4@;JkC zigxe?0`!Gp&Xnx~?wP&5e=(AS~%Gi%k{bekNb zWWDDC!gJqL=LwL2PD~X;${mjBSu;=BsKGe42|EXP4?C5agW=>VLn^qIe18oUS`)Kp zg|?#kMUa3x@0*wcy;~=NJa$Rj;{e}9Uohfs_5I+ue2Lfk5q4`$f0|im`kz$h~zta z%dX3+6cji}ZT=*fo02Y9NH_JAM%A}byy85VR{Xk z!_?+Y_CDbeu@au!R$1Tszx>pf1??H1kOnTK%4PYgiG=zbmbyQp4?ux^=TnIuJmuu6 z8F992+gZW=+%feIKI7?8Lm$T)9v5p@FqOg1SYxn!*^o+WOhhJ9f zo)H&plcLIObPDD!iWtJRoZI1;EU%Mjb=R4!PESOof=lV2XDXb0+J5RQ)5f~%-1Own z?^(+OU)3fCq#<$`Stf&4271Gj-~w}-ITG_wOn#~KYVl6+)^#89zQ`#pwOpI}M$g30 zAApgm_ncRT8v0{Nh+=vLvldxq>} z9{6Zps?kGs5l@*%EWA)?5@V8&{c^x4-_tt8CUL!+sj+Xc)FGWSjDKx$p+_)MLt2Tp zI#mr?G>j=MEn;s@8E93?IVpkXqqOx(jcIH>w+RE?J7e)kPk!1|n0%j@d~>1If0mEg zA2iE(T9dByusokuAW3CK!!&m6#aC;e@*s87it1EY zBf;*>4^q-nOUZ{Oxl?wI(wZL{RluKdjRIX)9^KnIRpr3Hb~g6|sLRRn=QpmJuO+08 z2F^=SZLJ`KO7M;Uwkd{P>gTwd!vRXlgp2W!%|r9cZ>FS2>i^$w%+UVRv-o{+=DF zRb1Njj~$rN3{YAd5sWf4&&YJi$lPGO$A1DF##G6p1yaZF)pZO7z}YfZrFA}I0VJ$F zm)dAbIu&r;+Mqt>l1B;}^!{Ju-7_*dU9k*cJ0_U`hpbFC7@qe^Sfc}@2j^w@KnHhi zJR$W?5RXnjG|sYvHjFV^u%G-^ai-CJMU)<+@G9*u1{;hxw)5WetV* zw{4(#%>Jx1$I42=J8O9xG#U0Hu8tHYiyI+b9NEOl;e*tTN3n718|*PT6#DNtllXPO z_c4E`wpcOvGl*JH|N9OO)O<4@l#*_BgVpTPRbUvv6iqC|CRcdqi=QUg`H--DPy_w}2qo{pwf$)e41ly+ztNnOGGVg(H*yqv$4psC%T zX^VqAE@^#U&Us9W3o8!%7IPkYCuZCp%-LUeud-C{;7#Abu1{b!80B0kXURcxy?`XR z1I#Gh1kwM}Qd)fl&0CYz(9nE3#-=`rchPz3RYk}5?K^De@ro4s_5q{`m%a?0vT`CJ zRr~J%U9go{rDs;h4^N8VXAZBDPPa?|H0ujU@y|P!)=Ee+J>$=ena|2ph=FAd|EP(O5BnD#r^MFZDd!qw*u&tt6u5WA`l1`9kIGbRlKkYh;H^; zmZ;BrZQJ&+qU#|+p!Xm(;pceu^o>EK`z%#0)?}`3@A8TTqUn0{F%7rTI3h6xFZVDF z{90O0Y&J#THp09!&bMLOHKf(q?@QMM{+79qa9yD?mvERgMS7rilVbK*e5}JcXVXe0 z<<4i}j>kyYmrA7Xeh_8#!Yc=P&x zrl_&XZHs#PvpAmkREn1Lgv4L29ntAnndmN@!79))Ct?m>jb`_tw zac8D5=Eu54T=P4G-l&;6MRg+#PQ_OaHV9n;{K=h6u6HA z2xx(0GnF<1_NsnGW1Gy_=E8sNENN)9A*bJZ&@cT+3NY=6gY-*;kdbMx*DEoN_*1T69yc+@$cT>;UF9NK?RObzyvqXv|QMC-ux&O9YB! z$5v$KjWml?f~R$fz#PC)P4;bhHsa8pMZDDC!$%H~|9VJLGH3`OdPQ~)bBD1{7v5r4 ztOF7andcB4A--y+(EYbnXSoLPfn0au#KIwZ3O}BV@auUZIS7Yg6yc@%0)1aAF5zI; z{M!RcTAG}vT>LldX^wI^M;dGc-W8!`non5)11wqGAaHs@c$azw{C~95tuvOSS?asK zwy#=z(5N<0dk$Yu$Z${Iz^|}DC1yJ&zX`eP4!1jr#|2vFpQ=^S_o6JT86(Y$F`38c zPFa^(tN_B6;}Dp$B#qG#Don2BqH&Bv5P|>Al5%Ap;c!(5@8F9qmkvOA*Q*l057Ru; zEUnXi_J@{M--+x*zTNIoeWmYVMFOII8_hDwXJLg;K ztE3AxulyrP(DogfE+yu5LR{>A0^EOFFS9D>2|0Y? z`ugE!X@8_fk27!1XmtNS0qk^P*rKS3)qQ-UYAG5sbIn=JkKnP-qdyP=fHGoZ0Fg7j zKZPmG-y5aB$fS8OD~RgSOYv$8InU?BJl``-R(*V~2-z5bsx?`6Dt)F86RHGYpqb$7 z>@4y`(&MfBIcK#3B^wBrqp@|?t-Y6Mf$zX0;cdu}uNR&Gbg(Kh&bsv=M%Xq3Rn%JI zUw~fjJ^<1;3$ze`>DfsyLGyLr1c-v_g3WYo|Cn{}i(ai7RSOkJ+yLAqpk@dmeJpPY+8_uV-6upB-;yI_$j2In0f$4hd-AGU zn615ZlENO%i+QqDwNSY(STlI5X8jQ#h?Mp#grTMS9vq-_%5}GoN{%(X75(B?Aq_uI zI>)n9EEYZkP8}ha{Rf;M>rS)I$?-O8rysG!T^_T-pg!Lm){vxH?IBi*28;c$)51#=f#S zi*kMwS8<#wyfcX2C%dko+=;^{o>oxh&)=J*OH*W0eseY_L|e}cE;5I?%!GZ(bc za0P7DnmYT2jllTN@PcSTrb09~ANf@XO8ftC_9oC+u3y-AgGe1iG@M9uGDM+MArvV` zW|<4236UXVWh!)}iIXW)%9JT&mZ?{R5K`u$SB8Xm&9B+F@8@}6L+AXy_5c2BS)J9o zpXVO-zV@}Rz3=Bl%iWP7cBCs<$k+77$)q#mDO#NIiMs=C{L>>V5^k7IP`Tj_(JhVJ zB8&AGKV9_9=i3V8hh?Fu%i^5QqyDEMzwGZ{khqRhHAZkk$luZszEu{0a-z}hBuFrB=}zuM)sx`7XL3!( zR{1j8Mhwk~sn2>Q-bO@$d+D|?lQ(S`oigW>O#(71*?U1HncWY|-2X(slW1EK#l(0tE@91&Ft5vVzLM)YY=2W5`^{gK*KqUe7` zXsa^@2%o$0kGfW6H)hjFEcGJ7v}!|RGV9(t81AmgQPhHz18*-5_nqwm4Of4V335ti zIAf=EAOi|(P9{YKH17J)~{{AUg-hjuRrXQROkd#ZM<(if%ZW(zh)QS0m%IoO_ zAlzC|2w9umQ$++I&Kuh?k%cASi--m&Mi_H#E{;^h@re>6ZpZO4bkGZso23PxCv_?b zkEY80r32V9u9r?iLWOdW(FeCF29Jsg>L86elgvbtP}ISrc!~kfI`z7Oq>x5;M7WT-&KabK@pW5BPvxhh8B`ZZ56kpNvD}n9&9D<( zUPwq?oEl`pppHHz(+BbCMje2_S$aDQ0qQ7XJ@4MXOw@S z`KFJ2V5HNkM6}Oi-1y+T4@{f2V-6%5dtP4fE!*3ue0Wuj?(2zA#-gG_{RWX^r_JTu z9-Ca;T1lee*G?l8ER=R_xx&kFs_cNV^8-5f*N#=mb7lvJSIzkE)!MF|Rzu5|Y7Bps zJ=APCO5jWr>|QIdD@C0Bu7Ea-s&gfnHY$r9II}6c;pPzC6{Xd;^zN{;$KJWCZ6=c7 zp>pj~X2{J|f0RswGSA<%ygYu>97cJ(%C*h3ErjG?-CZ~Dq!r3}NgM0m$ox>dTUuT# zi=-G>L+iiL1Y4I~wdtI5<$Cwm*<)8WZP`RN^W5${UL*|>u`X9F2ACC~QRiS&qp@SH^n9KI)>&+SI^vOLk5mRDB)$#|2Zc zAOC7uFBq)h_<1ty%wJEdI^0a(#CsqBu7khBDed^rCGbw2Gk>H$mVRW$;(9=`2{>og z9S*8M&-KR@A)fma#?s9nCDvqZ)m@muUOqiXitpQdS$~sgFiX(gN1V0ttv#ZlO}e>% zB<1l+HG1jFQQzGs!Xq93DEB2HypXNEhH5?7aif9a@9RqDxkQxcwG0fl3^7G-5Jh2p z#e}ls*c5=BqMW+J>%Ca2POP#_Bi$U(2y&5bJ1d~%_Bj0_%iNFPZ-hiGFyw;9;&%6C z!ID9_RhHa0YRdzEHAL*IF1m3RG112vVB4+P@H4h&;xO*@4ZUQ%j{an!#kk zS1<1m%Bvss@TsHa8+}Dbb2n}IUD~|6oaz9ofqBpQRvxjIWq5DbKUeJyTaNXbesr@} znRjo=ca4&GmI>Ix(+!<@6H<+@kqu7)xB~#(jtcZnbH%=~^`J{K_}(VyT^}r5@X^WW zJDVVNN}GSRZNzvCUoH2`X|m1}>ae_Gd(D6L_`ogSP-sk8a7W4V&Fx zShgoLjN}PN$Pj4s62K3hALpfiMM-Uk`iZM5$K*5XMlDwyJ=;Pt_g(hb+fCY=kyWk7 zUD%hK70rHL|3=3LEk0@aOl`&eKo3(3;AdVQ2*9(NC4$+pd=HhKMm%NpV`%^cj~A*b z9-+Q^NAK?Ai)m=q3Ab&kRX7bI()Iu zpYybdfiNSYlv(!?JYX7AEa;sYgQopC!9tHLY*jtla&d^=W4}1QdTQ zV#(8bI=g>e`JU!}p5ON!eWFE>8(p|T|96-R*r}Fi96jY;xbA{P zK-t~2vl=CHX8MZsak5jl0^bL++!tsxn>Vs1JZ8C+{>PQz02^ORexAo$yXhRUi7Bt@ zQC>Zo9UE}W^_@uDJB?07dT`dEg~$S1t3@gh`YB8gCenYfm*;n6lj3?7WnUmt2xhs$ zv{I9LmiYR~_yA)q0#Y~!nsNAB&?~9HhGXtWMQf}94?A^_-%8X|y2-F6TPbj)p~bF6 zBM|*>*U>-X1CBv9ZRFzlm}q>^0&xhoonz{Wiwwqm@1;PJTBS2``A<#x*MsxKgyGmvGG z-f@CDZLa0^Snkqh4txaO1U|sP1&peG=qHngKg-knYcm??L@&fnQ+=xkU!LqMP@C3r zA9=gUCtKJ~@l>Ky?DXrlP|Y3;uS_pz$YWcrnCvj^+(qzJ4j+HAK@xW1YTr zT7id33=PXB(X1dp*V84mUxwe;M-5bjvpivc@~z5(UUAEnKY0A^q$Th@9B~dxBiI+b z%rXlzQiOlyW9Yu+NVysC887^k`s>4B7o;;QCwN);KPH$t@Z78CZ__LO0?{hh)`iK_ zbN_KgfTxSR@0@4vwF?$C6PHx!LF+a0V&TWie@oU&CDv>93D1@N9M)F^n~Og3f6M!} zy=C?qzr*CaDb0C$+&)lu2vlxFuYTr3l{%@DX z&&^0Yr-3)+=`$dAV9N{f?919~trc!S$A2d~6=I+-LiDsDXRdxAi?Qd;0WF_^G_BO| zqg9Qw`wVWG&FEpM(kP)aOI|=)ZECoYHGpAKkAHP?_DU*GFcvSaF!l^_E!?Z7(}L3m z#z^SBv~c1jcrX6@we;phal?cqju!|T&HqjGag(mYku#Rs9H|rvMY)5cV(Kg{`^TR> zMdZkfS(S-9VwPo&G1iv+d^8^81xI;p9tQM_C3`G&$dMuHG}hE^x32TtDLXEF3bGRX zSRgMz`W=Dn5JU6h;Mc_UNAIxdZ+W&?8;=~w4I0(9ZiR>CWiqu{CC&sQ-iNj+&ugpI z*-AAd1J3kKUjmgCAH3g^ax7DO%e37aP1^`<0xkcu#7_!!>N}IMtfx{jHExng_*nGn zQ<-^^kbP{q8I>zy6T4@HP(IN9TUAFtaK*|Ewr;^_s%G?-s6BWODG|u)rv>z@NCzPP zwKycsSY`U;`pt6NR<;DX=eq3t%)dT^0*(DcR%m8PfyiA$3mb? z3&}!he%!DmyEYFB;B1<;-S73YqWrais#s2DMRi(@n_28d8JSGi`lx(?^Z7#=`q|x0)W7J@_ z#D=LB40!q~*gKHmSQRnu5!q|V_{U-Set4UbvSl2Enf7`@UNhb@;$pS7Ml{E6p%qr= zQquY&mT;q6kvjf(u$#-HMu&tjK{*^vu$ShgG{IP!FNFob1?t$COb5yWF*J3BN0NU- znD|n-Lw0|2Um;Q%m`b2537N%S*qW=^t(Ns9WJ^C56QnSf57KSApM66ZNFI@+0)3+M z_0!w0DDmU3B{Jv&1~iU~xcYBMiLv}Em?kGEt+p`SNH{K^t2wbZEE%gn5phw>bBLwC zH>KCG%9rGs8Cn!bu`q6(>;RVdu3Rfk53{5=QY9ox3Z=ylcAujh5uJiJ7j9uN&tzc- zoWDIV&-g)uJR-+adbHWt#3-;1kr1wKJ^BoH>U2aTf!}6@>=bT>xN7V8*P*ajahzL; ze$I#n1d=@0Gj@Yy!-IxE)1|fSwCQ>Y2bv+aFNq}F_!@_K6!xr4wq+(9PDG-Cw*X0o zQ*9M*%8fz8XxX;X@qhf8lzwAv(hGqk*!4#}Y(r~WG{^wQT<)7@N#*A0SVOMeN}Kv7 ztZy&eQ%aW63#Sn-5>asW{db_BDeEDAh$d6tM8ASt6;x_&*n*y zNsG>igN+#DDjchOFh|g~Xej>cr4=Yg_{Wsw@|8fII$vK~qtjLa?(t(ubPmyS)il7P z6bmUVY7mN6I00d01UnYyyN^gTE#HPrZM0=78A)D@cQ8x8&aN#i2pL3_4X#_F>;nn? zhE=4pPoLCwga9XXk4@M*2t#(n1t2QSvT;BBk0pg0-Ksw?Ob{2~AL*V;o$Vpc&q&W7 zEjGQJ3A!rk2=JRTR+Hcv0E1E8#%v;KopNK4h6kdsT<%(DB(W*Eo+Boe#MA0HX&If} z7}@fX_2eh7f&omwgBqsDL;d8Bn6}4UQAd3n6QnKutS8-n2M)%WWdgJLay~lP9Hg-( za0i)ZwaJ9@(W^AerHfpPgW$J`M`;*=pyc=}*fXA~8+6ZclsyoddE_AGMoeA>D`aYu z)3J`kpiV4CAa4s0YZn_P`vda;m<5m(;|DLbzIb1+W8?RPeRf;+jM|9@a_5h({Quc=4mvQH9<~ z-3@K*s{sqJT3ZE$)wWww;**yPvsO>(8yQRjE>T*Dr#(gm46^zZP{46C2r|t&_I`$% zB?B_AFiS!0KhbB%_<_wI!uuJLJ}3f{s!!@q*nbo4)|@L7dnli&OEp*hkv1N6-|S}x z8g=SfM*5|Tx&6!__+vL@e>J|oF6^`t*QRs%DLkz^Vw${TYe2qFak=UldUqLOVNytd z`V*7KDS(sn`U><-7Y;V-!Sn~t;C$|r1>zr2>uJx!(*=oKusO*&BpD|_Sw+g^53>UU z{6M2-p{0?lpeDxUq5#iDwcsktKX{G^_Sm%)1U-f32zzVK&K_e-v=6)rvXN}o(KtTQ zct3mVKwY7-4xAd2Ay!koD-UmhF_3ljQLckZ^9q!~^MauFQ9oT~S9q}>Fj9@>6N7Yvu%-ndTMgq6%LH zJAsY^1zo6|Xy~zeZ|+*#L@G!Yw}{#?%ahChyE!1o(IICq<~SQGi@g##XCn{4x*mC` z*tB~Pl$@KMD%mp0`3wSd$A$I>PN6J{ff zQFEoU6EHuu^|9O{GBnXIP+MfBmGO95cuf3%!wzPw`RK1osLS`?+T-6YMam1`fdc-7ialwDml0J?fmp41*bs%0eGe(!hk`zp@RId)PtHhOq9}ud(=l zGd0RD$Ul9CyxLG`tQ7l{bD%noqE`PG;VBve{Z^%>-m0y>d2>0-)D@^KtDm5_lgHq? zlr85rw#@O-E;s5jWgW;O)K&p^Ay}SXB7jApn`Gj(*AfGusC;tsmvfj4{;J4-F&PN? z1mjF-ZB;Sc-BY|$T%-LSXr|N-9d0dV-1TA)!4+;tcElh>fixJ1FqlF_at!}uKDmT{{r;%Oe1P<9L{d3R@hvw6>avAgbmA(xq z-hx`;K`>?zX;A*%y?Hom0P5u_Uf? z;s*B01wP1-s6fG{#6))(i0*FdgQ96ro{c~U6VK&=Sm{%M58g(-Tu|%ix&TYM_wiJX z??s0m^&ntUGQOE;V5?JQS3$c*6$&Jc2CJsv?@}DQh@?I4Nj0>i3(M6jR{jcz57y8g zoPnnK=>ha6>P;*sJ5`=i>jw;01OIKgrZ5TlbMw;FhS0LK=EQu-dIP#MTW*59M?zRn z!E=wZW~1@S_h_;y#J#N<>ft`Mrj`>Taho7(nZG(bW{S{8c8PkRM$$QEqFN6Sw4@bg^Ge-)Gb~7-`Mk z{eJo!Ewn;@-8OSgo}kY8uc(+puCugJ_z~g?ccoaJk8EL>*5=x2c8F!B#^lN2sG^IH zZ)Rd`g2e|Y3D&7kX;6|2#Q)I+BGxQfEo<-h9;;~K)X$5$=2;S##1R5TpWT&GC_QjB zU3+#vr6ppg$VCy;8YSmI_1E53L5Y?pF^%3S>1EcIbN67&6#ekhf7?zV`BOonC0cE_;99` zR*jxxwC!wqxj_bW90O}G&D%0w2=`-qE$F>_4dv7U1zOuJq5J04tzHKu%zAw-VfFd% zEdizRf)l|spq$OtsuxxbgK&08Xb=wo+QN>}5HK{}?OIh~S$-kG8NNXpQ`}%TapU^> z7be!+E@Z14F5g#nZ2+UcyMoYvVl(loh1pwS?t>aR>r3g}?hGzK_!=iFKZu z=i+$qcDfZRKPHw+>)Hh^{1$E*fxk%R*%9Zw{V$FbbP0k7!pg{!aV;w?D1gq1oCUrA zuVMGgAgW>rfn%8zZ4R3Q0bT>*bKbPEojC@k8DLh*EW z(z6d4q5i~;d85S?-w`(n2}xJuOQFjtBU^__s5M|>Mrn;YB_g*6{fEWIl;`xLVjb!g z7P`h=PF`R|qZ!2wNi-`sadF}7>8G~={YS29ab|LD*%4zd_^q|Qg6A7~-`ovN7YJKB zXilE84d)qt#rFt_-!r?Plk%pGq1*S2l~Tp=1p({fMvBa)M{BF+=x*4#J#B^PSC1fO1%K z25N9h+5}o4A>4<^w*8%ieMf-Z4=+<$Qt`rhMH10)G7{dw!8jNiN0Is|A@*RmoAVug zN?bJ!0#`Jam%CKMfZfhYR<{6L_VCoMoq(m1+aKBJQ5ve8wZR6Fd$IcLRODV>?dO=; zfMrp)+U2HMKh}UpKogf&p_a5*XB6ISC8KHa#?q6+8hFDM>;KW+8L@Yc_|?r6{jgTH zWmfN#8gwk4ECer$V@yCWF?UzqVzWG&A-p^f<`PBOd>jJA+A8QocQtrK0=HK#^uIg? z`djDF>b#<0n;e>hY(7UtA3tp1VBRDCwWz-BEQlmMrN@iSU`3DxgWYdXCA44|O!JGN z_z)BQ&uL##bm%?SgCBcD;cZ3w@edi0Aom`YSHo@wps$OiLJV+7>|gb&|CxN>T3 zseoKc7r3J#k3shOw*)!%7c3u*uE#M1Ego+>-c;k{Do;|r{latt?zon+E!8l8IcT=V zdpy`G%uBrhNDAKD+cpDxYcygpmiS6{`QFHZGY^oqS}w0}-W}6fez8pnI^3u%bxux; zHNm3L!wMgnsFOwt=S)^e5=>ekOYKR$T-Ecd_9Eai96TjbS+XdRzWeN6re>H}3Ps?W zk}85$%lfgz4R+_#e}GVAWyYR)dOqOS%}kp9x11!y^1ob%N7 z_(hq$KwbB>ve)Ud(H8|pS1;n%n>Bd@<37IS+`REqtCow67+4x$~CDs~E6nZuUeQeWT`-w4&~(8|QIP zY6-v<3-(#m@+Q#QFhA<25L;)Jp%sTx*YwfV*JoVmI;KyzgV9h&mXoWwZ{q~$ld-qz zZEdk_eV&;pzfbJ!mN;K{dI0gB2F*y~AC@PC=9CL8}UiM;IYZwXkhgjZ9OqQLT&99SZ(#RISJeth4B+-B4H z<8z^ldzdXXqb5$D!rGkLoE`WL@%MTbx!o_H9KPbxbCk|bvf?X!Mb;2{Bf5*H79kpZcBH_ zeZVxS7s&KVRBD&xFnyDcTJrv<8-@$xyj%8O`KhyYyIYyG^tE%BpPH1I1qtV?25;n( z9QOI`mcU!`wCv}|p%?o^FZTa&@~J?i)j$oMHP6R%yD>D&MS87WMGbuZ^Iq0Gsu%mm znuh}`+@s8i-}CPJM@-%b>qh%vf_fIuZF;F z9~HN{$Zd7Ww1|+E`&$oQ7sxAT!H}A?3YX@9LpJiOJ z7j$^+*kcr@nRD#u#_zCqMUGLGqXBhf9rfFjLKa(non0&v-eevz<>YsG{ev}~HPSC4 zS}m4QdwllLj4SIr(`3Dd_hsj*_xyZgnN1&5T}lStKG*P0ba4yi$KI4O?POkM+e?{dZ!vJN>?XD&fKo0?|Mx_`u8W+p_lj&3KD9c2|SB5>1)jil2G1jFRIG4pAdTj zCI|3nCY;bdAzdlup4rnM9Bcc-Xhh9=`!OcF4ZH#lzn>Y}er8C$#r`>?_NKW*n%Jp) zs@{LQ6?en?OBv-@bn!o0oqF%iPvl;gm1I8)wiTQX?sd3trM;&E&Ua1O+!3~r#i+d< zriiu%)7JmEQn2>5A<^3&EN6r5FEKjOBvN{swN0;Y|3FQ#a%!}7YUye4O~chF+&deX zzBXh%Y$;{WsL855rx)WMqB{BG^XhxQXMh7?oj?h#z_BmN4~k&4IVR)k>4wC)9qssBLxjpFCKM|8Zy6+VcP1)jV zej7g9ATh`%gxszSZs!}Tyn)%@ZVarO7#C?`(vhwuIyZjra+qA*#DVeIp5i&O`|OMw z-$tp#*e7iszK$<6y%WRTLPI`EPKOSR`n}XvJAIDcK6R9>$;Y?jZ5Q&zS}psbSA)G6 z04I6@fCr?egI0%Brn7Rs^#PP4Z=I5^to?x?d|QJ3`KtZJSv}N)oX}R6uG|a;kjbic z?Os=>6De0F<*2#`!WWzk2nQP~_2m|~6lf_%xN51LzGa?7Qj0nRo-C|<6fvpQcdD*d z?Vtz-!sk`*FFQmNwAYv`9(FoI%B=GXK`V69rW@n1$h-wWI#{x^5N=z>j z;OAt1JskQpcCakQFXtm3yb1eVS^`;3Ef?*1)fN>4p|kbwr+YZIR7+)si1zhzqH zwb9p>2UeCZ-eejq9EhlwXfNMrGP6%|n4nh*ef^Q&~%ONW~MwjPjPAZx}FV97Gwd&%f5BmBZOA5Uj8;e#3aS^K#tKi&)0Qut9d6 zr;5SXISV%Pecv`Ts!1_@iCgG)7o|&$M#YasuV}?i6%5@vQ+8nb{=P}G1__6F#h=eZ zLUQ*7o-fsT;c(O3^O7w9m%yr%0gM{Uc&dir-1~n1MBnd$HBw-MpR+9G^sc@8m%N@H zX<|MG>wkF^@xWFj_9|UF#BcNJNs|yUs_Lqj}kx zVN2-D@o}&Cgq}7hbxO^w_f-XyBR~NPw=`2+vULD4#YB1|xtQtllFPKI?IfsrHkwPMiW=O;bS#{zp;i#rj({7Q#4_=rJwezUt*aqaNa(`MQ3vwc5 ztdKfJ#3rptctd4%@5FX$(Ab!Erb!Hsgw!rXkhd=)Q@fR72GPMQJ8VlBd#EWLGELds zA?LiaH%1d0exQ`mJ%35!MW*Fk!0N{XK!|h}9PlSfIp3*E*r6p}jvo|Sa?xL8iuYnA zAuQ9}S7T}-ot3!o{AcSUcHMOD+?cJa(z!#k#!JU6c4c^t8D6$)*01J&D^^}xmt(?VQt3Ra)Na2<%Fp~ffv?#^?F?NE zGjUITd;Xkkcb@kElR#x6c{11@Epjk*agpOL(I5yi7&8Ws&hwmICH1YJNoC)pto{&x z|IN)E?3XqWLmeIml7@wEeWAbBbcHgtcj&ZRFr@PObM-}~Or$kKXNt?Nx;$xk>!Fsp zE;5}_WNJYn(misUPyU)tNJai z)O6h|hj{uSrzSGH4%ukkRn-eKriK}7q}&eEA9H_GzBv85dCK&+d^uIue6sdEv}jQe zWYH`*&<1$Rg{8BJm@Kw9=t(tqd#AVOs)Fg3*u`!63^_qeI@X`lDJf<0c8+o*c z@N>pHbb~wPv@iyzy-}j+eP>-H{J>GSQ}^;W9QBFKDydBy(a$9gbBWorz+7j%&oFUH z?@;Ay*Y{GUnD!KF2PwUx;ybA!x%;4HPv%5PztpwML?X$va8Nk6xcaRJQ6Oi z4$=Wn>&wb6Gy5$%9#6k2uw5YiDISp3_}cXRTFyE1Hi_Dp$$apkB;cz9p=k2Pgzvk@ zr-yv9F@W57W8S=#C`??Vv9my_v!m0=D|lj#urVRk)cxD}^MnR>6AKJ(2nlCh!KFIy zON(g+eLS`aEgC0b7#YWe<4`|?E@vz2y3e$ij6|poEu##YIJnH(eHYlt zlRVQFHT^Lk)}Mj(!&P7BoIKvd=i(d#Fa{4OU>>NF_$V+n`ORKpb?S`lK3$4;ROW?P zC*SwAPihDVT9PIsi{9MTNVVb+nDAA3H}CVZ=!@K?p&ZhEeixfTiEv#_uZ zPCPwy%b_;A{b0Qeim=T^u^A7CT|aH$yjD_g_4#RfD%(b>nyoJHCHCB&H0jTE?)#mn zl{3*U;&i%Dsp7+V_tL-KAs0?d3T=+2vlc$#GRmPto(xL9Fw5;QK35E)Q6 z@+G6K|3n-4OPt(OR6R&`Uf~q9mAN{fc&Ohr>?E1%6 z7xe*(3rh7OvqXj1fb; z<;bwwWj~`ip|x8udz4GGicX!uK;O(uG*c-co=pUZIdo{ZV1zW(=U;!-%3&tww_4FD zIg5_sX76;ZeUl#~6!nKfGQbd0mpOWL$ZH^aGI_u9Nr|y;luCQpyw!y=@y-H^`e)f? zV)BJjEzj@5o3HUGv1#ct#Q2==W4h7yAnzaXw zd^;vXFD(7753TyD+Pr6cyv~Y2_vI}nHxmo?$y!nI*dMo*%7rEeixe(M=DeEQxGf8y z7(ulBI?bUYu+`-(pB@VDl^aiTkJ13#Bu8f>TvL`e<$yy)iwyfZT4%rpTF+s&q?`(f zE=#4TCUlZ+cED5rxjFF5J|Nj#%aII>)?rYl&(1pMBmmkW!VD#yH9#p=!&S)V7u=%SmXIWFEKKo=peu{F-3ZuptM%s-3qZ)8J z1reVEBqf(Nr=$Ioy#xqZibslxrsoMK>uZQ4&6X~$zb8{nIiwN5h+wL?K61;fYk?g- z9p9XVjn)+dR|>|y6Y{fFiV`esmB*PnlNxiUf0))IQd{Y^>S$>Ppu6SALs<*J$)FKA zS8uarV^Iq03zU17o8?>RwO~2-A zE$KZaS1ymX7%8Kn+SL;3KxF5s$Xi@7a;#*Xebn;TP_#_kK+#fRf}-VxaM-s5Px~lp z8pfx_AurR%Dy4l9aZ6MdHCUFc#OD&nP_B2S{ou=6YL@<*cawKh`3m;?t?CBu_vQYR z?8oWhjU>u?HubAXx$%~^0&kM6=M72+G)wd91|ktl0BH^Va|~>8xIf-9Z<>NBls6Ge z=&^bSSN>wfkCH+t&g$rd2WBesGI@)}s}WoYXHG7{U8#QZ`zTKTFp5;bqRP|SGRy@5>xS#Nv%%BkceGNh6h3n>f;=dK*(s)|{i_LP z#DB$=Oy77NNl4^nyJs~@pqi%Cz^cMYwn4WzzZUe3l#1bhurq0HwBQ%}VQLQJvGG1r z4j3G|JhGG#kiG)wbB%jAhkojLj>UP~Zx%Hfbm4h>WTtnAB1)P|>2}wAEe($5OSFB;`ypQ4@ z3tYj%^!%@;HI`D~Y+VG%Vy3~g#QzALnVi3q{zo!uNKy(z+xj<3{B9J!$_)8$!lQfr zjZq|U<7=VKofb7SN6pQjgtwI6sw^f-Yi%-MWfD;+dfnZWa=rtID%l_^ejxZynILwgy%hzP({+_23kWM@K-s9I6Nbz4=3;n(Rb?32n@ z+BHXkzlITi@$NK#0*QN@i@+za)kDd4&0DbZa%SRWNU-TwlJmU^%~J6O6oE$|PYYkG z0EL{&{+C5X-Sf9^Dj}Z%t6Yn}4uo$s^mhiB%_Vc~R~wyC$TYez$I_^c9&iR-DTj?- z%=fZ{--aJVCJeps@ebt!1g$d-MChtvJe;J3-kf}2t%dRd7{hPcgPj3_ubnrkIj9(= zfBdu@PH0iF*V5DdCYaSvIM3`g%{o0XeSB^kiTOd=>HD$vdHKCV z56Sc8T2thgDu4QAC5))31ZtXlTND0H-wG*jD@yOjY!Y`2sc??w&JJ!NhTe)#0P9Ka zTZJ;|+RK2|hf>%Bw6;@idy7QYs%v($$2c}d?tj_dPx~1(0EcV4mUw&EZ(Sq#x-J&9 zQE}yB@&jpZsH9+s0ZxAPZNu~O)x?Wj!NkH2{e#RR+C?(v-fHU+ZnBUP`HcpR@5w(h z>Q)@7Km3Avlo5ZQbw*@gbd**&%4%XVC-G$c7GiUrH-6T^$M8V#U6f*vqCHlM8MwO3 zKkd-BW5O31X9aGu+KjmA8rmfLB_h+;3X)W3OG8{aaabA|gVZ7kMZllAHO1CBS#zux zT#x)+{+@Qg#fwaGYFbBZV<*pu54|8tUMpR=pStn&P`0C$tmV;dH{*n99$d50&)daE z^E}$OBKTUvMw{LzNoQZcoPL+d$$19r9iRVj-DTG5*>bbJYiDp+fdGMr+<5Ltft&UF zh5POb4L65{tvKs!^Oz68V`J#CWKJhF!c5*+;n1GH8zu+llAJDY|28#g=9B`{O+-q} z`vw#-AEGU>JZ2;6=j@?Go1WBUtN6hEYX02o?&HkXMdm~^-{P|}qp4w{T!)oD93}A3 zH`UpS$7cJ^YtspB^Jo*kVU!}%xvQ0yhL36LTgb(-6OJN=stK?YzFjbG&+@sLw8~vF zmo|Z6y{f>-S_03OraCwASXzQ3ONZ?)dA?l;v@bVVt@ZGKFni3VOwLi{oN9thT7*}w z!TQe%@&g-Xc5}n@rn~Y+^IR?8&2lVs?lu2?Gbc@p!uqUn0PQrbjL5|*%ZTb>Zcv|ZP=N^!@F$xG+lYm*s)|;6m?#k{3=yO zT0S|UNny~aGFjXwRFrsC+J%I@zg2za_@^Dd=>3SUC+(sB@eZRLd~auhuY!Rd`H!V` zKHX7p@&UNe@sYEKg=!RSm6O3HV!nSo8m=ue@T6{SMmatEYEjYyJ4H)QXD1f3{ydpe z-C#UB%y@Qg3(D!Dc11}cc8UxK+7xW;fHM5TW4_}DCzL4+61$~?iwphZV@El9-a-Z= zmKHxYhz4TmT70Hxkn*CH5rU~>n#tblXJL z&tKHt{UddQmxJcbgBi$e*8!%{p8^WVl9V*zHikKhl?Os0H!S+DFkTyDYK;X9{?nk94)zcQtIpMQ%%(mhHhIx7`%yoyrZKg!i%n4Nr*;; zdR)n}0oU3d<;gWN*Xo#P%63iXNDmaK8@BQ+e`JnG5aCm=_#s*C>x4(zbzl@N%$gK9 zzOQjII=av*FPmuCcdM{hCzVuo5+Y8 zDG0f`EE-ZE=8e%p{hZYJ>H#oxxxzKS0*Zkajvpx}9Nh>&?6G@9D&F25aobMHL%?+51`{LC4U!!2h`Y%pdPr#1+V5Lv> zcAihm89dC7MYKo|t{A!c+V<+q>k6ely2yq?mv@Ejktkm;&jhi_Y)itukYn@at4iHNS5oYaQ)=0bgQ4cF4Pk)j%uJXAmH z>g}N;?Zc^Ip3MM=HLg|8o*v!_$C>-e)=Ypo}?H*jU5BxCFOG*q`ITh-=??!ql>ABL`_45JmbJ?@;; zWhf*it!6vJ*q?WJF!Ekq$@Ig6-{&^^pZ$eNm>LjA{WH!H{BcH1?A3DIQdy|^4Dss6 z0(Xb_isk$Wf$zRK6*x+dv;X~xE&Z;y=JG9Ic52w}saB{OAHH1-q;K=N!2SBjk?KN zN{TP9aAqK!+lVWEu&pFwwA$^LY|x}&MLc&@w`|*bKw`K=vCF+Kl7-)jj0G8&9wqGa zoF}BmoYEG}L1D0#YX9&zT^kXA3?vu&meaM3{*2-L+oIy&g>)1BuhE7r=ZSS&B|#Yh zcn{w|$590=YK&e1{|bTZ?uc5v$ym0$FB%fK&>B5{Isx zA+|QvYk-(D{9BDYEqb`kpSrsINm~##$l3`qFAfzREci*rJM5ZVzOi}f&dqV?JXYdt zFm_@IM7gWTR!#gB?MrZeMAt-Nh+Q_8y;_eKyF6(p6J_uNS0I{fc~!^bconG`SSZ2O zC1CT;ciV)gRX}GfN)6m`^p?-C>@{+?PYhX$icid1J^p3eO?6i;^gsT&Jj}w_M zr;2@zK4b3vbeKMS6YR)}^IfU9YYOK({E_=id&ezs0N_tdeSMK{_oy>m%81LXO5tm4 zx8eNNjV6m5G6NXhY476BseV`5{BTBczQ=-j$N7RvB7G^0Fv3XR#&Evg7+Jp6~s!*{M+&8+mnVnDE%*mDOXTzd@ud-a(jYQqx z=Y;BaNblQSm2X#JFRu+r|0j%7NH_B zbP4~`v(fi_CpYQyv_Y#=E0!*%A4+!r*dp?kUZ?tAb$Z3#K7FX#2k8W3bDXyB5kaw; zS^X9AQu%|EFPpEHissVxaFAzC4#R1mIkxhPtj~7NF+gB~iRK;W)PFj*Rv&O+5NgYJ z_@p6W;MoPzFrt~ZcjD}8pYDZ%2~3(>d}zbDaocr_r`D z@H8)tXqgr{JdnFEReMzwmqCH@eeRVr%&3gh*uZbDxHBVju($KCt9xQS3?!NE&O&w zRn>bVdZ#ixHt~R;m&xZ_T;gw|CFv^@tigNG&cRDoO2sMH$`q_IJf2bFRjW^E!n&H9R|fB7*D7NkRElC~AI*6u-&2CBOUr3>B_Q|-e< zI3$Rgam7|_(2P@*HzO2&Js|nDL+vM7f3_h~N42+pYb#S$J5&p2UOY^lIdKpF-e6Ox zHJ{6uQhA|b=XW`d6_n{n4PKGqT zeje1!UndnHI;5gB&|s96^&hDWw5A%V*ZU^E*&HdqX3P zCh!(2Wqw0V3fklPKWrF2e|r}#&G5x!S_eeK^zl;)>mlG3j=OQ?^6oNEn0qQXHc&%6 zekzYS=8a!7RK_Qu38lIdl9mY5GeP@Y+TNj#CgPi@%CQ%C`&VO=RhSWPSI9+;f1uTo za7WKv+H7MX)8?eplLU47NWQq=T@1X+`+AOfTEi1VFB0yAUY0gn@9I2}^HNJ_RWEnU z1a$XBODyNs6f}RBJXAZtIvn~SK4il4?@u|V?tDnsUO`QW^i7}!`Pf0%ihgm64{=m{ zP;EpO%P?~*gpLy`^Oa{*^miYw%v=~cKh(O9d_2menThQ<@k!eq5I_p@sOz%pTej+{ z4Q0*BGtW$#`61(_*dKcQb&30s9QA9R=Qtu(0gCln8PTZx6Z7=MPsQiIK7$e97q|c4 zqin~$PyV?ex%jKUi`z>ltY$i10`x^8z{*U{LtaI5ec0korggwwrMnyJV-LS1 zv9k&p@o9bCVGxFF$&n&sd#9qoXSPSF6C_fY^3`C}u%)&qiSC{PsHbvpfR5prHAU}1 z8*j0xLh6rUMl;na16s4{BD}sg=mQx5qgH0xfrzin^c4=y8|}ynnK)6IiM6VZd-e+c z(novJ-n@&R;TgHu6Lw8-*2gO4p~0Zg%RSKQl0>bp6D(}U={&laVGo~-aRAyvHw9W@ zdzcjJF?ankOz8q*)z0D(#@jlsMy665ZCLxMdlz!5%n1PaqZ6}zs<1zONE*GA;u0Um zd|o7yCP@#!T4;IQcj}3Mcv$y*o1h5p(WdHO8g-IoPEU)X458ejJV5QqEQxZKMGHpf z0%fP>P)hvEsKgJwyznjA$}-N?WxN0HW6^L&65&PrMgJFFj6Qw=?4eA zLocJ7sQ+PQD2#Cn72(CtVlaS-^Ue%LD%4_FYSE?CnTQ8bZKa4UY_`5odfOMEg}Pr* z@Oc@Ajpo8>dKxU2jJqcSo$To@doFp#VJQ0#7ehgh2$*^YfQ9E;h-6)9KEl-avK|t3 zStK+Hrp4*2QXB9N3#FxyuY1rJV$*3>jhOsb!pjSFP!r7mS@b6s3EbAIEDzU>d0{6S#dQ&Hpy3BhTM{P%A3G&1pN0 zHjyjdr1>S}!-E4C4vv2h?|LcvW&Ytp2-o4aUyC#TX~y7~4EY48_at%Vpu+k0eGrgy zJI68JMD-8wX0Qcps2e#xt40Kf5yIYHbp*paDtl&8*>hMGDsEer9j@LMc-XHb-ga(a zU+BlhaR4U8AzAHwhno9Qal6Lm@Xq{Ug?_|+hrck=KtaFp$3~iO+S1H4d`;5L;$@yd z@Kj{hIC1TX=rTBF#I-(k8U&|oMB3cUX9eGOZwvQVG&IsH_Db(;dp&Wo1_J6y5m0b)vbZl=vMB99y2p$1qifE8seWcbVwl*lPO zAe6Nhwa8E-PS4!^Sl{&i$!fu5fvs+BhtSt-y9gnUuf;Z&9W8>KB=Th*7{G~ZIfrX4 zYzF*fD>IQoBF_Bj4+(%C-DL4#p5)s#^^MIyk{ePvQ>ZqP?DjEcNtZk4RAXj9{>uf+ggLlleqnvcyNGke9$#Xo9Dobb0=&UF zw1?OGFsLKj>Ba0Y2vD91@0u|>W61PmrW+4rZtSUROHZk0ua&tD*ISRF-zH{NBlOFG zbX#scwSQ#y4jQDduHK8QA+NZ>=*(22hE1;`{8-Ajn$;c;*CU5naIRhV> zwB^QbZRO1Tr)Y)8JGMfv>Mxsta9P1zX|vs~?#mtiRxmg6(p~J%&Bpzc+zn0}-Iqga zclVACZ1yB6Tpb-i-|RZdM}2SMJmd%SLN{R*>LwW6RFrRZnK@%JxNsEq^+Ybf?~~m0 z{Eubzf{(t9bVV)1erJ##ntNK+_uS?O^9&mkp*Zu)T}RGP{;48WB|Zu_RrK_hF{J7g zdjUw9lz7Z(cMCeZ-mN%5(3&v?c3$fN7Xoulfe(Y4C>mWb_X7KRg)=tA(D0X!qMST` zso!xNJGlcxit8il(H&zoflhZJI`yI)Jrp*6Lsz0%o)~$y7vhi->Q^@onuh$5%gar1 zO2*!ZB}zh0(L}8pibb($V@@Fa_xWM-ey{r|ZVDeD#IGaMp`$IoghNOFc(_d56hcrO zJNU1o%4gwTo_Zpj4D;|74O)4I_nQ00A$T2K$Ub{8Hn&2pAuO1r!0kHOThHcsZT14x z1QkC8@e8A^tuGnEWrcF3p>qL3=c}VE;0-QWmOc%@{Ia-wZV|k`?4ylA0y(i?EkYv81dAo;)d^!Mhl8x(0 zujnZK$+Z9dmwz$*pQrxWGDk5KvY;1J5v5}0OE#xTn!1E~hd{k2a{-r&2;o1dRK)s2^M0=;n=_g;!e|0M|g4`Y)CX5>DW9iO<$f)!U~0Plokj zZD!Ku{}C7%*fM1OO&5Nu;Qv&y|LXRN^Zsqil4a%m|NNKB#5!885G}>lhGMo}$L!)P z8ttsRRu0}vX~``RIro-uK@=DTas0v+^E8yH9AK>PWBQQYe08gnl`XLp?AL1kXR7f; zfHaAQHDY$BGt4{eRgKNr#baG@+-6RQf|PEMlL%0g@x4q|z4 zEPE1y?CI#uXBV0p-_O;>cZ=D)ZEmzFT$%?D{72dYc|)**B#~dNNCnx&69bepV38&% zXE-ydI6P|ka$=~(lPFOQ08N<#Y0pYJc3uUeFczM)ejFeu@$hRmG?y$R2T`6z4vw+p zBv>KSSHsLR9067;n7xN)8`f3`U`)BE9(cLD&M_!s`$wXYi1UVx+svfIXVhJm*z_UN(b~9W6oeRsdt(ibURno#M`w-Awg@igq&$9dVOs+6wf33_omx!@R)O;lkcc3@y2xhiz4g$Uq z_;usZBGz*ZxLR@o1alO?7c)=WC~?%p1wv9Oi#U=U2(spVCzk7PqHS0XicSNXAAr{Q zJ`2r4a)Jbkmk$XNR1VH(9BNyH9t0>jUQ3#~LUh^pFj`%AR;wC&Fn2X9S+uC`!JLXa z*da{J!8Xff$Z9c~$Ej0BH(5=yr42NpWBdl1JG4%K$ApzBs41404DZ$!=Ydw4lCQjU z-*tN^Mg1h^Hq&CP<4kt;0$f6vOUVu}+37C6Z{u9@`{Jpt8pu~*vcuG|LSgodUjLC5 zy?doxD{LMwFN+BpJHl;?Auq;*p|SI`xogH0aCZoX5!3AEEO(*N46;n1arBFwxx`_A zNIblU;$kUk0fQgR5wBHv;I-JuJxJu@Z!=|W#CpI+&{tQmv4SoVIs>(*5vfN9IlKJy zs4`zE4$QFTTpxslmm=6I;PwWK7y_3HqJ+ z3-Skh5C75k^1F*6LO^OPkYhmVsez{bv_Il7W`E-QYB9diLG`IG!ydKWQML|8zKzBy z=}~L<)eBT(L-P4K-9gAj_H8^gi$)156z~k7=+Kn=spved$P}jj$6o^!-4;&hg5zOZ z1HXUU8ipzix|bDGyzlw~yIlKnw0Rswo4TNv(e0agH2yEm^t+1^@8+R1&C***Jjj%O zXu!Pybf!`MYT>ouB+%6EWY!Dw@&?;?f7M0OX|p_DV~1Bg3(w1*b9{3%SN zL#ZnDw@U^Cd0ur+c%cGR>N0-<-VTn{T;FlGp0nW}-vlm*a1&(peaVYJ>`?O=BVjI%|tOm#1* z+%lGb+wqC9@u9*FpUe9LuLaMXwu;(%K)lSQMo{#d{rU%@?2?O(oc#fAxJuVA61pcH zuX&_ZgVV!FH?8p)#=;mXjiwN9!$94fO1Z5yokc$})_<6$-- zk@6CAjLcY5jBlZRip1bE!{&mS?N$fgg!@q|^AxdK&G(Nc`0L+u5f-)boDh*8Xxe)q zG*3gp9|wyZcq4sQrH0+|0UXo7L+^`g&_N}mvOXNPjiu)VbF_7XRdG8Z@;G<4mD~Ie z-dbwSwBu!?sqI4Q&IyI~nM%rIsYALaOO2i=^p>JU+%44UIOPygBeBKJuu!Sd>36S) z{LReOp;X`$f6G%q80L-qssChN!nZme89uOyDft?sxMZcv zK+=Iw>4d_@;$;EF$oL!&bT;RK`pKr1qn{&z8Qnq$bkgYl49Voy+BcgyJ38njR1 zr2MD9OF6zNs$Y5RADPrXmr|KNhVQ_lGb9EEI;$c=?C477a4=c($1(=An3O)3Gmqn4 z>F|C9#X2)atW*99y>XP0M}YH+wi1|(G)c=uuN1Ye84riF?Pp;p)whg^5`&_&QdD{# zbF)SH%ep|3=V+d;cjr+bFNtj|OYa-x%hm_u8^w&|uCz!IOKVLR6 z&K^r97K_yNx2@@oK-;uftxhHlwf*4X2)04y<@dw1Fq(51X)p`S(h_3~A$Up-KI3xQ=U&s&An%n;Un4P?-U(Ax?MUh0GU7aJeQWWq zou-8bhi^`%#x{Rb0gd$nh;hF-h|@i*L|$ULwzE_wF}It~u)t4e%loPZX}7%|bA+&Q zy~$MD*M6T`<&U*SBQCwA4OjSsW)QG6}DpHTB*Jl&zoGEY%;lwT6TTI>vd>%grfl~+PLXNvNuq`@%Ir%W)-9j0 zj0ff2U<|YS`px6UC5mLiET#p*=zs>H82^Ia?N)-}!aWV?_bT;iZRT(%)B?BG1XaR$ zDw!1=nBW-(Z@ecEP}=*v)3F0e?qDyh$=M4`^P`q^lN(E<^tVm9$sCiDM&!?S10(>p z&G2&%&3eH$15eG|gM?#AwoYBbI@QXXHM4yxxXaYBKwA6j6JGmbRx95cGRa_di+DJ5 zyF-=4VC=LWm_YVb02Ct%=fa~#WiNP~%6vwvHz;yBjY$>tvDO5i`npgmQ{2AD>?=iO z4z@PC#eRsBx^Y+MQ6O1p@5wWZcC>R7ilf__!nc&RRgB?Wz+%p7X%!)-#(*dI@_>JO zWpMQ758M#7*Q7ihcp4EJiwAHz15GZL_iohqF>EQAcJ$Z1y~g&ubEv&%-e{`Io2Nzo zcQKSI<5VV4yxO1B4Nf#}gT)q)Iowdq#TcSG#tALVs_s4}cDaHe^np#0#0(AR_lE`T zi0TNv>(vHvU>dKEg;;Y^JF-NB%Z{TAxAzpGu-p#Ao5;9KzPCH5BhzH+US8xKF+&XT z8j;fXP7+SG3<1;?I^MPHig2Z-aAALjh=?5#c!?dG%V$ddQ)z%1C-oc-(Dm2B`wlc= zARrSqP7*5#;B!n43b|81_U~Ykx$qGnaSo?@vN^tRA?}x*_cq1ME53Hd`-gOyN`HD5 z5F5cU;jhDVI8Rw&-mFq?zkjYO2ZyWp-VbR{qFLP~-QZ8xL=!CD6EdQ zp>k4ZHw)$RpPi%@W3&MAoRiZB(_UXR%Kuirf&Nq*A@UaU{Pik}JTon4oSqVd1K_v3 z@>#-R$f#`*VkF$4%PvqSE+0OY-YOM_ zjtSHiT_;GdK$HcErmugm^>50&vWT_GRvH5 zdtBLKbJH_bnsW8|o&sUlxl8g5yw%iQ1X=vVGvroHLGI`r*0cm*1%=VeWipc{2KbXP zB`n#90!|VuxJ{+`UByG|m^Y*(g2#0S&EY{>ey8dnYhvT*t8b-yKRDZn1r#JLbF6f@ znSUkAl;r>iN@bQ@ZFP7Z4dB3&-AA1umzYB^8BuIGQQ(CMClh7?^R25 z_6;2S;}v=$zm>QV)0DGN@620d5&xkTj@t}ahIid%DlJFMI+mFU;ruTlZ(;&xI(qdW z=#h-C1q!W!5M=pyF~&qZ&B=zmg~fr|y#@EZjrR^o^c^3|6`?slu?7O)Gg^X(=LjT7 zR?~;;r$V&kc>>Hg9(pQHQ}kU0!d38OyCAwUb&iY4(P&$|k1!sixSoAvan5@!mf*BW zkhU9R_S-dRLS2KW>8WaGnO$mJ+yx{hXZaTZd;KvP`W`pB*@uI7AlCx+%h#qv#H`c- zglARqH6C}XG2x7f(11Sv{)!Wmm8cseZ1*qT2&Ay+v)BTk$g;6zv@qlB=^**78d#!< zA$ovJs#aBv+X}YHxQa(3hs5y<1Cl2G#0YGC$ik4mTiCn7`XO%_&42DtayfxmMTWxk z*N`%Vs9>=s$you&7$F=|RU@N-2S~r387H=!1mGYEsA=Ih)d^gI0rtNu*>2NPqnlo- z^y1E!0z=4l&YUwZK|_LFvQ0dLN@5x@y0lG`heOdO>oqqZTG9mGEz&6emm1uG;91o( zEO@SbK_>-F)JQ)=Z*{;;9RgU5bi~j6o_c1lfv-y@vfslq(j35CpP~ zjnS4&Ws!hYzbO&ss)+nL9GY-saAzpH`a{Wv#UQs82D@ABkB4}UVI*`H#!n2FAxMx} z!BSrcG%QHz(6Vw}q6M2uqa^ES-@9X(_J6pj9Hf4E{(7ws-~Z%SgS=cgcz-w~88#(j zL64;xc(>5hOp@X1&vTxHvT1{Sjh<-?@m~T9fr%#MYZbPymA9*E5$Q&Cw8Vf-6e=;=iaSYj^PG0b9PCsTg z`O8g^SFkk&p!%2Kk23v;m|Er)hCmH0v!2o-J*QT7c~EDw(oWE0`8YDKSiww0oc{3n zh*evev)q=Ef$__Kbey$+qU-?-aJ0Rh*8H`B&8Jv% zjf5fhw<~!&Y*n-A0~&3O9f=ujfi%i4sVWeIyIXuhMHvczf)kK9{%CI0y@|9Ew;T&6 zGCvZSTi{`;AAGDv)50x?z)+TefOQ$`mmh-*u97EIgoO9YfY&;hX;u6)g~K)gmCZgA z2;PEk(V7;t!NxX*%p(#3fSS=L?9Gz*Bba&q0eihTQIrsL(B>%fi0wr|hlbxIDd$2_ z0QMO4$$pDBzK8}tVRuY1^7y`a3P`AyqpwQ&o6L;k_#zErYIa+oG*#r0qhMCxtB}T8 zFhNQePKcL6x>gBZsP6@vLHuQRww(tRji&jX#Y0&~+}}E8-Cx+`*=iCfB46U0v#@z7 zbyd#O#V@6vWpSG}W}QE%tS}vYNOtyCu1M0;Ruhlulj1lYnu@XK>>&X&ru&$^F6VlS zAAd*_c<$JzM#6O1o_M>7P7U)EAV*E-Ek)@(|2EoF`&$o&(F9f>k!yfASP#q(m`+sN zJlJI{E$^P?7&>KpTWs}RR!@}L9MRWwa5iI`92*gtf4!Z?Yp@o-*AxhyQ(Kh0zX`!t z*%8Q-VpuV+b_86~1jDHY2Ty(brIV!<)?$o5*MS(^=F?3RmGjO?te0Kum6U^rbb9@r zp|1DJ1#%?)2?mI-!&y<0(uO*+yj5$PP2S>jM-L7T+s~K={o11{G=0hFpUCn%?Bn+r z_%f*$aQC1ZgS2e>% z*jd2i*lyrOf&L{E9Aub=T#&X9Zs;y3R_yIs)VNNjb!KWCL;`Ulh%Ka)VqMV=r_i}1 z!s&=SK~0=yb9P07g-}dPb%)yo2BEY4$k78!ESV5XSnM4;2$wq(DVHNPbl%LTmt4fp z)7LZ%@O&>Hv)cc?eCot+;T(F2a6sbJcA6Tf0vSseJ4w{3vP6|7v5uY5B#EQ&*%V}vJE{~!w{E^}0EO{-Um4<2exBmS0ZGeR}iOYNXAq5=ymv!Kr4 zHp>d}&li+@Am4^n#5u=uHLhDS{U1vJ;6g@#9Yyj@WPRiD6+t#8Ii$|ugh_`L|C8?k zcN?RDz~V+iWZ|N}IE~nHMA~VL>V0M@(h%rFkVlf+-wug|tJq`PjCb$s=`>ds6d3*R zprfda-ss?IWWQA6pZ_akA;qEh^*$dF9ON`#GW=+k64;^H9QYqjK>%(T=Jo|Ty>Fy- z&Jd049lJn}QS;!ZTnBMcDYpa35;+Y1C8Ov};Bt=ckK`Ij++gy_RnHUxP7*O5jv>Ft z8z^a=w~XpCmfmZH5awdQVsTv8@B1{7f-}+zY<91*|6Ia0#f!pdaI(@2Og||@VeSzfir>Rf z_*O^1uzI>xF=-d7yqc9|0K`$a3yea|cK{t}HB&hnb1O3d{f*o9kYSyogsAlGkAM`53ZmTL6%3ImFXa^LwC7cl8(wH-y5Ye7!O6~2ppIaCu0-6 zN!fxe?e9t`6v)%diFG7O^$@`A{nHG(#+Hp4R43mH%2|KHm+o+0K|<=J1bLSs>%jjg zGT>}u`c;y(+y0i_lWbXpf+RBC6U|e0Ltek3j*NL=2q<`BdvQW@dJE8 z7h@*8hGopC$eO8DJQY;C4CF3TUQhpv)BTz6!KXya^f&DV>>F;9`Q2l5WojX7*KbTe z1}OgNPh0^5IAG`pn#H#`(&-E=b!jeITC^|wT>&9j3&>9Z#-mC!ywt9BuCCGK4`U?6 zFLFozl7OzVm(}rbh|fB7{D}%AgRO^$HjDO8z$Fbon{HM+ z_{V8r&JODVlJqoh=*V9XY~>qzfBLnr@PUn#K(BOvWse1@9sNf`#DDo;>%hDGD-%p4 z`Z1KUlNZ1mNPlP;a*IC!6s1Cb%RN)Gly)I@(e!#MH6wk?Wp1Og-XB^HIj+UiG{<8% zNJsp=(1ti^TyWhr;HcQ zyA=pSjYIbY6=VxcFk{j4~CIHaYg4pz7JnC ztfeX8_Dg$)v!3d#EQG;{^aqJm%f`eu8SVIc9R*osRMZRbYfvW}xTQ?$J1r~@n>k(- zNUt~5gxtxQ?|OCLTd@4iaN`M|U*@)`=`0lUl&4ZUF9Iy)t*%k+!(pv^sBe61f{~QuK@Z3bm;d8Y#b_6pkaz}7DF{xts8hJMB}lhzJ;d{Dhan)H_Ywp=YI>4 zM7zUN-i1%BXlS5Jz5>+}UAs+(@+h6a$G^Aghnb9LhDLzs*tCQ!k352E>+vJg(Gj<~|#Bfg-qUJctS4Vb!Z(qdU)H4D$Lzt?nzO1w4V85=fa z0z_c_IE=_+qydP(KODON-Q9Z$-_or5IaOw{mMVGZ<6`!=vDB$cB@-9FwmWD*6JGN1 zuQ`E{*!9_Z^Ylu=CY}%zbw0L*PJa!l$INBhAc#4x=4XA_=iC|I+`?AaAGo52ZM7ohN4fKE2QaMDOL1RIHDN77l(}w zd}5u#Vk|#VYzifyFq7WAW|?FtS6DHPF84`_+aFh6q=n9D=NL}yYdcg{8qh=y*28@Y zkWuqfNum+Lc@-`!v&4urwL>RGB?&}!S$O&uE|3(6Z-^vz6q$z$LlLjEy~#yPtqOYL z$HK<#gpY)c149hzxX6<~?Kgw-x`dwp42Sj5@vpf9;8#PnjFCQ@wDQh4pYAU&mcnCKxP1%}mZF6|pt8AXV zRcqxT?6K*mXc@!?TD7XEZns!8TyWR_!Mq|2AOJ6Ps65jBA#x{8FdbzGgXTTp8Oi?i%t4ntFlHKnYy$ghmt~bC zS&=)tbwgzaX)0=3AV6|3Z_KmRFs#coo8Ln`6f4MYuSR`PG=H^dIkQz2F;s&*%kK@f-IU1a(v$uTO^^I-%S1}D0{;)5_26KV>?ZTQs29s>N`ZzJ zumK^^z`|?+E6fsUjc(3d*8>21$}71fB}Xzw7)MzAx;${U#;Hllm97snX-*aBg_CAF z7&lDaM#iM~jtormQmJ-EXT5#rY%Ny+1Nk&9=@?elU@7c>HM9)A%tf`pk-BIZb?Pfb z-ZlZCsJ}tmgP`xI6sOd5`E=EQd4U(dxlpoMh z_bkb}FG>l-fGXU9o}0@xjIl85So1reA(j*r~%*X{-)87ExNf{BWvZ>@`CG zR5X==BGHwzV|K>|kaO*}iTkCcFi*H**6?3!8VS7b-+;}z6vKr7Lxmqx+e{hv24M^Y z#~-yF^JESh%TrWbPS_|C0Y(P_&rswE-X%=(F&21cxy9IH(*6%;PzxVx5MtJYS}#wK z{ljSFf8nzKWyqiE4-=H1Vd@9XSatKi1@DE%8YgGG0)I~4#lXnSdqYm_iJIik-PC@`{MLJuV;8N=du6+Ujew0j{2xq6xU>k9^~CwLFpP3|>migm3Q zJk>ym+$)|C<1^f8_!QOGAy=fE{&?Z(h(6TUnW?SI&DY$i@(LaB-Uzg8!)U3Ly$ZDS z7v4^M5UVOJ_mFNnPC)k^b5W(o!R&oi5WG$FQ17X~)1oe&Y|dLZusE7q$acPEO^#n( z$-MZM)JheF^<_TX@V$h?IsB|Pmry;Fo!+Gn$xddd zm$FvP6|Hy8Y^p!#A9b=_(BI~lA$jP$SZtQ{kZ%xVJC8ix$l7G@p{;*CsyY40Ah&xH zJ^X-A+U@8;gLI=?DAkdQSf}wwF)w-BMyV^l!(s)_$`4HTu;#G|J5Dw>UNhF2%4`c9 zkvaEC^*VF%Ro>*y8>PZ;!w>Z+7FDXJ8BMd%OO)b>)d za5$x~TCHw8buTZ-|3_Z(e281nD?uT9Ut^{ zgOkpGr;_A2e_qjgNsc)egKt}|Xxe`iYBkqH zzg1L)zc0O~B}(I)$*7;6-IKb}_G%&i0%j>a%{)EcZ@kB*x!2B3pEzb0I`H@{!p9Toh-kSScgKHR( zrs7X$(M@;NjBI=m6`R;-sx@kP^2Q7eB`AB@rO~nV)8U={QbMt7(1B05O`oi1Nx7;G z2R3<;h;glXpQ|HtOR!eVDj?YFz@v2WPwRPN6KY48nF&pH*&!emeU=$x`G-05sy*ka z5O2HktJREHyv!Hw!!lmvW%kJL&pjccv6{S(VT|*$uHmo1mu6n;ytus|@0R+6#nMGt z2Hjv8R*bna9n6_kpmo2l&myl>Q~wKA;I?tn5pe#Gj6)i!w20=_jTn z*Ish1Hj%qAW32;XtHS5HU(fAsxO9y7yuEqo=4&eojmkey}pNVTR*=-F(Jq)ri5_5H2RHBXzZ4)5Q6qesPsXl2NfSKihal|;Ai zWo4gp=HvN2pLg89F{AiQ73|88CFELowa5U;AdFZ-_)25eC~G-%7Z2pamX;kNS*IVV z>agAqvsELVfM!oLP0+RIZ=L`kbr0 znr$q_NyJzm+plilbGkIp{^5chrivCZgDG^yuN;mT7RO!y-hl}dfH9g>>#9~GL15<0M}+x!D4#$%~ZE<;}u=ljX~|kbJ)Pnav0IS=q8H(XHm7 zx4TXjUi#OBDb&N!7CXsq>?BY0mrnvKXjY7=xm=cR&Nx*%e<2|xyN%7N?t@{Ly-W7H zg{E$B0n0~j#mA6rGYS!N3=xw)9*v!bTk>I!8rAn2nqO?6DF?P|xENqNd0kR|G;D39 z-s>aHGeka(7+PRXoy3kqst*~nBpwNzxA?NUdDQjuTbe0H(|2EbL_(7JDM;u{sMG_6 zy`yO{-RFZ?f-$w?H83^)X&uIrAHiYUVx(d%dwlZxvmF>TY%E-ls9 z&ET$rEOj~X1=NAHcy5l_Y2*YzybQpUEjsVttlt{35y11f0#9r;zU)37rXvV}zn7U5 z-zP*4%mgPc&C&5~^CBa{W_)N*)D`7d9>@{ELhE3mNxP68Rep#o4=NEN^vEDYXiXUE zhJL-FEsGZwdg;s5lo(QL#&8UPo#>{+YWDBk+XfbP$_dnQu4L%IO_D}$%-ZQ3OD7bx z48WjD2ZGS4?MSDN4hY-UI;QFJc*cB`4geaSR|LZItV*Msm~kJH?HCUAPYuY!Nv*5epW@~grwmV)@2XS&WqkWhwi6N-`QPoql= zATI&Dhm%aq!%6&y1T4vfO7+w6F-7Fjs}e`;=Ign#H2dS8b&G^d3Z@PgOL7!okOPdh z0@S+B($2PdxZ18erBgf{7HMKLAOpd$M!fmSGyOig-3SO^30+tsV3>I)ie?BXnn|J) z^6#z=|5j>STN|4b`J%+6*n$vA)es`{&)D_wB+RAt_zZs+F(@g_mVArW5P~(5hvtcQ zHdgj6d%ZGK3_YbuEHm>%ECWl}h}@MagpyY3uUg=DU4DW<3wByZBo+W^g^f~CmQVQn z2~BC1^CZCyKg#o;cv8nY!?T9Wv{nFVxo0qD@l&=pK(@9WCH=q5>Ts->Uj_z?2!K0< zK`fGbtP13%Y$Vs+#z66s2uZk&h$T9E(TVBNKWPOimT0E4t>j02?V}?=~4r}4EIU4v8 zRIpRE+0LcfW&)0Kb$>R;#9eO_{QlNrOHM@$m}$xlCfs8n3`GtpPuZvWiB0bM9C~Y8ce^u`;Yo!e$yJbb)|nB3hy;tb_v23U$W>>T)wOt z-fObnSw12O^?*0pAWFyo*9NDxASz$p;FK1K!`XY^3RC>_FX5+`^6O0Ld4$B6w&sWM5ds+IY-ozli`I3$7Jy<+C7mLi^ zz>o0&pOmdC_O>|m$DQp2iNt0o4*pUL_I@$DNyUKC5fcv#Zk1+eBX1PXWq?3>=%VJC zxMwRz$1*}Bc+d}G78kxsf0rIN+fpIb%uL5TJn+;p&J2vw78F?(0=%-*fMtob05HfJ zR;h{pusm#7-_jAP6+YTe?~mD(r+;PN%b_^UESZ@8_v@qR1qJ{-2lFYTN*@?ZY)5gG zOE}L^-u)d_UhsQl;~goXJYcwM1MTBn8LyC|pMZc2`W*u2YXa>7L3)c!m>x=x4%uU{ zn~!axq}1fH(6Zz47O6Qyo6-6TqKe_%=jl)4tXD`OmZTVjSYK>LAu`5rjM$9Z=fVfTu6S3+ zPTTV<9K@5{_1?X7QLXwTxuMANicHWem=8JAFK2e+VUEhNfS15n)#lN242{(n)a`E? z(DRrOBFeLQ^OKFS&C+suqRMI__bvid{){?!8>kHs0`@rbbR=06*;uZ`l77&t6#ZCZSuq>^vv(+!=bd;3B{!T(z}hh zFu-wpsao9Wx&M<}7Qw`eZ=SkJABTNN7j??z(JCf6He+Jyp5Bg*VxwjXXx8aNq8~O3 z)iYge4I^grokOTf*)C|}$YuF}4;y(XK{b7vJzuT}I?&D;SoROKdFE!gC*f(Vb3I6J zkV>F<8qF_vTE59Q+0osKN|~!ynA-u@?cNNxe_q*Vcj7c>7ro^Pmq}uXqMT!sg0JXI zJJDbcsVDTxU>r>&7t|<=>%)}qI*CT*@!GN(-)_?esG+M%{2Hv0@fV9mU0b!=t^CP& z8tP9wU~p!|-3KAKnh0}j*p&ViA(JJhK8D^2!E4LqtY^M?%sa)4(o3Q6Y8uitpd-t| zDd}c_E)JvKluWD`13VUo8h_jEul3n?@kd8RnodnsEkzXUp6pGz(lwa*V2dS{W5JgD4$3e-U?~4Ux!Y~Ml zT{pN3Z3H;-OK2V~2{3@jw-=O-Ke{%z^+>H892^@C9QVh#hOuS3q&}MhpZ|ijQfPVp zENFBmY8<=SI@HU++{&4NC1IiY_?uzf$G*VzJRW&$vQJ-<3f-9Vg<6|v4Jg1JI@2&0 z;jh02|D>o238P*70)_(Agk`0c5wnU~EG>FXPoxJ?_n z2TY*57}jkJn~;VFBcx09Mna?=Qz)TdAvR0iL|pynCi;gl8b;0I`U{|%T*tFo&sto3 zObU(l_23N9T_0eF#&sA`TniVL;9lp)QVV*=-+2*)@!}=iTipr+7JWF}4(awvj_f4I z{g&?In_ORD*MZitpkeN`%CpL^KsJ52jvdla{t|_&h?>q-@)hO^^R;V*mhvil_yf?W zGw|fl_@s!ki=0RK3#4ua=vs9cBf7S1yhG5h0Pf*B22zmR#)jpm(G zIDj5sy_Cm~hbd+He{9+8VbbZ6Z@RaG*&Jy^O;9IJ-HmREDgmyM7sP0e*ArFT9!IJ> zSIb8XXLL9PJzwucIEYtr7WVNi7aWZ62)pS>i~rg)!d%4hId~IU7G_NRCt{8EJr8bH zBH;aljh_b>8o@2_h&y(C9`=O>?^OGxT!ob=_VVO7Sg`x}>y8}$cRKjJyGgW6v3$f` zJ3b|crRnyul#AvB^jQOmITrI_d8f_2hxY|GYR*TCF6s~c@KSlYxIghp0idi1(N;HeA5q{k<#1EFc7U6m!kAf zb?420HY#7?Ycw^5ubyBmM7I=SG)mg=Mag@F8C#iebUQf~c2@)7xV z>D38Z(Ab4#`o`Z=s#wl$&VfFRia)EG>>P8b!}l)+w8$FZ_mq^VoW9G~47CLiMF&VU z5Q@lsPJ1T)(^O9IA&KF(VE=|}Hj8yt?I%BRoLrp=>Wl6=q7y9AhuM49hE}uqx6wM) zNYQM5Zc;iig>HZq$#+3>K#Zm)B9HkCA^!Fk!})U9MYxX*s&$^-v$jk=hmys3;8z@g zA;4ewqC+X+fI!W~Qh7h_iBu-D2i>zf(8m~%lkuY{uDV1E+=|VCHLms{Zk1Z1nK{&w zg>y<-A{9}L>&6iRZO+q(<(n_Kqf76IPJ+|(zQg;(i##tzDKAWRoPBd;&tmpDo;wD@ zTVd9h!9ugznt7g0A*OGt@_S~l)?)D0h9&-6n$_*X6D z)fXNjDlbo@diGIvdk+`VFU|FySS@t94b`BSnD|wx?7Fy%&ImQB|69XNljIFNVO*_B zSS2rqctS&SD{4&+Kd$&jSg=usOc_4)*xIi8dB@{?PmW$G!LC-Gl8e)3q3%+IyEjeT zt}3ytYEJ*0NqG@XLNO+$>*`ca8gexhwz6$P(S^^S9L_i!ngA2|FzO3`s>?!sjYEU= zqU6cK9YdE>J7WtUM-w9B_N?b?fd=hPD%ZThkxMOEzeLgIwq#bw^hA$+~A+BZ68UH zW$CGh$jr_Z!e(SrOEh%>4hYxdo zrt!g?MBnvJsshw_%O4W7ez@v)It^#oCOQ^bl2i(TkvrS8f9eC+u`VE~aJ&xV57@z- zw1ds~*PrI0d<&U}fn|*HGiX>-Ur*jXPA6Q7xc`Se&{18@+uV6Vi_1=Zhecscpc-O47+}r)#*u5oC z!3GEq?LAdZftBFKSnO?Mu=}#ha=4YUMy66;vk_xKb`!m_%xrQsHksLh8^eIGpG3hd z66E*2eR9po@0HN-x*ulO#EZ`23;?%@?}N)TOhvAU(Nx17)kf@o#j>d28Zzd0(6hXB zpPiSZ=5S}jv%W5yiK^?|TwLWs%fptQ=c(C+(tHCURo{~1mD)NT(yY^T_N%cSa;|(s z+_~TFVn5Yj-X6GG&cIJ-Tc+VLnwbM_%cRsI9EgU?xNCFT(8wHi#2!%^!b^JVrAetD z?P zHu*bAo)?fiwxfGgVBZff_W!5}^0kw~eAK{f68-@UTyqzU2T+GGP4PKN z^1M6QKm`)B#Z-`uLG<__2iH|~%JK1w#xXr?I{ z;of@fm8wm8YM6a{{A5FOd!R$YVR+-bR;=&}#>uz3dQ(LE=(HUk^8w4LYTnBi-O$tiCvs=jEOtuFh#RwgP3&GJ{ZI7lbue+3tLSvZwm z&R-|D@Vg8jx5K%EpP^iJ{cc}9jZXa2^>&gw-u2Z}Z4LoS9>n;q@^nX(#4`fVi0;u# zYbN{ZUCuQVB5$u(;|k-P@Z2a9|I`|SibHTM;v)RLQzi>jRF?f|nb}EI_iq3N-#;NJ z*n#*}|7nyEr`9YL!Jg@`?n>jVmhfc&5?wB2$k&lnb-A`0#ibmPUE6z@Vc^wZ#ihM- z^$?|^parcTWWaU8JI#?tzdD6JTFWu&ILtIK+|Y&LLoat2)AhODahZ1VmR$7gq|Dmh z37Lbc3iBV1bwN=olA7-2Bz=2?)8!?WbEzL{>sjBuuct~19#XO-R(_^ozwYr2T;6y1 zVV|`=DJQ(xxH1PPFIlFj82*44i1_cPuqadC5=}_^q}SKFNZK?G#QEwb@t}CAkH_Qj0DkJw9PI(udGD9+cUOB3^Q7VEZv+46` zm^HVdo3z>mUq+yoRmQ>%P#j;`ihg6Zs>gH`GVGHoIz7(hsb;Sy=%n{_CvY?6{l<MoEK>5H zI0#cAzg!$*#4XQVIoeipaqsJSg(oXpONU^BDU*~Bsbbf4($W{IUP=u-8Gm2zQVn;c z*x}PEnpLNMMa!@UxH#l|RITQMZjIZYT?ySJ3$od{)MJ?fz?}K=72H)AKj>7L?(@5u zok)h0yhLmKTjlR{@!SKZjnZ70?a)fJ7R$zetQC*Fx;cgZ&GF_FjSxBhBPWae_$l;d z5r_)8ztwZRG90(zJ5t><*Bks}WD9pW*-YRJ%$|u`zBc7kT9UU7eudYtzy3P; z*d%-Pn1N>xJ)QzQ1!_aM^c$N|@cmA+o@(OtP>RiP1%L+)7Dte^)huusS|#ZjoVn(2Z!#DqT(m5U6v2eK*kopI8@o zh6NnnqC^4}!@SV7LUP`9)?Hg#K7!fa@hx_jjvS7Vo~NCv1yP=Lr=!T>Fg(FY>ozo% zdI8!Q^y)-tL;<$R%s->k> z=s(da<;L~wn-(^Iob_G1;g(@>^0R!xbCYsJ>9rvCYF%AR(s#cgog>6yl28P`;(5g7JdGw*x9ne zHue4)bNyPyJ7ULcc-zn3-m~xC`n_cnn$L!Y`l{0}?;Pu;`4WC2tJeCvn92C9re3$t zEsl;GGeZ1y&mG;ZdHn9x%~u#T4@eci@ww@&5MI(4%vbe#_G{QWatjTtd2;~7rVOO_A)#<-W7 zB9Bw7?&6)6Et9D8$y#4{{etnrx=>ZAi@3x?bN9A1=H1*h>m^MXpipV?U-qa*Op1lE+rnz-y_St=_8HDcRMrxlD9m)e#17K%kT2Hv1PBV z9PMl0Tzc_AuXzWr#bo>s=~X&iTTW(dHjX+R>$UgOE;e=?;f7VXSjW|)R~}{gFnnn@ zNFu&|YOJ2~(!iZ2eC=JsmQ3$gDWBHyHD=CI5<<4+*gn6ojaDE|SU~jEhoj!K?;GEX zahsVmI5*30{~UKa`_zhvJ?@==Q3VDEwQND^8mqMGkCG zI=5si$bBUAx+l--L&-BGVv|>A%{*!GH*efET)%U6_3@0WZ#E4ie~`|K&D?+dV?}a^ zyP@@+=F}X)1P`{b6QNdNT@9SeBzD*W$DxikFMAN%=U(d*lQ{6stY$##VEymnM{50s8 zIgZ;~8$YX?Sg&(w_o~;X>uor%s_vY25~2N6`MyT|4!-F zwej#oMDWr+Yi@d+)dqb*0_W0xO4A+l^gj{7?x`YOE<3}$SlO;sp>jEOrm=;JhrhD<8fVs4K0{two#@s@KAq&Sy`SV^*-N&d)?^e@ z@Q^Wf-p6n(_VQO&g**4RtY$Ji-1j<)HR!?s?)b}hJGGg(#Ti}JD}zv zRNuMDw@-R~Zsh$#Yh^-NjDn^{X!7-)1FMrL^L`xR4IMp{wJk+If$kmG-XvO`yo|Pw zN4D3rUOy6Nym~~j^S!gfC{BF**xZ&0d#gvI0=Dkj6&dcc0!7JXY-4T1>*FIkf_>DO zr8gO*Q|xcg7si>sE7G_z_`*N?LZHls$KU;jz6LQro%Fin%T>J$5y?p0D4yVmfgzubJ%wnV>)5qq7&-z?X zFP*H@qmq7)zWLxPy&R21cMI7m$vu0-c$LO@Wsro<%GsYW5)P>*H%pAS`%8N~2 ztbWgy0_yuuQ$Ji2z0=J7s8BAKD{xEUP8R1TpDlc$$}!uxJg2ptE}yowKfuBDgZBAJ zN888@rPlpVWoaMnZdQBqZr7`K?RoLC5AO9S53+F7ja~}aFV5O#TQRkpiOE5KX4gXZ z))c`l<*M9AJ1I4e7zZsKNK?3Ir(SM8+M8oo!A|Y0X&b7uCgObIME`Ne}<_B`I$w82>>)04OK%oEzE zAId5U>YO5-nvVt#NUi$!A7z|;TT#IVr84UHRJYw?E&27kDyf-F?lW;;Og+m>d-B|L ztkj*wFs_Hm`woOWzi`o2eFxN>F6S31B+v5&Rg16-21wd24)>0^WJ@}Z_srz!#HS_g zIb}?>OGRa{+Cyh)VPR{NPWIw(--t+W70*a+e)jxKv3K^;2!0~2tzolZmi_2xFL7(L z{Y1Bt2TNW3P8OeG4%c_XZ(FYnal2i*LpL|FTG~HO4t!-BLmN&k|!^1ZN6dH+RX|Cz6WlxO; z1x`k0xZeumj@mrr9h<$vfA>@u-erPWCVSn6cDgy%3;BmUPnq>pq`7T7_*C-N zH}}&L=5-35pYk2J={Sx(wQ`oA)c=Pq31OkLYy==+T4!R*x% zmUkJCUSzUuwQ!1ayILoa{4MFL>p^k+Smg7%^ld5@k3Md*O_4HJFS~TGxI3*}jq~Dj zmLDgh`977LoAA51`l{y}qZ(c_Z)(1)QzOrA#OaxsI^Eu~+m*37WQO@@X&XP;+!wtT-kwT7Oz&6YvC3pe>3OcDw&>NH6XWKnFlGu+NT znKJp6;f#@!#HIVx>o#m{bb7FP@%&N$D9!7G&t86ed5C_^rsJxa8;Kq61*NX>Jo8qc zS*=!^Q?B*U8ku{Bf4Tc=_t85EG*fK+MmG8Z4IgfLMw{1IpEK94o?e=JbFETaD)wrL z(Tg$7j}wy9?u_I>YU=HgvpM0*&s=Dti)1CgDD47=(4`#9gLM<`J6DS5-R8)_X*L~5RlW@UJ!7wwNlrv-0@|Mi*m^!H0Ofj!y z=E_V!1tWT9f_N9@jZQ~TD_6^^9?=q7;ux3aScXN0AoK$o?wPbN&C$-x@MR|X4x-2-{N9Q--T zmChA}?9+)UJ{PgaQ`iimx05wvA@;gC$!UN6C~d%Zq}FLuunhYURDzWX5!k4|W3LJ^~*W%Q@fTMs@Ddv!K^9Pbbb za=%(93p8!rtU}FNc`5!)Brcx6UgZkatJS`P_1bAzz~Q;?A%=z9T(MKQVuxyVYS)Ia zA~p3Z)~o>SR>T3J_ug%i`?5k~-GggK`0RZ6tHPx{H zU4ETiJZ(&!|5JFUHMH#yIZ*ucEq{em-dNE8z0Zh|D$tT@CQjT+e6vLbBHZI@besV< zZT0how*^UJ3v^-2szMdn$nSr4d-PE5E7l@D3?=gvX{WE*8TOb`=g^yZzISV6Twx6= z|EMZt5l8k3i27N6an~0M?MHC_ibq%nD559x>^<8~yhO9;okBW-x>JPXr>UGqiletN zO49~IS}jB~8F8VJW^V>L>l(^1xyVPLIf&<`OT)ResL7GY>OE!b=GFc$Hu?*OC)o=i^pJJhJ+IU`#fh;5SSAEs9G>GhXZ(O2leScLbQu1cfq>EI>dYlaStxU4 zz^#yrGm#8aiK!@bpJd2rn`@GRv)n)oR?;!$i3xTREvSs3=u&o6nxT3_x>nJ%ZpOKA zw4Q|DShW`hdmTS$>ri)b26JW|hP+r+QthGcxt8NjmhV1(;b>fD3JJT;-tw}z2 z1>_s7wHJRj0?{Fg9o1sbzDZR4+X5~%1nu^cVIi0vv-T3 zogV6^U9>HsJ~uzjrv3QdG@TYP`eZ}fN;odc#m6gqqQ%XtkIW<4TmlCdP>=3?l0uv^ zy(ds6-%R&w5{;F49WZ2J>Zagk4IS3V9RlWP#Y$8Y$9V)$tw)t<5@~Cs23k8OU)rpG z^tB7Nw1K(|^^RE%vR0#dVJC_lLCx)UKJc^E%zOc+e?u=*x2qiHH=wkL5}{HsE$_QC zUDEK%%Ys)m$ECYj>>SBX0TLYE9`Tl2y3O5!-dA|}$k{10Jw|Ord})IV75bZXtWv=m z>L64l+9j^P<6X^;Se};N^}LvF>*Eayt>40R@U6cN)MzN5&Y-&GwJ70kGJnqPvJ{!% z%aE&*v8N!JX;d%aoxss2hqkMU3Sv1=Ra7PF5eT$Y?F5_kN5hw80bBN&zo6UzvQh(Y2p~T@kv)m=%FEJk||^?cE)3x_e`~4QA^} zXef4Y9O8z80nH5uyzK@|kmZXqds}V3<oeo>j&}KNM%Cm>_C<{{CCk;Ln=#7zC}q zfk$26y?J!U)y(Nx!pX`uqGm#U0`DJ>C!jG#p1n8#PAsgUKxT%&^YFyhhMsN+Qpd`T z;K4PoGe>CM?JVB&d@S7bd+|pKZ0Tm!(WHiShNK3`z5PjS~91HSu{*$w)-PJiSsQgNFgk+x=AI%o=wv}cT{rS-6rU%+vj4*^P8K4I~ zh}@&|S+KC~0X)PYdtl=6W>20tT5+h~H9Nb#2e)c^Y(tKtZsMSU*m`PE+DU!9v9iyb z(M9yMa4lEc!ei}qyb!-01{+~l(0!PJN#X{x0jzcM|YR!p+CI+EdCyT8@qeFefw6q``elS*Oh)B zzrTC)R-cp|e%_n>^w-_q@6DI`#FO7!{MpS&JQ{+uIgA`&ao-`&-vo`Pwb`b@}g&iz+!fJUqC8!&AvlJN(yJIdFBl zB;1xi&mG)W*H1v7-QM-vTjuG(?Hj(|pN<}1 zzU}Rw-ELkR{5`(yDJ#3(=D4AmAGLrtQ}g;gTYP-`-E(yIxAwU=fl~grn@V7sOZOgs zk8R$CpBwkR?~%aISLll5t!&@NhQ43k4_9cb?LB+@{5JQAv3BjO{CqrjcP7R^o(=*x zb1v=Q4P868AXxYt(+9;sUcB=^53Rx@aBFv8&bP0kRw{G(7 z_Ti&4y**pFc_0Plg~|5t+;puj{yyJZ-$P?R8Sz`$W8pn?=VpEWtE*o7SId6iKY`JY zpP=CX#P)Y>UWsmZFt@dC?E$g~`z!;uZTxJ*A_zyg4!q-`zu%`0brLKeB&c}F2W>d6G%*3ds2)Hi_@{seR*%~j(Z_GS%LWN`0e38 z;e9*Xe7ra&p>~F?U*E^?U&pr}J6}KR$Q%KBGkbPs1&>fIyj_Z_; zVK^8`zGDdB^Wt!~uQ$Ld2QT(r-uFfPl6vg*^i8J9rO<1wvzMc-Yo=S%dgd4EUUNG3 zof&4Td9UFmA4~YEd6HjFT6D)(ifz;B-}2)5KMu9FtaIs>Op;eheu``_T)LO{wkvdF zp1)7gE#p;9DUtCvwQcx?StZF-eG=)CskNI6M1AM++Pm7pwR!6X}kj7Sifut*01IY-@$*9SIv~7T@`FJ6CeN zp4m@abI{yt_$QcXot$IF%)Yt)ohD$;I5o`G)5=`C>mGDKg6OA_s>7T-4;S zfRT_SJTXbt5tb{?%zQgj5B+wHugCzRT7qSB$(wQG7RsxiNjx_5xn40?!pDhbZ{ z>z2g^&3=ld)Sg2o$C8`>nKRx+A#SOREyrSo9XI4Mt+8iS{1^B^0ATeXN>`avzMjKY z@SO{qFSo{wv-;M)S4>9N{EWIUh&a#GXeEe?9v9&${nVT{ToIC8gp6d!PA8dRx>C-6_X*7X5+NE zW!>Rsi1Z$QW}6sRIZjPL@aSvP?~~;@rq~3jht`uO3k5zG!c@d;$XNGDM~9- zCsJry>2eR&kCv%W7Iad^{90w6p)^b2N9cGLA+*ElrQ51}mPyqvjq}2gTov=fC8>D> z3#l%208vshpyw+Xl)+tTs)OKBNM6Y+6E)Kz-RLJiYHZLcLqEO#a*S)s3Njxvb2KQH zTKBM68F_>?6j8!n(lEY~5$$0u<%ap1e#zo}j0?0M9jGylHU+l2PBzFLybuqn|K2E~ zo6^EYBu$Sog>k~vB{m^6tR+o@wY`oBQ=imyW`3oyZS)y9yy5|6`yP1I-0#KXeFfhE ztRc>w@+!!1DE;YY4a#jvMRuOre8^NkoUL(lPYoMD-T(>?zCDA=-cQ|WXaIr#VDQ&WS>3mA&qNw5#RCXKgl(Wtc_Z&8SLWWQOfzq83Cm)=aXutz45 zW|A!(4=q}GQkAprk#e(vJG_*+HBRc94O#^TRYIwh^#kd3V8pqqAL*$+ zfK0blHdE*LNTs%8q|sS5RXzWQo-5NyI$)jZ;dFKI7*-7vbCdGSFysAstaUd0fKy-_ zpA#edlB^-YvGdX&`<=u7T3>$sVC01)5M=sj3^-a8SpPT=^3grD9yWwwe$7%y9pOoYci-IExlUALCw z!L&}T!8NK-^>%Tpu8(hJN>_t|_m zL{Ql;0AIk73il+af-;3a5mFWcnq&cHH8oM4dnEGm9 z>I5LN$(nyOkjQ(k+RY5c>8U#?C@wdEZ_c{-d+rGgISEF;$eo&=8rwaiwv)|49z&Xf zIvAZmVcaF#CQBB!p@Sv;q!fx8r;P1XY7^bIy2NgwF1D4U1he!IdKCuMeR7j2mt)8Z zH7)xGY{434rgQ}wAX^QQH6qr4J~q{O)8a!+h7v(2wiEdkU)3#2iPF4rvYbl3Wt|u{ zfT4=+Bq1%p5L7y{FLXnc9E@hebPbQf)@6@?;dDFV2%6A+2L;kqv9bg*a!2jMfqrj8?>iCx*xX9q)8XJ9@e&D~QmD0y@Ddm0 zKqykH7KJT`cnRZ}uKsapB6L9{BA07r32V(E7gCs5F`$4=1^{9wKQox8gPWR^xJbd} zeZ)|Z4K;K?Q_rk)ai;dKPiF8K6(@b51Yp-8`?qVjv|B&-#|UY13CBe2WP~&04h=J? zrS;z+P_xtwt2|pXPoy?jz)#0ff&D@}#a;^o7kPmX@#xgt&w_N%d zZ_KA4y>ZvW?<^hAl>$qu#gdOY|ADwT12qc_3$6j3SNq3ki|=gPZ~V5GJ=?3_Wwl`D z3zv}$_(`aQrZI@ONwfy2lCpRGL@Ir~(yCcxpYVWqN)INPNS z6UJn2m$|S6!#D_Myw4gezj6w@Q@`m%_7(8hK!YSfT*vkkB7HfQ;fQ$x5OCE-gR`nr zQ>-LMNNA=pgc)B0{DX9hvT-15r;N={eu#Q1L}+8K9-&H1l62fx z^5F*Rgp4pQ^^2s^m6`P9doMk1;Jpz5wFCL8 z(^4;H!TWU;|F3-@z?ch85i%Q=?^{!M{#MMIiaCOaCzC|Eln|tp1k~7pMAK^NeHD5$ zjN%uZ&T}MQT_k+7lM>6Bch?|_G84zCO+02TtQThIlzwd>w>XH z9;tIMrncj}4}o2}@&Dc_Y@d3J930N9w$e0DWHllewn&on%-b!XQ$OFV<2kl44rR0*U!h`WB`OxmFw;7E|(S2m>X12vB@PRm9_S``C%b^ zCJl>C=!I7<$zAe*Y)}G4x9XpK$0*MzSN+Z5#GO-|Cd{0Pv(0{llX>Pi6)TI|vj_fu zwqR@wv&fkgu}2rTt7ggi*L|e~D>7w>D9guy1TF}Hg_RV6hC@Wvv7@aDOuQg2y7w-K zBB3@Vv}Q=u>P-))t7heVoi(5tG!oEikF09Kp{?ye44h#$9(sg|~6CKDdZ95@0 zywMcES2CG(lnBWie4qKUlg39(QzX~3dacoZ+zjnla>t_~XZE4?3K~4niULD-Kfsf@ zAtZe_P!z|%+QxpDYYLNPahza1zS`AnT3#37uw|Kpf#%2XNnJ}^WDE@ab6&Q;(Dk#Q z?9S)*W%|LnT^{{&+cw!BXKFn6Ecc1Hab3hnggdaEP2UgCaLftHXEv5KFcR?&jZUUV z_8}T@A#lVkH|;$%pWzS~%yRw=p2mh)>FC`XdW(L?Xl2wA)wx7F&w`&Cdtrnn7p z;0$msasH_GVC<;LH^beqPpX|`*rDhBgs7CD0!t8@Yh{zw1s82sH$FWzW*9l+EZB3c zL5nEnm@Mg(K0vlUCQaav3>IS(6_#qm$!Qocp(&6$Stm5@MneFYp?u|vz`4qJs8|kh z&Qgm}V|%zjfh0G9R6?V@NG(K#8a>n~{vgq_^C5HHiqeAb_TE`!v8P(ISnJv!StE@U zB+y-iYFPzq|F8rL9}m#-LQhu^$Sg6GN;R?3Lxf(^xj20;@DLb=&GEPEHHRUz14UsC zJrhm&oY7_-+BH_OYCey$k0AbUwTQqD3^p39eEqyiO^zavO{Wgb`S3m$_=R0EcE3lu z=i)vM3P(g83yYF-mp;SLpwbxDa^3yFoH|1p5XL`1D3*NUC>Unkxe9d9Qcu(}j8YIb zpBpIR&6}F7l%KhJMs;hSobksLu2 zVlMQf85s-)861yu{4Tj07vXmEbS&a8R22fXHH1doEf{m%Bu$V{DN>%)GHM-I+6>l5 zSt_E8Q{i@Cm_wIZ`Nn!A#>NuATyAh_+)y7%BUKHm+btatMt+oWhnFR2f7k)o_eD6=)o`N8&Y%1X5aA&O!*66eL2d7NA} z#OIQfRt0&mF#Yv-3Rnfo$C;G~fQ(LHp>%)dNpDvBS2d-gBEl7+T{UJRC6F={zsTzH zFOyddL?8gvgoxXO+$44yLBRfL;LKUWi)#n4w}~|8L`&r&O)!;^AS~6*q&4b2uGQn6A(Nk~;a1-lAhJ&^b((^k? zUBC(7RCS#ZEBKINK=>mKZu0| z9GHa2P2s$7gg{zr;wRNc@uhD22SNH!X553}0E*gK>?)5Wf!R`PlSgk9H z>yAE8MOh&9suV&Bf-S<#|sjJ4ka+iB!H8Nk2juHU0SE>*LrdD@>Zl$%Vu88{+un zD=KM9n{(=9+PnVSxoMJp2&sCa33QvepnI|vE;t< zp74uXq9iCEMr>Q4W){2;CA~a!s2b$yrXR$Cxtz@r+S@sD=yJ)fTMFlKQbLiPs)hRh zo|j`X5V-K=0Pc>$Du3yjhoe0_lTdHSCPf(>)3wA1y01-4GmsGCgT@8!VpYy7ADuhA zWo|c8cj!7M#==97{karFbg^gxg@9+K?2XVX?1wSv)_$I<+Eb$ysQ z#8Obk2m?oZOT)R{qz3%C0C~5Dbn98aDW~uRu@2R*QAA&S)e$gmH=(jj~9C z^J8jk!`f4ccy1D*2H^Z^CBe-!`st^7X~wubOjI>kD1uStdj--2mSUfn+IitcU~}`X zxGR2aLsFLAY)!8XPO{jsFX!}&VkRleWm_eVaif*-{-h=KZ;Qd*4yx@67o;OfMbX{@ z%&wXK4h;99e6VjVUgC^+EsdiPNyNk{L{a8}3W5Qk=sKV?>GEf>+)`#|WbUPT)hcDz zw}eT`6zR8N(q}{%iM`eN{W=FEz1Y5WXlSt%$dNvRf;Y6G>EwXlVJ_YTE1MI zW66Jptlpn2gGa+HZCHVRx7a2~Nb)Gh{DbX$5~uyc995#$m4=AFlT(}%e7Jq{sNoHz z!2>=ACbw3KJ(ry#0+<*K5bUJXpg@XLs2~m3Q8X+iHHD;L$N?>#0`f{6mUCKkA4B5C)Hf+M{reU;96YH3a&m$11$4J(bEHA=l$4}v zS(&gzBZ6)*>h|={J2(KgDo$7kefJA0!fU`HWik0hG6ZG2S7f3&Sz&1vDFHy7O~~WS zBWuCoxu6}Y81DN_n0q`bTfj#Y9B{_+y|@SZCEFTu1U)%6C?n8vB|4>) z@<7#v3;vf&!o_f6HFDet`!SmP6t2!zCz#NIy_`V_7Rgtv^qrm0-@Y`SBEupB^p&BtkSz+Hpduku**aSd;7l!pGiUlsQFzF%Ev6{LaEXk(VkGIh7!2(YNs;UK$2-~(E2!65 zWn~pBNGGFKYq1l5YRvC0eGCCer#m;)Ilkjt1%LZD=1vy9L! z>~C*HtJL}!a<7<4d7zAnk}BflSTTN&Hu{0 z0Q!ZX)a&bKQfbidPb1|CG_|78Z6+Li6)TcrW&@NmjScmMY+*=YLs;!&V1?6yFA5TX z01#u=G8yNRYGA68FdP?^I2rk$9OwUYiI3{1z@4DTN)j3I%4k}{Fjc0h!DMtEvScmf z(*=;VjtNkxK1|?77*Y22PJ*5rHvn+fDQV-Z&`1R_xs;%;Ouyhg(#fcE$)}^pcVtj; z5adTkoT&mSm(oQw6OCrq*wj^1y7?p`y&%*1BJ%`4#YCbanKFB7)l4d1{)xgy`Y z?Q0OSq6!uhBWfIU1eWOy`*)^S>9$}L{~;o6Ay5lBd?WLVZhAZ-My1*c9SZ`1^kpcT zVuMZpbt@Ir?HsNw1jmd`0-?@n1%jF_G^qGbVshKw{^Y}LrM!aY@*o?&b& zPBIOCFp7c$lt>pkH^&>Lt0<`HzS5M(tKc>oN8ij8Stue$FTfXi9^d|5y<FSF&68^if^H+|vi?uW5uvHgj!d9d5}u!nmcUX#(8M z6p5%KTbm234#4h#l&-M?lM5jb)HXIUh5F?Ez{Rqv9Jdu17(^~e3Ch7-kg$b{7!@eB z0As*W^M;GrI5uYw6tE}bb1o=)6RA_k2R94Vu$2d@x@_7ph2Bu<2xE&pitu^Z;nrJ_7so;Qk zVR?T1yc+FKCe!;XAp;8Nv9^!=cxm zEg>FLl)V+5fI2wdbb3~i@~)aK*)}Trs7wON3Be~ploh0uJq3qlk4d6GBHPpt1tA*M zV)ExCu^H9?UL7wgXpu!u+bAWbaQhs0?KEMo&~XjoOSYIoZ|q>mVT>ND{^gEBko%G zMOOxO8uECF_*$*%lW-$3|I(b$;CYYWE*aui_|L{P`JJqgzgC(mAr6|n{aVXfX# zv82Qok^}a!*3JNUyst|=ptk!#g9@9YKdpiD>X-E>%4e4Gcru-G%LxvWk>y*Ik3JyQfmq&(}CY2rq#*EXko@`_0 zCLuB@lz>!>3;a{NTZbXIl2Q_7%5%WbUs(p}hfZ7n1xoQR2a4B<7(%S^ZuLv0+;E+} z93KKpDP-p^+DxVn-vOK!X%Kf}K*mt5#+F#+JHiD%=_C}Jh*Vo1W+fl^VrU~`oyADh z>sk@j3C=YVP!=W6gC&8O;3HhA{r6QQ)jpJt5%qM3VU~Fbu_g^*-KeJK z9%>NyDmnNjBZinncLpVs8kBtX^P1ovqEL{PDC`Y{qP*jdDBFI7I%D@o*iFrt@=T6Z zLp`R&MAE1y9}$IF)8L@SiH(&Yrd`p-t6<9eF7vp$C_K@mz6R&ERR>q;M52u)@t>N{ zqZdc(px?YoVib#q{#EP^_-Y8KuA8!5!R0BU^)lCQU#9o~YPyGB=uj(08di%nv{DbrFl+lxG8Gf;VeM*S_Y{@$2&(66#rO^xA%Kwq zN(`Jbg&5N^@}pRb^}><;m>87D;bLX?^<1U5+bV5Hc|N#*y;nILuHLRj^gK#?u!m*b z8BV$ZKkEKW$`1I$aku8x3khWz`o(791J!S+x$eZOL`C&&+%Hq0NGT6aR;{l4+d8ib ztHO>U4UhHcya)$EBrt*WAVI~I*(}5bn6~GvyHalr zEl$dztfmyGtLCCv>oXA{puXpy%jY(AB?)-<)6MT94lw{yM=x<{J(^ z5cAWErcB)XD>}I4z%Ug<@FO_f;NB^*%8pUNPSemon5^Xcuu*i|sIO3}DhY1%ZyiH| zl}!-yprZRJ?G{U@fTTCJ4n)pS;?E4@G4k+hr&!wyv!G}i(YCsd^k2aet}53A-OGc> zZse9h2p30L7q;Q)4>jE zBtBPn;Xxjlf-VXHT>44$BdLA-;Cz%2Y$9MkQ-n#7d%lRFiu82laBz9y38l1f%Oh)K zHwvbPQu_LpII!~XN7x1&jI)r)Fok=%RBaa4*|0*1z)Q?K(~h1x@IT-O$A}AWs5OhN z4<-zBZtiD#dLgbHNs3xaxcVegnumCP!t}2p(ad>03B1vTq0tC!)H%F5+91 zbApLXOTsbi=EcqU&^aDTQ9hv`i!osfg_kY=G(#+TDr`#*$=9L;X2ua%PfDff_>>Z- zXs$V44p(V4!i8*lUL)w%6S^8wxQ8c-C8_-&-dhWq;3D64Sh`wgNQ@5vv2+q`&o``}9AvCdsJ<1G#51JJ8~9o1Rt>dJwq&Y`NKx7(8e zEC|qEour{8Oj-)~4O2S=a8~+NhY6M~mE%xE58HbBRq7)}^h-sFw?Q7rDu$XggbQMV z3Y^`w6qKXqhnl68n*=HnvgyHxPXPn7*65o6KX-1H5b%_y90)ROi!Q`e`Ie9$7;`HK zDdtscSceM=6gpSTB{6Sjmv96u{9k7aeiD2hTZ0)zVEo!0?fI3sC7KfuQtQk=G~ecW z0r5fOJ5^@g#Yb!&@?2EYhG)VH%HpYxq!-?2kDeMKE1eQ#n^C@`XrxIfV1JF4?{9y{ z%{plxJusR_I#s_Dwvc`BQ%H=2Lt|z+~2k+8F3-AAAL~_88>{LYq;Q0i4qia_YrIDf@lp*4hY=9 zC1#jo)K0~(ZA__t4KoF|AJ2yvf>f6uGtU(nJy1RtV5fKHMY#`dvskdw0RWx4RQekL zGrWwW)7&e#W##AQz!s7;uH!aG;lUta%|Z@VMQg;*TRe-)=sb0`kza%O-R*ZM5CGG; zFFo+FlDoYM24H04ibP%k^!BaU?$E>)?+G5T#5#x!W_dPs`iG}^_vtd(9X;mP2X;gV zm360iIpId*7}&Co6)}vhZSU06xnGq(c#W4)C52|VFg~|2~U>*WQw3usKQhk z7HwFQhEsdBn*!6@P66RfI;WF-aJ=&nRw#Z4D5xCb4XSESWwgzSgw>cv}nNe$Kv zEGYOhH#I=#o=J`Z0=*?MEW85ylIowVdq77?2;Zsf)cOqT#uPXM?9ZcP8{rHh-qm!{ z+&aY!|G^K!MxGpsDi{j-%q-hS#x-sRY1}Eun3#=c8QbJ)AAxi#t9cFV> zgx5`;m;+1N0DviR@}cA+Q-+sBrZ$Ny_WUOh+^Lfen{o0J@+y=@Z>rXTb{ceFS@$mo$cbA6kvajs;ZuWM5J$tYI9bN|8W5;&cKJSn?vZKDx5mnF7BIoA$`=+UM}wzRHi6e!aPDFh76%yuX%X z|Hhx$w|T2QzaHFEC@;@kHIXh(x-Q?mPrLoR-oL+Jt^m_kr`u0=qUM{Z(?|X0db?@@0 zBh|KtrkH>KlSR?U-__aw_x|DKW-EN>=JjRg>FaBUc5jMvl9bQi$LI6n_rE->|I=K; zZObG*@K0te^v`-i{NHlp|3A~VzLTlV|FLfWm(7#zKkJbAfv7dJDBs@AIHvt4Ck}HbY}sEtmDpx*i8RZdK%vLR=oSt4-kY?2 zf{h%P+bEEe5V_dk)8|_AJN$J zHC0x&kNV&(^m46tG{?T`!Cbsbnm^ZZzW6+L#eK69k8>fCoCyYeMG~CjqBoBLMuJ!F zuX)zKa3E#2(^F5ckpFS<{?7;?57QtI{y&KT(|`On|Gx-W4KW~y+=c!PUiT*94p)dG z)t4e%Bl`vr)!Qn1m9l6j{rEax4-twRGTvp-_18pyi4HX;g08g_APi*@$)fSeUcv-|&=1QJ8KLV*7g008L!CJFw_zyDt$s7Vx*A7X$By_5JGPT`(pCjn7d z+6Ax(r(=+#xY;S&vK)Dc(&ir@9b&LVLWe`UmA~h$SNmp+aF;M4&f-)^MG3IA?uirQ zv%{yiBPBRuWG*bi@CZP%_Hr_Ei5W!OEiy}vBqPYQn3{CkQnor^Avppvp38T-fJXOJ zkZkMj2GCPeg`MlqY~nkNl07@#eUYYQ10PBMI}_)7r~N1Xh6N~%ncQU(5BeCLA`}*> z*cPt3;e@;>%_qp=Cz>kZ~jhUztO|7!mK)uTl*-Z`)L&!g3?3;;m>-@lQ=Ke@h(sgts) zi_3p>`}F@~%}8ftX?L9=8^5K35++}Us-PCZDxtbo%W`9~d2vnV$dpVKDSMdGB#=t( z$h6AQcEVv%rn=F1g%k;EGzAu!-=BsL_-{I0q_6vZp7+tj*!o9CTtb2z4Y~XE)mz@z z_LCzoZIxoC3|G*%`Px#(ZgQ}f|qJn8~@K$jqIvd663N@e$9F(4bxGpxyTB&V{^~&V;M()G3Q<2+$ zZh9Ol^%`%aqOt7eoUfn8OCLQ_LWXdejOSpH{&|f9Xqrvi;jGwFh52K!4d+V#4g3~t zG|^mk5-;Re+rhK5es{Ps9dx-@pH0%FU;RR#PM75 z=l$~Rj!XadLHJ`#dt}c4>nEESdhPD;n8pKpt_(j|=yEY5Y&;T>$=ExIkoPZZLE`0W z(yc_yjX1+YxabaqKdfyztI_KR}Zx9<@kftYh=w=f3Z&sgZBQ8D^5AR z(vLGbZ*9ElvZ%l#(yPQX_bZCJp_-Cw`FmcGu+@d}uN2wynOP%LL1eu1CC-^SA73Lj z8=fi6+`kkv#i>=cQc>E=$%6H zCkR&B5#(*VwWl}hnlqgn>B1h1|4z;W(#DMftKP|xJxruIhb_L;!+E=m%6XSp+Xbt+ zG@3o3GTl_u?sx7NJ(|=8<;}J5*XI^f4Kw-LI%ZT>M3zOd9?xI3ECWrPo8I`Pa|qkVH37~W%*!|lf2uA%+6(-B=kzAyowQLYz0{e~9f=;h zoH1FNdG+%OsnoSJ-G8B^^uM#~reDkIMl)}Sd1J%j?(XV}vejufN@aZBnv7JETIQCy z?3yls|E!i!TmB&xn}hrLYWdI}tg^}|AGcq9LXtWn|6&SBPf%Z$wM~2hn5li%=Eo1giw9&32`hixLtIK2?+mTbbNmr{69w9thdI$h zsgNeaowKn{40cGpSz?dD)jm>gcR$S5KZ|#LIu~jbXg}Y4mA-wd6u4h#Hb(53k6+AfZvtKbYYwW_msc-)c=a>J2bIG?bkBKknW|x?>=*n-T0GfLB z`d_fY+_LWwQ~rWJD0)luG2$vh|5u0WMO_{K;asFTsqr$VyzP;WxNWbHYTWJrz!}zE z^O5lOdV$))24sPu)&nrLt4;DoYm9@Z#BP7YZcsf9xx;bH`#`BfM7I5o^Hua%$u*{18FwP}CAPsdGl$bc+$a#Ll_{`0TuVs)y0QF!Gx0 z1)l2Q{qk5UCx!yKE72ZwT3a+Qf5->33cFzaPdg<=Ull{=Wy=z|e1Z z`%|j-Y@=rXA@f4Q?09K_(&aw;$iYFqu~f>s^kxynTKO|Z>c)UC2mBt0v2Tp&1;V4Y zM{nq!$ck;k+^Y`%~)R4t>8qXXZ}iuoeIn3tE?TFH%0$q|4X z$FlnBa$}o=iRLcUnKp}jbm$7BK1f9tp+5B#4mBru3bT}_+FbS+24W9hHM1?61xbT- z;+pNah)cu@EJ1tZjEt^0P<$VjdVV5b);g`jjaMAKx24ziibeKVRjJdyMIis_$PN{j zx}lolYdJ@Lq0p7hv5iLf?lDywA`4<5pYHtgq&m6T(5UmtPW7(lzcC87S4o>BqfH%Y zh*iXszBNsuJcpluBuDl+IhMw_GFG=NF8iy{C$Uyt;8$FcZB>W8fx0+0%Xm8^vL*E^ zU>*dIgLd1D_y?}V&q?sO=Kqjgc6oC0Q*U(EAo|e-%k+i%T<-4}ExPH>T5VnXzHhO##J)x>)pbS=If>M8LCNLiP@}=l-PpGw|J_fY4uGHE4w;c0^RdDb9LTbJEpvKTA(0} z&?G%$wX|8ka@yJvH!!_)&O3gp{t~VBNk{LdSC(q*b>X>3LC74TiZGwU7c@kcrjr== z8bi1iMw@z6>2`AY=(ffe{ni!Na<=r<9q@A*9uj?VS2-DGXWw4O&)g{Otf|!aC$6G+ zxPN$HK(5Gd=(hFSX1x3{W6S4*F4S%uC#>1YfJ$a4smPWy=i1isJauKm+IKfEosj-7 zr{&KXAA2lx#}N*E3TvVn7Q6;ZYXz~dEud9d+1aYWxZhj?qe_{XAPem8Ct9S-`%S=O zGY6~_(*ulK!(~-V{8We+xT9-+(${}RVj^^m5)>U6(6WF-Z@GNa#>uMP@hQGYhf>ft{2Kz=c>$xipM!wsc%ve6wFW#FZ(q4fzLc`X_ zyU_{s;#!0a-=Q8t4&5q#L+rd;$s+ET#ZyvA2c15})=UJ?DvnC!7IsW-%v7a)dbF6{ zovc)Xq>0@c5k@Po2W$K~wSW%O*p43dB(qzp5CJvmUOu=yKP=IoiTv4P25}mrzl|T8^~u zi5%Bl;^BdJ5PBZzshqtv%I9X>g|_Z(RStNGP0%ASr5p6ufP>MYm!623vN5aWJD82J z$)4(G`JD@bo!JXWbRd>DYcY;uv!?Z1#^TxeEuM{f#ei;@l?RZ>W+6aC*XbkmE7rCt=`pY-GP7-57bG$&VO!bi&aR0Zw{oKaMWzBZ&) zag|Er#(-=hw0k3$ z*fz4dsyM6Zu->;RYC4=KeL>XhaCyRo)iyl{n9%zd2K|1lCx-l3E);8Es z1@EtcvXl-m6kL+Cp|!qW17#KHVCa_x9vcaPv=E?>|0wss;a_gZyThHY?h60BJ)58Z V23YJ6hzRi7+6RFsT?29m Date: Sun, 24 Sep 2023 21:59:29 +0300 Subject: [PATCH 082/270] Update tests to use xlsforms as markdown Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_data_viewset.py | 26 +++++++++- .../tests/fixtures/geolocation/Geoshapes.xlsx | Bin 7909 -> 0 bytes .../tests/fixtures/geolocation/Geotraces.xlsx | Bin 97603 -> 0 bytes onadata/apps/main/tests/test_base.py | 49 +----------------- 4 files changed, 26 insertions(+), 49 deletions(-) delete mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx delete mode 100644 onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 30ff050735..67ed2f53a6 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -2330,7 +2330,18 @@ def test_geojson_format(self): def test_geotraces_in_repeats(self): # publish sample geotrace submissions - self._publish_submit_geotraces_in_repeats() + md = """ + | survey | + | | type | name | label | required | calculation | + | | begin repeat | segment | Waterway trace | | | + | | calculate | point_position | | | position(..)| + | | geotrace | blueline | GPS Coordinates | yes | | + | | end repeat | + """ + self.xform = self._publish_markdown( + md, self.user, self.project, id_string="geotraces") + # publish submissions + self._publish_submit_geoms_in_repeats("Geotraces") view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) response = view(request, pk=self.xform.pk, format="geojson") @@ -2380,7 +2391,18 @@ def test_geotraces_in_repeats(self): def test_geoshapes_in_repeats(self): # publish sample geoshape submissions - self._publish_submit_geoshapes_in_repeats() + md = """ + | survey | + | | type | name | label | required | calculation | + | | begin repeat | segment | Waterway trace | | | + | | calculate | point_position | | | position(..)| + | | geoshape | blueline | GPS Coordinates | yes | | + | | end repeat | + """ + self.xform = self._publish_markdown( + md, self.user, self.project, id_string="geoshapes") + # publish submissions + self._publish_submit_geoms_in_repeats("Geoshapes") view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) response = view(request, pk=self.xform.pk, format="geojson") diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx b/onadata/apps/main/tests/fixtures/geolocation/Geoshapes.xlsx deleted file mode 100644 index 1f82673804d5a7f08bd2858d0b07b9b9de156c6f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7909 zcmbVRWmH_-lExi^YX^7NAi)WQ;1Dcm2s93j6I_Brkj5c61b5c}!QHK)(ICMkK+u7E zXC`@>_wJ9WUVUV(U3K=ZQ|GJNU#ZB$BjCfJprF7QDRF4Q{3hrR*M<&2Yex?D$73nD zL+L9gM$n;081n_+k~m7%=Vk@jW-4Ctc3NAq+R&V?4=3j@@Nh9cR(*}M_x8Damj_j_ zWrcFm%NvAhya|PCr8oA>){kFhqisH zC_fInWxQtsBO)H`ps(S|ePO4ielxgl{r7+Xh3Ky+Y8{lWimRdavEi@*0h-c?taK%Tw}4;g5k&5 zBPT;GE{$N;tqC|(wzFgTlc#F5(t2{6CV-9%kM*_<%mA69Trr@GmldkiQ$!e8k(Vz%Tr#2x9$a-~OMJA%s%W8Xy zX81}LP7@v3e9oeDWJEAYwdAhxP=dbG=(B)V z_){=8GMr?vt=>8Ie__9HSoVFP1P)H$s+C&}pkS+84kyU_d`jUGN6j@xQt^Y@(UL~; zeEpU;Nv7I@6J_SkeC4`6wj+4_J$DFbJ&B&eCfn^M)^k3aeVfSnhK-c81AYU-(fsT(+w6CKdl`4Sm zwQT73e)%NcsFV5CG%$X;t)ysQy{M*$nL(hds4~WWA@_^74MqTG+oXc2Yuvmfxvz)U zfrzLcMBC`x-B4u$UfXH==r8m^T&F28;qyV&27qP0gzl27 zO-~8+WVj{oVJ4!;J}pPr#Cv0C)k-D7Zhem+sm<& zGYle~sCi2$QF_MOLvplqgw}<&pEj=!REjM83*80-3L#$k&g^4UD4-E!^-o}h>b^mCiT%B#|58c6}6J$BcT-EKMGZ=JV zDj5zZu_P1HW$)uB&>^yimb`XPjyEY){CrwQb8K+eO~aDts;sTapxI{0 zf^zNIo0#j!k~|O@B=z%(XG90A3uZDe&#Pa?h&zcALQMQVbp$}0=D&!yl;8Rmjrx)s zCON!!M%I*;2Tal*VZu(u6Iy1G_X$C(+tfkBy3^`itR7x+Y~oq}B^c{VZU&<>4>KRL zbBvmSVYM613Nwq8>>6m(F$kb#vh;Mf8K90juhfX{X7hzsi4*&*}Bmp*I@`-}^* z%xnN@kDHQbnhTBD66Fm!=0qhKzUFm5PLb7Zgc26MT#Ssj76sl&DXUil1hIIUTo_kaJrYRUo1bg0L?r+kZmt=ls_2 zRICyA1KO>r(@5}>xz@*t>n!Q(fgfkvvbV_FL3rs8ob=>z zy$8wux5u|k+vB1hyeK?~Tnn}rxovk6ZhJ{Do}Q=Ezrye4$~8~XA<2!lwcoo|;SEWZ z5qER?+-rd;X44=^UIxFNEUHeWqP!{>;ErZhD&n$q7DIcUkNJ&D)PuahC1P}QoDbOh zBF4K2o8KJaYdWo;a1-G^A8D_zQ03ix#uHw-Y{pl6jBfqw)f2kq1tO#~Pi29|#UB;sF*&CwhPbaLeQ>%{elO|3c*aH;^d z*K&F0`>5fi=&H1V%Y(|`DW$5y1%9LrE`kVcDi3zUFW?5^db>}-m4>(iLQeHpH316B z_k+9as#qZ#TuR7A&~bIdJgUgGGq=pUOVQhnA~~fGylA+evi+`t9DWQQmEWP$e>q{3 zR5wDJ@XWMj8WweJc59GsraWB1$g?*2(9fQ^mmutV8j(qf`(z(nmSv&jO4L=TqkXAx z*M$VNUc>%q->{q&M<|0L2RG0;T5q4AHDiz?Rz(!E5~x@kmXEX6KwP-LlVAcO3P#v^ zFZG-X8lTl)ebxl+7j|9EK&V6SEB87NB^jC9NMED<45F5Co+Ik{f&;{3z>ndJOkz)^ z?8hM+^fx;T*^W|(m^{2BgD+Y=s90vBAV1GZ%8MQ>!>%DIF#IvvR`#ZB$x%Rg>@IX% z9@?OJ!seY^#dz^5T1y5|fX2Qi>~)ul+&SGPzc<_hP{#j(2XwFt}o%H=kH) zvT9$E3X5bT1ma6G*xraJCMEXPFX1U!qT?(eTnC&GCy6OA-?U4T7ch-lc3Q7RToY%} zu2yTrM>`S?`56}uJnw1l<&@Pfb5gWg6btF48mMkUgB}@d}}t)Z?P0@hskvR;P8 z&Gts@^um5CZJ9C>4=O^S`uJps*U(HkNv&ml{oSo<8CAz2Bf(n8qTLR+1Us7sVdQIJ zDA6(FPoKOw8`Big)P$ZUS13M3$!^$ai<>^dsJi+ymVPy6`pHp&Y>3}?7wgGMz}GUM zi3ZQh7J6-P2^PhBvu|n=K(3W+Y|U+7+;d-)f>17G>APlzyI?PRclNt)Ckzq^{LHM2 zTh@2Lx6-IK>x_~X#k#AGL@j7+uJ+7#qdR0AR0bG+TeX z6Ri3vZ0w4yIlB+n%=$Vu>}ip1fn5uE^3C0OuJNyg@?Pe5=af2IU$5(*%O$oxON43^ z^|I+V*&r4>1Gd)n-SgzjyYsTVTEz5=&tXfwGz@aw&)iQfAYO}+y9OO*wb$D=kf>Fs z5|*hFgMG0pd-#c{dA_pPB}?Q9NFAmAmx1q7#UipGNWjpVvu;aoSgfV^2cwJ?qQH;mkK*nKw!PqRgYPHTM5dg+Tw35;zG-PJy1$0E7*MvFonf~mSL1?ctAyIO!@e%HO+%@$5uH26fke%q=H{sh3&8){BqnPxCA z%v#Pg(nY{BnjNdLWFw6XUY>6{_}*li6lr8m=wbSS%ZB3nOzV`_XA_B_KI0n$PsbrL zFUfIZwlBv!-)xrJ)Cg=l89F@z3?~kCg(@h6g+|_;ho=W?0#8E%1i~AI*5oZ>;tOScal2D7 z2j9!(bM;4+SPA7BvAxBD2rRP=e$2`fx3h25btLUt!l##Iq`7 zn-eF5I4n~w_Rqcz5zZ%h6Jo7}TlO@B7Q+I?kf25lbAiX3fIMLvrtsOZlcZilJR!${ z;c7f@tYoMjax&xYSXuR_fY@r`!96~|cv;c?B3Bqa2?w%)0?`HHTPMqoFLE6D3V1)? zyq!w~a7n){mDH`Gzw!Xi{>ZsR3o;c{$TzyR7wX2R<%Jx)0*~H2IbNdXBA;1BrbC`4 z@hHL$rt7LVmgTdXxh&7U^mBmIWOJ=OP8AqJyhe}|2RajZ{s!E-A46SK77+RP@PuO=_J*RLs7 zo?N$GP`_v#!G@1nWBfk!Wp0$CRAvO(3I92Ze5)naSD6q7WA28SdD_&#n0m?0l_}Lr za=6{}2J?g1gGI0Jh4=eGiwya<-0e&D)qL{0Tou!`xS5^GVa{Bqy&akSp+Wtb8mQ+R zcwAvpW34ceKhXE2jSHuVls1v87%*;Z1oX*Pn)f<+uAI=o%-2?3zydYXYs)d~{2IpH zhD9U-eB(~ggw6=Ah*aE>a^tPmmHD$i+5NX`s+^=GUeE;YLZ(=7A@_aedb$yk>9U2fohgnQ!fTQf+078exgPT9Z$n2LX zQx^n+MjX-&kZIt4km<9Kb$g87OnwD$!IR|;$!&Kniee~{pn?vT1fT!lJ!PS)eSxnt6_mBN1K%r#UIgj=fDFo0 z${EQt8kV+5HySw8@uvTtoo$zwJVwf=oS>jSwHMiH`iWVd#W&a@{ll6bN1-}H(M(I{ z<@%la@{ao3J#rEi$}}#Fr`bI9>+B>75Ps_`vd0oR%AI?B?_s}={J?_!XYPme?-KcM zmh5*1=r6{s6+CMBz?cOedZFc9uhEhjM+mgWNtEke!1SoDjdeW(pXYuC!LWPoBCenKt@JY;`~uDA1s5c9h(@MMaDb0dZyUu^+iZ_y zuD(!|VTYt>p79CacPqv0+{kA;msj9pP|W!r&bQf>{P=I8QQtoe{SX_R+A^_a7a|6s zHZx?k00)1vLYBFUWhnLyu3;ZNWcRINXW?P}RsYD2;s5O+M{^?wpvh||2Mb%XzsQYv zO-0bu14VYqQUbsR%vq~SFq%wCNqelSgl3cHgo>nUJM_Hq$=zV+Sl*^EcByZN4WNJd z{Ln3ZYdGSyp%+Ml2*vm3%KvMlF-FP4y##*a{Uv-OG~ z2Rs2IZkcweI@pgW!rF4X2x%lNXXeUWh%VHn0rfOJD6OouunYlH=`Dn~fDuXrS179n zqRtkKdn3i=*OfHZZqCXom|P&y_==NDbc2S;Wryy^eP~mFM3*eIom$dC8R?CGv`Igw zK(!7Fyz88veB{KJ+NsTW<+ z#4C()_c(dH(I*>((^QAggN3qGqu@_ml*UQt_3!g^af(n+hiVJby>pMAx1-wiarg~i z_4#37iS}(c2A5hOe(>mHuf-}DouG2}RDzhqR^%$%8F74YysPvg*Q8e-v9z}+aZ9(& z2-k6OxaV)$$cWrzc#Nvvpr#u9hlr{|`KRv==OL<0K*lN#AUj76V~_*zF-3c+>4W?^ zANaCzN-y%bHN6^Gg}UziWLDMcsQNO!O0u}mrP=r1hyjhmd;3sFrO{hU;pcII^~g3a z=C?nWZI094VlrNJzHSf|HHQAw|0l|3q}d9Gb2;^2KUK zd9wFkNPq&q4RMBrf!X{=B*6atof+BLJ-)KEXhl%xV^86lK%=Wxkx4gPg|x?2Pd>-R`U6Js440D;-`Axj4U7P1$OzakGmA4pl;G z9)BjQOzD>>=86=3!Sz_4aoANaIJ2a$iI>pU+b=TTFseavN8YxYa>aWG?{msxl_x}z zDs*YQnIZm+BIDb)L=eK>HBEI##LOzTL{zslPDf7n)I47sLGFlc-`b=omehQj_Esmx zY%6Yqm0o^Z#@pk)*kQY3C1XlcK0i55D`)IR@`{zZp9_I)QBda@|NJS|=xd z;ObI7$~;6JIcB0JT;RF-MKrVy))CaF81Cgk<9`qtOd+Z?DRL`Sf<2F8*~r1d1jB}j z#lQx-%@cUl(ZBMRJRtKM5^J-|tR4+wNIz^iE2WiSKE7q%+{g-MqM$K5jWkFt-Cm71 za-=hPZw)q9NPSv$J`nFGwWr|EnLzhnG+gm4{7^w;6=!ab2r3FusWovp#%dn>7V_^Hb;oi0oP7Q`pfu ze68vdVExl7#T$!ZT1xC4-@OXFl34z!k7;ZEBj}8%pUjtLn2ThtEB#)UV$$biJaTDO z_+}iXDNJYO2{cHA=aLrSaPbr6GH|*l5EHoi9*?4tMu^W3rG)5~ra=p1iXlI!!BODI zxORTQjZHZLWYHED#XWJuW(snq(|LTwYsrvgS?~%LwO&*fTe?RyC zFqwZ^`MvP_S1ZR@|JKT1+rvMt{9YVA7Ky)v=HYvOn9F}C8UM8Ndpdk9rGAO@!|kIx z{w%EiwD3DPKc>xJl1lh*lIWl6zhl%Rg8vfb2k-oE4*qXE|5N*SBzVNJUlL66oAxhM l`_sVh0s5~73?JP3Z{R~k9`PZTVPMc7emxKQ2o>;n_g}tflx+Y2 diff --git a/onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx b/onadata/apps/main/tests/fixtures/geolocation/Geotraces.xlsx deleted file mode 100644 index 56c9d5bcd2e40a0e17721f50d8e5cda77fa312b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 97603 zcmeEubyQYc_b!405`qedbSfPJf^>s)gLH_bq;x7Bf)XM~E6qzt2}nze2oloL(%p5} zegWlt-??M_?%%(A#yOrb&f06PIiLB=XU?_XJ&uAj(q$Y3WCT>Z5{E|(Ax zaF7uYZX%!}YKd4|Ju|j?rl;a!V{EU(%^c>e$Y^}iT_uPUup^(@#` z>hl7k)oNb^)3UQsi7JS+sD7iobrxp0F|>|>b6kD1C^K8bSJ0d}$k1){-p!g({YUDp zh=>FxY+VnBC@#I@dAi7cGikD{@4e)!sNQTlynBU+I6lWqveuv1y|1ckeHDE;mGt4u zXJ1W&d0vfCTJ22SHIuhe`RlQ9FBVIOu=!26|9k|t#K6cVLG^eDp`j=|2DT6sh}CdrOZ4fQVcf#>)3^?ICjh?-e{ea zBcG(o@1ZqxJAXD>K*5hoXvn+oK*}4Y&rT(R`&*}XQHP0sZ@S)cVwi#Nnw^z}VMw>; z?aE#cUjY*F+jTA{76w{51_N zS>j8Xi>t#Wfx_^)UxBxgXJ0Eb)b8p06c<+>S;x5i9Ai<~Xl#0j)$X$Hu%qccHJ{N2 zMx*qd8c>%fCrAhi|ESm!RTj!kKtmZ&km#Ud_3Vr-?U|XNKmUKF{x2@i3!&#m$jUdc zVEC^{9tO1R^p3p37M6ArlB^_G@$`N;j8*dTJr&_d&0QjF6#^e539l;8!?vLj-k0+& z-lA>~5jSQSo}^C>e$x^;|F z{KuMTA$BK|I>FVOX+-{be6i0pn`9r28t&#@>J?UAg83JeaVF14cE0oKf0I6kDG>NT zX0baKx5eJjWFXJI#gcq$k3dz~^nqD{VWJ&3g^S)}%i0amcQrSU+!^KKTa+l+Z|vUd zlxU_--ty8cW?yShaQVV{wIQcvwe>{^Y3As^7Rdo3^ST`pXmFs*VF6^EEt#FH?JNwf ztu3IImaD2`{hkZQL(l94X(Hi8oqP5i4V9>tnWL%$Hyd-b__eo&xGWa$s8l&lJn|D> zrFZZ@;~}KJQ_37*@7`IZ?K$WGD@hT)H`;0WY)SvWY@!fhn@?wz~4VK_-eOX|t?w7p$3s{kaLoSZ?x~2|7aphf)bW#^4 z;ruXtCr{)Rk@~!3!gPD*$JD7d(HqYp zt$vDz)mwAV(lv+7gH*&CabQmzF_f#*^6|cKO;K7H^oP3(l#b-QPq6k??TQP9RUlEy zuPWSDjM`5LyKMATOH522M`u;{ITluhXRkf7TbGT^CkN>hlGtPgjbF^)(I{!zKEB}; z<>;dsjIC-}u914>bs~0Y^4PPr%%eS}epx>E;kSif0&gRuGz-xqc8a%8#@bLEDc)f? zzxTazk{^>qepRmpSqU*H{~E3vrb*ZfQV-gDo0AV1(FE3B3j+VADaE(v6e`uB}NwC(0=4%I`Nb-Wsa^XuUkO)`+RGEvy}|I+=D?* z@v`;CTdw9y)kW14S_QH^V?pv89+`PKYF*}Voru@<6?UXJ8&KeqwC{(Glbpw@W z+z!{5Rg)>$W=mMDJzwDG#*O}A*&u=x(%)+OGqEJ<0lkxkReD@Mf2pRKlu1X%dR2Ka z%ZJ~gU4BNRegcZ`zkAr|)gu~2P`EvyLdYG_J8+ZpCdcX zfuuLB_*D6HdDuo+8Aowfg$m_s?I7GyocR9b{+sQ))&h|t6@slyDYty64m||EXB>@Z ztbDnJY@VR{ikcR+I4wtWh-R0oil5hqtgo~?p(s;d@P&x2SD9CXoc*C_`W97cF!dGx zft3#*y7lvEZAqTS_b_e78QmtBb8zXyY~z(JyLZ^@ZJ16Q7X88|_dc1NPx!YJi^>Ws zX?NRclkhkB8ZFkhzCFf%5Ip~#jQ*`ARdef|pf6Jb_0Wd`{s8E*_TVD?tw%#okda2DBJ4;!<0e zJ=ouN;ESb{H|u`$0gti0D~R@)$JX)4I@#3wLDqCWvSK<39$2LqHcf>QregJP4Rm=r za>CSc*f4T-WN4J5vNDPye!7r|bBJrbux>rJY9lTEwUZcdTrhQpt+mKo(~(Cc;<4w6sSm|ggGX4mH`MQP#7m`!rg@N| zcy#1Xn+V}yyWXkl^SX|F_xWS_VT{$=xK%JJUcRDK6OBW04OjnI$JWNgm+{k=e4UwINFS`=hgO( z3e0w$yS9{H-n-af&H8%(mTdq0Bj#E<+NgLU-iUkM5V|kkC^T6 z+CI(i^4XM9d+h-e2x&lRD&u)izjo6!e{D9E7gt8{6MphoIOdyR0{qo6i%;dhep%QR zuhmLd`7mrmUcPHUAp5$^Kq^1rNmtbi&qI7Q%-lr8gd&{4u!!9a7w(3QfoU8TzcfK2 z8rE&ww~dte$;0=^xW(t2ZE>2MeMvG0l*q0{Sz40Md3$q_rKboa+?jhD_7xdb6s5Ik z2RGNaSMb@7t4Hj_oD-Jbf<$E3581VUV_`_Uy%SnreD%7>Z-`%k>^9BD+EVQ|*d1HV z?zQyhZ~1p@Q&WrNqxfUTg4&nD6P#oxoSlnJ3fS@=4ikH49eJrdKhoWWQysARaJdi=}#Ll&Pf@m8>oo*opV<9Ri7Oz-b+Bvp*$mtnnB zmshm;sn?N7`sAn9>J0|=8jF1gjing9fTMNumz0Dl7E~lgmG;Eb_K7-ojZs3sDpQBu zw~8I6f2xzEe|7oOX4G?f)r1>)qC3K---6n6N3^m$RBO$0-wXK1*SEmzw|k$H_G+3i z&M!9gF>GlQ+=+6Wk)dIe>HI7?N-LLjRqkG$;?4MCd)*gl37xlJ5D7g`7F8HP&kxAH zOTqEFDWZdAUPg{S=w^V`?|81SETNNE5?B9h5+y>lyL{!v4IyJ@h6ysIn`R8xZD0M6 zn)p%}dn@2JvTg3#+}mGNH;Fjv9`Zo}-Vm!n~H6_W6$D#t1@6;UPWro6|Zb`)1$M@aU*Ud!Tn4Uc?@gsAYPUu=IHw;aO$t9vGj ztPM6OI4d^MJ6Z*p^1OJDvp)M#I3p^DUkM2Ls)ZWHt!sir7+kM8Sxgh8A}S|3|NO0y zA*#Ari@;4YYTJ(-CYg_#)!D9fH$EY)3Gwljm2y|Q%ws$qS&v`RV?MzrM|D>Tn>I-0 z2s6+xT(Ki_?zJ=ifI0CL@?*I$`(wRd8_XmE_2UYI%ulaUHqsbJv9{gD4V4ndeSh

(m@k(S*11c!f%k5&~t| zLgSoWL$2~TogBp+j!}(G4oJ&SK1N>bsdH8JxjsJ`5&a{7k|NTsDp!_cBU4mO!I+lrJb^)>VfQfYM`1+;@6YO>C~u#dJnahK)ME(SDu(i1O^|qF{n4S zg5P`m5cd$XDjwY;mS^d{j7?8acQ7c3Q7@D$`s48@R1<|>FO)KCq=$JkL)*Xe1L%&e z6lm#6t%tEZ4)08rIu;mH2)tO_UzcpRCuaz#@U1&C>$Ds3zNG%)aZOEf?oYEoiANO# znrs4LlyP4TL+{sDlq|}I?v8T9jz53i6xPAkh`scji=s#V1~>ZX``QQ$v@eQ5V|=(R z?t)}tl2ghAvZkne!!v^ZCuli;3Bh*bH>3_64D{#C!nSWGP<9V>x>~*wraq_qUm*TWPP$% zb3p2KeB3OsW-UL)JQS1Ub$G+TUMbE&t@$Wp6FO>*R0}tHz7Y)1VOtcSX36 zPog7_omtV|-SpehNdYtn}s2E|lkj&lneJijTm5&r14S_Oe}h zu&eYs&;G83FR=#`SP74$bZF5NHkuD!Nnv$}t9tq+jQ^OxK*Cmc$OC8aILyE4Sy)X# zQ_u6AKilZRwlT+N9ik#ecL)gB;P$xs>%-*a{rLDAk91zbZkwBqtY5{|EG!G({!|tn z($k|W7nkRyGw&4y+gl8W6_>3Em+*b70BiKUJ-W)y)*=?wmwm_XyV(=Zk!WW0FLgCe zjH)rIE4vwtS7E~D=uSvHhZZ|#N1LSkwtsKfTGRLEW>rI)xD@AO#jo-s>I$xxv(rocBEQ@ zlYo%vFn0mT>p=*==0=I-&k8-B%^xwPW4un1olCuC1uK+T2knnG3s?5q?pu!J69*rJ zxk=3K-!D_2tn{=CM)?c^u5KKgag#7@9CggIX4_tU*IPcKg-YV~uCuPFr>rk#3VSR7 z=4L57GldH4B&KICQ=cEAG|SKEd`9(jLAp9Fp3s%Z#C#+GCSkg?ebT1te$51j_~Fog z#L9#5vT}!T*xWUnFH76UccSSJhjknFth-CAzuQ>%R0Oo`l}QX+tw_?TpOja<*mzwG zV}6E9T(X>Ecf54HY{+&(dg05;w@Bq-fk_c?Qu;Nm9fJk1P%h6fN5c8JC}E-w+OsD` z==n}-=Vx4A1KeCUv9&wZxZIo0-L$739qQu-3nW?_s$9j}c{4UXi6+9D05Y z*7jiKhr>$X&{F)GG>=?4pZs9g<%BhpEbHdIxT?(p$oUE<-@fIs4|d_r4AZ|M-@CQ@ zG99p@^)}ca1Lv)k%A1)R+}uq)(%7nxo?49r)bLF>x|z67lhk0Wtfg#m=y;6+{7gNh z)k7$)%A5qUh9_gj`S^>gr?0L+uKnnp01h0<((UXTJBMSztOuMh&jQ_hmmBSNNV}}( zC$gYb?Fjz!&$xUY+sZ0RV5I-lh(&*a18FIVZDqn}iQ4j7hVv??nbw$TswV`nHE{pS zl4D-I#WaamFp30lf7(KQ7jhGJ3jNNm19a#Q<@>smGkhE-3%HykJa5~cdJ>P$h%9VI zRrP;zk?$DrXq4T#mTs4!7vQx2otv+Vnn4COGvjf%KxtLsbUkNyv46aSzgSM*g^1T` zw7ElK_}Pj+-Oho7a}BV4Qzv4GRCy!OGXc|h_*wotRja*))i73x~uD~- zpTpmUgJ<>9ClE?gkWgViqL@3N=x3$v;f`O^C*=12&MmvweI5;P`rN?pZX#Wx){cYS z{?jPW9`1N%cI_G&=gM6F07tW|lfSW6*dgR5ui5TZB##6l~3m2YPYM9d71w{q+D|`)|DJM76XehsO0RXVjeIJd$-dQ|MH2c=Q6R(0)iFRlivu54UC>C zIRgbQq$m5R9aT<@iCQQ&ZY&}wHgER(u4c?k=Paa^gJ2Tlin=g5Mr!`-^Fh-1ZGPW! z^hf@jIo|z|xhKppq)$9=m4|B(eEZBgMlH|tQCGYZJLL1AQNZJB%=1IlMs$nxnXEa9 z7<AM1HcuM>u@QN=h;?umonOMLTqUudh5>f*8<3Yqc zZ-?6q8hwJ}Ct1vsI*EfR=)M>VcFB95?y8#F6ITp8bqe82^EVALx}A*}Wp|Vp95<_Q z>Cj%AI*Fd1bsOc2MI*ZQ1R=ICTbFcN7<^&gXP3d?xv9yGWviro(xiLWJbb^)Fz?l+ zQVtS#*#;w%@jL!R?D{7B3$-&Jai)L8)(Ufs%bm~5v7b|et;?b!6+6gL;#qv^rHJr^ z!dEK1GZrk*26X)Uf<_mKIpOK#c55xd^&(&JmM`1dLYf(hB**9E{#ELbddpic04g}t zj)91yBTEOYPZPKGe1zU3U9+dyOtjfu6J5x{5Wp?vC1md&qK7!q&B9FJ1@=Be9x+4I4tK~piXhuemZGd z<7PO3)!|aPO=975&R<+1%2hdbp5QxlZabADU^SsntI-T}m`f-bVq+%0r1G23o6Zir zxRhiImd@(&S?yA~oAP&S+;DJO00FPs@%+>Un<7s+N20<)vQ>tAhra9h^#$>p-9F7% z+_jRC!sQe-cd(%D*Eh!~zzh@(DopswXv##wVSVk62r;bPo`jgTOH}VbBYQ-N*skne znDpk-rp5MMSc|f)_K%x9gmpe*H$B)sNWDhm4b}Wb?ik`y>|fCN-mqm=@v^KSDhWf} zsA(0JVzA0DgC0bzLandw9o#)gaBJ@K6*Ax8)nM<6(a6Ths%^-;LSk6I=$0WY27 zwXs;PBgk-P+C{@wMV;>_wpX))8+^o+kHwdCqA75l@k?q&7V$PrJ4}-#8Zjp9@d8jn z?(9p+U0lO70&A>t^1KJsd87<+bILrbe;uOy)HXV(Esv`egcTTnT$j2RgolQkCV7QK zoN*PqvXS@2*5Z1mv%I^{)T(}$j2?5g(g#GW_jt~MBZee(N!!l8)Hha0zW(B&_6fl5 zkas~;h^FYZ%DPsr^@TFG{%4h^47HbQ*2-Z8cE_d=4SzO8G1xA0g(OUXPi)8YNSnUf z(}q)LUW;TA&tyMc<;?AdwG0E*ubw9~dq3nLKVzTF-s|?!H6?>WhFB zM6BkdM2g9M#r2F?&Ga1rHJb-RXgJ#peN&@(tX%N5^2fzdO6EUwNE%&TaBx zR9q3-5tJxkdZ7?*y2N>b5euwF8oR%0J)+F1ZBT&k4$k)=0bw&@xwVTpw*{%?;HJ&w z!cpToX3wP^nr4lek!Ri_#sZGf+qmP=?Mv7dR;F=B1&=aTi0<_NDEP7r(>u{Vaomlv zzLDyZS`tHy>x}9ZX8||gRt9h*;zD_KX0|Iof-9q>$S^?n45; z86T#vtsWI|>9r(rpMqUeJ|-Y_ys5do_a?w5vDmPlN~)vS=0zF=r@w0Q0WVn84&Up` zNAHpeVfyvXcA2zz`)s-7!|8JGUShy%6kA79u#&yn_&!t=U^8yk=_=tqv|1e22w9Hz zJGJZptK_Y(?<@0MR2tI}@HI^|JMK{=Cl>qkQ+q*yZac%kVUa()`*5!0?KYGQGqYFl zR6+GPzWbw~$uUWyH7PMiXDkuo&^(0(k&w20>v>RFl@NpS{i>`Fz|r=)Jm&uZ%XRE5 zlb#@xQkn(EH`1PHmNhEDk<1>4FXoMTcvYj$KD4NoN$rVO{Y?rg2O_82XE-@kYgb%Q zy|`z?EemM;KY4zKLks;fLv z<;!0QKqRW)GlWCdRMJ-o$GiWq+6&c7ivFf7bT(XNnv=HGEal6c(f|jp?o=L=i?T6V z`KD>@?!Q)rlkCV$d#9>j;G61?qm5~$v32Z3yOcd;Me}cTtEU{9oL|#{6Zo!Iy9k%O zCNmtAa{}iHjZH(fk-AyN3ur=}ap&SzscraHrrTa_e7LDDz_7#>lKO?2(`*cT^n1lG zwA{)g4HKx6YLa9BL8lw}mx5**tTk}Z{1#RY04nJJaH}Ep;1qBVuTCI=H#-gsHF1vp zAdSSpb%si7XlxcL>h--H$TPo*i=H)fpPcTl_38vwNmTMu+YktmKfGqTkt%qF-D{T) z=hCVx$I!Zlsa|^~GKZNb5%s31EXs=@2th2;b-4nCtZl~s2vUPHioG)U4xMTeRlBrD zA-W;uIRZ3fHKQC4a_I}%CL}_!;x^Es@vg`GL4f(s0~FDrQjNgbhq8#l{%Mw+cUo$r z^+FQ;S^W9KBLw)^t}nZw%C{biTs(62^vK+LK0x{Y%{xfnZvBacf*fvC<_R!5Bd@-` zaM7qr5Zq`3F=VuS+As9(h2)8_h;4X0i)Y4Y#o6)vx|X5d%3eKU%0F-+p2u3xTzqXp zv`Aw1zlC4z`rLK!=`TvW;_zCd#Vwl+!R{bDO?}d7>R%OfK`5z{H9tK8groGLlj?W` z?=qlPQ0IO_lEp?p3D_)O-sOsX(dYPZxX+XK;Xc2H7s5FiMg8FTMl;yCRYN=7KhkiR zz}q4yBl4#o;B5LHesB!%6+c$FfFZUlqi1{2B=zSw*2_)p={2HmMb^ zSLazXR0aTVWSD$v^*GVLPV`u_;6@M3B=X8KclfIbkN#T&KjP{w~m zamn0AW#Lt&80l)WO$`652oVokqHOFsA;mcYkIlAe0 z5ve;{Z0;yEbNQu#`iQof?18pmxd#!u`Gdbq@}*Z1(nlEM<(DYpr`cW_ie^2?LHLq+ zg)B@Ob3hP@jQZXF?IwF9Us+1z)Mttur*;do$SF+`K1?YKh`3mJqgp?FV1UYteHRag z1RVdemPJmFDjnl-fSy(<)0*k&zcGxUI1_>F2+&k+{4Np-+{W*Hb(fAZ@>o7nr^-HMk2Csjlo6-Xs%Z=~Sz=B%p9r%nx8dI) zy^Ov8>og>z?`NyxWrDsaX;Z9J!l-0ZD#UQNkjXiuKZs>$wy_F-^|5aQj<-I}2I~FE zA0J{tHY-gQ7Kkce<--sO1uqSC47sB4CbnI67@8na27ymhg`m^}Nz@P)*Ffx!D9d!I z*XCs^0mF=T*{r5#8`CODC`8$)lgK~6a6`S4{D=-7k0CrBdWShYDArK_K=ez#@!9VI zH?o?DG5{x)tCV7`o9s}pY2Ql zJF%o%EhI}W?0j*0uCt^FG`LEoefkQ$rpn3hXQw%LInY`j6NkDftUgI8BWmM#T~WMdXZ zWc?)yUek@MP0e@T{`YDWv`S;#lLdOfr9ee=K@8vz5gBeq@FE%tKnO=EpDQSTsnEl=2hBcurk|Yn|oYn0bXEECV<7(wm4Phx#n+P$3OGG@s&A zUz!Kdx)8wkY}~h1i?A7^u9{_e`z6r#BH}fJ77P}qxfNAWYq6`SYbMRlOtr&JwL9F% zdG&_8U^Ssd;ZjHn*f4#zp$@Wyg9BMQf?GOY0B_VAAxjU=odTBl-zbIRcC%u{(2Jrs zii4ZYAY(7NQ;2rnPZ;pG-Jv`8)oXHBX-~6ZrArW~MBqUr2I`mvUrlm45&c%uD_V5= ztmcVe1_ zQcV31D}>v?^Ec7M&@@6dYbQQwlN0FEmTTW#MPnsEL@(EtxWWLTteuCASrDb27wP85 zj``eAJMaBJ?F7{bXZqJh)L@jH!FwJ%tp)@yHvmQK)3Ekw93Bj5EDRq^@PQL}Xh|w* zk|kQ%BFlK?+J##15FZW`=67w_WUPtiVDZ=a=b3<#< zHT1e|HP7ppw{FFWG>Psmye20tn_La3X&&dXQf@Tg!m63NFOnOzyYOCPqyO$Cv1gcm z?QR9>H7~f2bbh9*{mkWUuP7iN;~MVzfCq>Kxev!zYrra*{ z+%x`WdqC3uV_7{zt?YmmkO z@Ai%H)pe9_oJ+vQ*43_GtI~PQm-&Ml!t-8=hX9nsS}7PPem#VO%##m94$!mnARl{3 zcSviaEMrs$nL_7e6;l-2yo$MYTbCu42g(qB-i2Z z!QYR>clU(L3nV>Jox33bz`EJ82ZN(A`<}X|LFI2F*6Rnw3fOeQ#LlJOFBAZoxvUhR zBDPYv!o!Nk$}H)U+2V+;u`bO01`WssjVLIypR`l=@a4PxOvk%Y-w*a8XY~P%8Girs z7f?7p?m1;tO6((Vf5r29l7foAgO!_HMWaJcpFCjhte%E&AQ2sU_A#?Sv!+(Kf$3Zo zGU2L_dGjF$dYAC)EO79b$W5rqo#-G!JIzxm8%%ow-hy_1s#cnQ5N|aTyt5yH;#UB2 zL6P`j7E+!^3rwqiGfr9lLj>Z6o=s#wA=jjl>jk>FT3>!q z%V710+rwHks?ZW#GII<+R@_yjtwdY+NeCAkb=8Yvb2Mt_Cos^Dcel31ziI*aP-xBpEi%>2Ywui(WwIsAtd zwyJ5M0aP!j6ohKK!#WHK$RG0Bru;M@F=Ix)7`H2;drlMvcg-B-9?LOCl?EBFi6u$t z;%i`l+=Fn^K|^eJ_?QRnHtfEz`%KzD*H7#WV>#2_JeeIQS~k?kNd3T>-A&|c4b zVnhMt#p@scm3tvEVcR4jx+8dEcK#QzVt9WwUU+R}^KM zk|`zqr}?ZvXW+$8F))-cyP{VW{`%8R&hk`L5LNvzLjS?kK*-~{-^_ott2cOWdT(i% z{#P@mGB>dO*A01n3}6~~r1@Wn%xP+9>uvsL=L76C0wDUOGQyb)r3zvv)E5#yz@Ym2 z^bbIo@EEpUlB~#SdS+U$mxAcuN}oor=2$KTI;?HE#@7SSU@h<(w(`$L|8`XnesH@^ z{30W@Ce8XEqJj1ugsefwY~ZA#l!U2RBAlHdgydLj&sa#A7?!F5&UFK%9Kr(`;nQwL<-*K_T2L$GaOT z9fTL$QhZtk*YToTd)?`XNB8vdKF|#v^2BPBQct29q$JCnhX&>&@Jur_i`?m_0J$>$ z<4N;$woD$pB7tF03+xwb!PpKQ2&Sjjl1w5P(X%}(XdKc;>rkO{E!PP;E#!OU1+HA3 z-xN*L;WHYfG^%&h!8|<89jmQzOIyD+XZF!~_y4;R($?kLZeI?Mt^+DkSEUU| zcRG6iS+TgD>OcYYkADdvyD7Eo6{O72P3s~V3kC}>iq>D}>gifT{nn2e&d1_EykW`> zV7VBoCkD=!D=@k(j7abMt_M4 zCN0fYIvRzS%q=wmyt~(9z2FUL^RJhxDH@y0@z^j~mT&!$lDqx6hn<=sE@tBhOU>-=#O^3_WX2h(vYDoHe&BqS|3q7>Ukbe7a*45b~kGH zvZZBy5xaOKabKfS+qDLYR)5eFIV7m6bN=4x+HkS}@R8dwh9 zh~mq&$>-mlDD~mL5pZ+`adseD>lNaII92}M$8Zsws>HZ{5S95y{>Qa5+lB=IK-*Z% znA#I}DwyXt((Lchl9~O}1J@h3|HIy$CJ;kM;Zv?4Fin8N{@n1;ZW{qql!k{Wq+pN! zFpz&?ELoaK-7l0CZ(PRyE1@fN^F7VcfXS%>BpFZ}lU!D!ivM%PG){6TjQZGFAo+1z z%hxnblI!ohq55YRq#N)(=NFzfemGd%111`jkK#qy|ISN)H-V#``{_>E{9*fI(djVM zSrGBDR@naU4KdoI4=&f+x7C#JF81P_kDaIb#NQy*hKOF(GJf9f|7XYCGl-?>X!|p1 zN}US4M=5GoMK?|vxE@d2*^?A2t+AN`7XUT*&!`a{vHs_U2T<3fzx$*fx?EK8gIu_Z zFS6x+_!^^KUCQ-``P+}+U17iksQ9602smrpZtn>1?tx77yu64`kp8xXfZX-~h3JC# z%ounP08N$fcC!7brVMIBa4%o)IsKiM{>BXQVfFYJ4Be-*-=v-K_JC2@gtSSUWw5#bO!QQSK@zfNfXSpmM#Q%{5Lbvo&pFsVOD+A28X1{hh= zK3teZqX$fM;5`Lm6n_v6ZRa`4^hKQ&@Sbv-Gmo6#=Q;bwp67n)pU!R#vF-4cG|(Fk zFz9K~87{n3xOb2n0`*AqELML|S5o7H|Zo=>ig^-b4+YSGflN1wHbzZ0tEqW<-7QJ^HVIC9|c zj{e^^orZZn3Ex5|=u!_|vVnfX-)XXf zB_IoM<$MliIH@+}KiAe_sqF#HHvR*Ju~@I`_ezzN66wUCvWkgVpR!`bW8(PD2#sS& z23tmn{;6lgfm*HF$Id?b>t9Le07prwf4mL6m?5K7sqNcu`0_At6BdiYBW_D4wnH~_ zZ6@kz+UVN)G~~OAaB}B>5h@CdjbL~MPSHJwM(k5TLF+n8i%~jA%MFcBJ@A*PpdKDR z(ze^s&VzRart+|M*yALay5(E1RtN38HhQ;qYX)~qF1_awU_3YacJ{XKEC?7~e&_`^ zspTk`Kx1#?*8(+5JqQWI>2EcTc@Z9^Z$5VN_c$@B`-VP^ipl`L%xe6M14ctR@uE&I z?11O<&CgmBeos4IC?~hgtX01jSLzo7F`(WS-#=bdBD{hM#(IMoY2eM7fSh95s=v&* z`K{)x!P%GpRS*Vb8IAjGs%Mu>xW0kj2T84eqxTg&f}0Oc)f-%;|7e(&pXqyTG2qIg zUKac{2B?I>s2LzgMXEq(1K2<}_*o-&`bGf0c4n{fVU*rM;c4%?Af?~(!Gtf|l(<3f zm)#Qu{oX!h_4-Wzz`b^njRBd#>igaml?&0I%ZxNKc*k)E4-6%zFI?#kavxE%R1DK! zU(9K5ymI7~zEw7MvYm8Nd9pUU;&r%@pH$83QRBY1*K9G+eWG!ikoX>;)OO`_hX~MY z2leE>=}{xwQ4MTzBSOE(JkS`NeNJx&Q1{i0aSLyBF%kk9!;uT7_SsWuKc7;dw%uJZ z?8fko5S`+&<$Ig%-m^vqz50NW@V57ERwyYNmz6&fyJ?N!YaU#Ej# z6+Sf_s|Q~dF@PBlTuc7z;0kVd{nS5<4xY7Bg><}yzp`-xuvEBMc25}G&>@uG+Z9y1)_N zq4Ki?b>WyL?wV;Aj>^5>HIB;IldFIJ88}oP?1(^DA=ft3MRRXFq^dbw%CvD7XCx|| zMD(NaD&JU8y-N8)fR$u-r&GD8KS;BcDm#=Z{Z3-;kDZS>QtNGW3T)=j zp_DxFx=|7lpmJSTMwZt(qti?scQaZw`AH8VN3#gp>tuf^%n}KGvsAT@Z|TNEvI@uF z>6(5Nnf5lu&AeSA*iraam?|?xNo1)II_Y^f0!#Xx8F9Uq=h<%yauGS+<7Ga$rV@CW zL&ZFkZ<||@{4 z`>J_nkHUgS1OC0P<%77lGREc;OFpxQX0zE8ZD$95U4tKpW!`c8p}Nnr*il~gYsozh zJlRbl2E0++SzSYPbAPm+cv^k(8$AkHa~9!84|}AHkZ^UHdVG^I@^cEA@{`4_rykHL z*f$}cLIB&rnF%H@Lh+;<2+g`G&^h)p`>-fiSPw#*?LIbz5MwB2+|ff182f#NIujY$ zU67PHs<{2QBRsw9}~iT2~@4KIuy$N&j)}nT`#j}1>`3cq7UBOlC8gO z4#;)tE)n6g&hOnn1+P@Ia%}0cDYL00KgksAxJ`)qIvKACxl_kHZ5EjhD-48kA``x; zVy>^VvnqueTa2L+@Q~bj)N!$|P z;K+t$1r-Y2M}(iIZ~>btIe41y19L-n% z;wg_FDw@AD_3XHaurU!u$*W{1p8E<{T}5RBOJnma0L0v14}}dO8i!+w(xy8G$5r>a zeROL4Hn3qq*9sgeKi*s8-F9W$^}tOlJu4m~BtXFWpUIC*D7Q)=phj3Z9Os8*2KLcnK~@ajExzzd5Ju&#wf*(jsBgFW>I5KDNy!P)OuD3k=J%I;eNo~@_pU6ucS zdRH^3o+L9%GO4GJE^ePi9x1E?U*#Nd7}Uqc;S+vkzmNUEOY35;M}Lw{T^kL4&BBg8K;i#v)=LITk&#wtNwdc*LoWU6v z$9ezL$3u7XZ#-fYBsN;6mcKFuWkWITfO5biX)F{f%ZJK`@6|!v>IV8tOwlaH9Knvv zvkK&}wzQ$XwC^GN)&>;lp|BCsb)ipZT6oIZX5^~!VS1YH8Fys3Kv{SKPbF!F&vYjg z)0c+Xa&JL(ZfHc`YI)A~+hp7I&8(|8zx8Oa=MVS)WgAsyiIGqF9tJzS+U>}Jsp^OJ zC-HhW|H+NP!n)xjx+N*{&4W0Wad^z)Dip^y?H95SBPRT6u|m8`|KWSaOem)4@;JkC zigxe?0`!Gp&Xnx~?wP&5e=(AS~%Gi%k{bekNb zWWDDC!gJqL=LwL2PD~X;${mjBSu;=BsKGe42|EXP4?C5agW=>VLn^qIe18oUS`)Kp zg|?#kMUa3x@0*wcy;~=NJa$Rj;{e}9Uohfs_5I+ue2Lfk5q4`$f0|im`kz$h~zta z%dX3+6cji}ZT=*fo02Y9NH_JAM%A}byy85VR{Xk z!_?+Y_CDbeu@au!R$1Tszx>pf1??H1kOnTK%4PYgiG=zbmbyQp4?ux^=TnIuJmuu6 z8F992+gZW=+%feIKI7?8Lm$T)9v5p@FqOg1SYxn!*^o+WOhhJ9f zo)H&plcLIObPDD!iWtJRoZI1;EU%Mjb=R4!PESOof=lV2XDXb0+J5RQ)5f~%-1Own z?^(+OU)3fCq#<$`Stf&4271Gj-~w}-ITG_wOn#~KYVl6+)^#89zQ`#pwOpI}M$g30 zAApgm_ncRT8v0{Nh+=vLvldxq>} z9{6Zps?kGs5l@*%EWA)?5@V8&{c^x4-_tt8CUL!+sj+Xc)FGWSjDKx$p+_)MLt2Tp zI#mr?G>j=MEn;s@8E93?IVpkXqqOx(jcIH>w+RE?J7e)kPk!1|n0%j@d~>1If0mEg zA2iE(T9dByusokuAW3CK!!&m6#aC;e@*s87it1EY zBf;*>4^q-nOUZ{Oxl?wI(wZL{RluKdjRIX)9^KnIRpr3Hb~g6|sLRRn=QpmJuO+08 z2F^=SZLJ`KO7M;Uwkd{P>gTwd!vRXlgp2W!%|r9cZ>FS2>i^$w%+UVRv-o{+=DF zRb1Njj~$rN3{YAd5sWf4&&YJi$lPGO$A1DF##G6p1yaZF)pZO7z}YfZrFA}I0VJ$F zm)dAbIu&r;+Mqt>l1B;}^!{Ju-7_*dU9k*cJ0_U`hpbFC7@qe^Sfc}@2j^w@KnHhi zJR$W?5RXnjG|sYvHjFV^u%G-^ai-CJMU)<+@G9*u1{;hxw)5WetV* zw{4(#%>Jx1$I42=J8O9xG#U0Hu8tHYiyI+b9NEOl;e*tTN3n718|*PT6#DNtllXPO z_c4E`wpcOvGl*JH|N9OO)O<4@l#*_BgVpTPRbUvv6iqC|CRcdqi=QUg`H--DPy_w}2qo{pwf$)e41ly+ztNnOGGVg(H*yqv$4psC%T zX^VqAE@^#U&Us9W3o8!%7IPkYCuZCp%-LUeud-C{;7#Abu1{b!80B0kXURcxy?`XR z1I#Gh1kwM}Qd)fl&0CYz(9nE3#-=`rchPz3RYk}5?K^De@ro4s_5q{`m%a?0vT`CJ zRr~J%U9go{rDs;h4^N8VXAZBDPPa?|H0ujU@y|P!)=Ee+J>$=ena|2ph=FAd|EP(O5BnD#r^MFZDd!qw*u&tt6u5WA`l1`9kIGbRlKkYh;H^; zmZ;BrZQJ&+qU#|+p!Xm(;pceu^o>EK`z%#0)?}`3@A8TTqUn0{F%7rTI3h6xFZVDF z{90O0Y&J#THp09!&bMLOHKf(q?@QMM{+79qa9yD?mvERgMS7rilVbK*e5}JcXVXe0 z<<4i}j>kyYmrA7Xeh_8#!Yc=P&x zrl_&XZHs#PvpAmkREn1Lgv4L29ntAnndmN@!79))Ct?m>jb`_tw zac8D5=Eu54T=P4G-l&;6MRg+#PQ_OaHV9n;{K=h6u6HA z2xx(0GnF<1_NsnGW1Gy_=E8sNENN)9A*bJZ&@cT+3NY=6gY-*;kdbMx*DEoN_*1T69yc+@$cT>;UF9NK?RObzyvqXv|QMC-ux&O9YB! z$5v$KjWml?f~R$fz#PC)P4;bhHsa8pMZDDC!$%H~|9VJLGH3`OdPQ~)bBD1{7v5r4 ztOF7andcB4A--y+(EYbnXSoLPfn0au#KIwZ3O}BV@auUZIS7Yg6yc@%0)1aAF5zI; z{M!RcTAG}vT>LldX^wI^M;dGc-W8!`non5)11wqGAaHs@c$azw{C~95tuvOSS?asK zwy#=z(5N<0dk$Yu$Z${Iz^|}DC1yJ&zX`eP4!1jr#|2vFpQ=^S_o6JT86(Y$F`38c zPFa^(tN_B6;}Dp$B#qG#Don2BqH&Bv5P|>Al5%Ap;c!(5@8F9qmkvOA*Q*l057Ru; zEUnXi_J@{M--+x*zTNIoeWmYVMFOII8_hDwXJLg;K ztE3AxulyrP(DogfE+yu5LR{>A0^EOFFS9D>2|0Y? z`ugE!X@8_fk27!1XmtNS0qk^P*rKS3)qQ-UYAG5sbIn=JkKnP-qdyP=fHGoZ0Fg7j zKZPmG-y5aB$fS8OD~RgSOYv$8InU?BJl``-R(*V~2-z5bsx?`6Dt)F86RHGYpqb$7 z>@4y`(&MfBIcK#3B^wBrqp@|?t-Y6Mf$zX0;cdu}uNR&Gbg(Kh&bsv=M%Xq3Rn%JI zUw~fjJ^<1;3$ze`>DfsyLGyLr1c-v_g3WYo|Cn{}i(ai7RSOkJ+yLAqpk@dmeJpPY+8_uV-6upB-;yI_$j2In0f$4hd-AGU zn615ZlENO%i+QqDwNSY(STlI5X8jQ#h?Mp#grTMS9vq-_%5}GoN{%(X75(B?Aq_uI zI>)n9EEYZkP8}ha{Rf;M>rS)I$?-O8rysG!T^_T-pg!Lm){vxH?IBi*28;c$)51#=f#S zi*kMwS8<#wyfcX2C%dko+=;^{o>oxh&)=J*OH*W0eseY_L|e}cE;5I?%!GZ(bc za0P7DnmYT2jllTN@PcSTrb09~ANf@XO8ftC_9oC+u3y-AgGe1iG@M9uGDM+MArvV` zW|<4236UXVWh!)}iIXW)%9JT&mZ?{R5K`u$SB8Xm&9B+F@8@}6L+AXy_5c2BS)J9o zpXVO-zV@}Rz3=Bl%iWP7cBCs<$k+77$)q#mDO#NIiMs=C{L>>V5^k7IP`Tj_(JhVJ zB8&AGKV9_9=i3V8hh?Fu%i^5QqyDEMzwGZ{khqRhHAZkk$luZszEu{0a-z}hBuFrB=}zuM)sx`7XL3!( zR{1j8Mhwk~sn2>Q-bO@$d+D|?lQ(S`oigW>O#(71*?U1HncWY|-2X(slW1EK#l(0tE@91&Ft5vVzLM)YY=2W5`^{gK*KqUe7` zXsa^@2%o$0kGfW6H)hjFEcGJ7v}!|RGV9(t81AmgQPhHz18*-5_nqwm4Of4V335ti zIAf=EAOi|(P9{YKH17J)~{{AUg-hjuRrXQROkd#ZM<(if%ZW(zh)QS0m%IoO_ zAlzC|2w9umQ$++I&Kuh?k%cASi--m&Mi_H#E{;^h@re>6ZpZO4bkGZso23PxCv_?b zkEY80r32V9u9r?iLWOdW(FeCF29Jsg>L86elgvbtP}ISrc!~kfI`z7Oq>x5;M7WT-&KabK@pW5BPvxhh8B`ZZ56kpNvD}n9&9D<( zUPwq?oEl`pppHHz(+BbCMje2_S$aDQ0qQ7XJ@4MXOw@S z`KFJ2V5HNkM6}Oi-1y+T4@{f2V-6%5dtP4fE!*3ue0Wuj?(2zA#-gG_{RWX^r_JTu z9-Ca;T1lee*G?l8ER=R_xx&kFs_cNV^8-5f*N#=mb7lvJSIzkE)!MF|Rzu5|Y7Bps zJ=APCO5jWr>|QIdD@C0Bu7Ea-s&gfnHY$r9II}6c;pPzC6{Xd;^zN{;$KJWCZ6=c7 zp>pj~X2{J|f0RswGSA<%ygYu>97cJ(%C*h3ErjG?-CZ~Dq!r3}NgM0m$ox>dTUuT# zi=-G>L+iiL1Y4I~wdtI5<$Cwm*<)8WZP`RN^W5${UL*|>u`X9F2ACC~QRiS&qp@SH^n9KI)>&+SI^vOLk5mRDB)$#|2Zc zAOC7uFBq)h_<1ty%wJEdI^0a(#CsqBu7khBDed^rCGbw2Gk>H$mVRW$;(9=`2{>og z9S*8M&-KR@A)fma#?s9nCDvqZ)m@muUOqiXitpQdS$~sgFiX(gN1V0ttv#ZlO}e>% zB<1l+HG1jFQQzGs!Xq93DEB2HypXNEhH5?7aif9a@9RqDxkQxcwG0fl3^7G-5Jh2p z#e}ls*c5=BqMW+J>%Ca2POP#_Bi$U(2y&5bJ1d~%_Bj0_%iNFPZ-hiGFyw;9;&%6C z!ID9_RhHa0YRdzEHAL*IF1m3RG112vVB4+P@H4h&;xO*@4ZUQ%j{an!#kk zS1<1m%Bvss@TsHa8+}Dbb2n}IUD~|6oaz9ofqBpQRvxjIWq5DbKUeJyTaNXbesr@} znRjo=ca4&GmI>Ix(+!<@6H<+@kqu7)xB~#(jtcZnbH%=~^`J{K_}(VyT^}r5@X^WW zJDVVNN}GSRZNzvCUoH2`X|m1}>ae_Gd(D6L_`ogSP-sk8a7W4V&Fx zShgoLjN}PN$Pj4s62K3hALpfiMM-Uk`iZM5$K*5XMlDwyJ=;Pt_g(hb+fCY=kyWk7 zUD%hK70rHL|3=3LEk0@aOl`&eKo3(3;AdVQ2*9(NC4$+pd=HhKMm%NpV`%^cj~A*b z9-+Q^NAK?Ai)m=q3Ab&kRX7bI()Iu zpYybdfiNSYlv(!?JYX7AEa;sYgQopC!9tHLY*jtla&d^=W4}1QdTQ zV#(8bI=g>e`JU!}p5ON!eWFE>8(p|T|96-R*r}Fi96jY;xbA{P zK-t~2vl=CHX8MZsak5jl0^bL++!tsxn>Vs1JZ8C+{>PQz02^ORexAo$yXhRUi7Bt@ zQC>Zo9UE}W^_@uDJB?07dT`dEg~$S1t3@gh`YB8gCenYfm*;n6lj3?7WnUmt2xhs$ zv{I9LmiYR~_yA)q0#Y~!nsNAB&?~9HhGXtWMQf}94?A^_-%8X|y2-F6TPbj)p~bF6 zBM|*>*U>-X1CBv9ZRFzlm}q>^0&xhoonz{Wiwwqm@1;PJTBS2``A<#x*MsxKgyGmvGG z-f@CDZLa0^Snkqh4txaO1U|sP1&peG=qHngKg-knYcm??L@&fnQ+=xkU!LqMP@C3r zA9=gUCtKJ~@l>Ky?DXrlP|Y3;uS_pz$YWcrnCvj^+(qzJ4j+HAK@xW1YTr zT7id33=PXB(X1dp*V84mUxwe;M-5bjvpivc@~z5(UUAEnKY0A^q$Th@9B~dxBiI+b z%rXlzQiOlyW9Yu+NVysC887^k`s>4B7o;;QCwN);KPH$t@Z78CZ__LO0?{hh)`iK_ zbN_KgfTxSR@0@4vwF?$C6PHx!LF+a0V&TWie@oU&CDv>93D1@N9M)F^n~Og3f6M!} zy=C?qzr*CaDb0C$+&)lu2vlxFuYTr3l{%@DX z&&^0Yr-3)+=`$dAV9N{f?919~trc!S$A2d~6=I+-LiDsDXRdxAi?Qd;0WF_^G_BO| zqg9Qw`wVWG&FEpM(kP)aOI|=)ZECoYHGpAKkAHP?_DU*GFcvSaF!l^_E!?Z7(}L3m z#z^SBv~c1jcrX6@we;phal?cqju!|T&HqjGag(mYku#Rs9H|rvMY)5cV(Kg{`^TR> zMdZkfS(S-9VwPo&G1iv+d^8^81xI;p9tQM_C3`G&$dMuHG}hE^x32TtDLXEF3bGRX zSRgMz`W=Dn5JU6h;Mc_UNAIxdZ+W&?8;=~w4I0(9ZiR>CWiqu{CC&sQ-iNj+&ugpI z*-AAd1J3kKUjmgCAH3g^ax7DO%e37aP1^`<0xkcu#7_!!>N}IMtfx{jHExng_*nGn zQ<-^^kbP{q8I>zy6T4@HP(IN9TUAFtaK*|Ewr;^_s%G?-s6BWODG|u)rv>z@NCzPP zwKycsSY`U;`pt6NR<;DX=eq3t%)dT^0*(DcR%m8PfyiA$3mb? z3&}!he%!DmyEYFB;B1<;-S73YqWrais#s2DMRi(@n_28d8JSGi`lx(?^Z7#=`q|x0)W7J@_ z#D=LB40!q~*gKHmSQRnu5!q|V_{U-Set4UbvSl2Enf7`@UNhb@;$pS7Ml{E6p%qr= zQquY&mT;q6kvjf(u$#-HMu&tjK{*^vu$ShgG{IP!FNFob1?t$COb5yWF*J3BN0NU- znD|n-Lw0|2Um;Q%m`b2537N%S*qW=^t(Ns9WJ^C56QnSf57KSApM66ZNFI@+0)3+M z_0!w0DDmU3B{Jv&1~iU~xcYBMiLv}Em?kGEt+p`SNH{K^t2wbZEE%gn5phw>bBLwC zH>KCG%9rGs8Cn!bu`q6(>;RVdu3Rfk53{5=QY9ox3Z=ylcAujh5uJiJ7j9uN&tzc- zoWDIV&-g)uJR-+adbHWt#3-;1kr1wKJ^BoH>U2aTf!}6@>=bT>xN7V8*P*ajahzL; ze$I#n1d=@0Gj@Yy!-IxE)1|fSwCQ>Y2bv+aFNq}F_!@_K6!xr4wq+(9PDG-Cw*X0o zQ*9M*%8fz8XxX;X@qhf8lzwAv(hGqk*!4#}Y(r~WG{^wQT<)7@N#*A0SVOMeN}Kv7 ztZy&eQ%aW63#Sn-5>asW{db_BDeEDAh$d6tM8ASt6;x_&*n*y zNsG>igN+#DDjchOFh|g~Xej>cr4=Yg_{Wsw@|8fII$vK~qtjLa?(t(ubPmyS)il7P z6bmUVY7mN6I00d01UnYyyN^gTE#HPrZM0=78A)D@cQ8x8&aN#i2pL3_4X#_F>;nn? zhE=4pPoLCwga9XXk4@M*2t#(n1t2QSvT;BBk0pg0-Ksw?Ob{2~AL*V;o$Vpc&q&W7 zEjGQJ3A!rk2=JRTR+Hcv0E1E8#%v;KopNK4h6kdsT<%(DB(W*Eo+Boe#MA0HX&If} z7}@fX_2eh7f&omwgBqsDL;d8Bn6}4UQAd3n6QnKutS8-n2M)%WWdgJLay~lP9Hg-( za0i)ZwaJ9@(W^AerHfpPgW$J`M`;*=pyc=}*fXA~8+6ZclsyoddE_AGMoeA>D`aYu z)3J`kpiV4CAa4s0YZn_P`vda;m<5m(;|DLbzIb1+W8?RPeRf;+jM|9@a_5h({Quc=4mvQH9<~ z-3@K*s{sqJT3ZE$)wWww;**yPvsO>(8yQRjE>T*Dr#(gm46^zZP{46C2r|t&_I`$% zB?B_AFiS!0KhbB%_<_wI!uuJLJ}3f{s!!@q*nbo4)|@L7dnli&OEp*hkv1N6-|S}x z8g=SfM*5|Tx&6!__+vL@e>J|oF6^`t*QRs%DLkz^Vw${TYe2qFak=UldUqLOVNytd z`V*7KDS(sn`U><-7Y;V-!Sn~t;C$|r1>zr2>uJx!(*=oKusO*&BpD|_Sw+g^53>UU z{6M2-p{0?lpeDxUq5#iDwcsktKX{G^_Sm%)1U-f32zzVK&K_e-v=6)rvXN}o(KtTQ zct3mVKwY7-4xAd2Ay!koD-UmhF_3ljQLckZ^9q!~^MauFQ9oT~S9q}>Fj9@>6N7Yvu%-ndTMgq6%LH zJAsY^1zo6|Xy~zeZ|+*#L@G!Yw}{#?%ahChyE!1o(IICq<~SQGi@g##XCn{4x*mC` z*tB~Pl$@KMD%mp0`3wSd$A$I>PN6J{ff zQFEoU6EHuu^|9O{GBnXIP+MfBmGO95cuf3%!wzPw`RK1osLS`?+T-6YMam1`fdc-7ialwDml0J?fmp41*bs%0eGe(!hk`zp@RId)PtHhOq9}ud(=l zGd0RD$Ul9CyxLG`tQ7l{bD%noqE`PG;VBve{Z^%>-m0y>d2>0-)D@^KtDm5_lgHq? zlr85rw#@O-E;s5jWgW;O)K&p^Ay}SXB7jApn`Gj(*AfGusC;tsmvfj4{;J4-F&PN? z1mjF-ZB;Sc-BY|$T%-LSXr|N-9d0dV-1TA)!4+;tcElh>fixJ1FqlF_at!}uKDmT{{r;%Oe1P<9L{d3R@hvw6>avAgbmA(xq z-hx`;K`>?zX;A*%y?Hom0P5u_Uf? z;s*B01wP1-s6fG{#6))(i0*FdgQ96ro{c~U6VK&=Sm{%M58g(-Tu|%ix&TYM_wiJX z??s0m^&ntUGQOE;V5?JQS3$c*6$&Jc2CJsv?@}DQh@?I4Nj0>i3(M6jR{jcz57y8g zoPnnK=>ha6>P;*sJ5`=i>jw;01OIKgrZ5TlbMw;FhS0LK=EQu-dIP#MTW*59M?zRn z!E=wZW~1@S_h_;y#J#N<>ft`Mrj`>Taho7(nZG(bW{S{8c8PkRM$$QEqFN6Sw4@bg^Ge-)Gb~7-`Mk z{eJo!Ewn;@-8OSgo}kY8uc(+puCugJ_z~g?ccoaJk8EL>*5=x2c8F!B#^lN2sG^IH zZ)Rd`g2e|Y3D&7kX;6|2#Q)I+BGxQfEo<-h9;;~K)X$5$=2;S##1R5TpWT&GC_QjB zU3+#vr6ppg$VCy;8YSmI_1E53L5Y?pF^%3S>1EcIbN67&6#ekhf7?zV`BOonC0cE_;99` zR*jxxwC!wqxj_bW90O}G&D%0w2=`-qE$F>_4dv7U1zOuJq5J04tzHKu%zAw-VfFd% zEdizRf)l|spq$OtsuxxbgK&08Xb=wo+QN>}5HK{}?OIh~S$-kG8NNXpQ`}%TapU^> z7be!+E@Z14F5g#nZ2+UcyMoYvVl(loh1pwS?t>aR>r3g}?hGzK_!=iFKZu z=i+$qcDfZRKPHw+>)Hh^{1$E*fxk%R*%9Zw{V$FbbP0k7!pg{!aV;w?D1gq1oCUrA zuVMGgAgW>rfn%8zZ4R3Q0bT>*bKbPEojC@k8DLh*EW z(z6d4q5i~;d85S?-w`(n2}xJuOQFjtBU^__s5M|>Mrn;YB_g*6{fEWIl;`xLVjb!g z7P`h=PF`R|qZ!2wNi-`sadF}7>8G~={YS29ab|LD*%4zd_^q|Qg6A7~-`ovN7YJKB zXilE84d)qt#rFt_-!r?Plk%pGq1*S2l~Tp=1p({fMvBa)M{BF+=x*4#J#B^PSC1fO1%K z25N9h+5}o4A>4<^w*8%ieMf-Z4=+<$Qt`rhMH10)G7{dw!8jNiN0Is|A@*RmoAVug zN?bJ!0#`Jam%CKMfZfhYR<{6L_VCoMoq(m1+aKBJQ5ve8wZR6Fd$IcLRODV>?dO=; zfMrp)+U2HMKh}UpKogf&p_a5*XB6ISC8KHa#?q6+8hFDM>;KW+8L@Yc_|?r6{jgTH zWmfN#8gwk4ECer$V@yCWF?UzqVzWG&A-p^f<`PBOd>jJA+A8QocQtrK0=HK#^uIg? z`djDF>b#<0n;e>hY(7UtA3tp1VBRDCwWz-BEQlmMrN@iSU`3DxgWYdXCA44|O!JGN z_z)BQ&uL##bm%?SgCBcD;cZ3w@edi0Aom`YSHo@wps$OiLJV+7>|gb&|CxN>T3 zseoKc7r3J#k3shOw*)!%7c3u*uE#M1Ego+>-c;k{Do;|r{latt?zon+E!8l8IcT=V zdpy`G%uBrhNDAKD+cpDxYcygpmiS6{`QFHZGY^oqS}w0}-W}6fez8pnI^3u%bxux; zHNm3L!wMgnsFOwt=S)^e5=>ekOYKR$T-Ecd_9Eai96TjbS+XdRzWeN6re>H}3Ps?W zk}85$%lfgz4R+_#e}GVAWyYR)dOqOS%}kp9x11!y^1ob%N7 z_(hq$KwbB>ve)Ud(H8|pS1;n%n>Bd@<37IS+`REqtCow67+4x$~CDs~E6nZuUeQeWT`-w4&~(8|QIP zY6-v<3-(#m@+Q#QFhA<25L;)Jp%sTx*YwfV*JoVmI;KyzgV9h&mXoWwZ{q~$ld-qz zZEdk_eV&;pzfbJ!mN;K{dI0gB2F*y~AC@PC=9CL8}UiM;IYZwXkhgjZ9OqQLT&99SZ(#RISJeth4B+-B4H z<8z^ldzdXXqb5$D!rGkLoE`WL@%MTbx!o_H9KPbxbCk|bvf?X!Mb;2{Bf5*H79kpZcBH_ zeZVxS7s&KVRBD&xFnyDcTJrv<8-@$xyj%8O`KhyYyIYyG^tE%BpPH1I1qtV?25;n( z9QOI`mcU!`wCv}|p%?o^FZTa&@~J?i)j$oMHP6R%yD>D&MS87WMGbuZ^Iq0Gsu%mm znuh}`+@s8i-}CPJM@-%b>qh%vf_fIuZF;F z9~HN{$Zd7Ww1|+E`&$oQ7sxAT!H}A?3YX@9LpJiOJ z7j$^+*kcr@nRD#u#_zCqMUGLGqXBhf9rfFjLKa(non0&v-eevz<>YsG{ev}~HPSC4 zS}m4QdwllLj4SIr(`3Dd_hsj*_xyZgnN1&5T}lStKG*P0ba4yi$KI4O?POkM+e?{dZ!vJN>?XD&fKo0?|Mx_`u8W+p_lj&3KD9c2|SB5>1)jil2G1jFRIG4pAdTj zCI|3nCY;bdAzdlup4rnM9Bcc-Xhh9=`!OcF4ZH#lzn>Y}er8C$#r`>?_NKW*n%Jp) zs@{LQ6?en?OBv-@bn!o0oqF%iPvl;gm1I8)wiTQX?sd3trM;&E&Ua1O+!3~r#i+d< zriiu%)7JmEQn2>5A<^3&EN6r5FEKjOBvN{swN0;Y|3FQ#a%!}7YUye4O~chF+&deX zzBXh%Y$;{WsL855rx)WMqB{BG^XhxQXMh7?oj?h#z_BmN4~k&4IVR)k>4wC)9qssBLxjpFCKM|8Zy6+VcP1)jV zej7g9ATh`%gxszSZs!}Tyn)%@ZVarO7#C?`(vhwuIyZjra+qA*#DVeIp5i&O`|OMw z-$tp#*e7iszK$<6y%WRTLPI`EPKOSR`n}XvJAIDcK6R9>$;Y?jZ5Q&zS}psbSA)G6 z04I6@fCr?egI0%Brn7Rs^#PP4Z=I5^to?x?d|QJ3`KtZJSv}N)oX}R6uG|a;kjbic z?Os=>6De0F<*2#`!WWzk2nQP~_2m|~6lf_%xN51LzGa?7Qj0nRo-C|<6fvpQcdD*d z?Vtz-!sk`*FFQmNwAYv`9(FoI%B=GXK`V69rW@n1$h-wWI#{x^5N=z>j z;OAt1JskQpcCakQFXtm3yb1eVS^`;3Ef?*1)fN>4p|kbwr+YZIR7+)si1zhzqH zwb9p>2UeCZ-eejq9EhlwXfNMrGP6%|n4nh*ef^Q&~%ONW~MwjPjPAZx}FV97Gwd&%f5BmBZOA5Uj8;e#3aS^K#tKi&)0Qut9d6 zr;5SXISV%Pecv`Ts!1_@iCgG)7o|&$M#YasuV}?i6%5@vQ+8nb{=P}G1__6F#h=eZ zLUQ*7o-fsT;c(O3^O7w9m%yr%0gM{Uc&dir-1~n1MBnd$HBw-MpR+9G^sc@8m%N@H zX<|MG>wkF^@xWFj_9|UF#BcNJNs|yUs_Lqj}kx zVN2-D@o}&Cgq}7hbxO^w_f-XyBR~NPw=`2+vULD4#YB1|xtQtllFPKI?IfsrHkwPMiW=O;bS#{zp;i#rj({7Q#4_=rJwezUt*aqaNa(`MQ3vwc5 ztdKfJ#3rptctd4%@5FX$(Ab!Erb!Hsgw!rXkhd=)Q@fR72GPMQJ8VlBd#EWLGELds zA?LiaH%1d0exQ`mJ%35!MW*Fk!0N{XK!|h}9PlSfIp3*E*r6p}jvo|Sa?xL8iuYnA zAuQ9}S7T}-ot3!o{AcSUcHMOD+?cJa(z!#k#!JU6c4c^t8D6$)*01J&D^^}xmt(?VQt3Ra)Na2<%Fp~ffv?#^?F?NE zGjUITd;Xkkcb@kElR#x6c{11@Epjk*agpOL(I5yi7&8Ws&hwmICH1YJNoC)pto{&x z|IN)E?3XqWLmeIml7@wEeWAbBbcHgtcj&ZRFr@PObM-}~Or$kKXNt?Nx;$xk>!Fsp zE;5}_WNJYn(misUPyU)tNJai z)O6h|hj{uSrzSGH4%ukkRn-eKriK}7q}&eEA9H_GzBv85dCK&+d^uIue6sdEv}jQe zWYH`*&<1$Rg{8BJm@Kw9=t(tqd#AVOs)Fg3*u`!63^_qeI@X`lDJf<0c8+o*c z@N>pHbb~wPv@iyzy-}j+eP>-H{J>GSQ}^;W9QBFKDydBy(a$9gbBWorz+7j%&oFUH z?@;Ay*Y{GUnD!KF2PwUx;ybA!x%;4HPv%5PztpwML?X$va8Nk6xcaRJQ6Oi z4$=Wn>&wb6Gy5$%9#6k2uw5YiDISp3_}cXRTFyE1Hi_Dp$$apkB;cz9p=k2Pgzvk@ zr-yv9F@W57W8S=#C`??Vv9my_v!m0=D|lj#urVRk)cxD}^MnR>6AKJ(2nlCh!KFIy zON(g+eLS`aEgC0b7#YWe<4`|?E@vz2y3e$ij6|poEu##YIJnH(eHYlt zlRVQFHT^Lk)}Mj(!&P7BoIKvd=i(d#Fa{4OU>>NF_$V+n`ORKpb?S`lK3$4;ROW?P zC*SwAPihDVT9PIsi{9MTNVVb+nDAA3H}CVZ=!@K?p&ZhEeixfTiEv#_uZ zPCPwy%b_;A{b0Qeim=T^u^A7CT|aH$yjD_g_4#RfD%(b>nyoJHCHCB&H0jTE?)#mn zl{3*U;&i%Dsp7+V_tL-KAs0?d3T=+2vlc$#GRmPto(xL9Fw5;QK35E)Q6 z@+G6K|3n-4OPt(OR6R&`Uf~q9mAN{fc&Ohr>?E1%6 z7xe*(3rh7OvqXj1fb; z<;bwwWj~`ip|x8udz4GGicX!uK;O(uG*c-co=pUZIdo{ZV1zW(=U;!-%3&tww_4FD zIg5_sX76;ZeUl#~6!nKfGQbd0mpOWL$ZH^aGI_u9Nr|y;luCQpyw!y=@y-H^`e)f? zV)BJjEzj@5o3HUGv1#ct#Q2==W4h7yAnzaXw zd^;vXFD(7753TyD+Pr6cyv~Y2_vI}nHxmo?$y!nI*dMo*%7rEeixe(M=DeEQxGf8y z7(ulBI?bUYu+`-(pB@VDl^aiTkJ13#Bu8f>TvL`e<$yy)iwyfZT4%rpTF+s&q?`(f zE=#4TCUlZ+cED5rxjFF5J|Nj#%aII>)?rYl&(1pMBmmkW!VD#yH9#p=!&S)V7u=%SmXIWFEKKo=peu{F-3ZuptM%s-3qZ)8J z1reVEBqf(Nr=$Ioy#xqZibslxrsoMK>uZQ4&6X~$zb8{nIiwN5h+wL?K61;fYk?g- z9p9XVjn)+dR|>|y6Y{fFiV`esmB*PnlNxiUf0))IQd{Y^>S$>Ppu6SALs<*J$)FKA zS8uarV^Iq03zU17o8?>RwO~2-A zE$KZaS1ymX7%8Kn+SL;3KxF5s$Xi@7a;#*Xebn;TP_#_kK+#fRf}-VxaM-s5Px~lp z8pfx_AurR%Dy4l9aZ6MdHCUFc#OD&nP_B2S{ou=6YL@<*cawKh`3m;?t?CBu_vQYR z?8oWhjU>u?HubAXx$%~^0&kM6=M72+G)wd91|ktl0BH^Va|~>8xIf-9Z<>NBls6Ge z=&^bSSN>wfkCH+t&g$rd2WBesGI@)}s}WoYXHG7{U8#QZ`zTKTFp5;bqRP|SGRy@5>xS#Nv%%BkceGNh6h3n>f;=dK*(s)|{i_LP z#DB$=Oy77NNl4^nyJs~@pqi%Cz^cMYwn4WzzZUe3l#1bhurq0HwBQ%}VQLQJvGG1r z4j3G|JhGG#kiG)wbB%jAhkojLj>UP~Zx%Hfbm4h>WTtnAB1)P|>2}wAEe($5OSFB;`ypQ4@ z3tYj%^!%@;HI`D~Y+VG%Vy3~g#QzALnVi3q{zo!uNKy(z+xj<3{B9J!$_)8$!lQfr zjZq|U<7=VKofb7SN6pQjgtwI6sw^f-Yi%-MWfD;+dfnZWa=rtID%l_^ejxZynILwgy%hzP({+_23kWM@K-s9I6Nbz4=3;n(Rb?32n@ z+BHXkzlITi@$NK#0*QN@i@+za)kDd4&0DbZa%SRWNU-TwlJmU^%~J6O6oE$|PYYkG z0EL{&{+C5X-Sf9^Dj}Z%t6Yn}4uo$s^mhiB%_Vc~R~wyC$TYez$I_^c9&iR-DTj?- z%=fZ{--aJVCJeps@ebt!1g$d-MChtvJe;J3-kf}2t%dRd7{hPcgPj3_ubnrkIj9(= zfBdu@PH0iF*V5DdCYaSvIM3`g%{o0XeSB^kiTOd=>HD$vdHKCV z56Sc8T2thgDu4QAC5))31ZtXlTND0H-wG*jD@yOjY!Y`2sc??w&JJ!NhTe)#0P9Ka zTZJ;|+RK2|hf>%Bw6;@idy7QYs%v($$2c}d?tj_dPx~1(0EcV4mUw&EZ(Sq#x-J&9 zQE}yB@&jpZsH9+s0ZxAPZNu~O)x?Wj!NkH2{e#RR+C?(v-fHU+ZnBUP`HcpR@5w(h z>Q)@7Km3Avlo5ZQbw*@gbd**&%4%XVC-G$c7GiUrH-6T^$M8V#U6f*vqCHlM8MwO3 zKkd-BW5O31X9aGu+KjmA8rmfLB_h+;3X)W3OG8{aaabA|gVZ7kMZllAHO1CBS#zux zT#x)+{+@Qg#fwaGYFbBZV<*pu54|8tUMpR=pStn&P`0C$tmV;dH{*n99$d50&)daE z^E}$OBKTUvMw{LzNoQZcoPL+d$$19r9iRVj-DTG5*>bbJYiDp+fdGMr+<5Ltft&UF zh5POb4L65{tvKs!^Oz68V`J#CWKJhF!c5*+;n1GH8zu+llAJDY|28#g=9B`{O+-q} z`vw#-AEGU>JZ2;6=j@?Go1WBUtN6hEYX02o?&HkXMdm~^-{P|}qp4w{T!)oD93}A3 zH`UpS$7cJ^YtspB^Jo*kVU!}%xvQ0yhL36LTgb(-6OJN=stK?YzFjbG&+@sLw8~vF zmo|Z6y{f>-S_03OraCwASXzQ3ONZ?)dA?l;v@bVVt@ZGKFni3VOwLi{oN9thT7*}w z!TQe%@&g-Xc5}n@rn~Y+^IR?8&2lVs?lu2?Gbc@p!uqUn0PQrbjL5|*%ZTb>Zcv|ZP=N^!@F$xG+lYm*s)|;6m?#k{3=yO zT0S|UNny~aGFjXwRFrsC+J%I@zg2za_@^Dd=>3SUC+(sB@eZRLd~auhuY!Rd`H!V` zKHX7p@&UNe@sYEKg=!RSm6O3HV!nSo8m=ue@T6{SMmatEYEjYyJ4H)QXD1f3{ydpe z-C#UB%y@Qg3(D!Dc11}cc8UxK+7xW;fHM5TW4_}DCzL4+61$~?iwphZV@El9-a-Z= zmKHxYhz4TmT70Hxkn*CH5rU~>n#tblXJL z&tKHt{UddQmxJcbgBi$e*8!%{p8^WVl9V*zHikKhl?Os0H!S+DFkTyDYK;X9{?nk94)zcQtIpMQ%%(mhHhIx7`%yoyrZKg!i%n4Nr*;; zdR)n}0oU3d<;gWN*Xo#P%63iXNDmaK8@BQ+e`JnG5aCm=_#s*C>x4(zbzl@N%$gK9 zzOQjII=av*FPmuCcdM{hCzVuo5+Y8 zDG0f`EE-ZE=8e%p{hZYJ>H#oxxxzKS0*Zkajvpx}9Nh>&?6G@9D&F25aobMHL%?+51`{LC4U!!2h`Y%pdPr#1+V5Lv> zcAihm89dC7MYKo|t{A!c+V<+q>k6ely2yq?mv@Ejktkm;&jhi_Y)itukYn@at4iHNS5oYaQ)=0bgQ4cF4Pk)j%uJXAmH z>g}N;?Zc^Ip3MM=HLg|8o*v!_$C>-e)=Ypo}?H*jU5BxCFOG*q`ITh-=??!ql>ABL`_45JmbJ?@;; zWhf*it!6vJ*q?WJF!Ekq$@Ig6-{&^^pZ$eNm>LjA{WH!H{BcH1?A3DIQdy|^4Dss6 z0(Xb_isk$Wf$zRK6*x+dv;X~xE&Z;y=JG9Ic52w}saB{OAHH1-q;K=N!2SBjk?KN zN{TP9aAqK!+lVWEu&pFwwA$^LY|x}&MLc&@w`|*bKw`K=vCF+Kl7-)jj0G8&9wqGa zoF}BmoYEG}L1D0#YX9&zT^kXA3?vu&meaM3{*2-L+oIy&g>)1BuhE7r=ZSS&B|#Yh zcn{w|$590=YK&e1{|bTZ?uc5v$ym0$FB%fK&>B5{Isx zA+|QvYk-(D{9BDYEqb`kpSrsINm~##$l3`qFAfzREci*rJM5ZVzOi}f&dqV?JXYdt zFm_@IM7gWTR!#gB?MrZeMAt-Nh+Q_8y;_eKyF6(p6J_uNS0I{fc~!^bconG`SSZ2O zC1CT;ciV)gRX}GfN)6m`^p?-C>@{+?PYhX$icid1J^p3eO?6i;^gsT&Jj}w_M zr;2@zK4b3vbeKMS6YR)}^IfU9YYOK({E_=id&ezs0N_tdeSMK{_oy>m%81LXO5tm4 zx8eNNjV6m5G6NXhY476BseV`5{BTBczQ=-j$N7RvB7G^0Fv3XR#&Evg7+Jp6~s!*{M+&8+mnVnDE%*mDOXTzd@ud-a(jYQqx z=Y;BaNblQSm2X#JFRu+r|0j%7NH_B zbP4~`v(fi_CpYQyv_Y#=E0!*%A4+!r*dp?kUZ?tAb$Z3#K7FX#2k8W3bDXyB5kaw; zS^X9AQu%|EFPpEHissVxaFAzC4#R1mIkxhPtj~7NF+gB~iRK;W)PFj*Rv&O+5NgYJ z_@p6W;MoPzFrt~ZcjD}8pYDZ%2~3(>d}zbDaocr_r`D z@H8)tXqgr{JdnFEReMzwmqCH@eeRVr%&3gh*uZbDxHBVju($KCt9xQS3?!NE&O&w zRn>bVdZ#ixHt~R;m&xZ_T;gw|CFv^@tigNG&cRDoO2sMH$`q_IJf2bFRjW^E!n&H9R|fB7*D7NkRElC~AI*6u-&2CBOUr3>B_Q|-e< zI3$Rgam7|_(2P@*HzO2&Js|nDL+vM7f3_h~N42+pYb#S$J5&p2UOY^lIdKpF-e6Ox zHJ{6uQhA|b=XW`d6_n{n4PKGqT zeje1!UndnHI;5gB&|s96^&hDWw5A%V*ZU^E*&HdqX3P zCh!(2Wqw0V3fklPKWrF2e|r}#&G5x!S_eeK^zl;)>mlG3j=OQ?^6oNEn0qQXHc&%6 zekzYS=8a!7RK_Qu38lIdl9mY5GeP@Y+TNj#CgPi@%CQ%C`&VO=RhSWPSI9+;f1uTo za7WKv+H7MX)8?eplLU47NWQq=T@1X+`+AOfTEi1VFB0yAUY0gn@9I2}^HNJ_RWEnU z1a$XBODyNs6f}RBJXAZtIvn~SK4il4?@u|V?tDnsUO`QW^i7}!`Pf0%ihgm64{=m{ zP;EpO%P?~*gpLy`^Oa{*^miYw%v=~cKh(O9d_2menThQ<@k!eq5I_p@sOz%pTej+{ z4Q0*BGtW$#`61(_*dKcQb&30s9QA9R=Qtu(0gCln8PTZx6Z7=MPsQiIK7$e97q|c4 zqin~$PyV?ex%jKUi`z>ltY$i10`x^8z{*U{LtaI5ec0korggwwrMnyJV-LS1 zv9k&p@o9bCVGxFF$&n&sd#9qoXSPSF6C_fY^3`C}u%)&qiSC{PsHbvpfR5prHAU}1 z8*j0xLh6rUMl;na16s4{BD}sg=mQx5qgH0xfrzin^c4=y8|}ynnK)6IiM6VZd-e+c z(novJ-n@&R;TgHu6Lw8-*2gO4p~0Zg%RSKQl0>bp6D(}U={&laVGo~-aRAyvHw9W@ zdzcjJF?ankOz8q*)z0D(#@jlsMy665ZCLxMdlz!5%n1PaqZ6}zs<1zONE*GA;u0Um zd|o7yCP@#!T4;IQcj}3Mcv$y*o1h5p(WdHO8g-IoPEU)X458ejJV5QqEQxZKMGHpf z0%fP>P)hvEsKgJwyznjA$}-N?WxN0HW6^L&65&PrMgJFFj6Qw=?4eA zLocJ7sQ+PQD2#Cn72(CtVlaS-^Ue%LD%4_FYSE?CnTQ8bZKa4UY_`5odfOMEg}Pr* z@Oc@Ajpo8>dKxU2jJqcSo$To@doFp#VJQ0#7ehgh2$*^YfQ9E;h-6)9KEl-avK|t3 zStK+Hrp4*2QXB9N3#FxyuY1rJV$*3>jhOsb!pjSFP!r7mS@b6s3EbAIEDzU>d0{6S#dQ&Hpy3BhTM{P%A3G&1pN0 zHjyjdr1>S}!-E4C4vv2h?|LcvW&Ytp2-o4aUyC#TX~y7~4EY48_at%Vpu+k0eGrgy zJI68JMD-8wX0Qcps2e#xt40Kf5yIYHbp*paDtl&8*>hMGDsEer9j@LMc-XHb-ga(a zU+BlhaR4U8AzAHwhno9Qal6Lm@Xq{Ug?_|+hrck=KtaFp$3~iO+S1H4d`;5L;$@yd z@Kj{hIC1TX=rTBF#I-(k8U&|oMB3cUX9eGOZwvQVG&IsH_Db(;dp&Wo1_J6y5m0b)vbZl=vMB99y2p$1qifE8seWcbVwl*lPO zAe6Nhwa8E-PS4!^Sl{&i$!fu5fvs+BhtSt-y9gnUuf;Z&9W8>KB=Th*7{G~ZIfrX4 zYzF*fD>IQoBF_Bj4+(%C-DL4#p5)s#^^MIyk{ePvQ>ZqP?DjEcNtZk4RAXj9{>uf+ggLlleqnvcyNGke9$#Xo9Dobb0=&UF zw1?OGFsLKj>Ba0Y2vD91@0u|>W61PmrW+4rZtSUROHZk0ua&tD*ISRF-zH{NBlOFG zbX#scwSQ#y4jQDduHK8QA+NZ>=*(22hE1;`{8-Ajn$;c;*CU5naIRhV> zwB^QbZRO1Tr)Y)8JGMfv>Mxsta9P1zX|vs~?#mtiRxmg6(p~J%&Bpzc+zn0}-Iqga zclVACZ1yB6Tpb-i-|RZdM}2SMJmd%SLN{R*>LwW6RFrRZnK@%JxNsEq^+Ybf?~~m0 z{Eubzf{(t9bVV)1erJ##ntNK+_uS?O^9&mkp*Zu)T}RGP{;48WB|Zu_RrK_hF{J7g zdjUw9lz7Z(cMCeZ-mN%5(3&v?c3$fN7Xoulfe(Y4C>mWb_X7KRg)=tA(D0X!qMST` zso!xNJGlcxit8il(H&zoflhZJI`yI)Jrp*6Lsz0%o)~$y7vhi->Q^@onuh$5%gar1 zO2*!ZB}zh0(L}8pibb($V@@Fa_xWM-ey{r|ZVDeD#IGaMp`$IoghNOFc(_d56hcrO zJNU1o%4gwTo_Zpj4D;|74O)4I_nQ00A$T2K$Ub{8Hn&2pAuO1r!0kHOThHcsZT14x z1QkC8@e8A^tuGnEWrcF3p>qL3=c}VE;0-QWmOc%@{Ia-wZV|k`?4ylA0y(i?EkYv81dAo;)d^!Mhl8x(0 zujnZK$+Z9dmwz$*pQrxWGDk5KvY;1J5v5}0OE#xTn!1E~hd{k2a{-r&2;o1dRK)s2^M0=;n=_g;!e|0M|g4`Y)CX5>DW9iO<$f)!U~0Plokj zZD!Ku{}C7%*fM1OO&5Nu;Qv&y|LXRN^Zsqil4a%m|NNKB#5!885G}>lhGMo}$L!)P z8ttsRRu0}vX~``RIro-uK@=DTas0v+^E8yH9AK>PWBQQYe08gnl`XLp?AL1kXR7f; zfHaAQHDY$BGt4{eRgKNr#baG@+-6RQf|PEMlL%0g@x4q|z4 zEPE1y?CI#uXBV0p-_O;>cZ=D)ZEmzFT$%?D{72dYc|)**B#~dNNCnx&69bepV38&% zXE-ydI6P|ka$=~(lPFOQ08N<#Y0pYJc3uUeFczM)ejFeu@$hRmG?y$R2T`6z4vw+p zBv>KSSHsLR9067;n7xN)8`f3`U`)BE9(cLD&M_!s`$wXYi1UVx+svfIXVhJm*z_UN(b~9W6oeRsdt(ibURno#M`w-Awg@igq&$9dVOs+6wf33_omx!@R)O;lkcc3@y2xhiz4g$Uq z_;usZBGz*ZxLR@o1alO?7c)=WC~?%p1wv9Oi#U=U2(spVCzk7PqHS0XicSNXAAr{Q zJ`2r4a)Jbkmk$XNR1VH(9BNyH9t0>jUQ3#~LUh^pFj`%AR;wC&Fn2X9S+uC`!JLXa z*da{J!8Xff$Z9c~$Ej0BH(5=yr42NpWBdl1JG4%K$ApzBs41404DZ$!=Ydw4lCQjU z-*tN^Mg1h^Hq&CP<4kt;0$f6vOUVu}+37C6Z{u9@`{Jpt8pu~*vcuG|LSgodUjLC5 zy?doxD{LMwFN+BpJHl;?Auq;*p|SI`xogH0aCZoX5!3AEEO(*N46;n1arBFwxx`_A zNIblU;$kUk0fQgR5wBHv;I-JuJxJu@Z!=|W#CpI+&{tQmv4SoVIs>(*5vfN9IlKJy zs4`zE4$QFTTpxslmm=6I;PwWK7y_3HqJ+ z3-Skh5C75k^1F*6LO^OPkYhmVsez{bv_Il7W`E-QYB9diLG`IG!ydKWQML|8zKzBy z=}~L<)eBT(L-P4K-9gAj_H8^gi$)156z~k7=+Kn=spved$P}jj$6o^!-4;&hg5zOZ z1HXUU8ipzix|bDGyzlw~yIlKnw0Rswo4TNv(e0agH2yEm^t+1^@8+R1&C***Jjj%O zXu!Pybf!`MYT>ouB+%6EWY!Dw@&?;?f7M0OX|p_DV~1Bg3(w1*b9{3%SN zL#ZnDw@U^Cd0ur+c%cGR>N0-<-VTn{T;FlGp0nW}-vlm*a1&(peaVYJ>`?O=BVjI%|tOm#1* z+%lGb+wqC9@u9*FpUe9LuLaMXwu;(%K)lSQMo{#d{rU%@?2?O(oc#fAxJuVA61pcH zuX&_ZgVV!FH?8p)#=;mXjiwN9!$94fO1Z5yokc$})_<6$-- zk@6CAjLcY5jBlZRip1bE!{&mS?N$fgg!@q|^AxdK&G(Nc`0L+u5f-)boDh*8Xxe)q zG*3gp9|wyZcq4sQrH0+|0UXo7L+^`g&_N}mvOXNPjiu)VbF_7XRdG8Z@;G<4mD~Ie z-dbwSwBu!?sqI4Q&IyI~nM%rIsYALaOO2i=^p>JU+%44UIOPygBeBKJuu!Sd>36S) z{LReOp;X`$f6G%q80L-qssChN!nZme89uOyDft?sxMZcv zK+=Iw>4d_@;$;EF$oL!&bT;RK`pKr1qn{&z8Qnq$bkgYl49Voy+BcgyJ38njR1 zr2MD9OF6zNs$Y5RADPrXmr|KNhVQ_lGb9EEI;$c=?C477a4=c($1(=An3O)3Gmqn4 z>F|C9#X2)atW*99y>XP0M}YH+wi1|(G)c=uuN1Ye84riF?Pp;p)whg^5`&_&QdD{# zbF)SH%ep|3=V+d;cjr+bFNtj|OYa-x%hm_u8^w&|uCz!IOKVLR6 z&K^r97K_yNx2@@oK-;uftxhHlwf*4X2)04y<@dw1Fq(51X)p`S(h_3~A$Up-KI3xQ=U&s&An%n;Un4P?-U(Ax?MUh0GU7aJeQWWq zou-8bhi^`%#x{Rb0gd$nh;hF-h|@i*L|$ULwzE_wF}It~u)t4e%loPZX}7%|bA+&Q zy~$MD*M6T`<&U*SBQCwA4OjSsW)QG6}DpHTB*Jl&zoGEY%;lwT6TTI>vd>%grfl~+PLXNvNuq`@%Ir%W)-9j0 zj0ff2U<|YS`px6UC5mLiET#p*=zs>H82^Ia?N)-}!aWV?_bT;iZRT(%)B?BG1XaR$ zDw!1=nBW-(Z@ecEP}=*v)3F0e?qDyh$=M4`^P`q^lN(E<^tVm9$sCiDM&!?S10(>p z&G2&%&3eH$15eG|gM?#AwoYBbI@QXXHM4yxxXaYBKwA6j6JGmbRx95cGRa_di+DJ5 zyF-=4VC=LWm_YVb02Ct%=fa~#WiNP~%6vwvHz;yBjY$>tvDO5i`npgmQ{2AD>?=iO z4z@PC#eRsBx^Y+MQ6O1p@5wWZcC>R7ilf__!nc&RRgB?Wz+%p7X%!)-#(*dI@_>JO zWpMQ758M#7*Q7ihcp4EJiwAHz15GZL_iohqF>EQAcJ$Z1y~g&ubEv&%-e{`Io2Nzo zcQKSI<5VV4yxO1B4Nf#}gT)q)Iowdq#TcSG#tALVs_s4}cDaHe^np#0#0(AR_lE`T zi0TNv>(vHvU>dKEg;;Y^JF-NB%Z{TAxAzpGu-p#Ao5;9KzPCH5BhzH+US8xKF+&XT z8j;fXP7+SG3<1;?I^MPHig2Z-aAALjh=?5#c!?dG%V$ddQ)z%1C-oc-(Dm2B`wlc= zARrSqP7*5#;B!n43b|81_U~Ykx$qGnaSo?@vN^tRA?}x*_cq1ME53Hd`-gOyN`HD5 z5F5cU;jhDVI8Rw&-mFq?zkjYO2ZyWp-VbR{qFLP~-QZ8xL=!CD6EdQ zp>k4ZHw)$RpPi%@W3&MAoRiZB(_UXR%Kuirf&Nq*A@UaU{Pik}JTon4oSqVd1K_v3 z@>#-R$f#`*VkF$4%PvqSE+0OY-YOM_ zjtSHiT_;GdK$HcErmugm^>50&vWT_GRvH5 zdtBLKbJH_bnsW8|o&sUlxl8g5yw%iQ1X=vVGvroHLGI`r*0cm*1%=VeWipc{2KbXP zB`n#90!|VuxJ{+`UByG|m^Y*(g2#0S&EY{>ey8dnYhvT*t8b-yKRDZn1r#JLbF6f@ znSUkAl;r>iN@bQ@ZFP7Z4dB3&-AA1umzYB^8BuIGQQ(CMClh7?^R25 z_6;2S;}v=$zm>QV)0DGN@620d5&xkTj@t}ahIid%DlJFMI+mFU;ruTlZ(;&xI(qdW z=#h-C1q!W!5M=pyF~&qZ&B=zmg~fr|y#@EZjrR^o^c^3|6`?slu?7O)Gg^X(=LjT7 zR?~;;r$V&kc>>Hg9(pQHQ}kU0!d38OyCAwUb&iY4(P&$|k1!sixSoAvan5@!mf*BW zkhU9R_S-dRLS2KW>8WaGnO$mJ+yx{hXZaTZd;KvP`W`pB*@uI7AlCx+%h#qv#H`c- zglARqH6C}XG2x7f(11Sv{)!Wmm8cseZ1*qT2&Ay+v)BTk$g;6zv@qlB=^**78d#!< zA$ovJs#aBv+X}YHxQa(3hs5y<1Cl2G#0YGC$ik4mTiCn7`XO%_&42DtayfxmMTWxk z*N`%Vs9>=s$you&7$F=|RU@N-2S~r387H=!1mGYEsA=Ih)d^gI0rtNu*>2NPqnlo- z^y1E!0z=4l&YUwZK|_LFvQ0dLN@5x@y0lG`heOdO>oqqZTG9mGEz&6emm1uG;91o( zEO@SbK_>-F)JQ)=Z*{;;9RgU5bi~j6o_c1lfv-y@vfslq(j35CpP~ zjnS4&Ws!hYzbO&ss)+nL9GY-saAzpH`a{Wv#UQs82D@ABkB4}UVI*`H#!n2FAxMx} z!BSrcG%QHz(6Vw}q6M2uqa^ES-@9X(_J6pj9Hf4E{(7ws-~Z%SgS=cgcz-w~88#(j zL64;xc(>5hOp@X1&vTxHvT1{Sjh<-?@m~T9fr%#MYZbPymA9*E5$Q&Cw8Vf-6e=;=iaSYj^PG0b9PCsTg z`O8g^SFkk&p!%2Kk23v;m|Er)hCmH0v!2o-J*QT7c~EDw(oWE0`8YDKSiww0oc{3n zh*evev)q=Ef$__Kbey$+qU-?-aJ0Rh*8H`B&8Jv% zjf5fhw<~!&Y*n-A0~&3O9f=ujfi%i4sVWeIyIXuhMHvczf)kK9{%CI0y@|9Ew;T&6 zGCvZSTi{`;AAGDv)50x?z)+TefOQ$`mmh-*u97EIgoO9YfY&;hX;u6)g~K)gmCZgA z2;PEk(V7;t!NxX*%p(#3fSS=L?9Gz*Bba&q0eihTQIrsL(B>%fi0wr|hlbxIDd$2_ z0QMO4$$pDBzK8}tVRuY1^7y`a3P`AyqpwQ&o6L;k_#zErYIa+oG*#r0qhMCxtB}T8 zFhNQePKcL6x>gBZsP6@vLHuQRww(tRji&jX#Y0&~+}}E8-Cx+`*=iCfB46U0v#@z7 zbyd#O#V@6vWpSG}W}QE%tS}vYNOtyCu1M0;Ruhlulj1lYnu@XK>>&X&ru&$^F6VlS zAAd*_c<$JzM#6O1o_M>7P7U)EAV*E-Ek)@(|2EoF`&$o&(F9f>k!yfASP#q(m`+sN zJlJI{E$^P?7&>KpTWs}RR!@}L9MRWwa5iI`92*gtf4!Z?Yp@o-*AxhyQ(Kh0zX`!t z*%8Q-VpuV+b_86~1jDHY2Ty(brIV!<)?$o5*MS(^=F?3RmGjO?te0Kum6U^rbb9@r zp|1DJ1#%?)2?mI-!&y<0(uO*+yj5$PP2S>jM-L7T+s~K={o11{G=0hFpUCn%?Bn+r z_%f*$aQC1ZgS2e>% z*jd2i*lyrOf&L{E9Aub=T#&X9Zs;y3R_yIs)VNNjb!KWCL;`Ulh%Ka)VqMV=r_i}1 z!s&=SK~0=yb9P07g-}dPb%)yo2BEY4$k78!ESV5XSnM4;2$wq(DVHNPbl%LTmt4fp z)7LZ%@O&>Hv)cc?eCot+;T(F2a6sbJcA6Tf0vSseJ4w{3vP6|7v5uY5B#EQ&*%V}vJE{~!w{E^}0EO{-Um4<2exBmS0ZGeR}iOYNXAq5=ymv!Kr4 zHp>d}&li+@Am4^n#5u=uHLhDS{U1vJ;6g@#9Yyj@WPRiD6+t#8Ii$|ugh_`L|C8?k zcN?RDz~V+iWZ|N}IE~nHMA~VL>V0M@(h%rFkVlf+-wug|tJq`PjCb$s=`>ds6d3*R zprfda-ss?IWWQA6pZ_akA;qEh^*$dF9ON`#GW=+k64;^H9QYqjK>%(T=Jo|Ty>Fy- z&Jd049lJn}QS;!ZTnBMcDYpa35;+Y1C8Ov};Bt=ckK`Ij++gy_RnHUxP7*O5jv>Ft z8z^a=w~XpCmfmZH5awdQVsTv8@B1{7f-}+zY<91*|6Ia0#f!pdaI(@2Og||@VeSzfir>Rf z_*O^1uzI>xF=-d7yqc9|0K`$a3yea|cK{t}HB&hnb1O3d{f*o9kYSyogsAlGkAM`53ZmTL6%3ImFXa^LwC7cl8(wH-y5Ye7!O6~2ppIaCu0-6 zN!fxe?e9t`6v)%diFG7O^$@`A{nHG(#+Hp4R43mH%2|KHm+o+0K|<=J1bLSs>%jjg zGT>}u`c;y(+y0i_lWbXpf+RBC6U|e0Ltek3j*NL=2q<`BdvQW@dJE8 z7h@*8hGopC$eO8DJQY;C4CF3TUQhpv)BTz6!KXya^f&DV>>F;9`Q2l5WojX7*KbTe z1}OgNPh0^5IAG`pn#H#`(&-E=b!jeITC^|wT>&9j3&>9Z#-mC!ywt9BuCCGK4`U?6 zFLFozl7OzVm(}rbh|fB7{D}%AgRO^$HjDO8z$Fbon{HM+ z_{V8r&JODVlJqoh=*V9XY~>qzfBLnr@PUn#K(BOvWse1@9sNf`#DDo;>%hDGD-%p4 z`Z1KUlNZ1mNPlP;a*IC!6s1Cb%RN)Gly)I@(e!#MH6wk?Wp1Og-XB^HIj+UiG{<8% zNJsp=(1ti^TyWhr;HcQ zyA=pSjYIbY6=VxcFk{j4~CIHaYg4pz7JnC ztfeX8_Dg$)v!3d#EQG;{^aqJm%f`eu8SVIc9R*osRMZRbYfvW}xTQ?$J1r~@n>k(- zNUt~5gxtxQ?|OCLTd@4iaN`M|U*@)`=`0lUl&4ZUF9Iy)t*%k+!(pv^sBe61f{~QuK@Z3bm;d8Y#b_6pkaz}7DF{xts8hJMB}lhzJ;d{Dhan)H_Ywp=YI>4 zM7zUN-i1%BXlS5Jz5>+}UAs+(@+h6a$G^Aghnb9LhDLzs*tCQ!k352E>+vJg(Gj<~|#Bfg-qUJctS4Vb!Z(qdU)H4D$Lzt?nzO1w4V85=fa z0z_c_IE=_+qydP(KODON-Q9Z$-_or5IaOw{mMVGZ<6`!=vDB$cB@-9FwmWD*6JGN1 zuQ`E{*!9_Z^Ylu=CY}%zbw0L*PJa!l$INBhAc#4x=4XA_=iC|I+`?AaAGo52ZM7ohN4fKE2QaMDOL1RIHDN77l(}w zd}5u#Vk|#VYzifyFq7WAW|?FtS6DHPF84`_+aFh6q=n9D=NL}yYdcg{8qh=y*28@Y zkWuqfNum+Lc@-`!v&4urwL>RGB?&}!S$O&uE|3(6Z-^vz6q$z$LlLjEy~#yPtqOYL z$HK<#gpY)c149hzxX6<~?Kgw-x`dwp42Sj5@vpf9;8#PnjFCQ@wDQh4pYAU&mcnCKxP1%}mZF6|pt8AXV zRcqxT?6K*mXc@!?TD7XEZns!8TyWR_!Mq|2AOJ6Ps65jBA#x{8FdbzGgXTTp8Oi?i%t4ntFlHKnYy$ghmt~bC zS&=)tbwgzaX)0=3AV6|3Z_KmRFs#coo8Ln`6f4MYuSR`PG=H^dIkQz2F;s&*%kK@f-IU1a(v$uTO^^I-%S1}D0{;)5_26KV>?ZTQs29s>N`ZzJ zumK^^z`|?+E6fsUjc(3d*8>21$}71fB}Xzw7)MzAx;${U#;Hllm97snX-*aBg_CAF z7&lDaM#iM~jtormQmJ-EXT5#rY%Ny+1Nk&9=@?elU@7c>HM9)A%tf`pk-BIZb?Pfb z-ZlZCsJ}tmgP`xI6sOd5`E=EQd4U(dxlpoMh z_bkb}FG>l-fGXU9o}0@xjIl85So1reA(j*r~%*X{-)87ExNf{BWvZ>@`CG zR5X==BGHwzV|K>|kaO*}iTkCcFi*H**6?3!8VS7b-+;}z6vKr7Lxmqx+e{hv24M^Y z#~-yF^JESh%TrWbPS_|C0Y(P_&rswE-X%=(F&21cxy9IH(*6%;PzxVx5MtJYS}#wK z{ljSFf8nzKWyqiE4-=H1Vd@9XSatKi1@DE%8YgGG0)I~4#lXnSdqYm_iJIik-PC@`{MLJuV;8N=du6+Ujew0j{2xq6xU>k9^~CwLFpP3|>migm3Q zJk>ym+$)|C<1^f8_!QOGAy=fE{&?Z(h(6TUnW?SI&DY$i@(LaB-Uzg8!)U3Ly$ZDS z7v4^M5UVOJ_mFNnPC)k^b5W(o!R&oi5WG$FQ17X~)1oe&Y|dLZusE7q$acPEO^#n( z$-MZM)JheF^<_TX@V$h?IsB|Pmry;Fo!+Gn$xddd zm$FvP6|Hy8Y^p!#A9b=_(BI~lA$jP$SZtQ{kZ%xVJC8ix$l7G@p{;*CsyY40Ah&xH zJ^X-A+U@8;gLI=?DAkdQSf}wwF)w-BMyV^l!(s)_$`4HTu;#G|J5Dw>UNhF2%4`c9 zkvaEC^*VF%Ro>*y8>PZ;!w>Z+7FDXJ8BMd%OO)b>)d za5$x~TCHw8buTZ-|3_Z(e281nD?uT9Ut^{ zgOkpGr;_A2e_qjgNsc)egKt}|Xxe`iYBkqH zzg1L)zc0O~B}(I)$*7;6-IKb}_G%&i0%j>a%{)EcZ@kB*x!2B3pEzb0I`H@{!p9Toh-kSScgKHR( zrs7X$(M@;NjBI=m6`R;-sx@kP^2Q7eB`AB@rO~nV)8U={QbMt7(1B05O`oi1Nx7;G z2R3<;h;glXpQ|HtOR!eVDj?YFz@v2WPwRPN6KY48nF&pH*&!emeU=$x`G-05sy*ka z5O2HktJREHyv!Hw!!lmvW%kJL&pjccv6{S(VT|*$uHmo1mu6n;ytus|@0R+6#nMGt z2Hjv8R*bna9n6_kpmo2l&myl>Q~wKA;I?tn5pe#Gj6)i!w20=_jTn z*Ish1Hj%qAW32;XtHS5HU(fAsxO9y7yuEqo=4&eojmkey}pNVTR*=-F(Jq)ri5_5H2RHBXzZ4)5Q6qesPsXl2NfSKihal|;Ai zWo4gp=HvN2pLg89F{AiQ73|88CFELowa5U;AdFZ-_)25eC~G-%7Z2pamX;kNS*IVV z>agAqvsELVfM!oLP0+RIZ=L`kbr0 znr$q_NyJzm+plilbGkIp{^5chrivCZgDG^yuN;mT7RO!y-hl}dfH9g>>#9~GL15<0M}+x!D4#$%~ZE<;}u=ljX~|kbJ)Pnav0IS=q8H(XHm7 zx4TXjUi#OBDb&N!7CXsq>?BY0mrnvKXjY7=xm=cR&Nx*%e<2|xyN%7N?t@{Ly-W7H zg{E$B0n0~j#mA6rGYS!N3=xw)9*v!bTk>I!8rAn2nqO?6DF?P|xENqNd0kR|G;D39 z-s>aHGeka(7+PRXoy3kqst*~nBpwNzxA?NUdDQjuTbe0H(|2EbL_(7JDM;u{sMG_6 zy`yO{-RFZ?f-$w?H83^)X&uIrAHiYUVx(d%dwlZxvmF>TY%E-ls9 z&ET$rEOj~X1=NAHcy5l_Y2*YzybQpUEjsVttlt{35y11f0#9r;zU)37rXvV}zn7U5 z-zP*4%mgPc&C&5~^CBa{W_)N*)D`7d9>@{ELhE3mNxP68Rep#o4=NEN^vEDYXiXUE zhJL-FEsGZwdg;s5lo(QL#&8UPo#>{+YWDBk+XfbP$_dnQu4L%IO_D}$%-ZQ3OD7bx z48WjD2ZGS4?MSDN4hY-UI;QFJc*cB`4geaSR|LZItV*Msm~kJH?HCUAPYuY!Nv*5epW@~grwmV)@2XS&WqkWhwi6N-`QPoql= zATI&Dhm%aq!%6&y1T4vfO7+w6F-7Fjs}e`;=Ign#H2dS8b&G^d3Z@PgOL7!okOPdh z0@S+B($2PdxZ18erBgf{7HMKLAOpd$M!fmSGyOig-3SO^30+tsV3>I)ie?BXnn|J) z^6#z=|5j>STN|4b`J%+6*n$vA)es`{&)D_wB+RAt_zZs+F(@g_mVArW5P~(5hvtcQ zHdgj6d%ZGK3_YbuEHm>%ECWl}h}@MagpyY3uUg=DU4DW<3wByZBo+W^g^f~CmQVQn z2~BC1^CZCyKg#o;cv8nY!?T9Wv{nFVxo0qD@l&=pK(@9WCH=q5>Ts->Uj_z?2!K0< zK`fGbtP13%Y$Vs+#z66s2uZk&h$T9E(TVBNKWPOimT0E4t>j02?V}?=~4r}4EIU4v8 zRIpRE+0LcfW&)0Kb$>R;#9eO_{QlNrOHM@$m}$xlCfs8n3`GtpPuZvWiB0bM9C~Y8ce^u`;Yo!e$yJbb)|nB3hy;tb_v23U$W>>T)wOt z-fObnSw12O^?*0pAWFyo*9NDxASz$p;FK1K!`XY^3RC>_FX5+`^6O0Ld4$B6w&sWM5ds+IY-ozli`I3$7Jy<+C7mLi^ zz>o0&pOmdC_O>|m$DQp2iNt0o4*pUL_I@$DNyUKC5fcv#Zk1+eBX1PXWq?3>=%VJC zxMwRz$1*}Bc+d}G78kxsf0rIN+fpIb%uL5TJn+;p&J2vw78F?(0=%-*fMtob05HfJ zR;h{pusm#7-_jAP6+YTe?~mD(r+;PN%b_^UESZ@8_v@qR1qJ{-2lFYTN*@?ZY)5gG zOE}L^-u)d_UhsQl;~goXJYcwM1MTBn8LyC|pMZc2`W*u2YXa>7L3)c!m>x=x4%uU{ zn~!axq}1fH(6Zz47O6Qyo6-6TqKe_%=jl)4tXD`OmZTVjSYK>LAu`5rjM$9Z=fVfTu6S3+ zPTTV<9K@5{_1?X7QLXwTxuMANicHWem=8JAFK2e+VUEhNfS15n)#lN242{(n)a`E? z(DRrOBFeLQ^OKFS&C+suqRMI__bvid{){?!8>kHs0`@rbbR=06*;uZ`l77&t6#ZCZSuq>^vv(+!=bd;3B{!T(z}hh zFu-wpsao9Wx&M<}7Qw`eZ=SkJABTNN7j??z(JCf6He+Jyp5Bg*VxwjXXx8aNq8~O3 z)iYge4I^grokOTf*)C|}$YuF}4;y(XK{b7vJzuT}I?&D;SoROKdFE!gC*f(Vb3I6J zkV>F<8qF_vTE59Q+0osKN|~!ynA-u@?cNNxe_q*Vcj7c>7ro^Pmq}uXqMT!sg0JXI zJJDbcsVDTxU>r>&7t|<=>%)}qI*CT*@!GN(-)_?esG+M%{2Hv0@fV9mU0b!=t^CP& z8tP9wU~p!|-3KAKnh0}j*p&ViA(JJhK8D^2!E4LqtY^M?%sa)4(o3Q6Y8uitpd-t| zDd}c_E)JvKluWD`13VUo8h_jEul3n?@kd8RnodnsEkzXUp6pGz(lwa*V2dS{W5JgD4$3e-U?~4Ux!Y~Ml zT{pN3Z3H;-OK2V~2{3@jw-=O-Ke{%z^+>H892^@C9QVh#hOuS3q&}MhpZ|ijQfPVp zENFBmY8<=SI@HU++{&4NC1IiY_?uzf$G*VzJRW&$vQJ-<3f-9Vg<6|v4Jg1JI@2&0 z;jh02|D>o238P*70)_(Agk`0c5wnU~EG>FXPoxJ?_n z2TY*57}jkJn~;VFBcx09Mna?=Qz)TdAvR0iL|pynCi;gl8b;0I`U{|%T*tFo&sto3 zObU(l_23N9T_0eF#&sA`TniVL;9lp)QVV*=-+2*)@!}=iTipr+7JWF}4(awvj_f4I z{g&?In_ORD*MZitpkeN`%CpL^KsJ52jvdla{t|_&h?>q-@)hO^^R;V*mhvil_yf?W zGw|fl_@s!ki=0RK3#4ua=vs9cBf7S1yhG5h0Pf*B22zmR#)jpm(G zIDj5sy_Cm~hbd+He{9+8VbbZ6Z@RaG*&Jy^O;9IJ-HmREDgmyM7sP0e*ArFT9!IJ> zSIb8XXLL9PJzwucIEYtr7WVNi7aWZ62)pS>i~rg)!d%4hId~IU7G_NRCt{8EJr8bH zBH;aljh_b>8o@2_h&y(C9`=O>?^OGxT!ob=_VVO7Sg`x}>y8}$cRKjJyGgW6v3$f` zJ3b|crRnyul#AvB^jQOmITrI_d8f_2hxY|GYR*TCF6s~c@KSlYxIghp0idi1(N;HeA5q{k<#1EFc7U6m!kAf zb?420HY#7?Ycw^5ubyBmM7I=SG)mg=Mag@F8C#iebUQf~c2@)7xV z>D38Z(Ab4#`o`Z=s#wl$&VfFRia)EG>>P8b!}l)+w8$FZ_mq^VoW9G~47CLiMF&VU z5Q@lsPJ1T)(^O9IA&KF(VE=|}Hj8yt?I%BRoLrp=>Wl6=q7y9AhuM49hE}uqx6wM) zNYQM5Zc;iig>HZq$#+3>K#Zm)B9HkCA^!Fk!})U9MYxX*s&$^-v$jk=hmys3;8z@g zA;4ewqC+X+fI!W~Qh7h_iBu-D2i>zf(8m~%lkuY{uDV1E+=|VCHLms{Zk1Z1nK{&w zg>y<-A{9}L>&6iRZO+q(<(n_Kqf76IPJ+|(zQg;(i##tzDKAWRoPBd;&tmpDo;wD@ zTVd9h!9ugznt7g0A*OGt@_S~l)?)D0h9&-6n$_*X6D z)fXNjDlbo@diGIvdk+`VFU|FySS@t94b`BSnD|wx?7Fy%&ImQB|69XNljIFNVO*_B zSS2rqctS&SD{4&+Kd$&jSg=usOc_4)*xIi8dB@{?PmW$G!LC-Gl8e)3q3%+IyEjeT zt}3ytYEJ*0NqG@XLNO+$>*`ca8gexhwz6$P(S^^S9L_i!ngA2|FzO3`s>?!sjYEU= zqU6cK9YdE>J7WtUM-w9B_N?b?fd=hPD%ZThkxMOEzeLgIwq#bw^hA$+~A+BZ68UH zW$CGh$jr_Z!e(SrOEh%>4hYxdo zrt!g?MBnvJsshw_%O4W7ez@v)It^#oCOQ^bl2i(TkvrS8f9eC+u`VE~aJ&xV57@z- zw1ds~*PrI0d<&U}fn|*HGiX>-Ur*jXPA6Q7xc`Se&{18@+uV6Vi_1=Zhecscpc-O47+}r)#*u5oC z!3GEq?LAdZftBFKSnO?Mu=}#ha=4YUMy66;vk_xKb`!m_%xrQsHksLh8^eIGpG3hd z66E*2eR9po@0HN-x*ulO#EZ`23;?%@?}N)TOhvAU(Nx17)kf@o#j>d28Zzd0(6hXB zpPiSZ=5S}jv%W5yiK^?|TwLWs%fptQ=c(C+(tHCURo{~1mD)NT(yY^T_N%cSa;|(s z+_~TFVn5Yj-X6GG&cIJ-Tc+VLnwbM_%cRsI9EgU?xNCFT(8wHi#2!%^!b^JVrAetD z?P zHu*bAo)?fiwxfGgVBZff_W!5}^0kw~eAK{f68-@UTyqzU2T+GGP4PKN z^1M6QKm`)B#Z-`uLG<__2iH|~%JK1w#xXr?I{ z;of@fm8wm8YM6a{{A5FOd!R$YVR+-bR;=&}#>uz3dQ(LE=(HUk^8w4LYTnBi-O$tiCvs=jEOtuFh#RwgP3&GJ{ZI7lbue+3tLSvZwm z&R-|D@Vg8jx5K%EpP^iJ{cc}9jZXa2^>&gw-u2Z}Z4LoS9>n;q@^nX(#4`fVi0;u# zYbN{ZUCuQVB5$u(;|k-P@Z2a9|I`|SibHTM;v)RLQzi>jRF?f|nb}EI_iq3N-#;NJ z*n#*}|7nyEr`9YL!Jg@`?n>jVmhfc&5?wB2$k&lnb-A`0#ibmPUE6z@Vc^wZ#ihM- z^$?|^parcTWWaU8JI#?tzdD6JTFWu&ILtIK+|Y&LLoat2)AhODahZ1VmR$7gq|Dmh z37Lbc3iBV1bwN=olA7-2Bz=2?)8!?WbEzL{>sjBuuct~19#XO-R(_^ozwYr2T;6y1 zVV|`=DJQ(xxH1PPFIlFj82*44i1_cPuqadC5=}_^q}SKFNZK?G#QEwb@t}CAkH_Qj0DkJw9PI(udGD9+cUOB3^Q7VEZv+46` zm^HVdo3z>mUq+yoRmQ>%P#j;`ihg6Zs>gH`GVGHoIz7(hsb;Sy=%n{_CvY?6{l<MoEK>5H zI0#cAzg!$*#4XQVIoeipaqsJSg(oXpONU^BDU*~Bsbbf4($W{IUP=u-8Gm2zQVn;c z*x}PEnpLNMMa!@UxH#l|RITQMZjIZYT?ySJ3$od{)MJ?fz?}K=72H)AKj>7L?(@5u zok)h0yhLmKTjlR{@!SKZjnZ70?a)fJ7R$zetQC*Fx;cgZ&GF_FjSxBhBPWae_$l;d z5r_)8ztwZRG90(zJ5t><*Bks}WD9pW*-YRJ%$|u`zBc7kT9UU7eudYtzy3P; z*d%-Pn1N>xJ)QzQ1!_aM^c$N|@cmA+o@(OtP>RiP1%L+)7Dte^)huusS|#ZjoVn(2Z!#DqT(m5U6v2eK*kopI8@o zh6NnnqC^4}!@SV7LUP`9)?Hg#K7!fa@hx_jjvS7Vo~NCv1yP=Lr=!T>Fg(FY>ozo% zdI8!Q^y)-tL;<$R%s->k> z=s(da<;L~wn-(^Iob_G1;g(@>^0R!xbCYsJ>9rvCYF%AR(s#cgog>6yl28P`;(5g7JdGw*x9ne zHue4)bNyPyJ7ULcc-zn3-m~xC`n_cnn$L!Y`l{0}?;Pu;`4WC2tJeCvn92C9re3$t zEsl;GGeZ1y&mG;ZdHn9x%~u#T4@eci@ww@&5MI(4%vbe#_G{QWatjTtd2;~7rVOO_A)#<-W7 zB9Bw7?&6)6Et9D8$y#4{{etnrx=>ZAi@3x?bN9A1=H1*h>m^MXpipV?U-qa*Op1lE+rnz-y_St=_8HDcRMrxlD9m)e#17K%kT2Hv1PBV z9PMl0Tzc_AuXzWr#bo>s=~X&iTTW(dHjX+R>$UgOE;e=?;f7VXSjW|)R~}{gFnnn@ zNFu&|YOJ2~(!iZ2eC=JsmQ3$gDWBHyHD=CI5<<4+*gn6ojaDE|SU~jEhoj!K?;GEX zahsVmI5*30{~UKa`_zhvJ?@==Q3VDEwQND^8mqMGkCG zI=5si$bBUAx+l--L&-BGVv|>A%{*!GH*efET)%U6_3@0WZ#E4ie~`|K&D?+dV?}a^ zyP@@+=F}X)1P`{b6QNdNT@9SeBzD*W$DxikFMAN%=U(d*lQ{6stY$##VEymnM{50s8 zIgZ;~8$YX?Sg&(w_o~;X>uor%s_vY25~2N6`MyT|4!-F zwej#oMDWr+Yi@d+)dqb*0_W0xO4A+l^gj{7?x`YOE<3}$SlO;sp>jEOrm=;JhrhD<8fVs4K0{two#@s@KAq&Sy`SV^*-N&d)?^e@ z@Q^Wf-p6n(_VQO&g**4RtY$Ji-1j<)HR!?s?)b}hJGGg(#Ti}JD}zv zRNuMDw@-R~Zsh$#Yh^-NjDn^{X!7-)1FMrL^L`xR4IMp{wJk+If$kmG-XvO`yo|Pw zN4D3rUOy6Nym~~j^S!gfC{BF**xZ&0d#gvI0=Dkj6&dcc0!7JXY-4T1>*FIkf_>DO zr8gO*Q|xcg7si>sE7G_z_`*N?LZHls$KU;jz6LQro%Fin%T>J$5y?p0D4yVmfgzubJ%wnV>)5qq7&-z?X zFP*H@qmq7)zWLxPy&R21cMI7m$vu0-c$LO@Wsro<%GsYW5)P>*H%pAS`%8N~2 ztbWgy0_yuuQ$Ji2z0=J7s8BAKD{xEUP8R1TpDlc$$}!uxJg2ptE}yowKfuBDgZBAJ zN888@rPlpVWoaMnZdQBqZr7`K?RoLC5AO9S53+F7ja~}aFV5O#TQRkpiOE5KX4gXZ z))c`l<*M9AJ1I4e7zZsKNK?3Ir(SM8+M8oo!A|Y0X&b7uCgObIME`Ne}<_B`I$w82>>)04OK%oEzE zAId5U>YO5-nvVt#NUi$!A7z|;TT#IVr84UHRJYw?E&27kDyf-F?lW;;Og+m>d-B|L ztkj*wFs_Hm`woOWzi`o2eFxN>F6S31B+v5&Rg16-21wd24)>0^WJ@}Z_srz!#HS_g zIb}?>OGRa{+Cyh)VPR{NPWIw(--t+W70*a+e)jxKv3K^;2!0~2tzolZmi_2xFL7(L z{Y1Bt2TNW3P8OeG4%c_XZ(FYnal2i*LpL|FTG~HO4t!-BLmN&k|!^1ZN6dH+RX|Cz6WlxO; z1x`k0xZeumj@mrr9h<$vfA>@u-erPWCVSn6cDgy%3;BmUPnq>pq`7T7_*C-N zH}}&L=5-35pYk2J={Sx(wQ`oA)c=Pq31OkLYy==+T4!R*x% zmUkJCUSzUuwQ!1ayILoa{4MFL>p^k+Smg7%^ld5@k3Md*O_4HJFS~TGxI3*}jq~Dj zmLDgh`977LoAA51`l{y}qZ(c_Z)(1)QzOrA#OaxsI^Eu~+m*37WQO@@X&XP;+!wtT-kwT7Oz&6YvC3pe>3OcDw&>NH6XWKnFlGu+NT znKJp6;f#@!#HIVx>o#m{bb7FP@%&N$D9!7G&t86ed5C_^rsJxa8;Kq61*NX>Jo8qc zS*=!^Q?B*U8ku{Bf4Tc=_t85EG*fK+MmG8Z4IgfLMw{1IpEK94o?e=JbFETaD)wrL z(Tg$7j}wy9?u_I>YU=HgvpM0*&s=Dti)1CgDD47=(4`#9gLM<`J6DS5-R8)_X*L~5RlW@UJ!7wwNlrv-0@|Mi*m^!H0Ofj!y z=E_V!1tWT9f_N9@jZQ~TD_6^^9?=q7;ux3aScXN0AoK$o?wPbN&C$-x@MR|X4x-2-{N9Q--T zmChA}?9+)UJ{PgaQ`iimx05wvA@;gC$!UN6C~d%Zq}FLuunhYURDzWX5!k4|W3LJ^~*W%Q@fTMs@Ddv!K^9Pbbb za=%(93p8!rtU}FNc`5!)Brcx6UgZkatJS`P_1bAzz~Q;?A%=z9T(MKQVuxyVYS)Ia zA~p3Z)~o>SR>T3J_ug%i`?5k~-GggK`0RZ6tHPx{H zU4ETiJZ(&!|5JFUHMH#yIZ*ucEq{em-dNE8z0Zh|D$tT@CQjT+e6vLbBHZI@besV< zZT0how*^UJ3v^-2szMdn$nSr4d-PE5E7l@D3?=gvX{WE*8TOb`=g^yZzISV6Twx6= z|EMZt5l8k3i27N6an~0M?MHC_ibq%nD559x>^<8~yhO9;okBW-x>JPXr>UGqiletN zO49~IS}jB~8F8VJW^V>L>l(^1xyVPLIf&<`OT)ResL7GY>OE!b=GFc$Hu?*OC)o=i^pJJhJ+IU`#fh;5SSAEs9G>GhXZ(O2leScLbQu1cfq>EI>dYlaStxU4 zz^#yrGm#8aiK!@bpJd2rn`@GRv)n)oR?;!$i3xTREvSs3=u&o6nxT3_x>nJ%ZpOKA zw4Q|DShW`hdmTS$>ri)b26JW|hP+r+QthGcxt8NjmhV1(;b>fD3JJT;-tw}z2 z1>_s7wHJRj0?{Fg9o1sbzDZR4+X5~%1nu^cVIi0vv-T3 zogV6^U9>HsJ~uzjrv3QdG@TYP`eZ}fN;odc#m6gqqQ%XtkIW<4TmlCdP>=3?l0uv^ zy(ds6-%R&w5{;F49WZ2J>Zagk4IS3V9RlWP#Y$8Y$9V)$tw)t<5@~Cs23k8OU)rpG z^tB7Nw1K(|^^RE%vR0#dVJC_lLCx)UKJc^E%zOc+e?u=*x2qiHH=wkL5}{HsE$_QC zUDEK%%Ys)m$ECYj>>SBX0TLYE9`Tl2y3O5!-dA|}$k{10Jw|Ord})IV75bZXtWv=m z>L64l+9j^P<6X^;Se};N^}LvF>*Eayt>40R@U6cN)MzN5&Y-&GwJ70kGJnqPvJ{!% z%aE&*v8N!JX;d%aoxss2hqkMU3Sv1=Ra7PF5eT$Y?F5_kN5hw80bBN&zo6UzvQh(Y2p~T@kv)m=%FEJk||^?cE)3x_e`~4QA^} zXef4Y9O8z80nH5uyzK@|kmZXqds}V3<oeo>j&}KNM%Cm>_C<{{CCk;Ln=#7zC}q zfk$26y?J!U)y(Nx!pX`uqGm#U0`DJ>C!jG#p1n8#PAsgUKxT%&^YFyhhMsN+Qpd`T z;K4PoGe>CM?JVB&d@S7bd+|pKZ0Tm!(WHiShNK3`z5PjS~91HSu{*$w)-PJiSsQgNFgk+x=AI%o=wv}cT{rS-6rU%+vj4*^P8K4I~ zh}@&|S+KC~0X)PYdtl=6W>20tT5+h~H9Nb#2e)c^Y(tKtZsMSU*m`PE+DU!9v9iyb z(M9yMa4lEc!ei}qyb!-01{+~l(0!PJN#X{x0jzcM|YR!p+CI+EdCyT8@qeFefw6q``elS*Oh)B zzrTC)R-cp|e%_n>^w-_q@6DI`#FO7!{MpS&JQ{+uIgA`&ao-`&-vo`Pwb`b@}g&iz+!fJUqC8!&AvlJN(yJIdFBl zB;1xi&mG)W*H1v7-QM-vTjuG(?Hj(|pN<}1 zzU}Rw-ELkR{5`(yDJ#3(=D4AmAGLrtQ}g;gTYP-`-E(yIxAwU=fl~grn@V7sOZOgs zk8R$CpBwkR?~%aISLll5t!&@NhQ43k4_9cb?LB+@{5JQAv3BjO{CqrjcP7R^o(=*x zb1v=Q4P868AXxYt(+9;sUcB=^53Rx@aBFv8&bP0kRw{G(7 z_Ti&4y**pFc_0Plg~|5t+;puj{yyJZ-$P?R8Sz`$W8pn?=VpEWtE*o7SId6iKY`JY zpP=CX#P)Y>UWsmZFt@dC?E$g~`z!;uZTxJ*A_zyg4!q-`zu%`0brLKeB&c}F2W>d6G%*3ds2)Hi_@{seR*%~j(Z_GS%LWN`0e38 z;e9*Xe7ra&p>~F?U*E^?U&pr}J6}KR$Q%KBGkbPs1&>fIyj_Z_; zVK^8`zGDdB^Wt!~uQ$Ld2QT(r-uFfPl6vg*^i8J9rO<1wvzMc-Yo=S%dgd4EUUNG3 zof&4Td9UFmA4~YEd6HjFT6D)(ifz;B-}2)5KMu9FtaIs>Op;eheu``_T)LO{wkvdF zp1)7gE#p;9DUtCvwQcx?StZF-eG=)CskNI6M1AM++Pm7pwR!6X}kj7Sifut*01IY-@$*9SIv~7T@`FJ6CeN zp4m@abI{yt_$QcXot$IF%)Yt)ohD$;I5o`G)5=`C>mGDKg6OA_s>7T-4;S zfRT_SJTXbt5tb{?%zQgj5B+wHugCzRT7qSB$(wQG7RsxiNjx_5xn40?!pDhbZ{ z>z2g^&3=ld)Sg2o$C8`>nKRx+A#SOREyrSo9XI4Mt+8iS{1^B^0ATeXN>`avzMjKY z@SO{qFSo{wv-;M)S4>9N{EWIUh&a#GXeEe?9v9&${nVT{ToIC8gp6d!PA8dRx>C-6_X*7X5+NE zW!>Rsi1Z$QW}6sRIZjPL@aSvP?~~;@rq~3jht`uO3k5zG!c@d;$XNGDM~9- zCsJry>2eR&kCv%W7Iad^{90w6p)^b2N9cGLA+*ElrQ51}mPyqvjq}2gTov=fC8>D> z3#l%208vshpyw+Xl)+tTs)OKBNM6Y+6E)Kz-RLJiYHZLcLqEO#a*S)s3Njxvb2KQH zTKBM68F_>?6j8!n(lEY~5$$0u<%ap1e#zo}j0?0M9jGylHU+l2PBzFLybuqn|K2E~ zo6^EYBu$Sog>k~vB{m^6tR+o@wY`oBQ=imyW`3oyZS)y9yy5|6`yP1I-0#KXeFfhE ztRc>w@+!!1DE;YY4a#jvMRuOre8^NkoUL(lPYoMD-T(>?zCDA=-cQ|WXaIr#VDQ&WS>3mA&qNw5#RCXKgl(Wtc_Z&8SLWWQOfzq83Cm)=aXutz45 zW|A!(4=q}GQkAprk#e(vJG_*+HBRc94O#^TRYIwh^#kd3V8pqqAL*$+ zfK0blHdE*LNTs%8q|sS5RXzWQo-5NyI$)jZ;dFKI7*-7vbCdGSFysAstaUd0fKy-_ zpA#edlB^-YvGdX&`<=u7T3>$sVC01)5M=sj3^-a8SpPT=^3grD9yWwwe$7%y9pOoYci-IExlUALCw z!L&}T!8NK-^>%Tpu8(hJN>_t|_m zL{Ql;0AIk73il+af-;3a5mFWcnq&cHH8oM4dnEGm9 z>I5LN$(nyOkjQ(k+RY5c>8U#?C@wdEZ_c{-d+rGgISEF;$eo&=8rwaiwv)|49z&Xf zIvAZmVcaF#CQBB!p@Sv;q!fx8r;P1XY7^bIy2NgwF1D4U1he!IdKCuMeR7j2mt)8Z zH7)xGY{434rgQ}wAX^QQH6qr4J~q{O)8a!+h7v(2wiEdkU)3#2iPF4rvYbl3Wt|u{ zfT4=+Bq1%p5L7y{FLXnc9E@hebPbQf)@6@?;dDFV2%6A+2L;kqv9bg*a!2jMfqrj8?>iCx*xX9q)8XJ9@e&D~QmD0y@Ddm0 zKqykH7KJT`cnRZ}uKsapB6L9{BA07r32V(E7gCs5F`$4=1^{9wKQox8gPWR^xJbd} zeZ)|Z4K;K?Q_rk)ai;dKPiF8K6(@b51Yp-8`?qVjv|B&-#|UY13CBe2WP~&04h=J? zrS;z+P_xtwt2|pXPoy?jz)#0ff&D@}#a;^o7kPmX@#xgt&w_N%d zZ_KA4y>ZvW?<^hAl>$qu#gdOY|ADwT12qc_3$6j3SNq3ki|=gPZ~V5GJ=?3_Wwl`D z3zv}$_(`aQrZI@ONwfy2lCpRGL@Ir~(yCcxpYVWqN)INPNS z6UJn2m$|S6!#D_Myw4gezj6w@Q@`m%_7(8hK!YSfT*vkkB7HfQ;fQ$x5OCE-gR`nr zQ>-LMNNA=pgc)B0{DX9hvT-15r;N={eu#Q1L}+8K9-&H1l62fx z^5F*Rgp4pQ^^2s^m6`P9doMk1;Jpz5wFCL8 z(^4;H!TWU;|F3-@z?ch85i%Q=?^{!M{#MMIiaCOaCzC|Eln|tp1k~7pMAK^NeHD5$ zjN%uZ&T}MQT_k+7lM>6Bch?|_G84zCO+02TtQThIlzwd>w>XH z9;tIMrncj}4}o2}@&Dc_Y@d3J930N9w$e0DWHllewn&on%-b!XQ$OFV<2kl44rR0*U!h`WB`OxmFw;7E|(S2m>X12vB@PRm9_S``C%b^ zCJl>C=!I7<$zAe*Y)}G4x9XpK$0*MzSN+Z5#GO-|Cd{0Pv(0{llX>Pi6)TI|vj_fu zwqR@wv&fkgu}2rTt7ggi*L|e~D>7w>D9guy1TF}Hg_RV6hC@Wvv7@aDOuQg2y7w-K zBB3@Vv}Q=u>P-))t7heVoi(5tG!oEikF09Kp{?ye44h#$9(sg|~6CKDdZ95@0 zywMcES2CG(lnBWie4qKUlg39(QzX~3dacoZ+zjnla>t_~XZE4?3K~4niULD-Kfsf@ zAtZe_P!z|%+QxpDYYLNPahza1zS`AnT3#37uw|Kpf#%2XNnJ}^WDE@ab6&Q;(Dk#Q z?9S)*W%|LnT^{{&+cw!BXKFn6Ecc1Hab3hnggdaEP2UgCaLftHXEv5KFcR?&jZUUV z_8}T@A#lVkH|;$%pWzS~%yRw=p2mh)>FC`XdW(L?Xl2wA)wx7F&w`&Cdtrnn7p z;0$msasH_GVC<;LH^beqPpX|`*rDhBgs7CD0!t8@Yh{zw1s82sH$FWzW*9l+EZB3c zL5nEnm@Mg(K0vlUCQaav3>IS(6_#qm$!Qocp(&6$Stm5@MneFYp?u|vz`4qJs8|kh z&Qgm}V|%zjfh0G9R6?V@NG(K#8a>n~{vgq_^C5HHiqeAb_TE`!v8P(ISnJv!StE@U zB+y-iYFPzq|F8rL9}m#-LQhu^$Sg6GN;R?3Lxf(^xj20;@DLb=&GEPEHHRUz14UsC zJrhm&oY7_-+BH_OYCey$k0AbUwTQqD3^p39eEqyiO^zavO{Wgb`S3m$_=R0EcE3lu z=i)vM3P(g83yYF-mp;SLpwbxDa^3yFoH|1p5XL`1D3*NUC>Unkxe9d9Qcu(}j8YIb zpBpIR&6}F7l%KhJMs;hSobksLu2 zVlMQf85s-)861yu{4Tj07vXmEbS&a8R22fXHH1doEf{m%Bu$V{DN>%)GHM-I+6>l5 zSt_E8Q{i@Cm_wIZ`Nn!A#>NuATyAh_+)y7%BUKHm+btatMt+oWhnFR2f7k)o_eD6=)o`N8&Y%1X5aA&O!*66eL2d7NA} z#OIQfRt0&mF#Yv-3Rnfo$C;G~fQ(LHp>%)dNpDvBS2d-gBEl7+T{UJRC6F={zsTzH zFOyddL?8gvgoxXO+$44yLBRfL;LKUWi)#n4w}~|8L`&r&O)!;^AS~6*q&4b2uGQn6A(Nk~;a1-lAhJ&^b((^k? zUBC(7RCS#ZEBKINK=>mKZu0| z9GHa2P2s$7gg{zr;wRNc@uhD22SNH!X553}0E*gK>?)5Wf!R`PlSgk9H z>yAE8MOh&9suV&Bf-S<#|sjJ4ka+iB!H8Nk2juHU0SE>*LrdD@>Zl$%Vu88{+un zD=KM9n{(=9+PnVSxoMJp2&sCa33QvepnI|vE;t< zp74uXq9iCEMr>Q4W){2;CA~a!s2b$yrXR$Cxtz@r+S@sD=yJ)fTMFlKQbLiPs)hRh zo|j`X5V-K=0Pc>$Du3yjhoe0_lTdHSCPf(>)3wA1y01-4GmsGCgT@8!VpYy7ADuhA zWo|c8cj!7M#==97{karFbg^gxg@9+K?2XVX?1wSv)_$I<+Eb$ysQ z#8Obk2m?oZOT)R{qz3%C0C~5Dbn98aDW~uRu@2R*QAA&S)e$gmH=(jj~9C z^J8jk!`f4ccy1D*2H^Z^CBe-!`st^7X~wubOjI>kD1uStdj--2mSUfn+IitcU~}`X zxGR2aLsFLAY)!8XPO{jsFX!}&VkRleWm_eVaif*-{-h=KZ;Qd*4yx@67o;OfMbX{@ z%&wXK4h;99e6VjVUgC^+EsdiPNyNk{L{a8}3W5Qk=sKV?>GEf>+)`#|WbUPT)hcDz zw}eT`6zR8N(q}{%iM`eN{W=FEz1Y5WXlSt%$dNvRf;Y6G>EwXlVJ_YTE1MI zW66Jptlpn2gGa+HZCHVRx7a2~Nb)Gh{DbX$5~uyc995#$m4=AFlT(}%e7Jq{sNoHz z!2>=ACbw3KJ(ry#0+<*K5bUJXpg@XLs2~m3Q8X+iHHD;L$N?>#0`f{6mUCKkA4B5C)Hf+M{reU;96YH3a&m$11$4J(bEHA=l$4}v zS(&gzBZ6)*>h|={J2(KgDo$7kefJA0!fU`HWik0hG6ZG2S7f3&Sz&1vDFHy7O~~WS zBWuCoxu6}Y81DN_n0q`bTfj#Y9B{_+y|@SZCEFTu1U)%6C?n8vB|4>) z@<7#v3;vf&!o_f6HFDet`!SmP6t2!zCz#NIy_`V_7Rgtv^qrm0-@Y`SBEupB^p&BtkSz+Hpduku**aSd;7l!pGiUlsQFzF%Ev6{LaEXk(VkGIh7!2(YNs;UK$2-~(E2!65 zWn~pBNGGFKYq1l5YRvC0eGCCer#m;)Ilkjt1%LZD=1vy9L! z>~C*HtJL}!a<7<4d7zAnk}BflSTTN&Hu{0 z0Q!ZX)a&bKQfbidPb1|CG_|78Z6+Li6)TcrW&@NmjScmMY+*=YLs;!&V1?6yFA5TX z01#u=G8yNRYGA68FdP?^I2rk$9OwUYiI3{1z@4DTN)j3I%4k}{Fjc0h!DMtEvScmf z(*=;VjtNkxK1|?77*Y22PJ*5rHvn+fDQV-Z&`1R_xs;%;Ouyhg(#fcE$)}^pcVtj; z5adTkoT&mSm(oQw6OCrq*wj^1y7?p`y&%*1BJ%`4#YCbanKFB7)l4d1{)xgy`Y z?Q0OSq6!uhBWfIU1eWOy`*)^S>9$}L{~;o6Ay5lBd?WLVZhAZ-My1*c9SZ`1^kpcT zVuMZpbt@Ir?HsNw1jmd`0-?@n1%jF_G^qGbVshKw{^Y}LrM!aY@*o?&b& zPBIOCFp7c$lt>pkH^&>Lt0<`HzS5M(tKc>oN8ij8Stue$FTfXi9^d|5y<FSF&68^if^H+|vi?uW5uvHgj!d9d5}u!nmcUX#(8M z6p5%KTbm234#4h#l&-M?lM5jb)HXIUh5F?Ez{Rqv9Jdu17(^~e3Ch7-kg$b{7!@eB z0As*W^M;GrI5uYw6tE}bb1o=)6RA_k2R94Vu$2d@x@_7ph2Bu<2xE&pitu^Z;nrJ_7so;Qk zVR?T1yc+FKCe!;XAp;8Nv9^!=cxm zEg>FLl)V+5fI2wdbb3~i@~)aK*)}Trs7wON3Be~ploh0uJq3qlk4d6GBHPpt1tA*M zV)ExCu^H9?UL7wgXpu!u+bAWbaQhs0?KEMo&~XjoOSYIoZ|q>mVT>ND{^gEBko%G zMOOxO8uECF_*$*%lW-$3|I(b$;CYYWE*aui_|L{P`JJqgzgC(mAr6|n{aVXfX# zv82Qok^}a!*3JNUyst|=ptk!#g9@9YKdpiD>X-E>%4e4Gcru-G%LxvWk>y*Ik3JyQfmq&(}CY2rq#*EXko@`_0 zCLuB@lz>!>3;a{NTZbXIl2Q_7%5%WbUs(p}hfZ7n1xoQR2a4B<7(%S^ZuLv0+;E+} z93KKpDP-p^+DxVn-vOK!X%Kf}K*mt5#+F#+JHiD%=_C}Jh*Vo1W+fl^VrU~`oyADh z>sk@j3C=YVP!=W6gC&8O;3HhA{r6QQ)jpJt5%qM3VU~Fbu_g^*-KeJK z9%>NyDmnNjBZinncLpVs8kBtX^P1ovqEL{PDC`Y{qP*jdDBFI7I%D@o*iFrt@=T6Z zLp`R&MAE1y9}$IF)8L@SiH(&Yrd`p-t6<9eF7vp$C_K@mz6R&ERR>q;M52u)@t>N{ zqZdc(px?YoVib#q{#EP^_-Y8KuA8!5!R0BU^)lCQU#9o~YPyGB=uj(08di%nv{DbrFl+lxG8Gf;VeM*S_Y{@$2&(66#rO^xA%Kwq zN(`Jbg&5N^@}pRb^}><;m>87D;bLX?^<1U5+bV5Hc|N#*y;nILuHLRj^gK#?u!m*b z8BV$ZKkEKW$`1I$aku8x3khWz`o(791J!S+x$eZOL`C&&+%Hq0NGT6aR;{l4+d8ib ztHO>U4UhHcya)$EBrt*WAVI~I*(}5bn6~GvyHalr zEl$dztfmyGtLCCv>oXA{puXpy%jY(AB?)-<)6MT94lw{yM=x<{J(^ z5cAWErcB)XD>}I4z%Ug<@FO_f;NB^*%8pUNPSemon5^Xcuu*i|sIO3}DhY1%ZyiH| zl}!-yprZRJ?G{U@fTTCJ4n)pS;?E4@G4k+hr&!wyv!G}i(YCsd^k2aet}53A-OGc> zZse9h2p30L7q;Q)4>jE zBtBPn;Xxjlf-VXHT>44$BdLA-;Cz%2Y$9MkQ-n#7d%lRFiu82laBz9y38l1f%Oh)K zHwvbPQu_LpII!~XN7x1&jI)r)Fok=%RBaa4*|0*1z)Q?K(~h1x@IT-O$A}AWs5OhN z4<-zBZtiD#dLgbHNs3xaxcVegnumCP!t}2p(ad>03B1vTq0tC!)H%F5+91 zbApLXOTsbi=EcqU&^aDTQ9hv`i!osfg_kY=G(#+TDr`#*$=9L;X2ua%PfDff_>>Z- zXs$V44p(V4!i8*lUL)w%6S^8wxQ8c-C8_-&-dhWq;3D64Sh`wgNQ@5vv2+q`&o``}9AvCdsJ<1G#51JJ8~9o1Rt>dJwq&Y`NKx7(8e zEC|qEour{8Oj-)~4O2S=a8~+NhY6M~mE%xE58HbBRq7)}^h-sFw?Q7rDu$XggbQMV z3Y^`w6qKXqhnl68n*=HnvgyHxPXPn7*65o6KX-1H5b%_y90)ROi!Q`e`Ie9$7;`HK zDdtscSceM=6gpSTB{6Sjmv96u{9k7aeiD2hTZ0)zVEo!0?fI3sC7KfuQtQk=G~ecW z0r5fOJ5^@g#Yb!&@?2EYhG)VH%HpYxq!-?2kDeMKE1eQ#n^C@`XrxIfV1JF4?{9y{ z%{plxJusR_I#s_Dwvc`BQ%H=2Lt|z+~2k+8F3-AAAL~_88>{LYq;Q0i4qia_YrIDf@lp*4hY=9 zC1#jo)K0~(ZA__t4KoF|AJ2yvf>f6uGtU(nJy1RtV5fKHMY#`dvskdw0RWx4RQekL zGrWwW)7&e#W##AQz!s7;uH!aG;lUta%|Z@VMQg;*TRe-)=sb0`kza%O-R*ZM5CGG; zFFo+FlDoYM24H04ibP%k^!BaU?$E>)?+G5T#5#x!W_dPs`iG}^_vtd(9X;mP2X;gV zm360iIpId*7}&Co6)}vhZSU06xnGq(c#W4)C52|VFg~|2~U>*WQw3usKQhk z7HwFQhEsdBn*!6@P66RfI;WF-aJ=&nRw#Z4D5xCb4XSESWwgzSgw>cv}nNe$Kv zEGYOhH#I=#o=J`Z0=*?MEW85ylIowVdq77?2;Zsf)cOqT#uPXM?9ZcP8{rHh-qm!{ z+&aY!|G^K!MxGpsDi{j-%q-hS#x-sRY1}Eun3#=c8QbJ)AAxi#t9cFV> zgx5`;m;+1N0DviR@}cA+Q-+sBrZ$Ny_WUOh+^Lfen{o0J@+y=@Z>rXTb{ceFS@$mo$cbA6kvajs;ZuWM5J$tYI9bN|8W5;&cKJSn?vZKDx5mnF7BIoA$`=+UM}wzRHi6e!aPDFh76%yuX%X z|Hhx$w|T2QzaHFEC@;@kHIXh(x-Q?mPrLoR-oL+Jt^m_kr`u0=qUM{Z(?|X0db?@@0 zBh|KtrkH>KlSR?U-__aw_x|DKW-EN>=JjRg>FaBUc5jMvl9bQi$LI6n_rE->|I=K; zZObG*@K0te^v`-i{NHlp|3A~VzLTlV|FLfWm(7#zKkJbAfv7dJDBs@AIHvt4Ck}HbY}sEtmDpx*i8RZdK%vLR=oSt4-kY?2 zf{h%P+bEEe5V_dk)8|_AJN$J zHC0x&kNV&(^m46tG{?T`!Cbsbnm^ZZzW6+L#eK69k8>fCoCyYeMG~CjqBoBLMuJ!F zuX)zKa3E#2(^F5ckpFS<{?7;?57QtI{y&KT(|`On|Gx-W4KW~y+=c!PUiT*94p)dG z)t4e%Bl`vr)!Qn1m9l6j{rEax4-twRGTvp-_18pyi4HX;g08g_APi*@$)fSeUcv-|&=1QJ8KLV*7g008L!CJFw_zyDt$s7Vx*A7X$By_5JGPT`(pCjn7d z+6Ax(r(=+#xY;S&vK)Dc(&ir@9b&LVLWe`UmA~h$SNmp+aF;M4&f-)^MG3IA?uirQ zv%{yiBPBRuWG*bi@CZP%_Hr_Ei5W!OEiy}vBqPYQn3{CkQnor^Avppvp38T-fJXOJ zkZkMj2GCPeg`MlqY~nkNl07@#eUYYQ10PBMI}_)7r~N1Xh6N~%ncQU(5BeCLA`}*> z*cPt3;e@;>%_qp=Cz>kZ~jhUztO|7!mK)uTl*-Z`)L&!g3?3;;m>-@lQ=Ke@h(sgts) zi_3p>`}F@~%}8ftX?L9=8^5K35++}Us-PCZDxtbo%W`9~d2vnV$dpVKDSMdGB#=t( z$h6AQcEVv%rn=F1g%k;EGzAu!-=BsL_-{I0q_6vZp7+tj*!o9CTtb2z4Y~XE)mz@z z_LCzoZIxoC3|G*%`Px#(ZgQ}f|qJn8~@K$jqIvd663N@e$9F(4bxGpxyTB&V{^~&V;M()G3Q<2+$ zZh9Ol^%`%aqOt7eoUfn8OCLQ_LWXdejOSpH{&|f9Xqrvi;jGwFh52K!4d+V#4g3~t zG|^mk5-;Re+rhK5es{Ps9dx-@pH0%FU;RR#PM75 z=l$~Rj!XadLHJ`#dt}c4>nEESdhPD;n8pKpt_(j|=yEY5Y&;T>$=ExIkoPZZLE`0W z(yc_yjX1+YxabaqKdfyztI_KR}Zx9<@kftYh=w=f3Z&sgZBQ8D^5AR z(vLGbZ*9ElvZ%l#(yPQX_bZCJp_-Cw`FmcGu+@d}uN2wynOP%LL1eu1CC-^SA73Lj z8=fi6+`kkv#i>=cQc>E=$%6H zCkR&B5#(*VwWl}hnlqgn>B1h1|4z;W(#DMftKP|xJxruIhb_L;!+E=m%6XSp+Xbt+ zG@3o3GTl_u?sx7NJ(|=8<;}J5*XI^f4Kw-LI%ZT>M3zOd9?xI3ECWrPo8I`Pa|qkVH37~W%*!|lf2uA%+6(-B=kzAyowQLYz0{e~9f=;h zoH1FNdG+%OsnoSJ-G8B^^uM#~reDkIMl)}Sd1J%j?(XV}vejufN@aZBnv7JETIQCy z?3yls|E!i!TmB&xn}hrLYWdI}tg^}|AGcq9LXtWn|6&SBPf%Z$wM~2hn5li%=Eo1giw9&32`hixLtIK2?+mTbbNmr{69w9thdI$h zsgNeaowKn{40cGpSz?dD)jm>gcR$S5KZ|#LIu~jbXg}Y4mA-wd6u4h#Hb(53k6+AfZvtKbYYwW_msc-)c=a>J2bIG?bkBKknW|x?>=*n-T0GfLB z`d_fY+_LWwQ~rWJD0)luG2$vh|5u0WMO_{K;asFTsqr$VyzP;WxNWbHYTWJrz!}zE z^O5lOdV$))24sPu)&nrLt4;DoYm9@Z#BP7YZcsf9xx;bH`#`BfM7I5o^Hua%$u*{18FwP}CAPsdGl$bc+$a#Ll_{`0TuVs)y0QF!Gx0 z1)l2Q{qk5UCx!yKE72ZwT3a+Qf5->33cFzaPdg<=Ull{=Wy=z|e1Z z`%|j-Y@=rXA@f4Q?09K_(&aw;$iYFqu~f>s^kxynTKO|Z>c)UC2mBt0v2Tp&1;V4Y zM{nq!$ck;k+^Y`%~)R4t>8qXXZ}iuoeIn3tE?TFH%0$q|4X z$FlnBa$}o=iRLcUnKp}jbm$7BK1f9tp+5B#4mBru3bT}_+FbS+24W9hHM1?61xbT- z;+pNah)cu@EJ1tZjEt^0P<$VjdVV5b);g`jjaMAKx24ziibeKVRjJdyMIis_$PN{j zx}lolYdJ@Lq0p7hv5iLf?lDywA`4<5pYHtgq&m6T(5UmtPW7(lzcC87S4o>BqfH%Y zh*iXszBNsuJcpluBuDl+IhMw_GFG=NF8iy{C$Uyt;8$FcZB>W8fx0+0%Xm8^vL*E^ zU>*dIgLd1D_y?}V&q?sO=Kqjgc6oC0Q*U(EAo|e-%k+i%T<-4}ExPH>T5VnXzHhO##J)x>)pbS=If>M8LCNLiP@}=l-PpGw|J_fY4uGHE4w;c0^RdDb9LTbJEpvKTA(0} z&?G%$wX|8ka@yJvH!!_)&O3gp{t~VBNk{LdSC(q*b>X>3LC74TiZGwU7c@kcrjr== z8bi1iMw@z6>2`AY=(ffe{ni!Na<=r<9q@A*9uj?VS2-DGXWw4O&)g{Otf|!aC$6G+ zxPN$HK(5Gd=(hFSX1x3{W6S4*F4S%uC#>1YfJ$a4smPWy=i1isJauKm+IKfEosj-7 zr{&KXAA2lx#}N*E3TvVn7Q6;ZYXz~dEud9d+1aYWxZhj?qe_{XAPem8Ct9S-`%S=O zGY6~_(*ulK!(~-V{8We+xT9-+(${}RVj^^m5)>U6(6WF-Z@GNa#>uMP@hQGYhf>ft{2Kz=c>$xipM!wsc%ve6wFW#FZ(q4fzLc`X_ zyU_{s;#!0a-=Q8t4&5q#L+rd;$s+ET#ZyvA2c15})=UJ?DvnC!7IsW-%v7a)dbF6{ zovc)Xq>0@c5k@Po2W$K~wSW%O*p43dB(qzp5CJvmUOu=yKP=IoiTv4P25}mrzl|T8^~u zi5%Bl;^BdJ5PBZzshqtv%I9X>g|_Z(RStNGP0%ASr5p6ufP>MYm!623vN5aWJD82J z$)4(G`JD@bo!JXWbRd>DYcY;uv!?Z1#^TxeEuM{f#ei;@l?RZ>W+6aC*XbkmE7rCt=`pY-GP7-57bG$&VO!bi&aR0Zw{oKaMWzBZ&) zag|Er#(-=hw0k3$ z*fz4dsyM6Zu->;RYC4=KeL>XhaCyRo)iyl{n9%zd2K|1lCx-l3E);8Es z1@EtcvXl-m6kL+Cp|!qW17#KHVCa_x9vcaPv=E?>|0wss;a_gZyThHY?h60BJ)58Z V23YJ6hzRi7+6RFsT?29m Date: Thu, 28 Sep 2023 16:58:26 +0300 Subject: [PATCH 083/270] Data endpoint enhancements (#2477) * add endpoint /api/v1/forms//regenerate-submission-metadata add endpoint force json update for all submissions under current form * add async task to regenerate json for form instances refactor the /api/v1/forms//regenerate-submission-metadata endpoint to only trigger async task if task has failed or does not exist in cache * update method docstring * update method docstring * update docstring * update docstring * update doc strings * update code comment * update doc string * update comment * refactor code * update doc string * update doc string * update docstring * update doc string * update test case * update documentation * format documentation * fix pagination link header missing in /api/v1/data/ when sorting when sort query parameter is applied with pagination, the pagination link header was missing * update documentation add documentation for endpoint /api/v1/forms/{pk}/regenerate-submission-metadata * update documentation * fix wrongly formatted link in docs * fix linting error fix error pycodestyle: E501 / line too long (90 > 88 characters) (col 89) * fix failing tests * fix linting error fix unused argument self * re-revert 0 float value converted to int accidentially during save * readd decimal precision accidentally removed during save * fix failing tests * fix failing tests * fix failing tests * fix failing test * fix failing tests * address lint error address unused-argument, wrong-import-order * fix bug argument after * must be an iterable * fix Instance model json _date_modified, _submission_time out of sync Instance model date_modified and date_created fiels were out of sync with their aliases in the json field * fix typo * fix failing test case * fix failing tests * refactor code and fix failing tests * remove redundant method call * refactor code * address lint error address invalid-name, missing-function-docstring * update docstring * refactor code * refactor code * fix failing tests * fix typo and refactor code * remove json getter in model Instance there is already a json field present that already has the result the getter is recalculating * readd getter method removed * use helper method to read async task state * refactor code * do not set json when regenerating json asynchronously json is set in the post_save signal so setting it explicitly is unnecessary * refactor code * update comment * fix bug generator object has no attribute count bug appears when pagination is used with sort and query query paramaters * convert endpoint /api/v1/forms//regenerate-submission-metadata into command convert endpoint into Django custom command * handle edge case in regenerate_form_instance_json async task ensure we do not regenerate instance json if instance json has already been regenerated * address lint error consider-using-f-string / Formatting a regular string which could be a f-string * address lint error address raise-missing-from, consider-using-f-string update docstring * address lint errors * address lint error * address lint errors * fix incorrect lint error suppression * suppress lint error --- docs/data.rst | 7 +- docs/forms.rst | 2 +- onadata/apps/api/tasks.py | 37 +++ onadata/apps/api/tests/fixtures/osm/osm.csv | 2 +- onadata/apps/api/tests/test_tasks.py | 73 ++++- .../api/tests/viewsets/test_data_viewset.py | 130 +++++--- .../tests/viewsets/test_dataview_viewset.py | 291 +++++++++--------- .../api/tests/viewsets/test_stats_viewset.py | 9 +- .../api/tests/viewsets/test_xform_viewset.py | 55 ++-- onadata/apps/api/viewsets/data_viewset.py | 20 +- onadata/apps/api/viewsets/xform_viewset.py | 5 +- .../commands/recover_deleted_attachments.py | 2 +- .../commands/regenerate_instance_json.py | 78 +++++ .../management/commands/tests/__init__.py | 0 .../tests/test_regenerate_instance_json.py | 122 ++++++++ .../migrations/0001_pre-django-3-upgrade.py | 10 +- .../migrations/0009_auto_20230914_0927.py | 17 + .../migrations/0010_auto_20230921_0346.py | 27 ++ .../migrations/0062_auto_20210202_0248.py | 3 +- onadata/apps/logger/models/instance.py | 108 +++---- onadata/apps/logger/models/xform.py | 6 +- .../apps/logger/tests/models/test_instance.py | 65 ++-- .../main/tests/fixtures/csv_export/export.csv | 2 +- .../csv_export/tutorial_w_repeats.csv | 2 +- .../tutorial_w_repeats_truncate_titles.csv | 2 +- .../transportation/transportation.csv | 10 +- .../userone/userone_with_dot_name_fields.csv | 2 +- onadata/apps/main/tests/test_process.py | 15 +- onadata/apps/viewer/tasks.py | 1 - .../viewer/tests/fixtures/transportation.csv | 2 +- .../fixtures/transportation_without_na.csv | 2 +- onadata/libs/renderers/renderers.py | 17 +- onadata/libs/tests/data/test_tools.py | 8 +- .../nested_repeats/nested_repeats.csv | 4 +- onadata/libs/tests/utils/test_logger_tools.py | 6 + onadata/libs/utils/cache_tools.py | 4 + 36 files changed, 780 insertions(+), 366 deletions(-) create mode 100644 onadata/apps/logger/management/commands/regenerate_instance_json.py create mode 100644 onadata/apps/logger/management/commands/tests/__init__.py create mode 100644 onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py create mode 100644 onadata/apps/logger/migrations/0009_auto_20230914_0927.py create mode 100644 onadata/apps/logger/migrations/0010_auto_20230921_0346.py diff --git a/docs/data.rst b/docs/data.rst index 851e351b57..9dcedd8860 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -570,7 +570,12 @@ Response Query submitted data of a specific form ---------------------------------------- -Use the `query` or `data` parameter to pass in a JSON key/value query. +Use the `query` or `data` parameter to pass in a JSON key/value query. + +When quering a date time field whose value is in ISO format such as ``2020-12-18T09:36:19.767455+00:00``, it is important to ensure the ``+`` (plus) is encoded to ``%2b``. + +``+`` without encoding is parsed as whitespace. So ``2020-12-18T09:36:19.767455+00:00`` should be converted to ``2020-12-18T09:36:19.767455%2b00:00``. + Example I ^^^^^^^^^ diff --git a/docs/forms.rst b/docs/forms.rst index 5c4184b859..065afa063f 100644 --- a/docs/forms.rst +++ b/docs/forms.rst @@ -1627,4 +1627,4 @@ If the upload is still running: HTTP 202 Accepted { "job_status": "PENDING" - } + } \ No newline at end of file diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 268cbe1931..09a751ae26 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -18,10 +18,15 @@ from onadata.apps.api import tools from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator +from onadata.libs.utils.cache_tools import ( + safe_delete, + XFORM_REGENERATE_INSTANCE_JSON_TASK, +) from onadata.apps.logger.models import Instance, ProjectInvitation, XForm from onadata.libs.utils.email import ProjectInvitationEmail from onadata.celeryapp import app + User = get_user_model() @@ -145,3 +150,35 @@ def send_project_invitation_email_async( else: email = ProjectInvitationEmail(invitation, url) email.send() + + +@app.task(track_started=True) +def regenerate_form_instance_json(xform_id: int): + """Regenerate a form's instances json + + Json data recreated afresh and any existing json data is overriden + """ + try: + xform: XForm = XForm.objects.get(pk=xform_id) + except XForm.DoesNotExist as err: + logging.exception(err) + + else: + if not xform.is_instance_json_regenerated: + instances = xform.instances.filter(deleted_at__isnull=True) + + for instance in queryset_iterator(instances): + # We do not want to trigger Model.save or any signal + # Queryset.update is a workaround to achieve this. + # Instance.save and the post/pre signals may contain + # some side-effects which we are not interested in e.g + # updating date_modified which we do not want + Instance.objects.filter(pk=instance.pk).update( + json=instance.get_full_dict() + ) + + xform.is_instance_json_regenerated = True + xform.save() + # Clear cache used to store the task id from the AsyncResult + cache_key = f"{XFORM_REGENERATE_INSTANCE_JSON_TASK}{xform_id}" + safe_delete(cache_key) diff --git a/onadata/apps/api/tests/fixtures/osm/osm.csv b/onadata/apps/api/tests/fixtures/osm/osm.csv index 4207557d14..f064b89130 100644 --- a/onadata/apps/api/tests/fixtures/osm/osm.csv +++ b/onadata/apps/api/tests/fixtures/osm/osm.csv @@ -1,2 +1,2 @@ photo,osm_road,osm_building,fav_color,form_completed,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,osm_road:ctr:lat,osm_road:ctr:lon,osm_road:highway,osm_road:lanes,osm_road:name,osm_road:way:id,osm_building:building,osm_building:building:levels,osm_building:ctr:lat,osm_building:ctr:lon,osm_building:name,osm_building:way:id -1424308569120.jpg,OSMWay234134797.osm,OSMWay34298972.osm,red,2015-02-19T04:18:21.427+03,uuid:d3ef929e-e3e7-456c-9f27-7679c0074f4f,d3ef929e-e3e7-456c-9f27-7679c0074f4f,2013-02-18T15:54:01,,,201511091147,,bob,3,2,False,23.708174238006087,90.40946505581161,tertiary,2,Patuatuli Road,234134797,yes,4,23.707316084046038,90.40849938337506,kol,34298972 +1424308569120.jpg,OSMWay234134797.osm,OSMWay34298972.osm,red,2015-02-19T04:18:21.427+03,uuid:d3ef929e-e3e7-456c-9f27-7679c0074f4f,d3ef929e-e3e7-456c-9f27-7679c0074f4f,2013-02-18T15:54:01+00:00,,,201511091147,,bob,3,2,False,23.708174238006087,90.40946505581161,tertiary,2,Patuatuli Road,234134797,yes,4,23.707316084046038,90.40849938337506,kol,34298972 diff --git a/onadata/apps/api/tests/test_tasks.py b/onadata/apps/api/tests/test_tasks.py index d1a1a3e648..868f0dc4b4 100644 --- a/onadata/apps/api/tests/test_tasks.py +++ b/onadata/apps/api/tests/test_tasks.py @@ -1,12 +1,16 @@ """Tests for module onadata.apps.api.tasks""" +import sys from unittest.mock import patch +from django.core.cache import cache + from onadata.apps.main.tests.test_base import TestBase from onadata.apps.api.tasks import ( send_project_invitation_email_async, + regenerate_form_instance_json, ) -from onadata.apps.logger.models import ProjectInvitation +from onadata.apps.logger.models import ProjectInvitation, Instance from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.utils.email import ProjectInvitationEmail @@ -30,3 +34,70 @@ def test_sends_email(self, mock_send): url = "https://example.com/register" send_project_invitation_email_async(self.invitation.id, url) mock_send.assert_called_once() + + +class RegenerateFormInstanceJsonTestCase(TestBase): + """Tests for regenerate_form_instance_json""" + + def test_regenerates_instances_json(self): + """Regenerates instances json""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + cache_key = f"xfm-regenerate_instance_json_task-{self.xform.pk}" + cache.set(cache_key, "foo") + instance = self.xform.instances.first() + self.assertFalse(instance.json) + self.assertFalse(self.xform.is_instance_json_regenerated) + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertTrue(instance.json) + self.xform.refresh_from_db() + self.assertTrue(self.xform.is_instance_json_regenerated) + # task_id stored in cache should be deleted + self.assertIsNone(cache.get(cache_key)) + + def test_json_overriden(self): + """Existing json is overriden""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {"foo": "bar"} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + instance = self.xform.instances.first() + self.assertEqual(instance.json.get("foo"), "bar") + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertFalse("foo" in instance.json) + + @patch("logging.exception") + def test_form_id_invalid(self, mock_log_exception): + """An invalid xform_id is handled""" + + regenerate_form_instance_json.delay(sys.maxsize) + + mock_log_exception.assert_called_once() + + def test_already_generated(self): + """Regeneration fails for a form whose regeneration has already been done""" + + def mock_get_full_dict(self): # pylint: disable=unused-argument + return {} + + with patch.object(Instance, "get_full_dict", mock_get_full_dict): + self._publish_transportation_form_and_submit_instance() + + self.xform.is_instance_json_regenerated = True + self.xform.save() + instance = self.xform.instances.first() + self.assertFalse(instance.json) + self.assertTrue(self.xform.is_instance_json_regenerated) + regenerate_form_instance_json.delay(self.xform.pk) + instance.refresh_from_db() + self.assertFalse(instance.json) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 67ed2f53a6..dcdc50a310 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -8,6 +8,7 @@ import json import logging import os +import pytz import csv from io import StringIO from builtins import open @@ -29,7 +30,7 @@ from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock, urlmatch -from mock import patch +from mock import patch, Mock from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, @@ -606,7 +607,7 @@ def test_data_pagination(self): self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 4) - # Query param returns correct pagination headers + # Pagination works with "query" query parameter request = self.factory.get( "/", data={"page_size": "1", "query": "ambulance"}, **self.extra ) @@ -616,6 +617,48 @@ def test_data_pagination(self): self.assertEqual( response["Link"], ('; rel="next"') ) + self.assertEqual(len(response.data), 1) + # Pagination works with "sort" query parametr + instances = self.xform.instances.all().order_by("-date_modified") + self.assertEqual(instances.count(), 4) + request = self.factory.get( + "/", + data={"page": "1", "page_size": "2", "sort": '{"date_modified":-1}'}, + **self.extra, + ) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertIn("Link", response) + self.assertEqual( + response["Link"], ('; rel="next"') + ) + self.assertEqual(len(response.data), 2) + self.assertEqual(response.data[0]["_id"], instances[0].pk) + # Pagination works with multiple query params + instances = ( + self.xform.instances.all() + .order_by("-date_modified") + .extra(where=["json::text ~* cast(%s as text)"], params=["ambulance"]) + ) + self.assertEqual(instances.count(), 2) + request = self.factory.get( + "/", + data={ + "page": "1", + "page_size": "1", + "sort": '{"date_modified":-1}', + "query": "ambulance", + }, + **self.extra, + ) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertIn("Link", response) + self.assertEqual( + response["Link"], ('; rel="next"') + ) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["_id"], instances[0].pk) def test_sort_query_param_with_invalid_values(self): self._make_submissions() @@ -1070,27 +1113,27 @@ def test_filter_by_date_modified(self): self._make_submissions() view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) - formid = self.xform.pk - instance = self.xform.instances.all().order_by("pk")[0] - response = view(request, pk=formid) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 4) - - instance = self.xform.instances.all().order_by("-date_created")[0] + instances = self.xform.instances.all().order_by("pk") + self.assertEqual(len(instances), 4) + instance = instances[2] date_modified = instance.date_modified.isoformat() - - query_str = '{"_date_modified": {"$gte": "%s"},' ' "_submitted_by": "%s"}' % ( - date_modified, - "bob", - ) + # greater than or equal to + query_str = '{"_date_modified": {"$gte": "%s"}}' % date_modified data = {"query": query_str} request = self.factory.get("/", data=data, **self.extra) - response = view(request, pk=formid) + response = view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 200) - expected_count = self.xform.instances.filter( - date_modified__gte=date_modified - ).count() - self.assertEqual(len(response.data), expected_count) + self.assertEqual(len(response.data), 2) + self.assertEqual(response.data[0]["_id"], instances[2].pk) + self.assertEqual(response.data[1]["_id"], instances[3].pk) + # greater than + query_str = '{"_date_modified": {"$gt": "%s"}}' % date_modified + data = {"query": query_str} + request = self.factory.get("/", data=data, **self.extra) + response = view(request, pk=self.xform.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["_id"], instances[3].pk) def test_filter_by_submission_time_and_submitted_by_with_data_arg(self): self._make_submissions() @@ -2339,7 +2382,8 @@ def test_geotraces_in_repeats(self): | | end repeat | """ self.xform = self._publish_markdown( - md, self.user, self.project, id_string="geotraces") + md, self.user, self.project, id_string="geotraces" + ) # publish submissions self._publish_submit_geoms_in_repeats("Geotraces") view = DataViewSet.as_view({"get": "list"}) @@ -2400,7 +2444,8 @@ def test_geoshapes_in_repeats(self): | | end repeat | """ self.xform = self._publish_markdown( - md, self.user, self.project, id_string="geoshapes") + md, self.user, self.project, id_string="geoshapes" + ) # publish submissions self._publish_submit_geoms_in_repeats("Geoshapes") view = DataViewSet.as_view({"get": "list"}) @@ -3194,7 +3239,7 @@ def test_floip_format(self): floip_list = json.loads(response.content) self.assertTrue(isinstance(floip_list, list)) floip_row = [x for x in floip_list if x[-2] == "none"][0] - self.assertEqual(floip_row[0], response.data[0]["_submission_time"] + "+00:00") + self.assertEqual(floip_row[0], response.data[0]["_submission_time"]) self.assertEqual(floip_row[2], "bob") self.assertEqual(floip_row[3], response.data[0]["_uuid"]) self.assertEqual( @@ -3361,24 +3406,27 @@ def test_data_list_xml_format(self): """Test DataViewSet list XML""" # create submission media_file = "1335783522563.jpg" - self._make_submission_w_attachment( - os.path.join( - self.this_directory, - "fixtures", - "transportation", - "instances", - "transport_2011-07-25_19-05-49_2", - "transport_2011-07-25_19-05-49_2.xml", - ), - os.path.join( - self.this_directory, - "fixtures", - "transportation", - "instances", - "transport_2011-07-25_19-05-49_2", - media_file, - ), - ) + mocked_now = datetime.datetime(2023, 9, 20, 12, 49, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + self._make_submission_w_attachment( + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + "transport_2011-07-25_19-05-49_2", + "transport_2011-07-25_19-05-49_2.xml", + ), + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + "transport_2011-07-25_19-05-49_2", + media_file, + ), + ) view = DataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) @@ -3394,7 +3442,7 @@ def test_data_list_xml_format(self): returned_xml = response.content.decode("utf-8") server_time = ET.fromstring(returned_xml).attrib.get("serverTime") edited = instance.last_edited is not None - submission_time = instance.date_created.strftime(MONGO_STRFTIME) + submission_time = instance.date_created.isoformat() attachment = instance.attachments.first() expected_xml = ( '\n' diff --git a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py index 995db9d828..35d9f20f29 100644 --- a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py @@ -25,7 +25,7 @@ filter_to_field_lookup, get_field_lookup, get_filter_kwargs, - apply_filters + apply_filters, ) from onadata.apps.api.viewsets.note_viewset import NoteViewSet from onadata.libs.serializers.xform_serializer import XFormSerializer @@ -79,51 +79,36 @@ def test_create_dataview(self): self._create_dataview() def test_filter_to_field_lookup(self): - self.assertEqual( - filter_to_field_lookup("="), "__iexact" - ) - self.assertEqual( - filter_to_field_lookup("<"), "__lt" - ) - self.assertEqual( - filter_to_field_lookup(">"), "__gt" - ) + self.assertEqual(filter_to_field_lookup("="), "__iexact") + self.assertEqual(filter_to_field_lookup("<"), "__lt") + self.assertEqual(filter_to_field_lookup(">"), "__gt") def test_get_field_lookup(self): - self.assertEqual( - get_field_lookup("q1", "="), "json__q1__iexact" - ) - self.assertEqual( - get_field_lookup("q1", "<"), "json__q1__lt" - ) - self.assertEqual( - get_field_lookup("q1", ">"), "json__q1__gt" - ) + self.assertEqual(get_field_lookup("q1", "="), "json__q1__iexact") + self.assertEqual(get_field_lookup("q1", "<"), "json__q1__lt") + self.assertEqual(get_field_lookup("q1", ">"), "json__q1__gt") def test_get_filter_kwargs(self): self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": "<"}]), - {'json__first_column__lt': '2'} + {"json__first_column__lt": "2"}, ) self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": ">"}]), - {'json__first_column__gt': '2'} + {"json__first_column__gt": "2"}, ) self.assertEqual( get_filter_kwargs([{"value": 2, "column": "first_column", "filter": "="}]), - {'json__first_column__iexact': '2'} + {"json__first_column__iexact": "2"}, ) def test_apply_filters(self): # update these filters - filters = [{'value': 'orange', 'column': 'fruit', 'filter': '='}] + filters = [{"value": "orange", "column": "fruit", "filter": "="}] xml = 'orange' instance = Instance(xform=self.xform, xml=xml) instance.save() - self.assertEqual( - apply_filters(self.xform.instances, filters).first().xml, - xml - ) + self.assertEqual(apply_filters(self.xform.instances, filters).first().xml, xml) # delete instance instance.delete() @@ -172,68 +157,75 @@ def test_dataview_with_attachment_field(self): self.assertEqual("image/png", attachment_info.get("mimetype")) self.assertEqual( f"{self.user.username}/attachments/{self.xform.id}_{self.xform.id_string}/{media_file}", - attachment_info.get("filename"),) + attachment_info.get("filename"), + ) self.assertEqual(response.status_code, 200) # Attachment viewset works ok for filtered datasets attachment_list_view = AttachmentViewSet.as_view({"get": "list"}) request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) response = attachment_list_view(request) - attachments = Attachment.objects.filter( - instance__xform=self.data_view.xform) + attachments = Attachment.objects.filter(instance__xform=self.data_view.xform) self.assertEqual(1, len(response.data)) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) - serialized_attachments = AttachmentSerializer( - attachments, - many=True, context={'request': request}).data self.assertEqual( - serialized_attachments, - response.data) + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) + serialized_attachments = AttachmentSerializer( + attachments, many=True, context={"request": request} + ).data + self.assertEqual(serialized_attachments, response.data) # create profile for alice - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'password1': 'alice', 'password2': 'alice', - 'first_name': 'Alice', 'last_name': 'A', - 'city': 'Nairobi', 'country': 'KE'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", + "first_name": "Alice", + "last_name": "A", + "city": "Nairobi", + "country": "KE", + } alice_profile = self._create_user_profile(extra_post_data=alice_data) self.extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} # check that user with no permisisons can not list attachment objects request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) response = attachment_list_view(request) - attachments = Attachment.objects.filter( - instance__xform=self.data_view.xform) + attachments = Attachment.objects.filter(instance__xform=self.data_view.xform) self.assertEqual(0, len(response.data)) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) self.assertEqual( - [], - response.data) + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) + self.assertEqual([], response.data) # check that user with no permisisons can not view a specific attachment object attachment_list_view = AttachmentViewSet.as_view({"get": "retrieve"}) request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) - response = attachment_list_view( - request, pk=attachments.first().pk) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) + response = attachment_list_view(request, pk=attachments.first().pk) + self.assertEqual( + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) self.assertEqual(response.status_code, 404) response_data = json.loads(json.dumps(response.data)) - self.assertEqual(response_data, {'detail': 'Not found.'}) + self.assertEqual(response_data, {"detail": "Not found."}) # a user with permissions can view a specific attachment object attachment_list_view = AttachmentViewSet.as_view({"get": "retrieve"}) self.extra = {"HTTP_AUTHORIZATION": f"Token {self.user.auth_token}"} request = self.factory.get("/?dataview=" + str(self.data_view.pk), **self.extra) - response = attachment_list_view( - request, pk=attachments.first().pk) - self.assertEqual(self.data_view.query, - [{'value': 'no', 'column': 'pizza_fan', 'filter': '='}]) + response = attachment_list_view(request, pk=attachments.first().pk) + self.assertEqual( + self.data_view.query, + [{"value": "no", "column": "pizza_fan", "filter": "="}], + ) self.assertEqual(response.status_code, 200) serialized_attachment = AttachmentSerializer( - attachments.first(), - context={'request': request}).data + attachments.first(), context={"request": request} + ).data self.assertEqual(response.data, serialized_attachment) # pylint: disable=invalid-name @@ -309,7 +301,9 @@ def test_get_dataview(self): response.data["url"], f"http://testserver/api/v1/dataviews/{self.data_view.pk}", ) - self.assertEqual(response.data["last_submission_time"], "2015-03-09T13:34:05") + self.assertEqual( + response.data["last_submission_time"], "2015-03-09T13:34:05.537766+00:00" + ) # Public self.project.shared = True @@ -1011,9 +1005,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): "xform": f"http://testserver/api/v1/forms/{xform.pk}", "project": f"http://testserver/api/v1/projects/{project.pk}", "columns": '["name", "age", "gender", "pizza_type"]', - "query": ( - '[{"column":"age","filter":"=","value":"28"}]' - ), + "query": ('[{"column":"age","filter":"=","value":"28"}]'), } self._create_dataview(data=data) @@ -1023,10 +1015,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): } ) - data = { - "format": "xlsx", - "show_choice_labels": "true" - } + data = {"format": "xlsx", "show_choice_labels": "true"} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.data_view.pk) @@ -1060,27 +1049,27 @@ def test_xlsx_export_with_choice_labels(self, async_result): sheet_data = list(main_sheet.values)[1] inst = self.xform.instances.get(id=sheet_data[4]) expected_headers = ( - 'name', - 'age', - 'gender', - 'pizza_type', - '_id', - '_uuid', - '_submission_time', - '_index', - '_parent_table_name', - '_parent_index', - '_tags', - '_notes', - '_version', - '_duration', - '_submitted_by', + "name", + "age", + "gender", + "pizza_type", + "_id", + "_uuid", + "_submission_time", + "_index", + "_parent_table_name", + "_parent_index", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", ) expected_data = ( - 'Dennis Wambua', + "Dennis Wambua", 28, - 'Male', - 'New York think crust!', + "Male", + "New York think crust!", inst.id, inst.uuid, inst.date_created.replace(microsecond=0, tzinfo=None), @@ -1089,7 +1078,7 @@ def test_xlsx_export_with_choice_labels(self, async_result): -1, None, None, - '4444', + "4444", 50, inst.user.username, ) @@ -1111,9 +1100,7 @@ def test_csv_export_with_choice_labels(self, async_result): "xform": f"http://testserver/api/v1/forms/{xform.pk}", "project": f"http://testserver/api/v1/projects/{project.pk}", "columns": '["name", "age", "gender", "pizza_type"]', - "query": ( - '[{"column":"age","filter":"=","value":"28"}]' - ), + "query": ('[{"column":"age","filter":"=","value":"28"}]'), } self._create_dataview(data=data) @@ -1123,10 +1110,7 @@ def test_csv_export_with_choice_labels(self, async_result): } ) - data = { - "format": "csv", - "show_choice_labels": "true" - } + data = {"format": "csv", "show_choice_labels": "true"} request = self.factory.get("/", data=data, **self.extra) response = view(request, pk=self.data_view.pk) @@ -1153,13 +1137,8 @@ def test_csv_export_with_choice_labels(self, async_result): export = Export.objects.get(task_id=task_id) self.assertTrue(export.is_successful) with default_storage.open(export.filepath, "r") as f: - expected_data = [ - 'Dennis Wambua', - '28', - 'Male', - 'New York think crust!' - ] - expected_headers = ['name', 'age', 'gender', 'pizza_type'] + expected_data = ["Dennis Wambua", "28", "Male", "New York think crust!"] + expected_headers = ["name", "age", "gender", "pizza_type"] csv_reader = csv.reader(f) headers = next(csv_reader) self.assertEqual(expected_headers, headers) @@ -1285,7 +1264,6 @@ def test_get_charts_data_for_grouped_field(self): # pylint: disable=invalid-name def test_get_charts_data_field_not_in_dataview_columns(self): - self._create_dataview() self.view = DataViewViewSet.as_view( { @@ -1426,54 +1404,59 @@ def test_geopoint_submission_dataview(self): # geojson pagination, fields and geofield params works ok request = self.factory.get( - "/?geofield=_geolocation&page=1&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') + "/?geofield=_geolocation&page=1&page_size=1&fields=name", **self.extra + ) + response = view(request, pk=self.data_view.pk, format="geojson") # we get correct content type headers = dict(response.items()) self.assertEqual(headers["Content-Type"], "application/geo+json") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': [ - {'type': 'Feature', - 'geometry': None, - 'properties': {'name': 'Kameli'}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": {"name": "Kameli"}, + } + ], + }, + response.data, ) request = self.factory.get( - "/?geofield=_geolocation&page=9&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') + "/?geofield=_geolocation&page=9&page_size=1&fields=name", **self.extra + ) + response = view(request, pk=self.data_view.pk, format="geojson") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [ - {'type': 'Feature', - 'geometry': - {'type': - 'GeometryCollection', - 'geometries': - [ - {'type': 'Point', - 'coordinates': [36.8304, -1.2655]}]}, - 'properties': {'name': 'Kameli'}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "GeometryCollection", + "geometries": [ + {"type": "Point", "coordinates": [36.8304, -1.2655]} + ], + }, + "properties": {"name": "Kameli"}, + } + ], + }, + response.data, ) request = self.factory.get( - "/?geofield=_geolocation&page=10&page_size=1&fields=name", - **self.extra) - response = view(request, pk=self.data_view.pk, format='geojson') - self.assertEqual(response.status_code, 404) - self.assertEqual( - {'detail': 'Invalid page.'}, - response.data + "/?geofield=_geolocation&page=10&page_size=1&fields=name", **self.extra ) + response = view(request, pk=self.data_view.pk, format="geojson") + self.assertEqual(response.status_code, 404) + self.assertEqual({"detail": "Invalid page."}, response.data) # pylint: disable=invalid-name def test_dataview_project_cache_cleared(self): @@ -1536,10 +1519,12 @@ def test_dataview_update_refreshes_cached_data(self): response = self.view(request, pk=self.data_view.pk) expected_count = 3 - expected_last_submission_time = "2015-03-09T13:34:05" + expected_last_submission_time = "2015-03-09T13:34:05.537766+00:00" self.assertEqual(response.data["count"], expected_count) - self.assertEqual(response.data["last_submission_time"], "2015-03-09T13:34:05") + self.assertEqual( + response.data["last_submission_time"], "2015-03-09T13:34:05.537766+00:00" + ) cache_dict = cache.get(f"{DATAVIEW_COUNT}{self.data_view.xform.pk}") self.assertEqual(cache_dict.get(self.data_view.pk), expected_count) @@ -1878,11 +1863,11 @@ def test_export_xls_dataview_with_date_filter(self, async_result): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) view = DataViewViewSet.as_view( @@ -1941,11 +1926,11 @@ def test_csv_export_dataview_date_filter(self): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) count = Export.objects.all().count() @@ -1984,11 +1969,11 @@ def test_csv_export_async_dataview_date_filter(self, async_result): first_datetime = start_date.strftime(MONGO_STRFTIME) second_datetime = start_date + timedelta(days=1, hours=20) query_str = ( - '{"_submission_time": {"$gte": "' + - first_datetime + - '", "$lte": "' + - second_datetime.strftime(MONGO_STRFTIME) + - '"}}' + '{"_submission_time": {"$gte": "' + + first_datetime + + '", "$lte": "' + + second_datetime.strftime(MONGO_STRFTIME) + + '"}}' ) count = Export.objects.all().count() diff --git a/onadata/apps/api/tests/viewsets/test_stats_viewset.py b/onadata/apps/api/tests/viewsets/test_stats_viewset.py index 83e395102d..1c21c2bbdc 100644 --- a/onadata/apps/api/tests/viewsets/test_stats_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_stats_viewset.py @@ -4,7 +4,6 @@ from django.core.files.base import ContentFile from django.test import RequestFactory from builtins import open -from mock import patch from onadata.apps.main.tests.test_base import TestBase from onadata.apps.api.viewsets.stats_viewset import StatsViewSet @@ -21,9 +20,7 @@ def setUp(self): self.factory = RequestFactory() self.extra = {"HTTP_AUTHORIZATION": "Token %s" % self.user.auth_token} - @patch("onadata.apps.logger.models.instance.submission_time") - def test_submissions_stats(self, mock_time): - self._set_mock_time(mock_time) + def test_submissions_stats(self): self._publish_transportation_form() self._make_submissions() view = SubmissionStatsViewSet.as_view({"get": "list"}) @@ -62,9 +59,7 @@ def test_submissions_stats(self, mock_time): self.assertDictContainsSubset(data, response.data[0]) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_submissions_stats_with_xform_in_delete_async_queue(self, mock_time): - self._set_mock_time(mock_time) + def test_submissions_stats_with_xform_in_delete_async_queue(self): self._publish_transportation_form() self._make_submissions() view = SubmissionStatsViewSet.as_view({"get": "list"}) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 4c36f6cb38..3345631f4b 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -8,6 +8,7 @@ import csv import json import os +import pytz import re from builtins import open from collections import OrderedDict @@ -2240,6 +2241,7 @@ def test_xform_serializer_none(self): "instances_with_geopoints": False, "has_hxl_support": False, "hash": "", + "is_instance_json_regenerated": False, } self.assertEqual(data, XFormSerializer(None).data) @@ -3837,18 +3839,24 @@ def test_csv_export_with_win_excel_utf8(self): self._publish_xls_form_to_project(xlsform_path=xlsform_path) # submit one hxl instance _submission_time = parse_datetime("2013-02-18 15:54:01Z") - self._make_submission( - os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "hxl_test", - "hxl_example_2.xml", - ), - forced_submission_time=_submission_time, - ) + mock_date_modified = datetime(2023, 9, 20, 11, 41, 0, tzinfo=pytz.utc) + + with patch( + "django.utils.timezone.now", Mock(return_value=mock_date_modified) + ): + self._make_submission( + os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "hxl_test", + "hxl_example_2.xml", + ), + forced_submission_time=_submission_time, + ) + self.assertTrue(self.xform.has_hxl_support) view = XFormViewSet.as_view({"get": "retrieve"}) @@ -3866,7 +3874,7 @@ def test_csv_export_with_win_excel_utf8(self): instance = self.xform.instances.first() data_id, date_modified = ( instance.pk, - instance.date_modified.strftime(MONGO_STRFTIME), + mock_date_modified.isoformat(), ) content = get_response_content(response) @@ -3877,7 +3885,7 @@ def test_csv_export_with_win_excel_utf8(self): "_total_media,_media_count,_media_all_received\n\ufeff#age" ",,,,,,,,,,,,,,\n\ufeff" "38,CR7,uuid:74ee8b73-48aa-4ced-9089-862f93d49c16," - "%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01," + "%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01+00:00," "%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) self.assertEqual(content, expected_content) @@ -3885,7 +3893,7 @@ def test_csv_export_with_win_excel_utf8(self): self.assertEqual(headers["Content-Type"], "application/csv") content_disposition = headers["Content-Disposition"] filename = filename_from_disposition(content_disposition) - basename, ext = os.path.splitext(filename) + _, ext = os.path.splitext(filename) self.assertEqual(ext, ".csv") # sort csv data in ascending order data = {"win_excel_utf8": False} @@ -3900,7 +3908,7 @@ def test_csv_export_with_win_excel_utf8(self): "tted_by,_total_media,_media_count,_media_all_received\n" "#age,,,,,,,,,,,,,,\n" "38,CR7,uuid:74ee8b73-48aa-4ced-9089-862f93d49c16" - ",%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01," + ",%s,74ee8b73-48aa-4ced-9089-862f93d49c16,2013-02-18T15:54:01+00:00," "%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) @@ -3967,7 +3975,7 @@ def test_csv_export_with_and_without_include_hxl(self): "_date_modified,_tags,_notes,_version,_duration,_submitted_by," "_total_media,_media_count,_media_all_received\n" "29,Lionel Messi,uuid:74ee8b73-48aa-4ced-9072-862f93d49c16," - f"{data_id},74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01," + f"{data_id},74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01+00:00," f"{date_modified},,,201604121155,,bob,0,0,True\n" ) self.assertEqual(expected_content, content) @@ -3990,7 +3998,7 @@ def test_csv_export_with_and_without_include_hxl(self): "_media_all_received\n" "#age,,,,,,,,,,,,,,\n" "29,Lionel Messi,uuid:74ee8b73-48aa-4ced-9072-862f93d49c16," - "%s,74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01" + "%s,74ee8b73-48aa-4ced-9072-862f93d49c16,2013-02-18T15:54:01+00:00" ",%s,,,201604121155,,bob,0,0,True\n" % (data_id, date_modified) ) self.assertEqual(expected_content, content) @@ -4436,7 +4444,10 @@ def test_csv_export_filtered_by_date(self): "transportation_filtered_date.csv", ) - expected_submission = ["2015-12-02T00:00:00", "2015-12-03T00:00:00"] + expected_submission = [ + "2015-12-02T00:00:00+00:00", + "2015-12-03T00:00:00+00:00", + ] self._validate_csv_export( response, test_file_path, "_submission_time", expected_submission ) @@ -5009,9 +5020,9 @@ def test_xlsx_import(self): self.assertEqual( self.xform.instances.values("json___submission_time")[::1], [ - {"json___submission_time": "2023-02-03T10:27:41"}, - {"json___submission_time": "2023-02-03T10:27:42"}, - {"json___submission_time": "2023-03-13T08:42:57"}, + {"json___submission_time": "2023-02-03T10:27:41+00:00"}, + {"json___submission_time": "2023-02-03T10:27:42+00:00"}, + {"json___submission_time": "2023-03-13T08:42:57+00:00"}, ], ) self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 8ed0d70765..693f9ef86a 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -43,6 +43,7 @@ get_where_clause, query_data, query_fields_data, + query_count, _get_sort_fields, ParsedInstance, ) @@ -497,7 +498,8 @@ def _set_pagination_headers( query = self.request.query_params.get("query") base_url = url.split("?")[0] if query: - num_of_records = self.object_list.count() + query = self._parse_query(query) + num_of_records = query_count(xform, query=query) else: num_of_records = xform.num_of_submissions next_page_url = None @@ -660,6 +662,12 @@ def list(self, request, *args, **kwargs): return custom_response_handler(request, xform, query, export_type) + def _parse_query(self, query): + """Parse `query` query parameter""" + return filter_queryset_xform_meta_perms_sql( + self.get_object(), self.request.user, query + ) + # pylint: disable=too-many-arguments def set_object_list(self, query, fields, sort, start, limit, is_public_request): """ @@ -694,9 +702,7 @@ def set_object_list(self, query, fields, sort, start, limit, is_public_request): self.object_list = self.object_list[start_index:end_index] elif (sort or limit or start or fields) and not is_public_request: try: - query = filter_queryset_xform_meta_perms_sql( - self.get_object(), self.request.user, query - ) + query = self._parse_query(query) # pylint: disable=protected-access has_json_fields = sort and ParsedInstance._has_json_fields( _get_sort_fields(sort) @@ -800,22 +806,23 @@ def _get_data(self, query, fields, sort, start, limit, is_public_request): xform = self.get_object() num_of_submissions = xform.num_of_submissions should_paginate = num_of_submissions > retrieval_threshold + if should_paginate: self.paginator.page_size = retrieval_threshold - if not isinstance(self.object_list, types.GeneratorType) and should_paginate: + if should_paginate: query_param_keys = self.request.query_params current_page = query_param_keys.get(self.paginator.page_query_param, 1) current_page_size = query_param_keys.get( self.paginator.page_size_query_param, retrieval_threshold ) - self._set_pagination_headers( self.get_object(), current_page=current_page, current_page_size=current_page_size, ) + if not isinstance(self.object_list, types.GeneratorType) and should_paginate: try: # pylint: disable=attribute-defined-outside-init self.object_list = self.paginate_queryset(self.object_list) @@ -824,6 +831,7 @@ def _get_data(self, query, fields, sort, start, limit, is_public_request): self.object_list = self.paginate_queryset(self.object_list) stream_data = getattr(settings, "STREAM_DATA", False) + if stream_data: response = self._get_streaming_response() else: diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 6620fd2f91..cfeff8c041 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -83,7 +83,10 @@ response_for_format, _get_export_type, ) -from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete +from onadata.libs.utils.cache_tools import ( + PROJ_OWNER_CACHE, + safe_delete, +) from onadata.libs.utils.common_tools import json_stream from onadata.libs.utils.csv_import import ( get_async_csv_submission_status, diff --git a/onadata/apps/logger/management/commands/recover_deleted_attachments.py b/onadata/apps/logger/management/commands/recover_deleted_attachments.py index 3199f68ec1..bf1f332e66 100644 --- a/onadata/apps/logger/management/commands/recover_deleted_attachments.py +++ b/onadata/apps/logger/management/commands/recover_deleted_attachments.py @@ -35,7 +35,7 @@ def recover_deleted_attachments(form_id: str, stdout=None): if stdout: stdout.write(f"Recovered {attachment.name} ID: {attachment.id}") # Regenerate instance JSON - instance.json = instance.get_full_dict(load_existing=False) + instance.json = instance.get_full_dict() instance.save() diff --git a/onadata/apps/logger/management/commands/regenerate_instance_json.py b/onadata/apps/logger/management/commands/regenerate_instance_json.py new file mode 100644 index 0000000000..a0f5f6c089 --- /dev/null +++ b/onadata/apps/logger/management/commands/regenerate_instance_json.py @@ -0,0 +1,78 @@ +""" +Management command python manage.py regenerate_instance_json + +Regenerates a form's instances json asynchronously +""" + +from celery.result import AsyncResult + +from django.core.management.base import BaseCommand, CommandError +from django.core.cache import cache + +from onadata.apps.api.tasks import regenerate_form_instance_json +from onadata.apps.logger.models import XForm +from onadata.libs.utils.cache_tools import ( + XFORM_REGENERATE_INSTANCE_JSON_TASK, + XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL, +) + + +class Command(BaseCommand): + """Regenerate a form's instances json + + Json data recreated afresh and any existing json data is overriden + + Usage: + python manage.py regenerate_instance_json e.g + python manage.py regenerate_instance_json 689 567 453 + """ + + help = "Regenerate a form's instances json" + + def add_arguments(self, parser): + parser.add_argument("form_ids", nargs="+", type=int) + + def handle(self, *args, **options): + for form_id in options["form_ids"]: + try: + xform: XForm = XForm.objects.get(pk=form_id) + + except XForm.DoesNotExist: + raise CommandError( # pylint: disable=raise-missing-from + f"Form {form_id} does not exist" + ) + + self._regenerate_instance_json(xform) + + def _regenerate_instance_json(self, xform: XForm): + if xform.is_instance_json_regenerated: + # Async task completed successfully + self.stdout.write( + self.style.SUCCESS(f"Regeneration for {xform.pk} COMPLETE") + ) + return + + cache_key = f"{XFORM_REGENERATE_INSTANCE_JSON_TASK}{xform.pk}" + cached_task_id: str | None = cache.get(cache_key) + + if cached_task_id and AsyncResult(cached_task_id).state.upper() != "FAILURE": + # FAILURE is the only state that should trigger regeneration if + # a regeneration had earlier been triggered + self.stdout.write( + self.style.WARNING(f"Regeneration for {xform.pk} IN PROGRESS") + ) + return + + # Task has either failed or does not exist in cache, we create a new async task + # Celery backend expires the result after 1 day (24hrs) as outlined in the docs, + # https://docs.celeryq.dev/en/latest/userguide/configuration.html#result-expires + # If after 1 day you create an AsyncResult, the status will be PENDING. + # We therefore set the cache timeout to 1 day same as the Celery backend result + # expiry timeout + result: AsyncResult = regenerate_form_instance_json.apply_async(args=[xform.pk]) + cache.set( + cache_key, + result.task_id, + XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL, + ) + self.stdout.write(f"Regeneration for {xform.pk} STARTED") diff --git a/onadata/apps/logger/management/commands/tests/__init__.py b/onadata/apps/logger/management/commands/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py b/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py new file mode 100644 index 0000000000..eb643d7c0b --- /dev/null +++ b/onadata/apps/logger/management/commands/tests/test_regenerate_instance_json.py @@ -0,0 +1,122 @@ +"""Tests for management command regenerate_instance_json""" +from io import StringIO + +from unittest.mock import patch, call + +from celery.result import AsyncResult + +from django.core.management import call_command +from django.core.cache import cache + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import XForm + +# pylint: disable=line-too-long + + +class RegenerateInstanceJsonTestCase(TestBase): + """Tests for management command regenerate_instance_json""" + + def setUp(self): + super().setUp() + + self._publish_transportation_form() + self._make_submissions() + self.cache_key = f"xfm-regenerate_instance_json_task-{self.xform.pk}" + self.out = StringIO() + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_regenerates_instance_json(self, mock_regenerate): + """Json data for form submissions is regenerated + + Regeneration should be asynchronous + """ + task_id = "f78ef7bb-873f-4a28-bc8a-865da43a741f" + mock_async_result = AsyncResult(task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_called_once_with(args=[self.xform.pk]) + self.assertEqual(cache.get(self.cache_key), task_id) + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_multiple_form_ids(self, mock_regenerate): + """Command supports multiple forms""" + self._publish_xlsx_file_with_external_choices() + form2 = XForm.objects.all()[1] + mock_regenerate.apply_async.side_effect = [ + AsyncResult("f78ef7bb-873f-4a28-bc8a-865da43a741f"), + AsyncResult("ca760839-d2d9-4244-938f-e884880ac0b4"), + ] + call_command( + "regenerate_instance_json", (self.xform.pk, form2.pk), stdout=self.out + ) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + self.assertIn(f"Regeneration for {form2.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_has_calls( + [call(args=[self.xform.pk]), call(args=[form2.pk])] + ) + self.assertEqual( + cache.get(self.cache_key), "f78ef7bb-873f-4a28-bc8a-865da43a741f" + ) + form2_cache = f"xfm-regenerate_instance_json_task-{form2.pk}" + self.assertEqual(cache.get(form2_cache), "ca760839-d2d9-4244-938f-e884880ac0b4") + + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_no_duplicate_work(self, mock_regenerate): + """If a regeneration finished successfully, we do not run it again""" + self.xform.is_instance_json_regenerated = True + self.xform.save() + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} COMPLETE", self.out.getvalue()) + mock_regenerate.apply_async.assert_not_called() + self.assertFalse(cache.get(self.cache_key)) + + def _mock_get_task_meta_failure(self) -> dict[str, str]: + return {"status": "FAILURE"} + + @patch.object(AsyncResult, "_get_task_meta", _mock_get_task_meta_failure) + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_task_state_failed(self, mock_regenerate): + """We regenerate if old celery task failed""" + old_task_id = "796dc413-e6ea-42b8-b658-e4ac9e22b02b" + cache.set(self.cache_key, old_task_id) + new_task_id = "f78ef7bb-873f-4a28-bc8a-865da43a741f" + mock_async_result = AsyncResult(new_task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn(f"Regeneration for {self.xform.pk} STARTED", self.out.getvalue()) + mock_regenerate.apply_async.assert_called_once_with(args=[self.xform.pk]) + self.assertEqual(cache.get(self.cache_key), new_task_id) + + def _mock_get_task_meta_non_failure(self) -> dict[str, str]: + return {"status": "FOO"} + + @patch.object(AsyncResult, "_get_task_meta", _mock_get_task_meta_non_failure) + @patch( + "onadata.apps.logger.management.commands.regenerate_instance_json.regenerate_form_instance_json" + ) + def test_task_state_not_failed(self, mock_regenerate): + """We do not regenerate if last celery task is in a state other than FAILURE + + FAILURE is the only state that should trigger regeneration if a regeneration + had earlier been triggered + """ + old_task_id = "796dc413-e6ea-42b8-b658-e4ac9e22b02b" + cache.set(self.cache_key, old_task_id) + mock_async_result = AsyncResult(old_task_id) + mock_regenerate.apply_async.return_value = mock_async_result + call_command("regenerate_instance_json", (self.xform.pk), stdout=self.out) + self.assertIn( + f"Regeneration for {self.xform.pk} IN PROGRESS", self.out.getvalue() + ) + mock_regenerate.apply_async.assert_not_called() + self.assertEqual(cache.get(self.cache_key), old_task_id) diff --git a/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py b/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py index 74bf18b414..567325de78 100644 --- a/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py +++ b/onadata/apps/logger/migrations/0001_pre-django-3-upgrade.py @@ -59,7 +59,7 @@ def regenerate_instance_json(apps, schema_editor): xform__downloadable=True, xform__deleted_at__isnull=True, ): - inst.json = inst.get_full_dict(load_existing=False) + inst.json = inst.get_full_dict() inst.save() @@ -1003,7 +1003,7 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), - migrations.RunPython(recalculate_xform_hash), + # migrations.RunPython(recalculate_xform_hash), migrations.AddField( model_name="instance", name="deleted_by", @@ -1223,8 +1223,8 @@ class Migration(migrations.Migration): name="uuid", field=models.CharField(db_index=True, default="", max_length=36), ), - migrations.RunPython(generate_uuid_if_missing), - migrations.RunPython(regenerate_instance_json), + # migrations.RunPython(generate_uuid_if_missing), + # migrations.RunPython(regenerate_instance_json), migrations.CreateModel( name="XFormVersion", fields=[ @@ -1264,5 +1264,5 @@ class Migration(migrations.Migration): "unique_together": {("xform", "version")}, }, ), - migrations.RunPython(create_initial_xform_version), + # migrations.RunPython(create_initial_xform_version), ] diff --git a/onadata/apps/logger/migrations/0009_auto_20230914_0927.py b/onadata/apps/logger/migrations/0009_auto_20230914_0927.py new file mode 100644 index 0000000000..a6e95ad0d1 --- /dev/null +++ b/onadata/apps/logger/migrations/0009_auto_20230914_0927.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.20 on 2023-09-14 13:27 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("logger", "0008_add_date_fields_indexing"), + ] + + operations = [ + migrations.AddField( + model_name="xform", + name="is_instance_json_regenerated", + field=models.BooleanField(default=False), + ), + ] diff --git a/onadata/apps/logger/migrations/0010_auto_20230921_0346.py b/onadata/apps/logger/migrations/0010_auto_20230921_0346.py new file mode 100644 index 0000000000..2dadd0a4ff --- /dev/null +++ b/onadata/apps/logger/migrations/0010_auto_20230921_0346.py @@ -0,0 +1,27 @@ +# Generated by Django 3.2.20 on 2023-09-21 07:46 + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + dependencies = [ + ("logger", "0009_auto_20230914_0927"), + ] + + operations = [ + migrations.AlterField( + model_name="instance", + name="date_created", + field=models.DateTimeField( + blank=True, default=django.utils.timezone.now, editable=False + ), + ), + migrations.AlterField( + model_name="instance", + name="date_modified", + field=models.DateTimeField( + blank=True, default=django.utils.timezone.now, editable=False + ), + ), + ] diff --git a/onadata/apps/logger/migrations/0062_auto_20210202_0248.py b/onadata/apps/logger/migrations/0062_auto_20210202_0248.py index 0dcfa1989d..01a08b6c8f 100644 --- a/onadata/apps/logger/migrations/0062_auto_20210202_0248.py +++ b/onadata/apps/logger/migrations/0062_auto_20210202_0248.py @@ -14,12 +14,11 @@ def regenerate_instance_json(apps, schema_editor): xform__downloadable=True, xform__deleted_at__isnull=True, ): - inst.json = inst.get_full_dict(load_existing=False) + inst.json = inst.get_full_dict() inst.save() class Migration(migrations.Migration): - dependencies = [ ("logger", "0061_auto_20200713_0814"), ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 840d5e6aaf..c28b50ea61 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -60,7 +60,6 @@ LAST_EDITED, MEDIA_ALL_RECEIVED, MEDIA_COUNT, - MONGO_STRFTIME, NOTES, REVIEW_COMMENT, REVIEW_DATE, @@ -183,7 +182,7 @@ def get_id_string_from_xml_str(xml_str): return id_string -def submission_time(): +def now(): """Returns current timestamp via timezone.now().""" return timezone.now() @@ -237,7 +236,6 @@ def update_xform_submission_count_async(self, instance_id, created): def update_xform_submission_count(instance_id, created): """Updates the XForm submissions count on a new submission being created.""" if created: - # pylint: disable=import-outside-toplevel from multidb.pinning import use_master @@ -321,12 +319,12 @@ def _update_xform_submission_count_delete(instance): # update xform if no instance has geoms if ( - instance.xform.instances.filter( - deleted_at__isnull=True - ).exclude(geom=None).count() < - 1 + instance.xform.instances.filter(deleted_at__isnull=True) + .exclude(geom=None) + .count() + < 1 ): - if (instance.xform.polygon_xpaths() or instance.xform.geotrace_xpaths()): + if instance.xform.polygon_xpaths() or instance.xform.geotrace_xpaths(): instance.xform.instances_with_geopoints = True else: instance.xform.instances_with_geopoints = False @@ -341,32 +339,28 @@ def update_xform_submission_count_delete(sender, instance, **kwargs): @app.task(bind=True, max_retries=3) -def save_full_json_async(self, instance_id, created): +def save_full_json_async(self, instance_id): """ Celery task to asynchrounously generate and save an Instances JSON once a submission has been made """ try: - save_full_json(instance_id, created) + instance = Instance.objects.get(pk=instance_id) except Instance.DoesNotExist as e: if self.request.retries > 2: msg = f"Failed to save full JSON for Instance {instance_id}" report_exception(msg, e, sys.exc_info()) self.retry(exc=e, countdown=60 * self.request.retries) + else: + save_full_json(instance) -def save_full_json(instance_id, created): - """set json data, ensure the primary key is part of the json data""" - if created: - try: - instance = Instance.objects.get(pk=instance_id) - except Instance.DoesNotExist as e: - # Retry if run asynchrounously - if current_task.request.id: - raise e - else: - instance.json = instance.get_full_dict() - instance.save(update_fields=["json"]) +def save_full_json(instance: "Instance"): + """Save full json dict""" + # Queryset.update ensures the model's save is not called and + # the pre_save and post_save signals aren't sent + json = instance.get_full_dict() + Instance.objects.filter(pk=instance.pk).update(json=json, version=json.get(VERSION)) @app.task(bind=True, max_retries=3) @@ -474,16 +468,12 @@ def _set_geom(self): else: self.geom = None - def _set_json(self): - # pylint: disable=attribute-defined-outside-init - self.json = self.get_full_dict() - - def get_full_dict(self, load_existing=True): + def get_full_dict(self): """Returns the submission XML as a python dictionary object.""" - doc = self.json or {} if load_existing else {} # Get latest dict doc = self.get_dict() # pylint: disable=no-member + if self.id: geopoint = [self.point.y, self.point.x] if self.point else [None, None] doc.update( @@ -495,7 +485,7 @@ def get_full_dict(self, load_existing=True): STATUS: self.status, TAGS: list(self.tags.names()), NOTES: self.get_notes(), - VERSION: self.version, + VERSION: doc.get(VERSION, self.xform.version), DURATION: self.get_duration(), XFORM_ID_STRING: self._parser.get_xform_id_string(), XFORM_ID: self.xform.pk, @@ -508,30 +498,19 @@ def get_full_dict(self, load_existing=True): doc.update(osm.get_tags_with_prefix()) if isinstance(self.deleted_at, datetime): - doc[DELETEDAT] = self.deleted_at.strftime(MONGO_STRFTIME) + doc[DELETEDAT] = self.deleted_at.isoformat() # pylint: disable=no-member if self.has_a_review: review = self.get_latest_review() if review: doc[REVIEW_STATUS] = review.status - doc[REVIEW_DATE] = review.date_created.strftime(MONGO_STRFTIME) + doc[REVIEW_DATE] = review.date_created.isoformat() if review.get_note_text(): doc[REVIEW_COMMENT] = review.get_note_text() - # pylint: disable=attribute-defined-outside-init - # pylint: disable=access-member-before-definition - if not self.date_created: - self.date_created = submission_time() - - # pylint: disable=access-member-before-definition - if not self.date_modified: - self.date_modified = self.date_created - - doc[DATE_MODIFIED] = self.date_modified.strftime(MONGO_STRFTIME) - - doc[SUBMISSION_TIME] = self.date_created.strftime(MONGO_STRFTIME) - + doc[DATE_MODIFIED] = self.date_modified.isoformat() + doc[SUBMISSION_TIME] = self.date_created.isoformat() doc[TOTAL_MEDIA] = self.total_media doc[MEDIA_COUNT] = self.media_count doc[MEDIA_ALL_RECEIVED] = self.media_all_received @@ -650,13 +629,18 @@ class Instance(models.Model, InstanceBaseClass): "logger.XForm", null=False, related_name="instances", on_delete=models.CASCADE ) survey_type = models.ForeignKey("logger.SurveyType", on_delete=models.PROTECT) - # shows when we first received this instance - date_created = models.DateTimeField(auto_now_add=True) - + date_created = models.DateTimeField( + default=now, + editable=False, + blank=True, + ) # this will end up representing "date last parsed" - date_modified = models.DateTimeField(auto_now=True) - + date_modified = models.DateTimeField( + default=now, + editable=False, + blank=True, + ) # this will end up representing "date instance was deleted" deleted_at = models.DateTimeField(null=True, default=None) deleted_by = models.ForeignKey( @@ -698,9 +682,9 @@ class Meta: app_label = "logger" unique_together = ("xform", "uuid") indexes = [ - models.Index(fields=['date_created']), - models.Index(fields=['date_modified']), - models.Index(fields=['deleted_at']), + models.Index(fields=["date_created"]), + models.Index(fields=["date_modified"]), + models.Index(fields=["deleted_at"]), ] @classmethod @@ -746,8 +730,8 @@ def get_expected_media(self): media_list.extend([i["media/file"] for i in data["media"]]) else: media_xpaths = ( - self.xform.get_media_survey_xpaths() + - self.xform.get_osm_survey_xpaths() + self.xform.get_media_survey_xpaths() + + self.xform.get_osm_survey_xpaths() ) for media_xpath in media_xpaths: media_list.extend(get_values_matching_key(data, media_xpath)) @@ -780,6 +764,7 @@ def attachments_count(self): # pylint: disable=arguments-differ def save(self, *args, **kwargs): force = kwargs.get("force") + self.date_modified = now() if force: del kwargs["force"] @@ -787,11 +772,8 @@ def save(self, *args, **kwargs): self._check_is_merged_dataset() self._check_active(force) self._set_geom() - self._set_json() self._set_survey_type() self._set_uuid() - # pylint: disable=no-member - self.version = self.json.get(VERSION, self.xform.version) super().save(*args, **kwargs) @@ -823,7 +805,9 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): - XForm submission count & instances_with_geopoints field - Project date modified - - Update the submission JSON field data + - Update the submission JSON field data. We save the full_json in + post_save signal because some implementations in get_full_dict + require the id to be available """ if instance.deleted_at is not None: _update_xform_submission_count_delete(instance) @@ -835,7 +819,7 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): ) ) transaction.on_commit( - lambda: save_full_json_async.apply_async(args=[instance.pk, created]) + lambda: save_full_json_async.apply_async(args=[instance.pk]) ) transaction.on_commit( lambda: update_project_date_modified_async.apply_async( @@ -845,7 +829,7 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): else: update_xform_submission_count(instance.pk, created) - save_full_json(instance.pk, created) + save_full_json(instance) update_project_date_modified(instance.pk, created) @@ -873,7 +857,7 @@ def permanently_delete_attachments(sender, instance=None, created=False, **kwarg pre_delete.connect( permanently_delete_attachments, sender=Instance, - dispatch_uid="permanently_delete_attachments" + dispatch_uid="permanently_delete_attachments", ) @@ -911,7 +895,7 @@ def attachments(self): @property def json(self): """Returns the XML submission as a python dictionary object.""" - return self.get_full_dict(load_existing=False) + return self.get_full_dict() @property def status(self): diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index e69b6a4af2..c0743e3222 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -132,14 +132,14 @@ def _build_obs_from_dict( "_parent_index": parent_index, } ) - for (k, v) in iteritems(dict_item): + for k, v in iteritems(dict_item): if isinstance(v, dict) and isinstance(v, list): if k in obs[table_name][-1]: raise AssertionError() obs[table_name][-1][k] = v obs[table_name][-1]["_index"] = this_index - for (k, v) in iteritems(dict_item): + for k, v in iteritems(dict_item): if isinstance(v, dict): kwargs = { "dict_item": v, @@ -893,7 +893,7 @@ class XForm(XFormMixin, BaseModel): ) # XForm was created as a merged dataset is_merged_dataset = models.BooleanField(default=False) - + is_instance_json_regenerated = models.BooleanField(default=False) tags = TaggableManager() class Meta: diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index cae12061fc..3d826abd28 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -1,11 +1,12 @@ import os +import pytz from datetime import datetime from datetime import timedelta from django.http.request import HttpRequest from django.utils.timezone import utc from django_digest.test import DigestAuth -from mock import patch +from mock import patch, Mock from onadata.apps.logger.models import XForm, Instance, SubmissionReview from onadata.apps.logger.models.instance import ( @@ -23,8 +24,6 @@ ) from onadata.libs.utils.common_tags import ( MONGO_STRFTIME, - SUBMISSION_TIME, - XFORM_ID_STRING, SUBMITTED_BY, ) @@ -36,24 +35,46 @@ def setUp(self): def test_stores_json(self): self._publish_transportation_form_and_submit_instance() instances = Instance.objects.all() + xform_id_string = XForm.objects.all()[0].id_string for instance in instances: self.assertNotEqual(instance.json, {}) + self.assertEqual(instance.json.get("_id"), instance.pk) + self.assertEqual( + instance.json.get("_date_modified"), instance.date_modified.isoformat() + ) + self.assertEqual( + instance.json.get("_submission_time"), instance.date_created.isoformat() + ) + self.assertEqual(instance.json.get("_xform_id_string"), xform_id_string) - @patch("django.utils.timezone.now") - def test_json_assigns_attributes(self, mock_time): - mock_time.return_value = datetime.utcnow().replace(tzinfo=utc) - self._publish_transportation_form_and_submit_instance() + def test_updates_json_date_modified_on_save(self): + """_date_modified in `json` field is updated on save""" + old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=pytz.utc) - xform_id_string = XForm.objects.all()[0].id_string - instances = Instance.objects.all() + with patch("django.utils.timezone.now", Mock(return_value=old_mocked_now)): + self._publish_transportation_form_and_submit_instance() - for instance in instances: - self.assertEqual( - instance.json[SUBMISSION_TIME], - mock_time.return_value.strftime(MONGO_STRFTIME), - ) - self.assertEqual(instance.json[XFORM_ID_STRING], xform_id_string) + instance = Instance.objects.first() + self.assertEqual(instance.date_modified, old_mocked_now) + self.assertEqual( + instance.json.get("_date_modified"), old_mocked_now.isoformat() + ) + + # After saving the date_modified in json should update + mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=pytz.utc) + + with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): + instance.save() + + instance.refresh_from_db() + self.assertEqual(instance.date_modified, mocked_now) + self.assertEqual(instance.json.get("_date_modified"), mocked_now.isoformat()) + # date_created, _submission_time is not altered + self.assertEqual(instance.date_created, old_mocked_now) + self.assertEqual( + instance.json.get("_submission_time"), old_mocked_now.isoformat() + ) @patch("django.utils.timezone.now") def test_json_stores_user_attribute(self, mock_time): @@ -87,16 +108,6 @@ def test_json_stores_user_attribute(self, mock_time): pi = ParsedInstance.objects.get(instance=instance) self.assertEqual(pi.to_dict_for_mongo()[SUBMITTED_BY], "bob") - def test_json_time_match_submission_time(self): - self._publish_transportation_form_and_submit_instance() - instances = Instance.objects.all() - - for instance in instances: - self.assertEqual( - instance.json[SUBMISSION_TIME], - instance.date_created.strftime(MONGO_STRFTIME), - ) - def test_set_instances_with_geopoints_on_submission_false(self): self._publish_transportation_form() @@ -211,8 +222,7 @@ def test_query_filter_by_integer(self): self.assertEqual(self.xform.instances.count(), 4) self.assertEqual(len(data), 3) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_query_filter_by_datetime_field(self, mock_time): + def test_query_filter_by_datetime_field(self): self._publish_transportation_form() now = datetime(2014, 1, 1, tzinfo=utc) times = [ @@ -221,7 +231,6 @@ def test_query_filter_by_datetime_field(self, mock_time): now + timedelta(seconds=2), now + timedelta(seconds=3), ] - mock_time.side_effect = times self._make_submissions() atime = None diff --git a/onadata/apps/main/tests/fixtures/csv_export/export.csv b/onadata/apps/main/tests/fixtures/csv_export/export.csv index acd9a44cae..b58c110365 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/export.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/export.csv @@ -1,2 +1,2 @@ bed_net[1]/member[1]/name,bed_net[1]/member[2]/name,bed_net[2]/member[1]/name,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Andrew,Bob,Carl,uuid:4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Andrew,Bob,Carl,uuid:4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,4e274a99-f7a9-467c-bb44-9f9a8ceee9a7,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv index 770d27ef4b..f0ebe11424 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats.csv @@ -1,2 +1,2 @@ name,age,picture,has_children,children[1]/childs_name,children[1]/childs_age,children[2]/childs_name,children[2]/childs_age,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv index 62b7f5b004..49601028b2 100644 --- a/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv +++ b/onadata/apps/main/tests/fixtures/csv_export/tutorial_w_repeats_truncate_titles.csv @@ -1,2 +1,2 @@ name,age,picture,has_children,childs_name,childs_age,childs_name,childs_age,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Bob,25,n/a,1,Tom,12,Dick,5,-1.2625621 36.7921711 0.0 20.0,-1.2625621,36.7921711,0.0,20.0,n/a,n/a,n/a,n/a,uuid:b31c6ac2-b8ca-4180-914f-c844fa10ed3b,b31c6ac2-b8ca-4180-914f-c844fa10ed3b,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/fixtures/transportation/transportation.csv b/onadata/apps/main/tests/fixtures/transportation/transportation.csv index cc2dc8c8b7..9edd6f926e 100644 --- a/onadata/apps/main/tests/fixtures/transportation/transportation.csv +++ b/onadata/apps/main/tests/fixtures/transportation/transportation.csv @@ -1,5 +1,5 @@ -"transport/available_transportation_types_to_referral_facility/ambulance","transport/available_transportation_types_to_referral_facility/bicycle","transport/available_transportation_types_to_referral_facility/boat_canoe","transport/available_transportation_types_to_referral_facility/bus","transport/available_transportation_types_to_referral_facility/donkey_mule_cart","transport/available_transportation_types_to_referral_facility/keke_pepe","transport/available_transportation_types_to_referral_facility/lorry","transport/available_transportation_types_to_referral_facility/motorbike","transport/available_transportation_types_to_referral_facility/taxi","transport/available_transportation_types_to_referral_facility/other","transport/available_transportation_types_to_referral_facility_other","transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility","transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility","meta/instanceID","_uuid","_submission_time" -"False","False","False","False","False","False","False","False","False","False","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:5b2cc313-fc09-437e-8149-fcd32f695d41","5b2cc313-fc09-437e-8149-fcd32f695d41","2013-02-14T15:37:21", -"True","True","False","False","False","False","False","False","False","False","n/a","daily","weekly","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:f3d8dc65-91a6-4d0f-9e97-802128083390","f3d8dc65-91a6-4d0f-9e97-802128083390","2013-02-14T15:37:22", -"True","False","False","False","False","False","False","False","False","False","n/a","weekly","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","uuid:9c6f3468-cfda-46e8-84c1-75458e72805d","9c6f3468-cfda-46e8-84c1-75458e72805d","2013-02-14T15:37:23", -"False","False","False","False","False","False","False","False","True","True","camel","n/a","n/a","n/a","n/a","n/a","n/a","n/a","n/a","daily","uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf","9f0a1508-c3b7-4c99-be00-9b237c26bcbf","2013-02-14T15:37:24", +transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time +False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-14T15:37:21+00:00 +True,True,False,False,False,False,False,False,False,False,n/a,daily,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:f3d8dc65-91a6-4d0f-9e97-802128083390,f3d8dc65-91a6-4d0f-9e97-802128083390,2013-02-14T15:37:22+00:00 +True,False,False,False,False,False,False,False,False,False,n/a,weekly,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:9c6f3468-cfda-46e8-84c1-75458e72805d,9c6f3468-cfda-46e8-84c1-75458e72805d,2013-02-14T15:37:23+00:00 +False,False,False,False,False,False,False,False,True,True,camel,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,daily,uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf,9f0a1508-c3b7-4c99-be00-9b237c26bcbf,2013-02-14T15:37:24+00:00 diff --git a/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv b/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv index b80057dfd6..d2dcecb664 100644 --- a/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv +++ b/onadata/apps/main/tests/fixtures/userone/userone_with_dot_name_fields.csv @@ -1,2 +1,2 @@ Q1.1,Q1.2,Q1.3,Q1.4,_Q1.4_latitude,_Q1.4_longitude,_Q1.4_altitude,_Q1.4_precision,rQ6.4[1]/Q6.4,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -Office,4,n/a,-1.2624975 36.7923384 0.0 25.0,-1.2624975,36.7923384,0.0,25.0,Cool,uuid:a32f232c-77cb-4468-b55b-6495d5e5de7,a32f232c-77cb-4468-b55b-6495d5e5de7,2013-02-18T15:54:01,,,2014111,,bob,0,0,True +Office,4,n/a,-1.2624975 36.7923384 0.0 25.0,-1.2624975,36.7923384,0.0,25.0,Cool,uuid:a32f232c-77cb-4468-b55b-6495d5e5de7,a32f232c-77cb-4468-b55b-6495d5e5de7,2013-02-18T15:54:01+00:00,,,2014111,,bob,0,0,True diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 549ffbcec9..27b6848e77 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -75,7 +75,7 @@ def _update_dynamic_data(self): """ Update stuff like submission time so we can compare within out fixtures """ - for (uuid, submission_time) in iteritems(self.uuid_to_submission_times): + for uuid, submission_time in iteritems(self.uuid_to_submission_times): i = self.xform.instances.get(uuid=uuid) i.date_created = pytz.timezone("UTC").localize( datetime.strptime(submission_time, MONGO_STRFTIME) @@ -368,7 +368,6 @@ def _check_data_dictionary(self): self.assertEqual(sorted(next(actual_csv)), sorted(expected_list)) def _check_data_for_csv_export(self): - data = [ { "available_transportation_types_to_referral_facility/ambulance": True, @@ -391,7 +390,7 @@ def _check_data_for_csv_export(self): ] for d_from_db in self.data_dictionary.get_data_for_excel(): test_dict = {} - for (k, v) in iteritems(d_from_db): + for k, v in iteritems(d_from_db): if ( k not in [ @@ -476,7 +475,7 @@ def _check_csv_export_second_pass(self): "image1": "1335783522563.jpg", "meta/instanceID": "uuid:5b2cc313-fc09-437e-8149-fcd32f695d41", "_uuid": "5b2cc313-fc09-437e-8149-fcd32f695d41", - "_submission_time": "2013-02-14T15:37:21", + "_submission_time": "2013-02-14T15:37:21+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -492,7 +491,7 @@ def _check_csv_export_second_pass(self): self.bicycle_key: "weekly", "meta/instanceID": "uuid:f3d8dc65-91a6-4d0f-9e97-802128083390", "_uuid": "f3d8dc65-91a6-4d0f-9e97-802128083390", - "_submission_time": "2013-02-14T15:37:22", + "_submission_time": "2013-02-14T15:37:22+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -507,7 +506,7 @@ def _check_csv_export_second_pass(self): self.ambulance_key: "weekly", "meta/instanceID": "uuid:9c6f3468-cfda-46e8-84c1-75458e72805d", "_uuid": "9c6f3468-cfda-46e8-84c1-75458e72805d", - "_submission_time": "2013-02-14T15:37:23", + "_submission_time": "2013-02-14T15:37:23+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -524,7 +523,7 @@ def _check_csv_export_second_pass(self): self.taxi_key: "daily", "meta/instanceID": "uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf", "_uuid": "9f0a1508-c3b7-4c99-be00-9b237c26bcbf", - "_submission_time": "2013-02-14T15:37:24", + "_submission_time": "2013-02-14T15:37:24+00:00", "_tags": "", "_notes": "", "_version": "2014111", @@ -543,7 +542,7 @@ def _check_csv_export_second_pass(self): for row, expected_dict in zip(actual_csv, data): test_dict = {} row_dict = dict(zip(headers, row)) - for (k, v) in iteritems(row_dict): + for k, v in iteritems(row_dict): if not (v in ["n/a", "False"] or k in additional_headers): test_dict[k] = v this_list = [] diff --git a/onadata/apps/viewer/tasks.py b/onadata/apps/viewer/tasks.py index 90cf235a71..2e812ae6d5 100644 --- a/onadata/apps/viewer/tasks.py +++ b/onadata/apps/viewer/tasks.py @@ -37,7 +37,6 @@ def _get_export_object(export_id): return Export.objects.get(id=export_id) except Export.DoesNotExist: if getattr(settings, "SLAVE_DATABASES", []): - with use_master: return Export.objects.get(id=export_id) diff --git a/onadata/apps/viewer/tests/fixtures/transportation.csv b/onadata/apps/viewer/tests/fixtures/transportation.csv index 484ebb1a49..acaa119d95 100644 --- a/onadata/apps/viewer/tests/fixtures/transportation.csv +++ b/onadata/apps/viewer/tests/fixtures/transportation.csv @@ -1,2 +1,2 @@ transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01,,,2014111,,bob,1,0,False +False,False,False,False,False,False,False,False,False,False,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,n/a,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01+00:00,,,2014111,,bob,1,0,False diff --git a/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv b/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv index 922d5efa68..990d1d37a5 100644 --- a/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv +++ b/onadata/apps/viewer/tests/fixtures/transportation_without_na.csv @@ -1,2 +1,2 @@ transport/available_transportation_types_to_referral_facility/ambulance,transport/available_transportation_types_to_referral_facility/bicycle,transport/available_transportation_types_to_referral_facility/boat_canoe,transport/available_transportation_types_to_referral_facility/bus,transport/available_transportation_types_to_referral_facility/donkey_mule_cart,transport/available_transportation_types_to_referral_facility/keke_pepe,transport/available_transportation_types_to_referral_facility/lorry,transport/available_transportation_types_to_referral_facility/motorbike,transport/available_transportation_types_to_referral_facility/taxi,transport/available_transportation_types_to_referral_facility/other,transport/available_transportation_types_to_referral_facility_other,transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bicycle/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/boat_canoe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/bus/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/donkey_mule_cart/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility,transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -False,False,False,False,False,False,False,False,False,False,,,,,,,,,,,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01,,,2014111,,bob,1,0,False +False,False,False,False,False,False,False,False,False,False,,,,,,,,,,,uuid:5b2cc313-fc09-437e-8149-fcd32f695d41,5b2cc313-fc09-437e-8149-fcd32f695d41,2013-02-18T15:54:01+00:00,,,2014111,,bob,1,0,False diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 1e828027bd..46fb86a932 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -52,11 +52,16 @@ def floip_rows_list(data): """ Yields a row of FLOIP results data from dict data. """ - _submission_time = ( - pytz.timezone("UTC") - .localize(parse_datetime(data["_submission_time"])) - .isoformat() - ) + try: + _submission_time = ( + pytz.timezone("UTC") + .localize(parse_datetime(data["_submission_time"])) + .isoformat() + ) + + except ValueError: + _submission_time = data["_submission_time"] + for i, key in enumerate(data, 1): if not (key.startswith("_") or key in IGNORE_FIELDS): instance_id = data["_id"] @@ -296,7 +301,7 @@ def _to_xml(self, xml, data): xml.endElement(self.element_node) elif isinstance(data, dict): - for (key, value) in iteritems(data): + for key, value in iteritems(data): if key not in FORMLIST_MANDATORY_FIELDS and value is None: continue xml.startElement(key, {}) diff --git a/onadata/libs/tests/data/test_tools.py b/onadata/libs/tests/data/test_tools.py index a7acf2b2b4..652d7e8523 100644 --- a/onadata/libs/tests/data/test_tools.py +++ b/onadata/libs/tests/data/test_tools.py @@ -35,8 +35,10 @@ def test_get_form_submissions_grouped_by_field(self, mock_time): self.assertEqual([field, count_key], sorted(list(result))) self.assertEqual(result[count_key], count) - @patch("onadata.apps.logger.models.instance.submission_time") - def test_get_form_submissions_grouped_by_field_datetime_to_date(self, mock_time): + def test_get_form_submissions_grouped_by_field_datetime( + self, + ): # pylint: disable=invalid-name + """Test get_form_submissions_grouped_by_field datetime""" now = datetime(2014, 1, 1, tzinfo=utc) times = [ now, @@ -44,12 +46,12 @@ def test_get_form_submissions_grouped_by_field_datetime_to_date(self, mock_time) now + timedelta(seconds=2), now + timedelta(seconds=3), ] - mock_time.side_effect = times self._make_submissions() for i in self.xform.instances.all().order_by("-pk"): i.date_created = times.pop() i.save() + count_key = "count" fields = ["_submission_time"] diff --git a/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv b/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv index 3a22b86d86..44fcc51f07 100644 --- a/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv +++ b/onadata/libs/tests/utils/fixtures/nested_repeats/nested_repeats.csv @@ -1,3 +1,3 @@ kids/has_kids,kids/kids_details[1]/kids_name,kids/kids_details[1]/kids_age,kids/kids_details[2]/kids_name,kids/kids_details[2]/kids_age,kids/kids_details[3]/kids_name,kids/kids_details[3]/kids_age,kids/kids_details[1]/kids_immunization[1]/immunization_info,kids/kids_details[1]/kids_immunization[2]/immunization_info,kids/kids_details[1]/kids_immunization[3]/immunization_info,kids/kids_details[2]/kids_immunization[1]/immunization_info,kids/kids_details[2]/kids_immunization[2]/immunization_info,kids/kids_details[2]/kids_immunization[3]/immunization_info,kids/kids_details[3]/kids_immunization[1]/immunization_info,kids/nested_group/nested_name,kids/nested_group/nested_age,address,gps,_gps_latitude,_gps_longitude,_gps_altitude,_gps_precision,web_browsers/firefox,web_browsers/chrome,web_browsers/ie,web_browsers/safari,meta/instanceID,_uuid,_submission_time,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received -1,Hansel,2,Gretel,1,n/a,n/a,Polio,Measles,Malaria,TB,Rickets,n/a,n/a,The Witch,45,1234,-1.2627557 36.7926442 0.0 30.0,-1.2627557,36.7926442,0.0,30.0,False,False,True,True,uuid:0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,2013-02-18T15:54:01,,,201211121234,,bob,0,0,True -1,Tom,8,Dick,5,Harry,3,Berry Berry,Scurvy,n/a,Measles,Flu,Anemia,Polio 2,John,38,5678,-1.2627427 36.7925298 0.0 39.0,-1.2627427,36.7925298,0.0,39.0,True,True,True,False,uuid:8742fb1c-50a5-4c54-80c6-4420129d14ce,8742fb1c-50a5-4c54-80c6-4420129d14ce,2013-02-18T15:54:01,,,201211121234,,bob,0,0,True +1,Hansel,2,Gretel,1,n/a,n/a,Polio,Measles,Malaria,TB,Rickets,n/a,n/a,The Witch,45,1234,-1.2627557 36.7926442 0.0 30.0,-1.2627557,36.7926442,0.0,30.0,False,False,True,True,uuid:0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,0a2a8ef5-5d82-4e43-a267-74037b0e9ea4,2013-02-18T15:54:01+00:00,,,201211121234,,bob,0,0,True +1,Tom,8,Dick,5,Harry,3,Berry Berry,Scurvy,n/a,Measles,Flu,Anemia,Polio 2,John,38,5678,-1.2627427 36.7925298 0.0 39.0,-1.2627427,36.7925298,0.0,39.0,True,True,True,False,uuid:8742fb1c-50a5-4c54-80c6-4420129d14ce,8742fb1c-50a5-4c54-80c6-4420129d14ce,2013-02-18T15:54:01+00:00,,,201211121234,,bob,0,0,True diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index 5e498d15e8..03516b7e66 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -97,6 +97,7 @@ def test_attachment_tracking(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -183,6 +184,7 @@ def test_attachment_tracking_for_repeats(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -254,6 +256,7 @@ def test_attachment_tracking_for_nested_repeats(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -326,6 +329,7 @@ def test_replaced_attachments_not_tracked(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[file_media, image_media], ) + instance.refresh_from_db() self.assertTrue(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual( instance.attachments.filter(deleted_at__isnull=True).count(), 2 @@ -408,6 +412,7 @@ def test_attachment_tracking_duplicate(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) @@ -473,6 +478,7 @@ def test_attachment_tracking_not_in_submission(self): BytesIO(xml_string.strip().encode("utf-8")), media_files=[media_file, media2_file], ) + instance.refresh_from_db() self.assertFalse(instance.json[MEDIA_ALL_RECEIVED]) self.assertEqual(instance.json[TOTAL_MEDIA], 2) self.assertEqual(instance.json[MEDIA_COUNT], 1) diff --git a/onadata/libs/utils/cache_tools.py b/onadata/libs/utils/cache_tools.py index 033b37c6f5..1b0ec079b8 100644 --- a/onadata/libs/utils/cache_tools.py +++ b/onadata/libs/utils/cache_tools.py @@ -55,6 +55,10 @@ XFORM_SUBMISSION_COUNT_FOR_DAY_DATE = "xfm-get_submission_count_date-" XFORM_SUBMISSION_STAT = "xfm-get_form_submissions_grouped_by_field-" XFORM_CHARTS = "xfm-get_form_charts-" +XFORM_REGENERATE_INSTANCE_JSON_TASK = "xfm-regenerate_instance_json_task-" + +# Cache timeouts used in XForm model +XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL = 24 * 60 * 60 # 24 hrs converted to seconds def safe_delete(key): From 20d5e020ab83ad50b4b0e2c824352548ad7fae1b Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 2 Oct 2023 11:00:46 +0300 Subject: [PATCH 084/270] Tag release v3.14.0 (#2484) * tag release v3.14.0 * update changes doc --- CHANGES.rst | 11 +++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0da14d42ca..6da9378e1c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,17 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.14.0(2023-10-02) +- Ensure sas token is appended to azure blob attachment url + `PR #2482 ` + [@KipSigei] +- Ensure geotrace and geoshape in repeats is included in GeoJSON data endpoint + `PR #2478 ` + [@KipSigei] +- Data endpoint enhancements + `PR #2477 ` + [@kelvin-muchiri] + v3.13.1(2023-09-13) ------------------- - Revert to have data exports default sorting by id diff --git a/onadata/__init__.py b/onadata/__init__.py index 64f9e5359a..0433794942 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.13.1" +__version__ = "3.14.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 745996bffb..b659514344 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.13.1 +version = 3.14.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 6c8a2a8d33fec2597e68cc04303a9008032ffa09 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 9 Oct 2023 16:41:18 +0300 Subject: [PATCH 085/270] tag release v3.14.1 (#2485) --- CHANGES.rst | 6 ++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 6da9378e1c..136a8b2eb8 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,13 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.14.1(2023-10-09) +------------------- +- Rebuilding to pick the latest google export changes. + [@kelvin-muchiri] + v3.14.0(2023-10-02) +------------------- - Ensure sas token is appended to azure blob attachment url `PR #2482 ` [@KipSigei] diff --git a/onadata/__init__.py b/onadata/__init__.py index 0433794942..ee168c510b 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.14.0" +__version__ = "3.14.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index b659514344..c3e3832111 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.14.0 +version = 3.14.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 68913aba8dc61ce2024c3aa618b778757e610a52 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 12 Oct 2023 02:16:25 +0300 Subject: [PATCH 086/270] save xml to json data in pre-save (#2486) when asynchronous processing of submissions is enabled, submissions whose json is not yet processed have an empty json on endpoint /api/v1/data/. To prevent this from happening, we save the XML to json data during pre-save. The json will later be updated to include the metadata in the post_save signal --- onadata/apps/logger/models/instance.py | 1 + 1 file changed, 1 insertion(+) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index c28b50ea61..fe32914da2 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -765,6 +765,7 @@ def attachments_count(self): def save(self, *args, **kwargs): force = kwargs.get("force") self.date_modified = now() + self.json = self.get_dict() # XML converted to json if force: del kwargs["force"] From 296ced4bd5cac27f71d96c853924b6ceb74a1490 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 6 Oct 2023 21:10:33 +0300 Subject: [PATCH 087/270] Handle non-iterable objects gracefully Signed-off-by: Kipchirchir Sigei --- onadata/libs/serializers/geojson_serializer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/onadata/libs/serializers/geojson_serializer.py b/onadata/libs/serializers/geojson_serializer.py index a2608225a4..7e7e4dc3f6 100644 --- a/onadata/libs/serializers/geojson_serializer.py +++ b/onadata/libs/serializers/geojson_serializer.py @@ -141,7 +141,10 @@ def to_representation(self, instance): points = instance.json.get(geo_field) if geo_field in geotrace_xpaths or geo_field in polygon_xpaths: value = get_values_matching_key(instance.json, geo_field) - points = next(value) + try: + points = next(value) + except TypeError: + points = None geometry = ( geometry_from_string(points, simple_style) if points and isinstance(points, str) From 935f0862b24b88b04a3d83cd3da03318ef335b5c Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 11 Oct 2023 09:22:57 +0300 Subject: [PATCH 088/270] Handle empty geoshapes and geotraces in data Signed-off-by: Kipchirchir Sigei --- onadata/libs/serializers/geojson_serializer.py | 3 ++- onadata/libs/utils/dict_tools.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/onadata/libs/serializers/geojson_serializer.py b/onadata/libs/serializers/geojson_serializer.py index 7e7e4dc3f6..54fa9a7370 100644 --- a/onadata/libs/serializers/geojson_serializer.py +++ b/onadata/libs/serializers/geojson_serializer.py @@ -141,9 +141,10 @@ def to_representation(self, instance): points = instance.json.get(geo_field) if geo_field in geotrace_xpaths or geo_field in polygon_xpaths: value = get_values_matching_key(instance.json, geo_field) + # handle empty geoms try: points = next(value) - except TypeError: + except StopIteration: points = None geometry = ( geometry_from_string(points, simple_style) diff --git a/onadata/libs/utils/dict_tools.py b/onadata/libs/utils/dict_tools.py index fe796c6632..9a37cc931e 100644 --- a/onadata/libs/utils/dict_tools.py +++ b/onadata/libs/utils/dict_tools.py @@ -22,8 +22,14 @@ def _get_values(doc, key): yield item elif isinstance(v, list): for i in v: - for j in _get_values(i, key): - yield j + if isinstance(i, (dict, list)): + try: + for j in _get_values(i, key): + yield j + except StopIteration: + continue + elif i == key: + yield i return _get_values(doc, key) From 0fd6c37b7bd928d5d5a21b6c135bd467df0e8863 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 12 Oct 2023 09:56:19 +0300 Subject: [PATCH 089/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_data_viewset.py | 112 ++++++++++++++++++ .../fixtures/geolocation/empty_geoshapes.csv | 3 + .../fixtures/geolocation/empty_geotraces.csv | 3 + onadata/libs/utils/dict_tools.py | 1 + 4 files changed, 119 insertions(+) create mode 100644 onadata/apps/main/tests/fixtures/geolocation/empty_geoshapes.csv create mode 100644 onadata/apps/main/tests/fixtures/geolocation/empty_geotraces.csv diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index dcdc50a310..bef0b1a0bf 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -2504,6 +2504,118 @@ def test_geoshapes_in_repeats(self): } self.assertEqual(response.data, data) + def test_empty_geotraces_in_repeats(self): + # publish sample geotrace submissions + md = """ + | survey | + | | type | name | label | required | calculation | + | | begin repeat | segment | Waterway trace | | | + | | calculate | point_position | | | position(..)| + | | geotrace | blueline | GPS Coordinates | yes | | + | | end repeat | + """ + self.xform = self._publish_markdown( + md, self.user, self.project, id_string="geotraces" + ) + # publish submissions + self._publish_submit_geoms_in_repeats("empty_geotraces") + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + self.assertEqual(response.status_code, 200) + # get geojson from geo_field + data_get = {"geo_field": "segment/blueline"} + request = self.factory.get("/", data=data_get, **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + instances = self.xform.instances.all().order_by("id") + self.assertEqual(response.status_code, 200) + self.assertEqual(self.xform.instances.count(), 2) + self.assertEqual(len(response.data["features"]), 2) + self.assertEqual(self.xform.geotrace_xpaths(), ["segment/blueline"]) + # test LineString geojson format + data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": {"id": instances[0].pk, "xform": self.xform.pk}, + }, + { + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [36.809057, -1.269392], + [36.803303, -1.271966], + [36.805943, -1.268118], + [36.808822, -1.269405], + ], + }, + "properties": {"id": instances[1].pk, "xform": self.xform.pk}, + }, + ], + } + self.assertEqual(response.data, data) + + def test_empty_geoshapes_in_repeats(self): + # publish sample geoshape submissions + md = """ + | survey | + | | type | name | label | required | calculation | + | | begin repeat | segment | Waterway trace | | | + | | calculate | point_position | | | position(..)| + | | geoshape | blueline | GPS Coordinates | yes | | + | | end repeat | + """ + self.xform = self._publish_markdown( + md, self.user, self.project, id_string="geoshapes" + ) + # publish submissions + self._publish_submit_geoms_in_repeats("empty_geoshapes") + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + self.assertEqual(response.status_code, 200) + # get geojson from specific field + data_get = {"geo_field": "segment/blueline"} + request = self.factory.get("/", data=data_get, **self.extra) + response = view(request, pk=self.xform.pk, format="geojson") + instances = self.xform.instances.all().order_by("id") + self.assertEqual(response.status_code, 200) + self.assertEqual(self.xform.instances.count(), 2) + self.assertEqual(len(response.data["features"]), 2) + self.assertEqual(self.xform.polygon_xpaths(), ["segment/blueline"]) + # test Polygon geojson format + data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": {"id": instances[0].pk, "xform": self.xform.pk}, + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [36.79198, -1.29728], + [36.785793, -1.298009], + [36.789744, -1.29961], + [36.790625, -1.300146], + [36.792107, -1.300897], + [36.79198, -1.29728], + ] + ], + }, + "properties": {"id": instances[1].pk, "xform": self.xform.pk}, + }, + ], + } + self.assertEqual(response.data, data) + def test_instances_with_geopoints(self): # publish sample geo submissions self._publish_submit_geojson() diff --git a/onadata/apps/main/tests/fixtures/geolocation/empty_geoshapes.csv b/onadata/apps/main/tests/fixtures/geolocation/empty_geoshapes.csv new file mode 100644 index 0000000000..482767bedc --- /dev/null +++ b/onadata/apps/main/tests/fixtures/geolocation/empty_geoshapes.csv @@ -0,0 +1,3 @@ +today,start,end,deviceid,segment[1]/point_position,segment[1]/blueline,segment[2]/point_position,segment[2]/blueline,meta/instanceID,_id,_uuid,_submission_time,_date_modified,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,_xform_id +2023-09-15,2023-09-15T12:53:19.451+03:00,2023-09-15T12:55:10.386+03:00,enketo.ona.io:qrggmjketRepjz8x,1,n/a,n/a,n/a,uuid:cb2f1dae-47f2-4919-9fea-5ede2fda841c,122094074,cb2f1dae-47f2-4919-9fea-5ede2fda841c,2023-09-15T09:55:11,2023-09-15T09:55:11,,,202309150951,111.0,kipsigei,0,0,True,764655 +2023-09-15,2023-09-15T12:55:10.436+03:00,2023-09-15T12:55:37.212+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.29728 36.79198 0 0;-1.298009 36.785793 0 0;-1.29961 36.789744 0 0;-1.300146 36.790625 0 0;-1.300897 36.792107 0 0;-1.29728 36.79198 0 0,2,-1.297391 36.789659 0 0;-1.298163 36.78296 0 0;-1.299751 36.7882 0 0;-1.297391 36.789659 0 0,uuid:6ac3e33c-d064-45e0-8597-880227e36435,122094094,6ac3e33c-d064-45e0-8597-880227e36435,2023-09-15T09:55:37,2023-09-15T09:55:37,,,202309150951,27.0,kipsigei,0,0,True,764655 diff --git a/onadata/apps/main/tests/fixtures/geolocation/empty_geotraces.csv b/onadata/apps/main/tests/fixtures/geolocation/empty_geotraces.csv new file mode 100644 index 0000000000..e9345fedda --- /dev/null +++ b/onadata/apps/main/tests/fixtures/geolocation/empty_geotraces.csv @@ -0,0 +1,3 @@ +today,start,end,deviceid,segment[1]/point_position,segment[1]/blueline,segment[2]/point_position,segment[2]/blueline,meta/instanceID,_id,_uuid,_submission_time,_date_modified,_tags,_notes,_version,_duration,_submitted_by,_total_media,_media_count,_media_all_received,_xform_id +2023-09-15,2023-09-15T12:39:32.676+03:00,2023-09-15T12:39:52.828+03:00,enketo.ona.io:qrggmjketRepjz8x,1,n/a,n/a,n/a,uuid:51558e36-de1c-49c1-bf66-199060b2655b,122093541,51558e36-de1c-49c1-bf66-199060b2655b,2023-09-15T09:39:53,2023-09-15T09:39:53,,,202309150939,20.0,kipsigei,0,0,True,764649 +2023-09-15,2023-09-15T12:39:52.858+03:00,2023-09-15T12:40:31.290+03:00,enketo.ona.io:qrggmjketRepjz8x,1,-1.269392 36.809057 0 0;-1.271966 36.803303 0 0;-1.268118 36.805943 0 0;-1.269405 36.808822 0 0,2,-1.25616 36.865203 0 0;-1.251569 36.86919 0 0;-1.255495 36.870234 0 0;-1.256203 36.8654 0 0,uuid:39c3ee3f-bab3-4fca-81ce-011c6f26eb98,122093563,39c3ee3f-bab3-4fca-81ce-011c6f26eb98,2023-09-15T09:40:32,2023-09-15T09:40:32,,,202309150939,39.0,kipsigei,0,0,True,764649 diff --git a/onadata/libs/utils/dict_tools.py b/onadata/libs/utils/dict_tools.py index 9a37cc931e..e5624337a2 100644 --- a/onadata/libs/utils/dict_tools.py +++ b/onadata/libs/utils/dict_tools.py @@ -11,6 +11,7 @@ def get_values_matching_key(doc, key): """ def _get_values(doc, key): + # pylint: disable=too-many-nested-blocks if doc is not None: if key in doc: yield doc[key] From c8c0ac94bdb1df8950e711365993beb7de6d0b3f Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 19 Oct 2023 16:08:00 +0300 Subject: [PATCH 090/270] bump version to v3.14.2 (#2491) --- CHANGES.rst | 10 ++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 136a8b2eb8..2dcd06d88a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,16 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.14.2(2023-10-19) +------------------- + +- Gracefully handle empty geoshapes and geotraces in data + `PR #2489 ` + [@KipSigei] +- Save XML to JSON data in pre-save + `PR #2486 ` + [@kelvin-muchiri] + v3.14.1(2023-10-09) ------------------- - Rebuilding to pick the latest google export changes. diff --git a/onadata/__init__.py b/onadata/__init__.py index ee168c510b..0e55931c9c 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.14.1" +__version__ = "3.14.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index c3e3832111..8f496a9d9c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.14.1 +version = 3.14.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 47527f0a726c647acecb500bd220f1172759bf3b Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 19 Oct 2023 22:24:37 +0300 Subject: [PATCH 091/270] Allow only users with correct permissions to download media Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/media_viewset.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/onadata/apps/api/viewsets/media_viewset.py b/onadata/apps/api/viewsets/media_viewset.py index 422296c88f..11827bbb51 100644 --- a/onadata/apps/api/viewsets/media_viewset.py +++ b/onadata/apps/api/viewsets/media_viewset.py @@ -7,14 +7,14 @@ from django.conf import settings from django.http import Http404 from django.http import HttpResponseRedirect -from django.shortcuts import get_object_or_404 from rest_framework.response import Response from rest_framework import viewsets -from rest_framework.permissions import AllowAny from rest_framework.exceptions import ParseError +from onadata.apps.api.permissions import AttachmentObjectPermissions from onadata.apps.logger.models import Attachment +from onadata.libs import filters from onadata.libs.mixins.authenticate_header_mixin import AuthenticateHeaderMixin from onadata.libs.mixins.cache_control_mixin import CacheControlMixin from onadata.libs.mixins.etags_mixin import ETagsMixin @@ -30,14 +30,19 @@ class MediaViewSet( CacheControlMixin, ETagsMixin, BaseViewset, - viewsets.ViewSet, + viewsets.ReadOnlyModelViewSet, ): """A view to redirect to actual attachments url""" - permission_classes = (AllowAny,) + queryset = Attachment.objects.filter( + instance__deleted_at__isnull=True, deleted_at__isnull=True + ) + filter_backends = (filters.AttachmentFilter, filters.AttachmentTypeFilter) + lookup_field = "pk" + permission_classes = (AttachmentObjectPermissions,) # pylint: disable=invalid-name - def retrieve(self, request, pk=None): + def retrieve(self, request, *args, **kwargs): """ Redirect to final attachment url @@ -49,14 +54,14 @@ def retrieve(self, request, pk=None): return HttpResponseRedirect: redirects to final image url """ + pk = kwargs.get("pk") try: int(pk) except ValueError as exc: raise Http404() from exc else: filename = request.query_params.get("filename") - attachments = Attachment.objects.all() - obj = get_object_or_404(attachments, pk=pk) + obj = self.get_object() if obj.media_file.name != filename: raise Http404() From 59bfd6c7cfcb63db75a7a5ef229cc4303b3caaef Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 19 Oct 2023 22:25:09 +0300 Subject: [PATCH 092/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_media_viewset.py | 126 ++++++++++++++++-- 1 file changed, 116 insertions(+), 10 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_media_viewset.py b/onadata/apps/api/tests/viewsets/test_media_viewset.py index 53fadac3c4..79969ce2b9 100644 --- a/onadata/apps/api/tests/viewsets/test_media_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_media_viewset.py @@ -5,6 +5,10 @@ from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.media_viewset import MediaViewSet from onadata.apps.logger.models import Attachment +from onadata.apps.main.models.meta_data import MetaData +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.models.share_xform import ShareXForm +from onadata.libs.permissions import EditorRole def attachment_url(attachment, suffix=None): @@ -17,7 +21,7 @@ def attachment_url(attachment, suffix=None): return url -class TestMediaViewSet(TestAbstractViewSet): +class TestMediaViewSet(TestAbstractViewSet, TestBase): """ Test the /api/v1/files endpoint """ @@ -32,10 +36,55 @@ def test_retrieve_view(self): request = self.factory.get( "/", {"filename": self.attachment.media_file.name}, **self.extra ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 200, response) self.assertEqual(type(response.content), bytes) + def test_anon_retrieve_view(self): + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name} + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + self.assertEqual(response.status_code, 404, response) + + def test_retrieve_no_perms(self): + # create new user + new_user = self._create_user("new_user", "new_user") + self.extra = {"HTTP_AUTHORIZATION": f"Token {new_user.auth_token.key}"} + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **self.extra + ) + self.assertTrue(new_user.is_authenticated) + response = self.retrieve_view(request, pk=self.attachment.pk) + # new user shouldn't have perms to download media + self.assertEqual(response.status_code, 404, response) + + def test_returned_media_is_based_on_form_perms(self): + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **self.extra + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + self.assertEqual(response.status_code, 200, response) + self.assertEqual(type(response.content), bytes) + + # Enable meta perms + new_user = self._create_user("new_user", "new_user") + data_value = "editor-minor|dataentry-minor" + MetaData.xform_meta_permission(self.xform, data_value=data_value) + + instance = ShareXForm(self.xform, new_user.username, EditorRole.name) + instance.save() + auth_extra = { + 'HTTP_AUTHORIZATION': f'Token {new_user.auth_token.key}' + } + + # New user should not be able to view media for + # submissions which they did not submit + request = self.factory.get('/', {"filename": self.attachment.media_file.name}, + **auth_extra) + response = self.retrieve_view(request, pk=self.attachment.pk) + self.assertEqual(response.status_code, 404) + @patch("onadata.libs.utils.image_tools.get_storage_class") @patch("onadata.libs.utils.image_tools.boto3.client") def test_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class): @@ -55,7 +104,7 @@ def test_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class request = self.factory.get( "/", {"filename": self.attachment.media_file.name}, **self.extra ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 302, response.url) self.assertEqual(response.url, expected_url) @@ -74,13 +123,70 @@ def test_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class ExpiresIn=3600, ) + @patch("onadata.libs.utils.image_tools.get_storage_class") + @patch("onadata.libs.utils.image_tools.boto3.client") + def test_anon_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class): + + expected_url = ( + "https://testing.s3.amazonaws.com/doe/attachments/" + "4_Media_file/media.png?" + "response-content-disposition=attachment%3Bfilename%3media.png&" + "response-content-type=application%2Foctet-stream&" + "AWSAccessKeyId=AKIAJ3XYHHBIJDL7GY7A" + "&Signature=aGhiK%2BLFVeWm%2Fmg3S5zc05g8%3D&Expires=1615554960" + ) + mock_presigned_urls().generate_presigned_url = MagicMock( + return_value=expected_url + ) + mock_get_storage_class()().bucket.name = "onadata" + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name} + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + + self.assertEqual(response.status_code, 404, response) + + @patch("onadata.libs.utils.image_tools.get_storage_class") + @patch("onadata.libs.utils.image_tools.boto3.client") + def test_retrieve_view_from_s3_no_perms(self, mock_presigned_urls, mock_get_storage_class): + + expected_url = ( + "https://testing.s3.amazonaws.com/doe/attachments/" + "4_Media_file/media.png?" + "response-content-disposition=attachment%3Bfilename%3media.png&" + "response-content-type=application%2Foctet-stream&" + "AWSAccessKeyId=AKIAJ3XYHHBIJDL7GY7A" + "&Signature=aGhiK%2BLFVeWm%2Fmg3S5zc05g8%3D&Expires=1615554960" + ) + mock_presigned_urls().generate_presigned_url = MagicMock( + return_value=expected_url + ) + mock_get_storage_class()().bucket.name = "onadata" + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **self.extra + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + # owner should be able to retrieve media + self.assertEqual(response.status_code, 302, response) + + # create new user + new_user = self._create_user("new_user", "new_user") + self.extra = {"HTTP_AUTHORIZATION": f"Token {new_user.auth_token.key}"} + + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **self.extra + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + # new user shouldn't have perms to download media + self.assertEqual(response.status_code, 404, response) + def test_retrieve_view_with_suffix(self): request = self.factory.get( "/", {"filename": self.attachment.media_file.name, "suffix": "large"}, **self.extra, ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 302) self.assertTrue(response["Location"], attachment_url(self.attachment)) @@ -92,7 +198,7 @@ def test_handle_image_exception(self, mock_image_url): {"filename": self.attachment.media_file.name, "suffix": "large"}, **self.extra, ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 400) def test_retrieve_view_small(self): @@ -101,7 +207,7 @@ def test_retrieve_view_small(self): {"filename": self.attachment.media_file.name, "suffix": "small"}, **self.extra, ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 302) self.assertTrue(response["Location"], attachment_url(self.attachment, "small")) @@ -111,7 +217,7 @@ def test_retrieve_view_invalid_suffix(self): {"filename": self.attachment.media_file.name, "suffix": "TK"}, **self.extra, ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 404) def test_retrieve_view_invalid_pk(self): @@ -120,12 +226,12 @@ def test_retrieve_view_invalid_pk(self): {"filename": self.attachment.media_file.name, "suffix": "small"}, **self.extra, ) - response = self.retrieve_view(request, "INVALID") + response = self.retrieve_view(request, pk="INVALID") self.assertEqual(response.status_code, 404) def test_retrieve_view_no_filename_param(self): request = self.factory.get("/", **self.extra) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 404) def test_retrieve_small_png(self): @@ -160,6 +266,6 @@ def test_retrieve_small_png(self): {"filename": self.attachment.media_file.name, "suffix": "small"}, **self.extra, ) - response = self.retrieve_view(request, self.attachment.pk) + response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 302) self.assertTrue(response["Location"], attachment_url(self.attachment, "small")) From a0aeec83d3e172e58fb3d3e527beb29d85cefe66 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 24 Oct 2023 15:29:25 +0300 Subject: [PATCH 093/270] Add missing docstrings Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_media_viewset.py | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_media_viewset.py b/onadata/apps/api/tests/viewsets/test_media_viewset.py index 79969ce2b9..cc1b194be1 100644 --- a/onadata/apps/api/tests/viewsets/test_media_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_media_viewset.py @@ -25,6 +25,7 @@ class TestMediaViewSet(TestAbstractViewSet, TestBase): """ Test the /api/v1/files endpoint """ + def setUp(self): super(TestMediaViewSet, self).setUp() self.retrieve_view = MediaViewSet.as_view({"get": "retrieve"}) @@ -41,13 +42,15 @@ def test_retrieve_view(self): self.assertEqual(type(response.content), bytes) def test_anon_retrieve_view(self): - request = self.factory.get( - "/", {"filename": self.attachment.media_file.name} - ) + """Test that anonymous users shouldn't retrieve media""" + request = self.factory.get("/", {"filename": self.attachment.media_file.name}) response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 404, response) def test_retrieve_no_perms(self): + """Test that users without permissions to retrieve media + shouldn't be able to retrieve media + """ # create new user new_user = self._create_user("new_user", "new_user") self.extra = {"HTTP_AUTHORIZATION": f"Token {new_user.auth_token.key}"} @@ -60,6 +63,7 @@ def test_retrieve_no_perms(self): self.assertEqual(response.status_code, 404, response) def test_returned_media_is_based_on_form_perms(self): + """Test that attachments are returned based on form meta permissions""" request = self.factory.get( "/", {"filename": self.attachment.media_file.name}, **self.extra ) @@ -74,14 +78,13 @@ def test_returned_media_is_based_on_form_perms(self): instance = ShareXForm(self.xform, new_user.username, EditorRole.name) instance.save() - auth_extra = { - 'HTTP_AUTHORIZATION': f'Token {new_user.auth_token.key}' - } + auth_extra = {"HTTP_AUTHORIZATION": f"Token {new_user.auth_token.key}"} # New user should not be able to view media for # submissions which they did not submit - request = self.factory.get('/', {"filename": self.attachment.media_file.name}, - **auth_extra) + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **auth_extra + ) response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 404) @@ -125,8 +128,10 @@ def test_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class @patch("onadata.libs.utils.image_tools.get_storage_class") @patch("onadata.libs.utils.image_tools.boto3.client") - def test_anon_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class): - + def test_anon_retrieve_view_from_s3( + self, mock_presigned_urls, mock_get_storage_class + ): + """Test that anonymous user cannot retrieve media from s3""" expected_url = ( "https://testing.s3.amazonaws.com/doe/attachments/" "4_Media_file/media.png?" @@ -139,17 +144,19 @@ def test_anon_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_ return_value=expected_url ) mock_get_storage_class()().bucket.name = "onadata" - request = self.factory.get( - "/", {"filename": self.attachment.media_file.name} - ) + request = self.factory.get("/", {"filename": self.attachment.media_file.name}) response = self.retrieve_view(request, pk=self.attachment.pk) self.assertEqual(response.status_code, 404, response) @patch("onadata.libs.utils.image_tools.get_storage_class") @patch("onadata.libs.utils.image_tools.boto3.client") - def test_retrieve_view_from_s3_no_perms(self, mock_presigned_urls, mock_get_storage_class): - + def test_retrieve_view_from_s3_no_perms( + self, mock_presigned_urls, mock_get_storage_class + ): + """Test that authenticated user without correct perms + cannot retrieve media from s3 + """ expected_url = ( "https://testing.s3.amazonaws.com/doe/attachments/" "4_Media_file/media.png?" From df42af6538e6a6a754aaa9c35f6eedc7c83403a0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 24 Oct 2023 15:55:44 +0300 Subject: [PATCH 094/270] Add test for when an attachment submission is soft-deleted Signed-off-by: Kipchirchir Sigei --- .../apps/api/tests/viewsets/test_media_viewset.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_media_viewset.py b/onadata/apps/api/tests/viewsets/test_media_viewset.py index cc1b194be1..31a674c155 100644 --- a/onadata/apps/api/tests/viewsets/test_media_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_media_viewset.py @@ -1,5 +1,7 @@ import os import urllib + +from django.utils import timezone from mock import MagicMock, patch from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet @@ -41,6 +43,16 @@ def test_retrieve_view(self): self.assertEqual(response.status_code, 200, response) self.assertEqual(type(response.content), bytes) + # test when the submission is soft deleted + self.attachment.instance.deleted_at = timezone.now() + self.attachment.instance.save() + + request = self.factory.get( + "/", {"filename": self.attachment.media_file.name}, **self.extra + ) + response = self.retrieve_view(request, pk=self.attachment.pk) + self.assertEqual(response.status_code, 404, response) + def test_anon_retrieve_view(self): """Test that anonymous users shouldn't retrieve media""" request = self.factory.get("/", {"filename": self.attachment.media_file.name}) From 603a87756601615f5001631a777aa182a32509db Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 27 Oct 2023 17:37:16 +0300 Subject: [PATCH 095/270] Stream response on endpoint `//xformsManifest/` (#2493) * stream response on endpoint //xformsManifest/ stream response to avoid serialization using too much memory * wrap respsonse in brackets * separate streamed data using comma * specify content_type when streaming response * update stream response for / stream XML instead of JSON * use default queryset_iterator chunk_size * adjust failing tests * suppress lint error line-too-long * fix lint error fix pylint: redefined-builtin / Redefining built-in 'hash' * suppress lint rule line-too-long * remove unncessary kwarg * refactor code * add docstring * fix docstring typo * refactor code * address failing tests * add method docstring --- .../tests/viewsets/test_xform_list_viewset.py | 997 +++++++++--------- .../apps/api/viewsets/xform_list_viewset.py | 33 +- onadata/libs/renderers/renderers.py | 67 +- 3 files changed, 562 insertions(+), 535 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index 723aaf12ef..e21b183469 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -9,12 +9,12 @@ from django_digest.test import DigestAuth from mock import patch -from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( - TestAbstractViewSet -) +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.xform_list_viewset import ( - PreviewXFormListViewSet, XFormListViewSet) + PreviewXFormListViewSet, + XFormListViewSet, +) from onadata.apps.main.models import MetaData from onadata.libs.permissions import DataEntryRole, ReadOnlyRole, OwnerRole @@ -26,78 +26,73 @@ def setUp(self): self._publish_xls_form_to_project() def test_get_xform_list(self): - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = self.view(request) self.assertEqual(response.status_code, 200) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') - with open(path, encoding='utf-8') as f: + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], - 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_get_xform_list_xform_pk_filter_anon(self): """ Test formList xform_pk filter for anonymous user. """ - request = self.factory.get('/') - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk + 10000) + request = self.factory.get("/") + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + 10000 + ) self.assertEqual(response.status_code, 404) # existing form is in result when xform_pk filter is in use. - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk) + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], - 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_get_xform_list_form_id_filter(self): """ Test formList formID filter """ # Test unrecognized formID - request = self.factory.get('/', {'formID': 'unrecognizedID'}) + request = self.factory.get("/", {"formID": "unrecognizedID"}) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) # Test a valid formID - request = self.factory.get('/', {'formID': self.xform.id_string}) + request = self.factory.get("/", {"formID": self.xform.id_string}) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 200) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) def test_form_id_filter_for_non_require_auth_account(self): @@ -105,13 +100,18 @@ def test_form_id_filter_for_non_require_auth_account(self): Test formList formID filter for account that requires authentication """ # Bob submit forms - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # Set require auth to true for this user @@ -122,10 +122,12 @@ def test_form_id_filter_for_non_require_auth_account(self): self.xform.shared = False self.xform.save() request = self.factory.get( - f'/{self.user.username}/{self.xform.pk}/formList', - {'formID': self.xform.id_string}) + f"/{self.user.username}/{self.xform.pk}/formList", + {"formID": self.xform.id_string}, + ) response = self.view( - request, username=self.user.username, xform_pk=self.xform.pk) + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 401) self.user.profile.require_auth = False @@ -136,29 +138,33 @@ def test_form_id_filter_for_non_require_auth_account(self): self.xform.save() alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } self._create_user_profile(alice_data) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request = self.factory.get( - f'/{self.user.username}/{self.xform.pk}/formList', - {'formID': self.xform.id_string}) + f"/{self.user.username}/{self.xform.pk}/formList", + {"formID": self.xform.id_string}, + ) request.META.update(auth(request.META, response)) response = self.view( - request, username=self.user.username, xform_pk=self.xform.pk) + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 200) # ensure anonymous users still have access # to the xform with id self.xform.pk request = self.factory.get( - f'/{self.user.username}/{self.xform.pk}/formList', - {'formID': self.xform.id_string}) + f"/{self.user.username}/{self.xform.pk}/formList", + {"formID": self.xform.id_string}, + ) response = self.view( - request, username=self.user.username, xform_pk=self.xform.pk) + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 200) def test_form_id_filter_for_require_auth_account(self): @@ -166,101 +172,109 @@ def test_form_id_filter_for_require_auth_account(self): Test formList formID filter for account that requires authentication """ # Bob submit forms - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # Set require auth to true self.user.profile.require_auth = True self.user.profile.save() - request = self.factory.get('/', {'formID': self.xform.id_string}) + request = self.factory.get("/", {"formID": self.xform.id_string}) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 401) # Test for authenticated user but unrecognized formID - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get('/', {'formID': 'unrecognizedID'}) + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/", {"formID": "unrecognizedID"}) request.META.update(auth(request.META, response)) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) # Test for authenticated user and valid formID - request = self.factory.get('/', {'formID': self.xform.id_string}) + request = self.factory.get("/", {"formID": self.xform.id_string}) self.assertTrue(self.user.profile.require_auth) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 200) path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList2.xml') + os.path.dirname(__file__), "..", "fixtures", "formList2.xml" + ) - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list = f.read().strip() - data = {"hash": self.xform.hash, "pk": self.xform.pk, - 'version': self.xform.version} - content = response.render().content.decode('utf-8') + data = { + "hash": self.xform.hash, + "pk": self.xform.pk, + "version": self.xform.version, + } + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list % data) # Test for shared forms # Create user Alice alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } alice_profile = self._create_user_profile(alice_data) # check that she can authenticate successfully - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) response = self.view(request) self.assertEqual(response.status_code, 200) - self.assertFalse( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertFalse(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) # share Bob's project with Alice - data = { - 'username': 'alice', - 'role': ReadOnlyRole.name - } - request = self.factory.post('/', data=data, **self.extra) - share_view = ProjectViewSet.as_view({'post': 'share'}) + data = {"username": "alice", "role": ReadOnlyRole.name} + request = self.factory.post("/", data=data, **self.extra) + share_view = ProjectViewSet.as_view({"post": "share"}) project_id = self.project.pk response = share_view(request, pk=project_id) self.assertEqual(response.status_code, 204) - self.assertTrue( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertTrue(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) - request = self.factory.get('/', {'formID': self.xform.id_string}) + request = self.factory.get("/", {"formID": self.xform.id_string}) response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) - response = self.view(request, username='alice') + response = self.view(request, username="alice") self.assertEqual(response.status_code, 200) path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList2.xml') + os.path.dirname(__file__), "..", "fixtures", "formList2.xml" + ) - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list = f.read().strip() - data = {"hash": self.xform.hash, "pk": self.xform.pk, - "version": self.xform.version} - content = response.render().content.decode('utf-8') + data = { + "hash": self.xform.hash, + "pk": self.xform.pk, + "version": self.xform.version, + } + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list % data) # Bob's profile @@ -268,39 +282,44 @@ def test_form_id_filter_for_require_auth_account(self): # Submit form as Alice self._login_user_and_profile(extra_post_data=alice_data) - self.assertEqual(self.user.username, 'alice') + self.assertEqual(self.user.username, "alice") path = os.path.join( - settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", - "good_eats_multilang", "good_eats_multilang.xlsx") + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "good_eats_multilang", + "good_eats_multilang.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=path) - self.assertTrue(OwnerRole.user_has_role(alice_profile.user, - self.xform)) + self.assertTrue(OwnerRole.user_has_role(alice_profile.user, self.xform)) # Share Alice's form with Bob ReadOnlyRole.add(bob_profile, self.xform) self.assertTrue(ReadOnlyRole.user_has_role(bob_profile, self.xform)) # Get unrecognized formID as bob - request = self.factory.get('/', {'formID': 'unrecognizedID'}) + request = self.factory.get("/", {"formID": "unrecognizedID"}) response = self.view(request, username=bob_profile.username) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = self.view(request, username=bob_profile.username) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) # Get Alice's form as Bob - request = self.factory.get('/', {'formID': 'good_eats_multilang'}) + request = self.factory.get("/", {"formID": "good_eats_multilang"}) response = self.view(request, username=bob_profile.username) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = self.view(request, username=bob_profile.username) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual(response.data[0]['formID'], 'good_eats_multilang') + self.assertEqual(response.data[0]["formID"], "good_eats_multilang") def test_get_xform_list_project_pk_filter(self): """ @@ -308,36 +327,33 @@ def test_get_xform_list_project_pk_filter(self): """ self.user.profile.require_auth = True self.user.profile.save() - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) # existing form is in result when xform_pk filter is in use. response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], - 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_get_xform_list_xform_pk_filter(self): """ @@ -345,52 +361,58 @@ def test_get_xform_list_xform_pk_filter(self): """ self.user.profile.require_auth = True self.user.profile.save() - request = self.factory.get('/') - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk) + request = self.factory.get("/") + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk + 10000) + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + 10000 + ) self.assertEqual(response.status_code, 404) - request = self.factory.get('/') - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk) + request = self.factory.get("/") + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) # existing form is in result when xform_pk filter is in use. - response = self.view(request, username=self.user.username, - xform_pk=self.xform.pk) + response = self.view( + request, username=self.user.username, xform_pk=self.xform.pk + ) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], - 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_get_xform_list_of_logged_in_user_with_username_param(self): # publish 2 forms as bob - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # change one of bob's forms to public @@ -402,82 +424,81 @@ def test_get_xform_list_of_logged_in_user_with_username_param(self): # check that bob still has 2 private forms self.assertEqual(self.user.xforms.filter(shared=False).count(), 2) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) - response = self.view(request, username='bob') + response = self.view(request, username="bob") # check that bob's request is succesful and it returns both public and # private forms that belong to bob self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 3) alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', + "username": "alice", + "email": "alice@localhost.com", } self._login_user_and_profile(extra_post_data=alice_data) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'bobbob') + auth = DigestAuth("alice", "bobbob") request.META.update(auth(request.META, response)) - response = self.view(request, username='bob') + response = self.view(request, username="bob") # check that alice's request is succesful and it returns public forms # owned by bob self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual(response.data[0].get('formID'), xform_id_string) + self.assertEqual(response.data[0].get("formID"), xform_id_string) def test_get_xform_list_with_malformed_cookie(self): - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - request.COOKIES['__enketo'] = 'hello' + request.COOKIES["__enketo"] = "hello" response = self.view(request) self.assertEqual(response.status_code, 401) self.assertEqual( - response.data.get('detail'), - u'JWT DecodeError: Not enough segments') + response.data.get("detail"), "JWT DecodeError: Not enough segments" + ) - @patch('onadata.apps.api.viewsets.project_viewset.send_mail') + @patch("onadata.apps.api.viewsets.project_viewset.send_mail") def test_read_only_users_get_non_empty_formlist_using_preview_formlist( - self, mock_send_mail): + self, mock_send_mail + ): alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } alice_profile = self._create_user_profile(alice_data) - self.assertFalse( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertFalse(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) # share bob's project with alice data = { - 'username': 'alice', - 'role': ReadOnlyRole.name, - 'email_msg': 'I have shared the project with you' + "username": "alice", + "role": ReadOnlyRole.name, + "email_msg": "I have shared the project with you", } - request = self.factory.post('/', data=data, **self.extra) - share_view = ProjectViewSet.as_view({'post': 'share'}) + request = self.factory.post("/", data=data, **self.extra) + share_view = ProjectViewSet.as_view({"post": "share"}) projectid = self.project.pk response = share_view(request, pk=projectid) self.assertEqual(response.status_code, 204) self.assertTrue(mock_send_mail.called) - self.assertTrue( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertTrue(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) # check that she can authenticate successfully - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) - response = self.view(request, username='bob') + response = self.view(request, username="bob") self.assertEqual(response.status_code, 200) # check that alice gets an empty response when requesting bob's # formlist @@ -486,250 +507,225 @@ def test_read_only_users_get_non_empty_formlist_using_preview_formlist( # set endpoint to preview formList self.view = PreviewXFormListViewSet.as_view({"get": "list"}) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) self.assertNotEqual(response.data, []) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) - response = self.view(request, username='bob') + response = self.view(request, username="bob") self.assertEqual(response.status_code, 200) # check that alice does NOT get an empty response when requesting bob's # formlist when using the preview formlist endpoint self.assertNotEqual(response.data, []) - @patch('onadata.apps.api.viewsets.project_viewset.send_mail') + @patch("onadata.apps.api.viewsets.project_viewset.send_mail") def test_get_xform_list_with_shared_forms(self, mock_send_mail): # create user alice alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } alice_profile = self._create_user_profile(alice_data) # check that she can authenticate successfully - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) response = self.view(request) self.assertEqual(response.status_code, 200) - self.assertFalse( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertFalse(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) # share bob's project with her data = { - 'username': 'alice', - 'role': ReadOnlyRole.name, - 'email_msg': 'I have shared the project with you' + "username": "alice", + "role": ReadOnlyRole.name, + "email_msg": "I have shared the project with you", } - request = self.factory.post('/', data=data, **self.extra) - share_view = ProjectViewSet.as_view({'post': 'share'}) + request = self.factory.post("/", data=data, **self.extra) + share_view = ProjectViewSet.as_view({"post": "share"}) projectid = self.project.pk response = share_view(request, pk=projectid) self.assertEqual(response.status_code, 204) self.assertTrue(mock_send_mail.called) - self.assertTrue( - ReadOnlyRole.user_has_role(alice_profile.user, self.project)) + self.assertTrue(ReadOnlyRole.user_has_role(alice_profile.user, self.project)) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('alice', 'alice') + auth = DigestAuth("alice", "alice") request.META.update(auth(request.META, response)) - response = self.view(request, username='alice') + response = self.view(request, username="alice") self.assertEqual(response.status_code, 200) - path = os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'formList.xml') + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") - with open(path, encoding='utf-8') as f: + with open(path, encoding="utf-8") as f: form_list_xml = f.read().strip() data = {"hash": self.xform.hash, "pk": self.xform.pk} - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") self.assertEqual(content, form_list_xml % data) - download_url = ('http://testserver/%s/' - 'forms/%s/form.xml') % ( - self.user.username, self.xform.id) + download_url = ( + "http://testserver/%s/" "forms/%s/form.xml" + ) % (self.user.username, self.xform.id) # check that bob's form exists in alice's formList self.assertTrue(download_url in content) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], - 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_get_xform_list_inactive_form(self): self.xform.downloadable = False self.xform.save() - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = self.view(request) self.assertEqual(response.status_code, 200) - xml = u'\n' - content = response.render().content.decode('utf-8') + xml = '\n\n\n\n\n -screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa + manifest_xml = """screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa data = { "hash": self.metadata.hash, "pk": self.metadata.pk, - "xform": self.xform.pk + "xform": self.xform.pk, } - content = response.render().content.decode('utf-8').strip() + content = "".join( + [i.decode("utf-8").strip() for i in response.streaming_content] + ) self.assertEqual(content, manifest_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_retrieve_xform_manifest_anonymous_user(self): self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "manifest"}) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 401) - response = self.view( - request, pk=self.xform.pk, username=self.user.username) + response = self.view(request, pk=self.xform.pk, username=self.user.username) self.assertEqual(response.status_code, 200) - manifest_xml = """ -screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa + manifest_xml = """screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa data = { "hash": self.metadata.hash, "pk": self.metadata.pk, - "xform": self.xform.pk + "xform": self.xform.pk, } - content = response.render().content.decode('utf-8').strip() + content = "".join( + [i.decode("utf-8").strip() for i in response.streaming_content] + ) self.assertEqual(content, manifest_xml % data) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], 'text/xml; charset=utf-8') + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") def test_retrieve_xform_manifest_anonymous_user_require_auth(self): self.user.profile.require_auth = True self.user.profile.save() self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "manifest"}) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 401) - response = self.view( - request, pk=self.xform.pk, username=self.user.username) + response = self.view(request, pk=self.xform.pk, username=self.user.username) self.assertEqual(response.status_code, 401) def test_retrieve_xform_media(self): self._load_metadata(self.xform) - self.view = XFormListViewSet.as_view( - { - "get": "media", - "head": "media" - } - ) - request = self.factory.head('/') + self.view = XFormListViewSet.as_view({"get": "media", "head": "media"}) + request = self.factory.head("/") response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='png') - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get('/') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="png" + ) + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") request.META.update(auth(request.META, response)) response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='png') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="png" + ) self.assertEqual(response.status_code, 200) def test_retrieve_xform_media_anonymous_user(self): self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "media"}) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='png') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="png" + ) self.assertEqual(response.status_code, 401) response = self.view( @@ -835,7 +822,8 @@ def test_retrieve_xform_media_anonymous_user(self): pk=self.xform.pk, username=self.user.username, metadata=self.metadata.pk, - format='png') + format="png", + ) self.assertEqual(response.status_code, 200) def test_retrieve_xform_media_anonymous_user_require_auth(self): @@ -843,100 +831,103 @@ def test_retrieve_xform_media_anonymous_user_require_auth(self): self.user.profile.save() self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "media"}) - request = self.factory.get('/') + request = self.factory.get("/") response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='png') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="png" + ) self.assertEqual(response.status_code, 401) def test_retrieve_xform_media_linked_xform(self): - data_type = 'media' - data_value = 'xform {} transportation'.format(self.xform.pk) + data_type = "media" + data_value = "xform {} transportation".format(self.xform.pk) self._add_form_metadata(self.xform, data_type, data_value) self._make_submissions() self.xform.refresh_from_db() - self.view = XFormListViewSet.as_view( - { - "get": "manifest", - "head": "manifest" - } - ) - request = self.factory.head('/') + self.view = XFormListViewSet.as_view({"get": "manifest", "head": "manifest"}) + request = self.factory.head("/") response = self.view(request, pk=self.xform.pk) - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get('/') + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") request.META.update(auth(request.META, response)) response = self.view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data[0]['filename'], 'transportation.csv') - self.assertEqual( - response.data[0]['hash'], 'md5:%s' % md5( - self.xform.last_submission_time.isoformat().encode( - 'utf-8')).hexdigest()) - - self.view = XFormListViewSet.as_view( - { - "get": "media", - "head": "media" - } + + manifest_xml = """{}{}{}""" # noqa + expected_downloadUrl = f"http://testserver/bob/xformsMedia/{self.xform.pk}/{self.metadata.pk}.csv?group_delimiter=.&repeat_index_tags=_,_" # noqa + expected_hash = md5( + self.xform.last_submission_time.isoformat().encode("utf-8") + ).hexdigest() + expected_content = manifest_xml.format( + "transportation.csv", f"md5:{expected_hash}", expected_downloadUrl ) - request = self.factory.get('/') + content = "".join( + [i.decode("utf-8").strip() for i in response.streaming_content] + ) + self.assertEqual(content, expected_content) + + self.view = XFormListViewSet.as_view({"get": "media", "head": "media"}) + request = self.factory.get("/") response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='csv') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="csv" + ) self.assertEqual(response.status_code, 401) - request = self.factory.head('/') + request = self.factory.head("/") response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='csv') - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get('/') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="csv" + ) + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") request.META.update(auth(request.META, response)) response = self.view( - request, pk=self.xform.pk, metadata=self.metadata.pk, format='csv') + request, pk=self.xform.pk, metadata=self.metadata.pk, format="csv" + ) self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], - 'attachment; filename=transportation.csv') + self.assertEqual( + response["Content-Disposition"], "attachment; filename=transportation.csv" + ) def test_retrieve_xform_manifest_linked_form(self): # for linked forms check if manifest media download url for csv # has a group_delimiter param - data_type = 'media' - data_value = 'xform {} transportation'.format(self.xform.pk) - media = self._add_form_metadata(self.xform, data_type, data_value) - - self.view = XFormListViewSet.as_view( - { - "get": "manifest", - "head": "manifest" - } - ) + data_type = "media" + data_value = "xform {} transportation".format(self.xform.pk) + self._add_form_metadata(self.xform, data_type, data_value) + + self.view = XFormListViewSet.as_view({"get": "manifest", "head": "manifest"}) # sign in bob - request = self.factory.head('/') + request = self.factory.head("/") auth_response = self.view(request, pk=self.xform.pk) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") # set up bob's request - request = self.factory.get('/xformsManifest') + request = self.factory.get("/xformsManifest") request.META.update(auth(request.META, auth_response)) # make request - response = self.view(request, pk=self.xform.pk, format='csv') + response = self.view(request, pk=self.xform.pk) # test - manifest_media_url = '{}{}'.format( - media.data['media_url'], - '?group_delimiter=.&repeat_index_tags=_,_') - download_url = response.data[0]['downloadUrl'] - self.assertEqual(manifest_media_url, download_url) + manifest_xml = """{}{}{}""" # noqa + expected_downloadUrl = f"http://testserver/bob/xformsMedia/{self.xform.pk}/{self.metadata.pk}.csv?group_delimiter=.&repeat_index_tags=_,_" # noqa + expected_content = manifest_xml.format( + "transportation.csv", "md5:", expected_downloadUrl + ) + content = "".join( + [i.decode("utf-8").strip() for i in response.streaming_content] + ) + self.assertEqual(content, expected_content) - url = '/bob/xformsMedia/{}/{}.csv?group_delimiter=.'\ - .format(self.xform.pk, self.metadata.pk) - username = 'bob' - password = 'bob' + url = "/bob/xformsMedia/{}/{}.csv?group_delimiter=.".format( + self.xform.pk, self.metadata.pk + ) + username = "bob" + password = "bob" client = DigestClient() - client.set_authorization(username, password, 'Digest') + client.set_authorization(username, password, "Digest") req = client.get(url) self.assertEqual(req.status_code, 200) @@ -949,28 +940,18 @@ def test_retrieve_xform_manifest_linked_form(self): self.assertEqual(req.status_code, 401) def test_xform_3gp_media_type(self): - for fmt in ["png", "jpg", "mp3", "3gp", "wav"]: url = reverse( - 'xform-media', - kwargs={ - 'username': 'bob', - 'pk': 1, - 'metadata': '1234', - 'format': fmt - }) + "xform-media", + kwargs={"username": "bob", "pk": 1, "metadata": "1234", "format": fmt}, + ) - self.assertEqual(url, '/bob/xformsMedia/1/1234.{}'.format(fmt)) + self.assertEqual(url, "/bob/xformsMedia/1/1234.{}".format(fmt)) def test_get_xform_anonymous_user_xform_require_auth(self): - self.view = XFormListViewSet.as_view( - { - "get": "retrieve", - "head": "retrieve" - } - ) - request = self.factory.head('/') - response = self.view(request, username='bob', pk=self.xform.pk) + self.view = XFormListViewSet.as_view({"get": "retrieve", "head": "retrieve"}) + request = self.factory.head("/") + response = self.view(request, username="bob", pk=self.xform.pk) # no authentication prompted self.assertEqual(response.status_code, 200) @@ -980,15 +961,15 @@ def test_get_xform_anonymous_user_xform_require_auth(self): self.xform.require_auth = True self.xform.save() - request = self.factory.head('/') - response = self.view(request, username='bob', pk=self.xform.pk) + request = self.factory.head("/") + response = self.view(request, username="bob", pk=self.xform.pk) # authentication prompted self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get('/') + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") request.META.update(auth(request.META, response)) - response = self.view(request, username='bob', pk=self.xform.pk) + response = self.view(request, username="bob", pk=self.xform.pk) # success with authentication self.assertEqual(response.status_code, 200) @@ -997,24 +978,24 @@ def test_manifest_url_tag_is_not_present_when_no_media(self): Test that content does not contain a manifest url only when the form has no media """ - request = self.factory.get('/') + request = self.factory.get("/") view = XFormListViewSet.as_view({"get": "list"}) - response = view(request, username='bob', pk=self.xform.pk) + response = view(request, username="bob", pk=self.xform.pk) self.assertEqual(response.status_code, 200) - content = response.render().content.decode('utf-8') - manifest_url = ('') + content = response.render().content.decode("utf-8") + manifest_url = "" self.assertNotIn(manifest_url, content) # Add media and test that manifest url exists - data_type = 'media' - data_value = 'xform {} transportation'.format(self.xform.pk) + data_type = "media" + data_value = "xform {} transportation".format(self.xform.pk) self._add_form_metadata(self.xform, data_type, data_value) - response = view(request, username='bob', pk=self.xform.pk) + response = view(request, username="bob", pk=self.xform.pk) self.assertEqual(response.status_code, 200) - content = response.render().content.decode('utf-8') + content = response.render().content.decode("utf-8") manifest_url = ( - 'http://testserver/%s/xformsManifest' - '/%s') % (self.user.username, self.xform.id) + "http://testserver/%s/xformsManifest" "/%s" + ) % (self.user.username, self.xform.id) self.assertTrue(manifest_url in content) def test_form_list_case_insensitivity(self): @@ -1022,22 +1003,17 @@ def test_form_list_case_insensitivity(self): Test that the /formList endpoint utilizes the username in a case insensitive manner """ - request = self.factory.get( - f'/{self.user.username}/formList', **self.extra) + request = self.factory.get(f"/{self.user.username}/formList", **self.extra) response = self.view(request, username=self.user.username) self.assertEqual(response.status_code, 200) - request = self.factory.get( - f'/{self.user.username.capitalize()}', **self.extra) - response_2 = self.view( - request, username=self.user.username.capitalize()) + request = self.factory.get(f"/{self.user.username.capitalize()}", **self.extra) + response_2 = self.view(request, username=self.user.username.capitalize()) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, response_2.data) - request = self.factory.get( - f'/{self.user.username.swapcase()}', **self.extra) - response_3 = self.view( - request, username=self.user.username.capitalize()) + request = self.factory.get(f"/{self.user.username.swapcase()}", **self.extra) + response_3 = self.view(request, username=self.user.username.capitalize()) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, response_3.data) @@ -1047,13 +1023,18 @@ def test_retrieve_form_using_pk(self): a form properly """ # Bob submit forms - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # Set require auth to true for form owner @@ -1063,10 +1044,8 @@ def test_retrieve_form_using_pk(self): # Ensure that anonymous users do not have access to private forms self.xform.shared = False self.xform.save() - request = self.factory.get( - f'/enketo/{self.xform.pk}/formList') - response = self.view( - request, xform_pk=self.xform.pk) + request = self.factory.get(f"/enketo/{self.xform.pk}/formList") + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 401) # Set require auth to false for form owner @@ -1079,34 +1058,28 @@ def test_retrieve_form_using_pk(self): # Ensure logged in users have access to the form alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } self._create_user_profile(alice_data) - auth = DigestAuth('alice', 'alice') - request = self.factory.get( - f'/enketo/{self.xform.pk}/formList') + auth = DigestAuth("alice", "alice") + request = self.factory.get(f"/enketo/{self.xform.pk}/formList") request.META.update(auth(request.META, response)) - response = self.view( - request, xform_pk=self.xform.pk) + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0]['formID'], self.xform.id_string) + self.assertEqual(response.data[0]["formID"], self.xform.id_string) # Ensure anonymous users have access to public forms # when require_auth is False - request = self.factory.get( - f'/enketo/{self.xform.pk}/formList') - response = self.view( - request, xform_pk=self.xform.pk) + request = self.factory.get(f"/enketo/{self.xform.pk}/formList") + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0]['formID'], self.xform.id_string) + self.assertEqual(response.data[0]["formID"], self.xform.id_string) def test_retrieve_form_in_forms_formlist_endpoint(self): """ @@ -1114,13 +1087,18 @@ def test_retrieve_form_in_forms_formlist_endpoint(self): a form properly """ # Bob submit forms - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # Set require auth to true for form owner @@ -1130,10 +1108,8 @@ def test_retrieve_form_in_forms_formlist_endpoint(self): # Ensure that anonymous users do not have access to private forms self.xform.shared = False self.xform.save() - request = self.factory.get( - f'/forms/{self.xform.pk}/formList') - response = self.view( - request, xform_pk=self.xform.pk) + request = self.factory.get(f"/forms/{self.xform.pk}/formList") + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 401) # Set require auth to false for form owner @@ -1146,34 +1122,28 @@ def test_retrieve_form_in_forms_formlist_endpoint(self): # Ensure logged in users have access to the form alice_data = { - 'username': 'alice', - 'email': 'alice@localhost.com', - 'password1': 'alice', - 'password2': 'alice' + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", } self._create_user_profile(alice_data) - auth = DigestAuth('alice', 'alice') - request = self.factory.get( - f'/forms/{self.xform.pk}/formList') + auth = DigestAuth("alice", "alice") + request = self.factory.get(f"/forms/{self.xform.pk}/formList") request.META.update(auth(request.META, response)) - response = self.view( - request, xform_pk=self.xform.pk) + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0]['formID'], self.xform.id_string) + self.assertEqual(response.data[0]["formID"], self.xform.id_string) # Ensure anonymous users have access to public forms # when require_auth is False - request = self.factory.get( - f'/forms/{self.xform.pk}/formList') - response = self.view( - request, xform_pk=self.xform.pk) + request = self.factory.get(f"/forms/{self.xform.pk}/formList") + response = self.view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0]['formID'], self.xform.id_string) + self.assertEqual(response.data[0]["formID"], self.xform.id_string) def test_retrieve_forms_in_project(self): """ @@ -1181,13 +1151,18 @@ def test_retrieve_forms_in_project(self): retrieve forms in a project properly """ # Bob submit forms - xls_path = os.path.join(settings.PROJECT_ROOT, "apps", "main", "tests", - "fixtures", "tutorial.xlsx") + xls_path = os.path.join( + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" + ) self._publish_xls_form_to_project(xlsform_path=xls_path) - xls_file_path = os.path.join(settings.PROJECT_ROOT, "apps", "logger", - "fixtures", - "external_choice_form_v1.xlsx") + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) # Set require auth to true for form owner @@ -1197,10 +1172,8 @@ def test_retrieve_forms_in_project(self): # Ensure that anonymous users do not have access to private forms self.xform.shared = False self.xform.save() - request = self.factory.get( - f'/projects/{self.project.pk}/formList') - response = self.view( - request, project_pk=self.project.pk) + request = self.factory.get(f"/projects/{self.project.pk}/formList") + response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 401) # Set require auth to false for form owner @@ -1211,21 +1184,17 @@ def test_retrieve_forms_in_project(self): self.xform.shared = True self.xform.save() # check that logged in user (bob) has access to forms - auth = DigestAuth('bob', 'bobbob') - request = self.factory.get( - f'/projects/{self.project.pk}/formList') + auth = DigestAuth("bob", "bobbob") + request = self.factory.get(f"/projects/{self.project.pk}/formList") request.META.update(auth(request.META, response)) - response = self.view( - request, project_pk=self.project.pk) + response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 200) # check number of forms returned in project are 3 self.assertEqual(len(response.data), 3) # Ensure anonymous users have access to public forms # when require_auth is False - request = self.factory.get( - f'/projects/{self.project.pk}/formList') - response = self.view( - request, project_pk=self.project.pk) + request = self.factory.get(f"/projects/{self.project.pk}/formList") + response = self.view(request, project_pk=self.project.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 3) diff --git a/onadata/apps/api/viewsets/xform_list_viewset.py b/onadata/apps/api/viewsets/xform_list_viewset.py index 3fe92295de..318d2d2c4d 100644 --- a/onadata/apps/api/viewsets/xform_list_viewset.py +++ b/onadata/apps/api/viewsets/xform_list_viewset.py @@ -3,7 +3,7 @@ OpenRosa Form List API - https://docs.getodk.org/openrosa-form-list/ """ from django.conf import settings -from django.http import Http404 +from django.http import Http404, StreamingHttpResponse from django.shortcuts import get_object_or_404 from django.views.decorators.cache import never_cache @@ -77,11 +77,24 @@ def get_object(self): return obj - def get_renderers(self): - if self.action and self.action == "manifest": - return [XFormManifestRenderer()] + def get_serializer_class(self): + """Return the class to use for the serializer""" + if self.action == "manifest": + return XFormManifestSerializer - return super().get_renderers() + return super().get_serializer_class() + + def get_serializer(self, *args, **kwargs): + """ + Return the serializer instance that should be used for validating and + deserializing input, and for serializing output. + """ + if self.action == "manifest": + kwargs.setdefault("context", self.get_serializer_context()) + kwargs["context"][GROUP_DELIMETER_TAG] = ExportBuilder.GROUP_DELIMITER_DOT + kwargs["context"][REPEAT_INDEX_TAGS] = "_,_" + + return super().get_serializer(*args, **kwargs) def filter_queryset(self, queryset): username = self.kwargs.get("username") @@ -164,13 +177,11 @@ def manifest(self, request, *args, **kwargs): object_list = MetaData.objects.filter( data_type="media", object_id=self.object.pk ) - context = self.get_serializer_context() - context[GROUP_DELIMETER_TAG] = ExportBuilder.GROUP_DELIMITER_DOT - context[REPEAT_INDEX_TAGS] = "_,_" - serializer = XFormManifestSerializer(object_list, many=True, context=context) - return Response( - serializer.data, headers=get_openrosa_headers(request, location=False) + return StreamingHttpResponse( + XFormManifestRenderer().stream_data(object_list, self.get_serializer), + content_type="text/xml; charset=utf-8", + headers=get_openrosa_headers(request, location=False), ) @action(methods=["GET", "HEAD"], detail=True) diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 46fb86a932..4410db62c2 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -316,8 +316,63 @@ def _to_xml(self, xml, data): xml.characters(smart_str(data)) +class StreamRendererMixin: + """Mixin class for renderers that support stream responses""" + + def _get_current_buffer_data(self): + if hasattr(self, "stream"): + ret = self.stream.getvalue() + self.stream.truncate(0) + self.stream.seek(0) + return ret + return None + + def stream_data(self, data, serializer): + """Returns a streaming response.""" + if data is None: + yield "" + + # pylint: disable=attribute-defined-outside-init + self.stream = StringIO() + xml = SimplerXMLGenerator(self.stream, self.charset) + xml.startDocument() + yield self._get_current_buffer_data() + xml.startElement(self.root_node, {"xmlns": self.xmlns}) + yield self._get_current_buffer_data() + data = iter(data) + + try: + out = next(data) + except StopIteration: + out = None + + while out: + try: + next_item = next(data) + out = serializer(out).data + out, attributes = _pop_xml_attributes(out) + xml.startElement(self.element_node, attributes) + self._to_xml(xml, out) + xml.endElement(self.element_node) + yield self._get_current_buffer_data() + out = next_item + except StopIteration: + out = serializer(out).data + out, attributes = _pop_xml_attributes(out) + xml.startElement(self.element_node, attributes) + self._to_xml(xml, out) + xml.endElement(self.element_node) + yield self._get_current_buffer_data() + break + + xml.endElement(self.root_node) + yield self._get_current_buffer_data() + xml.endDocument() + yield self._get_current_buffer_data() + + # pylint: disable=too-few-public-methods -class XFormManifestRenderer(XFormListRenderer): +class XFormManifestRenderer(XFormListRenderer, StreamRendererMixin): """ XFormManifestRenderer - render XFormManifest XML. """ @@ -346,7 +401,7 @@ def render(self, data, accepted_media_type=None, renderer_context=None): return super().render(data, accepted_media_type, renderer_context) -class InstanceXMLRenderer(XMLRenderer): +class InstanceXMLRenderer(XMLRenderer, StreamRendererMixin): """ InstanceXMLRenderer - Renders Instance XML """ @@ -354,14 +409,6 @@ class InstanceXMLRenderer(XMLRenderer): root_tag_name = "submission-batch" item_tag_name = "submission-item" - def _get_current_buffer_data(self): - if hasattr(self, "stream"): - ret = self.stream.getvalue() - self.stream.truncate(0) - self.stream.seek(0) - return ret - return None - def stream_data(self, data, serializer): """Returns a streaming response.""" if data is None: From b56e5927b0f7f8dea19203eaadca2621ade1d0c9 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 30 Oct 2023 09:54:29 +0300 Subject: [PATCH 096/270] bump version to 3.14.3 (#2495) --- CHANGES.rst | 10 +++++++++- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 2dcd06d88a..dc52891a2e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,9 +3,17 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` -v3.14.2(2023-10-19) +v3.14.3(2023-10-30) ------------------- +- Stream response on endpoint //xformsManifest/ + `PR #2493 ` + [@kelvin-muchiri] +- Allow only authenticated users with correct permissions to download media + `PR #2492 ` + [@KipSigei] +v3.14.2(2023-10-19) +------------------- - Gracefully handle empty geoshapes and geotraces in data `PR #2489 ` [@KipSigei] diff --git a/onadata/__init__.py b/onadata/__init__.py index 0e55931c9c..1f42f0543d 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.14.2" +__version__ = "3.14.3" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 8f496a9d9c..454fdc32cb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.14.2 +version = 3.14.3 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 538a82192e483b369d0ffffdbf129d000e9e6028 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 6 Nov 2023 10:53:34 +0300 Subject: [PATCH 097/270] Update ona-oidc package to version 1.0.3 (#2501) * update ona-oidc package to version 1.0.3 * upgrade pillow * upgrade ona-oidc in dev requirements --- requirements/base.in | 2 +- requirements/base.pip | 9 +++++---- requirements/dev.pip | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/requirements/base.in b/requirements/base.in index f1259347cd..3307409f37 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -7,5 +7,5 @@ -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient --e git+https://github.com/onaio/ona-oidc.git@v1.0.1#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter diff --git a/requirements/base.pip b/requirements/base.pip index e64d03011d..5c061657ae 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/base.pip requirements/base.in # @@ -8,7 +8,8 @@ # via -r requirements/base.in -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@v1.0.1#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc + # via -r requirements/base.in # via -r requirements/base.in -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip # via -r requirements/base.in @@ -258,7 +259,7 @@ packaging==23.1 # via sphinx paho-mqtt==1.6.1 # via onadata -pillow==10.0.0 +pillow==10.0.1 # via # elaphe3 # onadata diff --git a/requirements/dev.pip b/requirements/dev.pip index 23ad4f12c6..c693d1c922 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -8,7 +8,7 @@ # via -r requirements/base.in -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@v1.0.1#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc # via -r requirements/base.in -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip # via -r requirements/base.in @@ -313,7 +313,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # elaphe3 # onadata From bd8bd7af016e00ce0b7d68ecd76ba69d7a978e8c Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 2 Nov 2023 11:38:38 +0300 Subject: [PATCH 098/270] Add xform id to attachment URL to improve meta permissions check Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index fe32914da2..5220360d1a 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -91,10 +91,11 @@ def get_attachment_url(attachment, suffix=None): """ Returns the attachment URL for a given suffix """ + xform = attachment.instance.xform kwargs = {"pk": attachment.pk} url = ( f"{reverse('files-detail', kwargs=kwargs)}" - f"?filename={attachment.media_file.name}" + f"?filename={attachment.media_file.name}&xform={xform.pk}" ) if suffix: url += f"&suffix={suffix}" From ba60d8c566eb87f917697fecd5d292c8dc776e79 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 3 Nov 2023 00:03:15 +0300 Subject: [PATCH 099/270] Update _xform_filter to handle attachments Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 3 +-- onadata/libs/filters.py | 30 +++++++++++++++++++------- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 5220360d1a..fe32914da2 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -91,11 +91,10 @@ def get_attachment_url(attachment, suffix=None): """ Returns the attachment URL for a given suffix """ - xform = attachment.instance.xform kwargs = {"pk": attachment.pk} url = ( f"{reverse('files-detail', kwargs=kwargs)}" - f"?filename={attachment.media_file.name}&xform={xform.pk}" + f"?filename={attachment.media_file.name}" ) if suffix: url += f"&suffix={suffix}" diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index 6734688d6e..9d19b2e286 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -17,7 +17,14 @@ from rest_framework_guardian.filters import ObjectPermissionsFilter from onadata.apps.api.models import OrganizationProfile, Team -from onadata.apps.logger.models import Instance, Project, XForm, DataView, MergedXForm +from onadata.apps.logger.models import ( + Instance, + Project, + XForm, + DataView, + MergedXForm, + Attachment, +) from onadata.apps.api.viewsets.dataview_viewset import get_filter_kwargs from onadata.apps.viewer.models import Export from onadata.libs.permissions import exclude_items_from_queryset_using_xform_meta_perms @@ -147,8 +154,8 @@ def filter_queryset(self, request, queryset, view): filtered_queryset = super().filter_queryset(request, queryset, view) org_users = set( - [group.team.organization for group in request.user.groups.all()] + - [o.user for o in filtered_queryset] + [group.team.organization for group in request.user.groups.all()] + + [o.user for o in filtered_queryset] ) return queryset.model.objects.filter(user__in=org_users, user__is_active=True) @@ -330,6 +337,8 @@ def _xform_filter(self, request, view, keyword): xform = request.query_params.get("xform") dataview = request.query_params.get("dataview") merged_xform = request.query_params.get("merged_xform") + filename = request.query_params.get("filename") + public_forms = XForm.objects.none() dataview_kwargs = {} if dataview: @@ -355,6 +364,13 @@ def _xform_filter(self, request, view, keyword): self.xform = get_object_or_404(XForm, pk=xform) xform_qs = XForm.objects.filter(pk=self.xform.pk) public_forms = XForm.objects.filter(pk=self.xform.pk, shared_data=True) + elif filename: + attachment_id = view.kwargs.get("pk") + attachment = get_object_or_404(Attachment, pk=attachment_id) + xform = attachment.instance.xform.pk + self.xform = get_object_or_404(XForm, pk=xform) + xform_qs = XForm.objects.filter(pk=self.xform.pk) + public_forms = XForm.objects.filter(pk=self.xform.pk, shared_data=True) else: xform_qs = XForm.objects.all() xform_qs = xform_qs.filter(deleted_at=None) @@ -515,7 +531,6 @@ class AttachmentFilter(XFormPermissionFilterMixin, ObjectPermissionsFilter): """Attachment filter.""" def filter_queryset(self, request, queryset, view): - queryset = self._xform_filter_queryset( request, queryset, view, "instance__xform" ) @@ -697,10 +712,9 @@ def filter_queryset(self, request, queryset, view): public_xform_id = _public_xform_id_or_none(view.kwargs.get("pk")) if public_xform_id: form_exports = queryset.filter(xform_id=public_xform_id) - current_user_form_exports = ( - form_exports.filter(*has_submitted_by_key) - .filter(options__query___submitted_by=request.user.username) - ) + current_user_form_exports = form_exports.filter( + *has_submitted_by_key + ).filter(options__query___submitted_by=request.user.username) other_form_exports = form_exports.exclude(*has_submitted_by_key) return current_user_form_exports | other_form_exports From 58f743761961e2bcf5e4b35f248c834eb960ee3f Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 6 Nov 2023 12:47:15 +0300 Subject: [PATCH 100/270] cleanup Signed-off-by: Kipchirchir Sigei --- onadata/libs/filters.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index 9d19b2e286..e210ad658f 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -367,8 +367,7 @@ def _xform_filter(self, request, view, keyword): elif filename: attachment_id = view.kwargs.get("pk") attachment = get_object_or_404(Attachment, pk=attachment_id) - xform = attachment.instance.xform.pk - self.xform = get_object_or_404(XForm, pk=xform) + self.xform = attachment.instance.xform xform_qs = XForm.objects.filter(pk=self.xform.pk) public_forms = XForm.objects.filter(pk=self.xform.pk, shared_data=True) else: From 2c1e632cde241f69cf3496102eb0f39d7331d2bc Mon Sep 17 00:00:00 2001 From: Eric Musyoka Date: Mon, 30 Oct 2023 12:29:16 +0300 Subject: [PATCH 101/270] Create user auth token if doesn't exist on geting user --- onadata/libs/serializers/user_profile_serializer.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/onadata/libs/serializers/user_profile_serializer.py b/onadata/libs/serializers/user_profile_serializer.py index b8660e9bae..713071d8ce 100644 --- a/onadata/libs/serializers/user_profile_serializer.py +++ b/onadata/libs/serializers/user_profile_serializer.py @@ -9,7 +9,7 @@ from django.contrib.auth import get_user_model from django.contrib.auth.password_validation import validate_password from django.core.cache import cache -from django.core.exceptions import ValidationError +from django.core.exceptions import ValidationError, ObjectDoesNotExist from django.db import IntegrityError, transaction from django.db.models.query import QuerySet from django.utils import timezone @@ -19,6 +19,7 @@ from django_digest.backend.db import update_partial_digests from registration.models import RegistrationProfile from rest_framework import serializers +from rest_framework.authtoken.models import Token from onadata.apps.api.models.temp_token import TempToken from onadata.apps.api.tools import get_host_domain @@ -438,7 +439,11 @@ def get_api_token(self, obj): """ Returns user's API Token. """ - return obj.user.auth_token.key + try: + token = obj.user.auth_token + except ObjectDoesNotExist: + token = Token.objects.create(user=obj.user) + return token.key def get_temp_token(self, obj): """ From cf381cf9dd0190998ee429aaf96c2503ed7852d3 Mon Sep 17 00:00:00 2001 From: Eric Musyoka Date: Mon, 30 Oct 2023 12:33:39 +0300 Subject: [PATCH 102/270] Test if user auth token is created if doesn't exist --- .../tests/viewsets/test_connect_viewset.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_connect_viewset.py b/onadata/apps/api/tests/viewsets/test_connect_viewset.py index ab4911f442..bc2569d7c1 100644 --- a/onadata/apps/api/tests/viewsets/test_connect_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_connect_viewset.py @@ -105,6 +105,28 @@ def test_get_profile(self): self.assertEqual(response.status_code, 200) self.assertEqual(dict(response.data), self.data) + def test_get_profile_user_no_auth_token(self): + # delete auth token + token = Token.objects.get(user=self.user) + old_token_key = token.key + token.delete() + + view = ConnectViewSet.as_view( + {"get": "list"}, + authentication_classes=( + DigestAuthentication, + authentication.BasicAuthentication, + ), + ) + request = self.factory.get("/") + auth = BasicAuth("bob", "bobbob") + request.META.update(auth(request.META)) + request.session = self.client.session + + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.data.get("api_token"), old_token_key) + def test_using_valid_temp_token(self): request = self.factory.get("/", **self.extra) request.session = self.client.session From 37e8ae841f5c0fb77e37740ba78e4a769a6140cb Mon Sep 17 00:00:00 2001 From: Eric Musyoka Date: Wed, 1 Nov 2023 12:45:12 +0300 Subject: [PATCH 103/270] Add doc string to test new function --- onadata/apps/api/tests/viewsets/test_connect_viewset.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_connect_viewset.py b/onadata/apps/api/tests/viewsets/test_connect_viewset.py index bc2569d7c1..b2b309b755 100644 --- a/onadata/apps/api/tests/viewsets/test_connect_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_connect_viewset.py @@ -106,6 +106,10 @@ def test_get_profile(self): self.assertEqual(dict(response.data), self.data) def test_get_profile_user_no_auth_token(self): + """ + Test new user auth token is generated when user doesn't have an + existing one + """ # delete auth token token = Token.objects.get(user=self.user) old_token_key = token.key From a2baf9aaaf27b139bf08261708321706ecd3e091 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 6 Nov 2023 21:05:15 +0300 Subject: [PATCH 104/270] Tag release v3.14.4 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 12 ++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index dc52891a2e..65053d1c52 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,18 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.14.4(2023-11-07) +------------------- +- Bump oidc version to v1.0.3 + `PR #2501 ` + [@kelvin-muchiri] +- Improve performance for attachments xform meta permissions check + `PR #2499 ` + [@KipSigei] +- Create user auth token if doesn't exist upon retrieval + `PR #2496 ` + [@ciremusyoka] + v3.14.3(2023-10-30) ------------------- - Stream response on endpoint //xformsManifest/ diff --git a/onadata/__init__.py b/onadata/__init__.py index 1f42f0543d..f4743f83e3 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.14.3" +__version__ = "3.14.4" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 454fdc32cb..c12b7fb51e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.14.3 +version = 3.14.4 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 3329c5f937415b60f7965954ea7b8ca2c6b161e3 Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 9 Nov 2023 08:28:25 +0300 Subject: [PATCH 105/270] Soft delete xform from legacy UI Don't call xform.delete() method which soft deletes --- .../api/tests/viewsets/test_xform_viewset.py | 36 +++++++++++++++++++ onadata/apps/logger/views.py | 5 ++- 2 files changed, 38 insertions(+), 3 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 3345631f4b..ae55a4d1bc 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -59,6 +59,7 @@ from onadata.apps.logger.models import Attachment, Instance, Project, XForm from onadata.apps.logger.models.xform_version import XFormVersion from onadata.apps.logger.xform_instance_parser import XLSFormError +from onadata.apps.logger.views import delete_xform from onadata.apps.main.models import MetaData from onadata.apps.messaging.constants import FORM_UPDATED, XFORM from onadata.apps.viewer.models import Export @@ -4351,6 +4352,41 @@ def test_delete_xform_also_deletes_linked_dataviews(self): self.assertIsNotNone(self.data_view.deleted_at) self.assertIn("-deleted-at-", self.data_view.name) + def test_delete_xform_endpoint(self): + """ + Tests that the delete_xform view soft deletes xforms + """ + # publish form and make submissions + xlsform_path = os.path.join( + settings.PROJECT_ROOT, "libs", "tests", "utils", "fixtures", "tutorial.xlsx" + ) + self._publish_xls_form_to_project(xlsform_path=xlsform_path) + for x in range(1, 9): + path = os.path.join( + settings.PROJECT_ROOT, + "libs", + "tests", + "utils", + "fixtures", + "tutorial", + "instances", + "uuid{}".format(x), + "submission.xml", + ) + self._make_submission(path) + + # Make request to delete + request = self.factory.post("/", **self.extra) + request.user = self.xform.user + response = delete_xform( + request, username=self.xform.user.username, id_string=self.xform.id_string + ) + + self.assertEqual(response.status_code, 302) + self.assertEqual(response.content, b"") + self.xform.refresh_from_db() + self.assertIsNotNone(self.xform.deleted_at) + def test_multitple_enketo_urls(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() diff --git a/onadata/apps/logger/views.py b/onadata/apps/logger/views.py index d837be284e..21f75b2d69 100644 --- a/onadata/apps/logger/views.py +++ b/onadata/apps/logger/views.py @@ -46,7 +46,6 @@ PublishXForm, inject_instanceid, publish_form, - remove_xform, response_with_mimetype_and_name, safe_create_instance, ) @@ -504,8 +503,8 @@ def delete_xform(request, username, id_string): {"user__username__iexact": username, "id_string__iexact": id_string} ) - # delete xform and submissions - remove_xform(xform) + # Delete xform + xform.soft_delete(user=request.user) audit = {} audit_log( From f2869fb6cfdaa9402cbe638c8c36d8735fa49d7e Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 9 Nov 2023 10:38:16 +0300 Subject: [PATCH 106/270] Remove remove_xform fn: it's nolonger required --- onadata/libs/utils/logger_tools.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index 0f0b5a10d3..818bf1be52 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -947,12 +947,6 @@ def inject_instanceid(xml_str, uuid): return xml_str -def remove_xform(xform): - """Deletes an XForm ``xform``.""" - # delete xform, and all related models - xform.delete() - - class PublishXForm: "A class to publish an XML XForm file." From 9b356a1aa3647764cb1d4c24a945f2e6dc19cd78 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 16 Nov 2023 14:17:54 +0300 Subject: [PATCH 107/270] Upgrade Django to version 3.2.23 (#2507) * upgrade django update patch to 3.2.23 * upload Trivy results to security tab on pull request * do not exit when trivy scan fails * revert to only run Trivy Slack summary on push --- .github/workflows/ci.yml | 16 +++++++--------- requirements/base.pip | 2 +- requirements/dev.pip | 2 +- setup.cfg | 2 +- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6c34ea55d0..39b85c7495 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,8 +46,7 @@ jobs: pip install -r requirements/azure.pip - name: Install linting tools - run: - pip install prospector==1.7.7 pylint==2.14.5 + run: pip install prospector==1.7.7 pylint==2.14.5 - name: Run Prospector run: prospector -X -s veryhigh onadata @@ -200,9 +199,8 @@ jobs: image-ref: onaio/onadata:${{ github.head_ref || github.base_ref || env.version }} format: sarif ignore-unfixed: true - severity: 'CRITICAL,HIGH' - exit-code: '1' - output: 'trivy_results.sarif' + severity: "CRITICAL,HIGH" + output: "trivy_results.sarif" - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master @@ -211,13 +209,13 @@ jobs: image-ref: onaio/onadata:${{ github.head_ref || github.base_ref || env.version }} format: sarif ignore-unfixed: true - output: 'trivy_results.sarif' + output: "trivy_results.sarif" - name: Upload vulnerability scan results uses: github/codeql-action/upload-sarif@v2 - if: github.event_name == 'push' + if: github.event_name == 'push' || github.event_name == 'pull_request' with: - sarif_file: 'trivy_results.sarif' + sarif_file: "trivy_results.sarif" - name: Run Trivy vulnerability for Slack summary uses: aquasecurity/trivy-action@master @@ -226,7 +224,7 @@ jobs: image-ref: onaio/onadata:${{ github.head_ref || github.base_ref || env.version }} format: json ignore-unfixed: true - output: 'trivy_results.json' + output: "trivy_results.json" - name: Create summary of trivy issues if: github.event_name == 'push' diff --git a/requirements/base.pip b/requirements/base.pip index 5c061657ae..32a36f9663 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -99,7 +99,7 @@ deprecated==1.2.14 # onadata dict2xml==1.7.3 # via onadata -django==3.2.20 +django==3.2.23 # via # django-activity-stream # django-cors-headers diff --git a/requirements/dev.pip b/requirements/dev.pip index c693d1c922..9f3489f777 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -110,7 +110,7 @@ dict2xml==1.7.3 # via onadata dill==0.3.7 # via pylint -django==3.2.20 +django==3.2.23 # via # django-activity-stream # django-cors-headers diff --git a/setup.cfg b/setup.cfg index c12b7fb51e..d44ebc9d71 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,7 +27,7 @@ tests_require = mock requests-mock install_requires = - Django>=3.2.20,<4 + Django>=3.2.23,<4 django-guardian django-registration-redux django-templated-email From 7fc8444254be224c47f9645023f84072261a819b Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 17 Nov 2023 09:35:23 +0300 Subject: [PATCH 108/270] Custom project invitation template (#2506) * add support for custom project invitation email, subject * add username to project invitation email data * add type hints * add test case * use html for project invitation email message * enhance project invitation email template * handle project invitation email invited_by null --- .../libs/templates/projects/invitation.txt | 11 -- .../projects/invitation_message.html | 8 ++ onadata/libs/tests/utils/test_email.py | 112 ++++++++++++++++-- onadata/libs/utils/email.py | 53 +++++++-- 4 files changed, 150 insertions(+), 34 deletions(-) delete mode 100644 onadata/libs/templates/projects/invitation.txt create mode 100644 onadata/libs/templates/projects/invitation_message.html diff --git a/onadata/libs/templates/projects/invitation.txt b/onadata/libs/templates/projects/invitation.txt deleted file mode 100644 index fb812e8017..0000000000 --- a/onadata/libs/templates/projects/invitation.txt +++ /dev/null @@ -1,11 +0,0 @@ -{% load i18n %}{% blocktranslate %} -Hello, - -You have been added to {{project_name}} by a project admin allowing you to begin data collection. - -To begin using {{deployment_name}}, please create an account first by clicking the link below: -{{invitation_url}} - -Thanks, -The Team at {{ deployment_name }} -{% endblocktranslate %} \ No newline at end of file diff --git a/onadata/libs/templates/projects/invitation_message.html b/onadata/libs/templates/projects/invitation_message.html new file mode 100644 index 0000000000..11fd55d683 --- /dev/null +++ b/onadata/libs/templates/projects/invitation_message.html @@ -0,0 +1,8 @@ +{% load i18n %}{% blocktranslate %} +

Hello,

+

You have been added to {{project_name}} by a project admin allowing you to begin data collection.

+

To begin using {{deployment_name}}, please create an account first by clicking the link below:

+

{{invitation_url}}

+

Thanks,

+

The Team at {{ deployment_name }}

+{% endblocktranslate %} \ No newline at end of file diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index 81a9e94e28..b1732b515b 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -155,6 +155,7 @@ def test_email_data_does_not_contain_newline_chars(self): self.assertNotIn("\n", email_data.get("subject")) +@override_settings(DEFAULT_FROM_EMAIL="no-reply@mail.misfit.com") class ProjectInvitationEmailTestCase(TestBase): """Tests for class ProjectInvitationEmail""" @@ -166,32 +167,48 @@ def setUp(self) -> None: self.user.profile.save() self.project.name = "Test Invitation" self.project.save() + self.user.email = "user@foo.com" + self.user.save() self.invitation = ProjectInvitation.objects.create( email="janedoe@example.com", project=self.project, role="editor", status=ProjectInvitation.Status.PENDING, + invited_by=self.user, ) self.email = ProjectInvitationEmail( self.invitation, "https://example.com/register" ) @override_settings(DEPLOYMENT_NAME="Misfit") - @patch("onadata.libs.utils.email.send_generic_email") + @patch("onadata.libs.utils.email.send_mail") def test_send(self, mock_send): """Email is sent successfully""" self.email.send() - email_data = { - "subject": "Invitation to Join a Project on Misfit", - "message_txt": "\nHello,\n\nYou have been added to Test Invitation by" - " a project admin allowing you to begin data collection.\n\nTo begin" - " using Misfit, please create an account first by clicking the link below:" - "\nhttps://example.com/register" - "\n\nThanks,\nThe Team at Misfit\n", - } + expected_subject = "Invitation to Join a Project on Misfit" + expected_message = ( + "\nHello,\n" + "You have been added to Test Invitation by a project admin allowing" + " you to begin data collection.\n" + "To begin using Misfit, please create an account first by" + " clicking the link below:\n" + "https://example.com/register\n" + "Thanks,\nThe Team at Misfit\n" + ) + expected_html_message = ( + "\n

Hello,

\n

You have been added to Test Invitation by a project" + " admin allowing you to begin data collection.

\n" + "

To begin using Misfit, please create an account first by" + " clicking the link below:

\n" + "

https://example.com/register

\n" + "

Thanks,

\n

The Team at Misfit

\n" + ) mock_send.assert_called_with( - self.invitation.email, - **email_data, + expected_subject, + expected_message, + "no-reply@mail.misfit.com", + (self.invitation.email,), + html_message=expected_html_message, ) @override_settings(DEPLOYMENT_NAME="Misfit") @@ -204,11 +221,80 @@ def test_get_template_data(self): "project_name": "Test Invitation", "invitation_url": "https://example.com/register", "organization": "Test User", + "invited_by": "user@foo.com", + "username": "janedoe@example.com", }, } data = self.email.get_template_data() self.assertEqual(data, expected_data) + # invitation invited_by is null + self.invitation.invited_by = None + self.invitation.save() + expected_data = { + "subject": {"deployment_name": "Misfit"}, + "body": { + "deployment_name": "Misfit", + "project_name": "Test Invitation", + "invitation_url": "https://example.com/register", + "organization": "Test User", + "invited_by": None, + "username": "janedoe@example.com", + }, + } + data = self.email.get_template_data() + self.assertEqual(data, expected_data) + + @override_settings( + DEPLOYMENT_NAME="Misfit", + PROJECT_INVITATION_SUBJECT="Invitation to join {deployment_name}", + PROJECT_INVITATION_MESSAGE=( + "

Hello {username}

" + "

You have been added to {project_name} in the" + " {organization} account on Misfit.

" + "

To begin using Misfit, please verify your account" + " first by clicking the link below:

" + "

{invitation_url}

" + "

Then, enter you first and last name, desired username," + " and password and click Join. Once complete, please notify" + " your project admin {invited_by} and we'll activate your account.

" + ), + ) + @patch("onadata.libs.utils.email.send_mail") + def test_send_custom_message(self, mock_send): + """Custom subject and message works""" + self.email.send() + expected_subject = "Invitation to join Misfit" + expected_message = ( + "Hello janedoe@example.com" + "You have been added to Test Invitation in the" + " Test User account on Misfit." + "To begin using Misfit, please verify your account" + " first by clicking the link below:" + "https://example.com/register" + "Then, enter you first and last name, desired username," + " and password and click Join. Once complete, please notify" + " your project admin user@foo.com and we'll activate your account." + ) + expected_html_message = ( + "

Hello janedoe@example.com

" + "

You have been added to Test Invitation in the" + " Test User account on Misfit.

" + "

To begin using Misfit, please verify your account" + " first by clicking the link below:

" + "

https://example.com/register

" + "

Then, enter you first and last name, desired username," + " and password and click Join. Once complete, please notify" + " your project admin user@foo.com and we'll activate your account.

" + ) + mock_send.assert_called_with( + expected_subject, + expected_message, + "no-reply@mail.misfit.com", + (self.invitation.email,), + html_message=expected_html_message, + ) + class ProjectInvitationURLTestCase(TestBase): """Tests for get_project_invitation_url""" @@ -231,8 +317,8 @@ def test_url_configured(self): } ) @override_settings(ALLOWED_HOSTS=["*"]) - def test_url_configured(self): - """settings.PROJECT_INVITATION_URL is set""" + def test_url_configured_for_host(self): + """settings.PROJECT_INVITATION_URL is set for specific host""" self.custom_request.META["HTTP_HOST"] = "new-domain.com" url = get_project_invitation_url(self.custom_request) self.assertEqual(url, "https://new-domain.com/register") diff --git a/onadata/libs/utils/email.py b/onadata/libs/utils/email.py index 0a4fb035bc..7541875f3e 100644 --- a/onadata/libs/utils/email.py +++ b/onadata/libs/utils/email.py @@ -3,8 +3,9 @@ email utility functions. """ from django.conf import settings -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, send_mail from django.http import HttpRequest +from django.utils.html import strip_tags from django.template.loader import render_to_string from six.moves.urllib.parse import urlencode from rest_framework.reverse import reverse @@ -123,6 +124,11 @@ def get_template_data(self) -> dict[str, str]: """Get context data for the templates""" deployment_name = getattr(settings, "DEPLOYMENT_NAME", "Ona") organization = self.invitation.project.organization.profile.name + invited_by = None + + if self.invitation.invited_by: + invited_by = self.invitation.invited_by.email + data = { "subject": {"deployment_name": deployment_name}, "body": { @@ -130,6 +136,8 @@ def get_template_data(self) -> dict[str, str]: "project_name": self.invitation.project.name, "invitation_url": self.url, "organization": organization, + "invited_by": invited_by, + "username": self.invitation.email, }, } @@ -137,18 +145,43 @@ def get_template_data(self) -> dict[str, str]: def get_email_data(self) -> dict[str, str]: """Get the email data to be sent""" - message_path = "projects/invitation.txt" - subject_path = "projects/invitation_subject.txt" template_data = self.get_template_data() - email_data = { - "subject": render_to_string(subject_path, template_data["subject"]), - "message_txt": render_to_string( + custom_subject: str | None = getattr( + settings, "PROJECT_INVITATION_SUBJECT", None + ) + custom_message: str | None = getattr( + settings, "PROJECT_INVITATION_MESSAGE", None + ) + + if custom_subject: + subject = custom_subject.format(**template_data["subject"]) + + else: + subject_path = "projects/invitation_subject.txt" + subject = render_to_string(subject_path, template_data["subject"]) + + if custom_message: + message = custom_message.format(**template_data["body"]) + + else: + message_path = "projects/invitation_message.html" + message = render_to_string( message_path, template_data["body"], - ), - } - return email_data + ) + + return ( + subject, + message, + ) def send(self) -> None: """Send project invitation email""" - send_generic_email(self.invitation.email, **self.get_email_data()) + subject, message = self.get_email_data() + send_mail( + subject, + strip_tags(message), + settings.DEFAULT_FROM_EMAIL, + (self.invitation.email,), + html_message=message, + ) From cdd55e8c250b1a645655c2353b322c708c67988a Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 17 Nov 2023 12:29:22 +0300 Subject: [PATCH 109/270] bump version to v3.15.0 (#2509) --- CHANGES.rst | 12 ++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 65053d1c52..6d37cd6366 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,18 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.15.0(2023-11-17) +------------------- +- Upgrade Django to version 3.2.23 + `PR #2507 ` + [@kelvin-muchiri] +- Custom project invitation template + `PR #2506 ` + [@kelvin-muchiri] +- Soft delete xform from legacy UI + `PR #2506 ` + [@FrankApiyo] + v3.14.4(2023-11-07) ------------------- - Bump oidc version to v1.0.3 diff --git a/onadata/__init__.py b/onadata/__init__.py index f4743f83e3..17336ac6c9 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.14.4" +__version__ = "3.15.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index d44ebc9d71..f991290a42 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.14.4 +version = 3.15.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 9daa6f71a2c98e13e1448539a128d0eb74b7eb61 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 16 Nov 2023 12:09:28 +0300 Subject: [PATCH 110/270] Enhancement: Add Optimization Fence Query to gracefully handle cancel statement timeout Signed-off-by: Kipchirchir Sigei --- .../apps/api/viewsets/briefcase_viewset.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 7e0648a7a5..8834f67cbf 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -8,6 +8,7 @@ from django.contrib.auth import get_user_model from django.core.files import File from django.core.validators import ValidationError +from django.db import OperationalError from django.http import Http404 from django.utils.translation import gettext as _ @@ -121,7 +122,7 @@ def get_object(self, queryset=None): return obj - # pylint: disable=too-many-branches,too-many-statements + # pylint: disable=too-many-branches,too-many-statements,too-many-locals def filter_queryset(self, queryset): """ Filters an XForm submission instances using ODK Aggregate query parameters. @@ -188,7 +189,28 @@ def filter_queryset(self, queryset): num_entries = _parse_int(num_entries) if num_entries: - instances = instances[:num_entries] + try: + instances = instances[:num_entries] + except OperationalError: + # Create an optimization fence + # Define the base query + inner_raw_sql = str(instances.query) + + # Create the outer query with the LIMIT clause + outer_query = ( + f"SELECT id, uuid FROM ({inner_raw_sql}) AS items " # nosec + "ORDER BY id ASC LIMIT %s" # nosec + ) + raw_queryset = Instance.objects.raw(outer_query, [num_entries]) + # convert raw queryset to queryset + instances_data = [ + {"pk": item.id, "uuid": item.uuid} + for item in raw_queryset.iterator() + ] + # Create QuerySet from the instances dict + instances = Instance.objects.filter( + pk__in=[item["pk"] for item in instances_data] + ).values("pk", "uuid") # Using len() instead of .count() to prevent an extra # database call; len() will load the instances in memory allowing From 36a1aafd9424dd94bbf06ba5a897de93f18f98c0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 21 Nov 2023 21:41:01 +0300 Subject: [PATCH 111/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_briefcase_viewset.py | 22 ++++++- .../apps/api/viewsets/briefcase_viewset.py | 61 +++++++++++++------ 2 files changed, 61 insertions(+), 22 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index fb95b669f3..7461a7ed74 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -12,9 +12,11 @@ from rest_framework.test import APIRequestFactory from onadata.apps.api.tests.viewsets import test_abstract_viewset -from onadata.apps.api.viewsets.briefcase_viewset import BriefcaseViewset -from onadata.apps.api.viewsets.xform_submission_viewset import \ - XFormSubmissionViewSet +from onadata.apps.api.viewsets.briefcase_viewset import ( + BriefcaseViewset, + _query_optimization_fence, +) +from onadata.apps.api.viewsets.xform_submission_viewset import XFormSubmissionViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.logger.models import Instance from onadata.apps.logger.models import XForm @@ -662,6 +664,20 @@ def test_view_downloadSubmission_multiple_nodes(self, mock_get_object): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) + def test_query_optimization_fence(self): + self._publish_xml_form() + self._make_submissions() + instances = ordered_instances(self.xform) + optimized_instances = _query_optimization_fence(instances, 4) + self.assertEqual(instances.count(), optimized_instances.count()) + op_sql_query = ( + 'SELECT "logger_instance"."id", "logger_instance"."uuid" FROM "logger_instance"' + f' WHERE "logger_instance"."id" IN ({optimized_instances[0].get("pk")},' + f' {optimized_instances[1].get("pk")}, {optimized_instances[2].get("pk")},' + f' {optimized_instances[3].get("pk")})' + ) + self.assertEqual(str(optimized_instances.query), op_sql_query) + def tearDown(self): # remove media files if self.user: diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 8834f67cbf..6d3c888569 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -72,6 +72,47 @@ def _parse_int(num): return None +def _query_optimization_fence(instances, num_entries): + """ + Enhances query performance by using an optimization fence. + + This utility function creates an optimization fence around the provided + queryset instances. It encapsulates the original query within a + SELECT statement with an ORDER BY and LIMIT clause, + optimizing the database query for improved performance. + + Parameters: + - instances: QuerySet + The input QuerySet of instances to be optimized. + - num_entries: int + The number of instances to be included in the optimized result set. + + Returns: + QuerySet + An optimized QuerySet containing selected fields ('pk' and 'uuid') + based on the provided instances. + """ + inner_raw_sql = str(instances.query) + + # Create the outer query with the LIMIT clause + outer_query = ( + f"SELECT id, uuid FROM ({inner_raw_sql}) AS items " # nosec + "ORDER BY id ASC LIMIT %s" # nosec + ) + raw_queryset = Instance.objects.raw(outer_query, [num_entries]) + # convert raw queryset to queryset + instances_data = [ + {"pk": item.id, "uuid": item.uuid} + for item in raw_queryset.iterator() + ] + # Create QuerySet from the instances dict + instances = Instance.objects.filter( + pk__in=[item["pk"] for item in instances_data] + ).values("pk", "uuid") + + return instances + + # pylint: disable=too-many-ancestors class BriefcaseViewset( mixins.CreateModelMixin, @@ -192,25 +233,7 @@ def filter_queryset(self, queryset): try: instances = instances[:num_entries] except OperationalError: - # Create an optimization fence - # Define the base query - inner_raw_sql = str(instances.query) - - # Create the outer query with the LIMIT clause - outer_query = ( - f"SELECT id, uuid FROM ({inner_raw_sql}) AS items " # nosec - "ORDER BY id ASC LIMIT %s" # nosec - ) - raw_queryset = Instance.objects.raw(outer_query, [num_entries]) - # convert raw queryset to queryset - instances_data = [ - {"pk": item.id, "uuid": item.uuid} - for item in raw_queryset.iterator() - ] - # Create QuerySet from the instances dict - instances = Instance.objects.filter( - pk__in=[item["pk"] for item in instances_data] - ).values("pk", "uuid") + instances = _query_optimization_fence(instances, num_entries) # Using len() instead of .count() to prevent an extra # database call; len() will load the instances in memory allowing From 1bff4a7bd0f80d4dcc3ecb0779a08582a588e28b Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 22 Nov 2023 17:38:44 +0300 Subject: [PATCH 112/270] upgrade Django for all requirements files (#2512) the previous upgrade had only targeted dev.pip and base.pip and omitted the other requirements file --- requirements/azure.in | 2 +- requirements/azure.pip | 2 +- requirements/s3.in | 2 +- requirements/s3.pip | 2 +- requirements/ses.in | 2 +- requirements/ses.pip | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/azure.in b/requirements/azure.in index d748531d54..c7b8e930ab 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ django-storages[azure] cryptography>=39.0.1 -django >=3.2.20,<4 +django >=3.2.23,<4 diff --git a/requirements/azure.pip b/requirements/azure.pip index 2d6febf604..7401938f51 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -20,7 +20,7 @@ cryptography==41.0.3 # via # -r requirements/azure.in # azure-storage-blob -django==3.2.20 +django==3.2.23 # via # -r requirements/azure.in # django-storages diff --git a/requirements/s3.in b/requirements/s3.in index fd471546b4..84a3d03a03 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ django-storages -django >=3.2.20,<4 +django >=3.2.23,<4 boto3 diff --git a/requirements/s3.pip b/requirements/s3.pip index 6f8abf5b4d..723bcbdf4c 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -12,7 +12,7 @@ botocore==1.31.22 # via # boto3 # s3transfer -django==3.2.20 +django==3.2.23 # via # -r requirements/s3.in # django-storages diff --git a/requirements/ses.in b/requirements/ses.in index 1f0304a6b8..19f15cd2c8 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django >=3.2.20,<4 +django >=3.2.23,<4 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index 740663a8d5..6ef3d085f6 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -14,7 +14,7 @@ botocore==1.31.22 # via # boto3 # s3transfer -django==3.2.20 +django==3.2.23 # via # -r requirements/ses.in # django-ses From fc66fa7f99d34a330cf5a2e0c33b366d1e739200 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 22 Nov 2023 17:31:44 +0300 Subject: [PATCH 113/270] Trigger db call to correctly capture OperationalError Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/briefcase_viewset.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 6d3c888569..35092209a4 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -231,7 +231,10 @@ def filter_queryset(self, queryset): num_entries = _parse_int(num_entries) if num_entries: try: - instances = instances[:num_entries] + paginated_instances = instances[:num_entries] + # trigger a database call + _ = len(instances) + instances = paginated_instances except OperationalError: instances = _query_optimization_fence(instances, num_entries) From d34a3221aa08fefee01e4b2ef2c15314a7a082bd Mon Sep 17 00:00:00 2001 From: apiyo Date: Wed, 22 Nov 2023 12:54:07 +0300 Subject: [PATCH 114/270] Add custom template tag: settings_value --- onadata/apps/logger/templatetags/__init__.py | 0 .../customize_template_by_domain.py | 19 ++++++ .../test_customize_template_by_domain.py | 68 +++++++++++++++++++ 3 files changed, 87 insertions(+) create mode 100644 onadata/apps/logger/templatetags/__init__.py create mode 100644 onadata/apps/logger/templatetags/customize_template_by_domain.py create mode 100644 onadata/apps/logger/tests/test_customize_template_by_domain.py diff --git a/onadata/apps/logger/templatetags/__init__.py b/onadata/apps/logger/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/apps/logger/templatetags/customize_template_by_domain.py b/onadata/apps/logger/templatetags/customize_template_by_domain.py new file mode 100644 index 0000000000..0bb4e5aeb4 --- /dev/null +++ b/onadata/apps/logger/templatetags/customize_template_by_domain.py @@ -0,0 +1,19 @@ +from django import template +from django.conf import settings +from onadata.apps.api.tools import get_host_domain + +register = template.Library() + + +@register.simple_tag(takes_context=True) +def settings_value(context, setting): + template_customization = getattr(settings, "TEMPLATE_CUSTOMIZATION", {}) + request = context.get("request") + domain = get_host_domain(request) + if domain in template_customization: + template_setting = template_customization[domain] + elif "*" in template_customization: + template_setting = template_customization["*"] + else: + template_setting = {} + return template_setting[setting] if setting in template_setting else "" diff --git a/onadata/apps/logger/tests/test_customize_template_by_domain.py b/onadata/apps/logger/tests/test_customize_template_by_domain.py new file mode 100644 index 0000000000..a9efb6c413 --- /dev/null +++ b/onadata/apps/logger/tests/test_customize_template_by_domain.py @@ -0,0 +1,68 @@ +from onadata.apps.main.tests.test_base import TestBase +from django.test import override_settings +from onadata.apps.logger.templatetags.customize_template_by_domain import settings_value + +TEMPLATE_CUSTOMIZATION = { + "*": { + "app_name": "Ona", + "login_logo": "/static/ona-logo.png", + "favicon": "/static/ona-favicon-32x32.png", + }, + "api.ona.io": { + "login_background": "#009CDE", + "app_name": "NCDS", + "login_logo": "/static/who-logo.jpeg", + "favicon": "/static/who-favicon-32x32.png", + }, +} + + +class TestCustomizeTemplateTasks(TestBase): + """ + Test api tasks + """ + + @override_settings(TEMPLATE_CUSTOMIZATION=TEMPLATE_CUSTOMIZATION) + @override_settings(ALLOWED_HOSTS=["api.ona.io"]) + def test_for_domain(self): + """Test settings_value returns correct values""" + request = self.factory.get("/") + request.META["HTTP_HOST"] = "api.ona.io" + self.assertEqual( + settings_value({"request": request}, "login_background"), "#009CDE" + ) + self.assertEqual(settings_value({"request": request}, "app_name"), "NCDS") + self.assertEqual( + settings_value({"request": request}, "login_logo"), "/static/who-logo.jpeg" + ) + self.assertEqual( + settings_value({"request": request}, "favicon"), + "/static/who-favicon-32x32.png", + ) + + @override_settings(TEMPLATE_CUSTOMIZATION=TEMPLATE_CUSTOMIZATION) + @override_settings(ALLOWED_HOSTS=["*"]) + def test_for_no_domain(self): + """Test settings_value returns correct values""" + request = self.factory.get("/") + self.assertEqual(settings_value({"request": request}, "login_background"), "") + self.assertEqual(settings_value({"request": request}, "app_name"), "Ona") + self.assertEqual( + settings_value({"request": request}, "login_logo"), "/static/ona-logo.png" + ) + self.assertEqual( + settings_value({"request": request}, "favicon"), + "/static/ona-favicon-32x32.png", + ) + + @override_settings(ALLOWED_HOSTS=["*"]) + def test_for_no_domain_no_settings(self): + """Test settings_value returns correct values""" + request = self.factory.get("/") + self.assertEqual(settings_value({"request": request}, "login_background"), "") + self.assertEqual(settings_value({"request": request}, "app_name"), "") + self.assertEqual(settings_value({"request": request}, "login_logo"), "") + self.assertEqual( + settings_value({"request": request}, "favicon"), + "", + ) From 484b11b71d40cb50567c1c384807ab3ea185aafa Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 23 Nov 2023 08:53:01 +0300 Subject: [PATCH 115/270] Bump version to v3.16.0 --- CHANGES.rst | 15 +++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 6d37cd6366..eebf1c7694 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,21 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.16.0(2023-11-23) +------------------- +- Add custom template tag: settings_value + `PR #2510 ` + [@FrankApiyo] +- Enhancement: Handle Statement Timeout in Briefcase Viewset + `PR #2508 ` + [@KipSigei] +- Trigger database call to correctly capture OperationalError + `PR #2513 ` + [@KipSigei] +- Upgrade Django for omitted requirements files + `PR #2512 ` + [@kelvin-muchiri] + v3.15.0(2023-11-17) ------------------- - Upgrade Django to version 3.2.23 diff --git a/onadata/__init__.py b/onadata/__init__.py index 17336ac6c9..1087b624c3 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.15.0" +__version__ = "3.16.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index f991290a42..78abdf8194 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.15.0 +version = 3.16.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From ae886c0a592b0d3d52434821f171b74cdc123129 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 23 Nov 2023 09:36:44 +0300 Subject: [PATCH 116/270] Filter out deleted forms from project forms endpoint Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/project_viewset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/api/viewsets/project_viewset.py b/onadata/apps/api/viewsets/project_viewset.py index b5d168708a..3f72099143 100644 --- a/onadata/apps/api/viewsets/project_viewset.py +++ b/onadata/apps/api/viewsets/project_viewset.py @@ -163,7 +163,7 @@ def forms(self, request, **kwargs): return Response(survey, status=status.HTTP_400_BAD_REQUEST) - xforms = XForm.objects.filter(project=project) + xforms = XForm.objects.filter(project=project, deleted_at__isnull=True) serializer = XFormSerializer(xforms, context={"request": request}, many=True) return Response(serializer.data) From 702791b7c535b335858b8949696fc3d74389b7ff Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 23 Nov 2023 09:37:01 +0300 Subject: [PATCH 117/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_project_viewset.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index b90c0fbd69..1b96ca546c 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -309,6 +309,25 @@ def test_project_get_deleted_form(self): self.assertEqual(len(response.data.get("forms")), 0) self.assertEqual(response.status_code, 200) + def test_xform_delete_project_forms_endpoint(self): + self._publish_xls_form_to_project() + + view = ProjectViewSet.as_view({"get": "forms"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + + # soft delete form + self.xform.soft_delete(user=self.user) + + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 0) + # pylint: disable=invalid-name def test_none_empty_forms_and_dataview_properties_in_returned_json(self): self._publish_xls_form_to_project() From 101e72e6ea7504f0bed65fc879c59d09caa30b46 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 23 Nov 2023 13:57:06 +0300 Subject: [PATCH 118/270] Check length of paginated instances Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/briefcase_viewset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 35092209a4..082b41f485 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -233,7 +233,7 @@ def filter_queryset(self, queryset): try: paginated_instances = instances[:num_entries] # trigger a database call - _ = len(instances) + _ = len(paginated_instances) instances = paginated_instances except OperationalError: instances = _query_optimization_fence(instances, num_entries) From 38f59cdabfa452c85af78b63e1748ab3aa7ecd9d Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 24 Nov 2023 10:19:44 +0300 Subject: [PATCH 119/270] Create logger instannce index on (xform_id, id) Signed-off-by: Kipchirchir Sigei --- .../0011_add_xform_id_instance_id_idx.py | 30 +++++++++++++++++++ onadata/apps/logger/models/instance.py | 1 + 2 files changed, 31 insertions(+) create mode 100644 onadata/apps/logger/migrations/0011_add_xform_id_instance_id_idx.py diff --git a/onadata/apps/logger/migrations/0011_add_xform_id_instance_id_idx.py b/onadata/apps/logger/migrations/0011_add_xform_id_instance_id_idx.py new file mode 100644 index 0000000000..ad93565ae7 --- /dev/null +++ b/onadata/apps/logger/migrations/0011_add_xform_id_instance_id_idx.py @@ -0,0 +1,30 @@ +# Generated by Django 3.2.20 on 2023-11-23 16:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ('logger', '0010_auto_20230921_0346'), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_instance_id_xform_id_index" ON "logger_instance" ("xform_id", "id");', + reverse_sql='DROP INDEX "logger_instance_id_xform_id_index";', + ), + ], + state_operations=[ + migrations.AddIndex( + model_name="instance", + index=models.Index( + fields=["xform_id", "id"], name="logger_instance_id_xform_id_index" + ), + ), + ], + ) + ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index fe32914da2..2c0fd87d12 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -685,6 +685,7 @@ class Meta: models.Index(fields=["date_created"]), models.Index(fields=["date_modified"]), models.Index(fields=["deleted_at"]), + models.Index(fields=["xform_id", "id"]), ] @classmethod From 81306bc1dd3008e028e6a8ceaaeef9d14b97cd8a Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 23 Nov 2023 16:35:58 +0300 Subject: [PATCH 120/270] Tag release v3.17.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 12 ++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index eebf1c7694..333a7dc382 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,18 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.17.0(2023-11-24) +------------------- +- Create Composite Index for xform_id and id fields + `PR #2519 ` + [@KipSigei] +- Filter out soft-deleted xforms from project forms endpoint + `PR #2515 ` + [@KipSigei] +- Check length of paginated instances in briefcase viewset + `PR #2517 ` + [@KipSigei] + v3.16.0(2023-11-23) ------------------- - Add custom template tag: settings_value diff --git a/onadata/__init__.py b/onadata/__init__.py index 1087b624c3..93b2ff24b5 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.16.0" +__version__ = "3.17.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 78abdf8194..c3632ff37a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.16.0 +version = 3.17.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 48fe4c50ae72c2783b976082b89f642f3354fe1c Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 7 Dec 2023 15:53:24 +0300 Subject: [PATCH 121/270] Enable TokenAuthentication on briefcase viewset Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_briefcase_viewset.py | 56 +++++++++++++++++++ .../apps/api/viewsets/briefcase_viewset.py | 3 +- 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index 7461a7ed74..0e8b60f6f2 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -132,6 +132,31 @@ def test_view_submission_list(self): '{{resumptionCursor}}', '%s' % last_index) self.assertContains(response, expected_submission_list) + def test_view_submission_list_token_auth(self): + view = BriefcaseViewset.as_view({'get': 'list'}) + self._publish_xml_form() + self._make_submissions() + # use Token auth in self.extra + request = self.factory.get( + self._submission_list_url, + data={'formId': self.xform.id_string}, **self.extra) + response = view(request, username=self.user.username) + self.assertEqual(response.status_code, 200) + submission_list_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'submissionList.xml') + instances = ordered_instances(self.xform) + + self.assertEqual(instances.count(), NUM_INSTANCES) + + last_index = instances[instances.count() - 1].pk + with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + expected_submission_list = f.read() + expected_submission_list = \ + expected_submission_list.replace( + '{{resumptionCursor}}', '%s' % last_index) + self.assertContains(response, expected_submission_list) + def test_view_submission_list_w_xformid(self): view = BriefcaseViewset.as_view({'get': 'list'}) self._publish_xml_form() @@ -371,6 +396,37 @@ def test_view_downloadSubmission(self): self.assertContains(response, instanceId, status_code=200) self.assertMultiLineEqual(response.content.decode('utf-8'), text) + def test_view_downloadSubmission_w_token_auth(self): + view = BriefcaseViewset.as_view({'get': 'retrieve'}) + self._publish_xml_form() + self.maxDiff = None + self._submit_transport_instance_w_attachment() + instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instance = Instance.objects.get(uuid=instanceId) + formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ + u'%(formId)s[@key=uuid:%(instanceId)s]' % { + 'formId': self.xform.id_string, + 'instanceId': instanceId} + params = {'formId': formId} + # use Token auth in self.extra + request = self.factory.get( + self._download_submission_url, data=params, **self.extra) + response = view(request, username=self.user.username) + self.assertEqual(response.status_code, 200) + text = "uuid:%s" % instanceId + download_submission_path = os.path.join( + self.main_directory, 'fixtures', 'transportation', + 'view', 'downloadSubmission.xml') + with codecs.open(download_submission_path, encoding='utf-8') as f: + text = f.read() + for var in ((u'{{submissionDate}}', + instance.date_created.isoformat()), + (u'{{form_id}}', str(self.xform.id)), + (u'{{media_id}}', str(self.attachment.id))): + text = text.replace(*var) + self.assertContains(response, instanceId, status_code=200) + self.assertMultiLineEqual(response.content.decode('utf-8'), text) + def test_view_downloadSubmission_w_xformid(self): view = BriefcaseViewset.as_view({'get': 'retrieve'}) self._publish_xml_form() diff --git a/onadata/apps/api/viewsets/briefcase_viewset.py b/onadata/apps/api/viewsets/briefcase_viewset.py index 082b41f485..d444928570 100644 --- a/onadata/apps/api/viewsets/briefcase_viewset.py +++ b/onadata/apps/api/viewsets/briefcase_viewset.py @@ -14,6 +14,7 @@ import six from rest_framework import exceptions, mixins, permissions, status, viewsets +from rest_framework.authentication import TokenAuthentication from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 from rest_framework.renderers import BrowsableAPIRenderer @@ -125,7 +126,7 @@ class BriefcaseViewset( https://code.google.com/p/opendatakit/wiki/BriefcaseAggregateAPI). """ - authentication_classes = (DigestAuthentication,) + authentication_classes = (DigestAuthentication, TokenAuthentication,) filter_backends = (filters.AnonDjangoObjectPermissionFilter,) queryset = XForm.objects.all() permission_classes = (permissions.IsAuthenticated, ViewDjangoObjectPermissions) From c335b6ffa0c83214be58ab061e0e650cb5181bf6 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 5 Dec 2023 15:49:45 +0300 Subject: [PATCH 122/270] use instance.get_full_dict() to send data to RapidPro We do instance.get_full_dict() instead of instance.json because when an instance is processed asynchronously, the json may not be upto date --- .../viewsets/test_xform_submission_viewset.py | 99 ++++++++++++++++++- onadata/apps/restservice/services/textit.py | 6 +- 2 files changed, 101 insertions(+), 4 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py index 40ca000a4d..9f7cbfb7b8 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py @@ -21,6 +21,8 @@ ) from onadata.apps.api.viewsets.xform_submission_viewset import XFormSubmissionViewSet from onadata.apps.logger.models import Attachment, Instance, XForm +from onadata.apps.restservice.models import RestService +from onadata.apps.restservice.services.textit import ServiceDefinition from onadata.libs.permissions import DataEntryRole from onadata.libs.utils.common_tools import get_uuid @@ -1159,7 +1161,9 @@ def test_post_submission_using_project_pk_while_anonymous(self): ) with open(submission_path, "rb") as sf: data = {"xml_submission_file": sf, "media_file": f} - request = self.factory.post(f"/projects/{self.xform.project.pk}/submission", data) + request = self.factory.post( + f"/projects/{self.xform.project.pk}/submission", data + ) count = Instance.objects.filter(xform=self.xform).count() request.user = AnonymousUser() response = self.view(request, xform_pk=self.xform.pk) @@ -1207,7 +1211,9 @@ def test_post_submission_using_project_pk_while_authenticated(self): with open(submission_path, "rb") as sf: data = {"xml_submission_file": sf, "media_file": f} count = Instance.objects.filter(xform=self.xform).count() - request = self.factory.post(f"/projects/{self.xform.project.pk}/submission", data) + request = self.factory.post( + f"/projects/{self.xform.project.pk}/submission", data + ) response = self.view(request) self.assertEqual(response.status_code, 401) auth = DigestAuth("bob", "bobbob") @@ -1225,3 +1231,92 @@ def test_post_submission_using_project_pk_while_authenticated(self): self.assertEqual( Instance.objects.filter(xform=self.xform).count(), count + 1 ) + + @mock.patch.object(ServiceDefinition, "send") + def test_new_submission_sent_to_rapidpro(self, mock_send): + """Submission created is sent to RapidPro""" + rest_service = RestService.objects.create( + service_url="https://rapidpro.ona.io/api/v2/flow_starts.json", + xform=self.xform, + name="textit", + ) + s = self.surveys[0] + media_file = "1335783522563.jpg" + path = os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + media_file, + ) + + with open(path, "rb") as f: + f = InMemoryUploadedFile( + f, "media_file", media_file, "image/jpg", os.path.getsize(path), None + ) + submission_path = os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + s + ".xml", + ) + + with open(submission_path, "rb") as sf: + data = {"xml_submission_file": sf, "media_file": f} + request = self.factory.post("/submission", data) + response = self.view(request) + self.assertEqual(response.status_code, 401) + auth = DigestAuth("bob", "bobbob") + request.META.update(auth(request.META, response)) + response = self.view(request, username=self.user.username) + self.assertContains(response, "Successful submission", status_code=201) + instance = Instance.objects.all().order_by("-pk")[0] + mock_send.assert_called_once_with(rest_service.service_url, instance) + + @mock.patch.object(ServiceDefinition, "send") + def test_edit_submission_sent_to_rapidpro(self, mock_send): + """Submission edited is sent to RapidPro""" + rest_service = RestService.objects.create( + service_url="https://rapidpro.ona.io/api/v2/flow_starts.json", + xform=self.xform, + name="textit", + ) + s = self.surveys[0] + media_file = "1335783522563.jpg" + path = os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + media_file, + ) + + with open(path, "rb") as f: + f = InMemoryUploadedFile( + f, "media_file", media_file, "image/jpg", os.path.getsize(path), None + ) + submission_path = os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + f"{s}_edited.xml", + ) + + with open(submission_path, "rb") as sf: + data = {"xml_submission_file": sf, "media_file": f} + request = self.factory.post("/submission", data) + response = self.view(request) + self.assertEqual(response.status_code, 401) + auth = DigestAuth("bob", "bobbob") + request.META.update(auth(request.META, response)) + response = self.view(request, username=self.user.username) + self.assertContains(response, "Successful submission", status_code=201) + new_uuid = "6b2cc313-fc09-437e-8139-fcd32f695d41" + instance = Instance.objects.get(uuid=new_uuid) + mock_send.assert_called_once_with(rest_service.service_url, instance) diff --git a/onadata/apps/restservice/services/textit.py b/onadata/apps/restservice/services/textit.py index 5e6333dd18..29816aaaca 100644 --- a/onadata/apps/restservice/services/textit.py +++ b/onadata/apps/restservice/services/textit.py @@ -29,8 +29,10 @@ def send(self, url, data=None): :param data: :return: """ - extra_data = self.clean_keys_of_slashes(data.json) - + # We do instance.get_full_dict() instead of instance.json because + # when an instance is processed asynchronously, the json may not be + # upto date + extra_data = self.clean_keys_of_slashes(data.get_full_dict()) data_value = MetaData.textit(data.xform) if data_value: From ce9aca3a856c3238a72a89749332f9218514100c Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 5 Dec 2023 15:58:32 +0300 Subject: [PATCH 123/270] fix stale data sent to generic external service when async submissions is enabed --- onadata/apps/restservice/services/generic_json.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/onadata/apps/restservice/services/generic_json.py b/onadata/apps/restservice/services/generic_json.py index 591b72cdbd..a217c54211 100644 --- a/onadata/apps/restservice/services/generic_json.py +++ b/onadata/apps/restservice/services/generic_json.py @@ -24,7 +24,9 @@ class ServiceDefinition(RestServiceInterface): def send(self, url, data=None): """Post submisison JSON data to an external service that accepts a JSON post.""" if data: - post_data = json.dumps(data.json) + # We do instance.get_full_dict() instead of instance.json because + # when an instance is processed asynchronously, the json may not be upto date + post_data = json.dumps(data.get_full_dict()) headers = {"Content-Type": "application/json"} try: requests.post( From 31d76d35e6829691bd2a8787066dc5a9d79466af Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 5 Dec 2023 16:01:30 +0300 Subject: [PATCH 124/270] update code comment --- onadata/apps/restservice/services/generic_json.py | 4 ++-- onadata/apps/restservice/services/textit.py | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/onadata/apps/restservice/services/generic_json.py b/onadata/apps/restservice/services/generic_json.py index a217c54211..b2acfdf423 100644 --- a/onadata/apps/restservice/services/generic_json.py +++ b/onadata/apps/restservice/services/generic_json.py @@ -24,8 +24,8 @@ class ServiceDefinition(RestServiceInterface): def send(self, url, data=None): """Post submisison JSON data to an external service that accepts a JSON post.""" if data: - # We do instance.get_full_dict() instead of instance.json because - # when an instance is processed asynchronously, the json may not be upto date + # We use Instance.get_full_dict() instead of Instance.json because + # when asynchronous processing is enabled, the json may not be upto date post_data = json.dumps(data.get_full_dict()) headers = {"Content-Type": "application/json"} try: diff --git a/onadata/apps/restservice/services/textit.py b/onadata/apps/restservice/services/textit.py index 29816aaaca..e47c85c071 100644 --- a/onadata/apps/restservice/services/textit.py +++ b/onadata/apps/restservice/services/textit.py @@ -29,9 +29,8 @@ def send(self, url, data=None): :param data: :return: """ - # We do instance.get_full_dict() instead of instance.json because - # when an instance is processed asynchronously, the json may not be - # upto date + # We use Instance.get_full_dict() instead of Instance.json because + # when asynchronous processing is enabled, the json may not be upto date extra_data = self.clean_keys_of_slashes(data.get_full_dict()) data_value = MetaData.textit(data.xform) From 794f9b0f6399b4b6ee4cf4e4219acc009c33fcdc Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Fri, 8 Dec 2023 16:44:16 +0300 Subject: [PATCH 125/270] Tag release v3.17.1 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 9 +++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 333a7dc382..b617b5d1ab 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,15 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.17.1(2023-12-11) +------------------- +- Enable TokenAuthentication on briefcase viewset + `PR #2523 ` + [@KipSigei] +- Fix stale data sent to RapidPro when asynchronous processing of submissions is enabled + `PR #2522 ` + [@kelvin-muchiri] + v3.17.0(2023-11-24) ------------------- - Create Composite Index for xform_id and id fields diff --git a/onadata/__init__.py b/onadata/__init__.py index 93b2ff24b5..8ae438470b 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.17.0" +__version__ = "3.17.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index c3632ff37a..741ce91201 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.17.0 +version = 3.17.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 0e2686d20b6e7b739ff970f64d809c70babdd0ce Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Dec 2023 09:10:01 +0300 Subject: [PATCH 126/270] Enable token authentication on /formlist endpoint Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/xform_list_viewset.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/onadata/apps/api/viewsets/xform_list_viewset.py b/onadata/apps/api/viewsets/xform_list_viewset.py index 318d2d2c4d..e0be32bfe1 100644 --- a/onadata/apps/api/viewsets/xform_list_viewset.py +++ b/onadata/apps/api/viewsets/xform_list_viewset.py @@ -9,6 +9,7 @@ from django_filters import rest_framework as django_filter_filters from rest_framework import permissions, viewsets +from rest_framework.authentication import TokenAuthentication from rest_framework.decorators import action from rest_framework.response import Response @@ -48,6 +49,7 @@ class XFormListViewSet(ETagsMixin, BaseViewset, viewsets.ReadOnlyModelViewSet): authentication_classes = ( DigestAuthentication, EnketoTokenAuthentication, + TokenAuthentication, ) content_negotiation_class = MediaFileContentNegotiation filter_class = filters.FormIDFilter From 528cb546d183b8db61c5e0c1fe4712370e4684e0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Dec 2023 09:10:17 +0300 Subject: [PATCH 127/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_xform_list_viewset.py | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index e21b183469..6f4709c2ea 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -44,6 +44,21 @@ def test_get_xform_list(self): self.assertTrue(response.has_header("Date")) self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def test_get_xform_list_w_token_auth(self): + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + path = os.path.join(os.path.dirname(__file__), "..", "fixtures", "formList.xml") + with open(path, encoding="utf-8") as f: + form_list_xml = f.read().strip() + data = {"hash": self.xform.hash, "pk": self.xform.pk} + content = response.render().content.decode("utf-8") + self.assertEqual(content, form_list_xml % data) + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def test_get_xform_list_xform_pk_filter_anon(self): """ Test formList xform_pk filter for anonymous user. @@ -720,6 +735,27 @@ def test_retrieve_xform_xml(self): self.assertTrue(response.has_header("Date")) self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def test_retrieve_xform_xml_w_token_auth(self): + self.view = XFormListViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.xform.pk) + self.assertEqual(response.status_code, 200) + + path = os.path.join( + os.path.dirname(__file__), "..", "fixtures", "Transportation Form.xml" + ) + + with open(path, encoding="utf-8") as f: + form_xml = f.read().strip() + data = {"form_uuid": self.xform.uuid} + content = response.render().content.decode("utf-8").strip() + content = content.replace(self.xform.version, "20141112071722") + self.assertEqual(content, form_xml % data) + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def _load_metadata(self, xform=None): data_value = "screenshot.png" data_type = "media" @@ -758,6 +794,28 @@ def test_retrieve_xform_manifest(self): self.assertTrue(response.has_header("Date")) self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def test_retrieve_xform_manifest_w_token_auth(self): + self._load_metadata(self.xform) + self.view = XFormListViewSet.as_view({"get": "manifest"}) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.xform.pk) + self.assertEqual(response.status_code, 200) + + manifest_xml = """screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa + data = { + "hash": self.metadata.hash, + "pk": self.metadata.pk, + "xform": self.xform.pk, + } + content = "".join( + [i.decode("utf-8").strip() for i in response.streaming_content] + ) + self.assertEqual(content, manifest_xml % data) + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + def test_retrieve_xform_manifest_anonymous_user(self): self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "manifest"}) @@ -808,6 +866,15 @@ def test_retrieve_xform_media(self): ) self.assertEqual(response.status_code, 200) + def test_retrieve_xform_media_w_token_auth(self): + self._load_metadata(self.xform) + self.view = XFormListViewSet.as_view({"get": "media", "head": "media"}) + request = self.factory.get("/", **self.extra) + response = self.view( + request, pk=self.xform.pk, metadata=self.metadata.pk, format="png" + ) + self.assertEqual(response.status_code, 200) + def test_retrieve_xform_media_anonymous_user(self): self._load_metadata(self.xform) self.view = XFormListViewSet.as_view({"get": "media"}) From 35d7f2dfcd8c7c278360030dbc8d5828c963bb80 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 15 Dec 2023 16:57:32 +0300 Subject: [PATCH 128/270] Set AWS credentials when generating pre-signed URLS (#2527) --- onadata/libs/utils/image_tools.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/onadata/libs/utils/image_tools.py b/onadata/libs/utils/image_tools.py index c61d5c752e..6268dcf876 100644 --- a/onadata/libs/utils/image_tools.py +++ b/onadata/libs/utils/image_tools.py @@ -83,7 +83,12 @@ def generate_aws_media_url( signature_version=getattr(settings, "AWS_S3_SIGNATURE_VERSION", "s3v4"), region_name=getattr(settings, "AWS_S3_REGION_NAME", ""), ) - s3_client = boto3.client("s3", config=s3_config) + s3_client = boto3.client( + "s3", + config=s3_config, + aws_access_key_id=s3_class.access_key, + aws_secret_access_key=s3_class.secret_key, + ) # Generate a presigned URL for the S3 object return s3_client.generate_presigned_url( From 7dcbb5ac46a8bffe0addab7a9e1b8c0dfc055bbc Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 18 Dec 2023 09:47:46 +0300 Subject: [PATCH 129/270] Update JS dependencies Signed-off-by: Kipchirchir Sigei --- yarn.lock | 1399 +++++++---------------------------------------------- 1 file changed, 164 insertions(+), 1235 deletions(-) diff --git a/yarn.lock b/yarn.lock index dcba759f1a..83cc73a501 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,44 +2,6 @@ # yarn lockfile v1 -"@angular/compiler@9.0.5": - version "9.0.5" - resolved "https://registry.yarnpkg.com/@angular/compiler/-/compiler-9.0.5.tgz#823dd4df25a9f1a641346712e7b7097ed1176105" - integrity sha512-TeyhRGefTOtA9N3udMrvheafoXcz/dvTTdZLcieeZQxm1SSeaQDUQ/rUH6QTOiHVNMtjOCrZ9J5rk1A4mPYuag== - -"@babel/code-frame@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.13.tgz#dcfc826beef65e75c50e21d3837d7d95798dd658" - integrity sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g== - dependencies: - "@babel/highlight" "^7.12.13" - -"@babel/code-frame@^7.0.0": - version "7.21.4" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39" - integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g== - dependencies: - "@babel/highlight" "^7.18.6" - -"@babel/helper-validator-identifier@^7.18.6": - version "7.19.1" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== - -"@babel/highlight@^7.12.13", "@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@7.12.11": - version "7.12.11" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.11.tgz#9ce3595bcd74bc5c466905e86c535b8b25011e79" - integrity sha512-N3UxG+uuF4CMYoNj8AhnbAcJF0PiuJ9KHuy1lQmkYsxTer/MAH9UBNHsBoAX/4s6NvlDD047No8mYVGGzLL4hg== - "@bower_components/backbone@jashkenas/backbone#~1.4.1": version "1.4.1" resolved "https://codeload.github.com/jashkenas/backbone/tar.gz/9260f3cb43d26b0e185f5800b31d9ae913999a1f" @@ -107,13 +69,11 @@ d3-zoom "3" "@bower_components/datatables-plugins@datatables/plugins#*": - version "1.13.1" - resolved "https://codeload.github.com/datatables/plugins/tar.gz/950f84e173883900ed139b933de120ad0de5617b" + version "1.13.5" + resolved "https://codeload.github.com/datatables/plugins/tar.gz/9047d05de714e8a98a558a3b92462c6097c34c9c" dependencies: "@types/jquery" "^3.5.16" datatables.net "^1.13.2" - prettier-plugin-x "^0.0.10" - typescript "^4.9.5" "@bower_components/datatables@datatables/datatables#~1.10.18": version "1.10.21" @@ -137,159 +97,11 @@ version "1.13.6" resolved "https://codeload.github.com/jashkenas/underscore/tar.gz/bd2d35c87620a7da36250a006c97fdae89f4902d" -"@brodybits/remark-parse@5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@brodybits/remark-parse/-/remark-parse-5.0.1.tgz#f13df839df25f4ff55d6aa8839418b43787f6548" - integrity sha512-z4BjfcxegoajMQwIWBewRXVzrEvSQY1rILm7+O57qX9UI4ofCNr+biteNCVMNDv7POleymz6inEIzbCKYX7MVA== - dependencies: - collapse-white-space "^1.0.2" - is-alphabetical "^1.0.0" - is-decimal "^1.0.0" - is-whitespace-character "^1.0.0" - is-word-character "^1.0.0" - markdown-escapes "^1.0.0" - parse-entities "^1.1.0" - repeat-string "^1.5.4" - state-toggle "^1.0.0" - trim "0.0.3" - trim-trailing-lines "^1.0.0" - unherit "^1.0.4" - unist-util-remove-position "^1.0.0" - vfile-location "^2.0.0" - xtend "^4.0.1" - "@colors/colors@1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== -"@glimmer/env@0.1.7": - version "0.1.7" - resolved "https://registry.yarnpkg.com/@glimmer/env/-/env-0.1.7.tgz#fd2d2b55a9029c6b37a6c935e8c8871ae70dfa07" - integrity sha512-JKF/a9I9jw6fGoz8kA7LEQslrwJ5jms5CXhu/aqkBWk+PmZ6pTl8mlb/eJ/5ujBGTiQzBhy5AIWF712iA+4/mw== - -"@glimmer/interfaces@^0.56.2": - version "0.56.2" - resolved "https://registry.yarnpkg.com/@glimmer/interfaces/-/interfaces-0.56.2.tgz#bcd4401fa980598fa43959ba38a6f19c65b91ca6" - integrity sha512-nRgcsTuyZ90aEoCuYVHKGDs3LpAv9n/JKiJ6iecpEYtyGgcPqSI3GjrJRl6k+1s5wnldEH1kjWq+ccCiXmA99w== - dependencies: - "@simple-dom/interface" "^1.4.0" - -"@glimmer/syntax@0.56.2": - version "0.56.2" - resolved "https://registry.yarnpkg.com/@glimmer/syntax/-/syntax-0.56.2.tgz#496fbda3b9a54f687cda84ce6f4f169ddac4feb2" - integrity sha512-saoBoLKYEFtcCdBes/eO4QNE/XXJBfEHo2TEVOzKjpc9kIhRKtBZ6Vn9Z1iZBGi+7Mxti83JxvRWKz2ptZd+jQ== - dependencies: - "@glimmer/interfaces" "^0.56.2" - "@glimmer/util" "^0.56.2" - handlebars "^4.7.4" - simple-html-tokenizer "^0.5.9" - -"@glimmer/util@^0.56.2": - version "0.56.2" - resolved "https://registry.yarnpkg.com/@glimmer/util/-/util-0.56.2.tgz#fce9f2844b3b915c1d3c034c5bac3e0dfbe70ef0" - integrity sha512-AljXCX5HBjJkmNt4DNYmJmVvwqKjFF4lU6e0SBftwhzK85RbETYwpb3YWrghcjSCxoodwIu1zNFiKOA+xD6txw== - dependencies: - "@glimmer/env" "0.1.7" - "@glimmer/interfaces" "^0.56.2" - "@simple-dom/interface" "^1.4.0" - -"@iarna/toml@2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@prettier-x/formatter-2021-01@^0.0.1-rc01": - version "0.0.1-rc01" - resolved "https://registry.yarnpkg.com/@prettier-x/formatter-2021-01/-/formatter-2021-01-0.0.1-rc01.tgz#29223e938ab73d9c61dd7597437cd89bff03e415" - integrity sha512-7XaAOF/IYCo+j0Fgr1Y27xIqQzNspqZphW0lG+D1BSiCc1Fbc1t+b6DbHK880oRBwUm+H6xNelwvFfjwQPtzWQ== - dependencies: - "@angular/compiler" "9.0.5" - "@babel/code-frame" "7.12.13" - "@babel/parser" "7.12.11" - "@brodybits/remark-parse" "5.0.1" - "@glimmer/syntax" "0.56.2" - "@iarna/toml" "2.2.5" - "@typescript-eslint/typescript-estree" "2.34.0" - angular-estree-parser "1.3.1" - angular-html-parser "1.7.0" - camelcase "6.2.0" - chalk "4.1.1" - ci-info "3.2.0" - cjk-regex "2.0.1" - cosmiconfig "7.0.0" - dashify "2.0.0" - dedent "0.7.0" - diff "5.0.0" - editorconfig "0.15.3" - editorconfig-to-prettier "0.1.1" - escape-string-regexp "4.0.0" - esutils "2.0.3" - fast-glob "3.2.5" - find-parent-dir "0.3.1" - find-project-root "1.1.1" - get-stream "6.0.1" - globby "11.0.4" - graphql "15.5.1" - html-element-attributes "2.3.0" - html-styles "1.0.0" - html-tag-names "1.1.5" - html-void-elements "1.0.5" - ignore "4.0.6" - jest-docblock "27.0.1" - json-stable-stringify "1.0.1" - leven "3.1.0" - lines-and-columns "1.1.6" - linguist-languages "7.10.0" - lodash "4.17.21" - mem "8.1.1" - minimatch "3.0.4" - minimist "1.2.5" - n-readlines "1.0.1" - please-upgrade-node "3.2.0" - postcss-less "4.0.1" - postcss-media-query-parser "0.2.3" - postcss-scss "2.1.1" - postcss-selector-parser "2.2.3" - postcss-values-parser "2.0.1" - regexp-util "1.2.2" - remark-math "1.0.6" - resolve "1.20.0" - semver "7.3.5" - srcset "3.0.0" - string-width "4.2.2" - tslib "1.14.1" - unicode-regex "3.0.0" - unified "9.2.1" - vnopts "1.0.2" - yaml-unist-parser "1.3.1" - -"@simple-dom/interface@^1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@simple-dom/interface/-/interface-1.4.0.tgz#e8feea579232017f89b0138e2726facda6fbb71f" - integrity sha512-l5qumKFWU0S+4ZzMaLXFU8tQZsicHEMEyAxI5kDFGhJsRqDwe0a7/iPA/GdxlGyDKseQQAgIz5kzU7eXTrlSpA== - "@sinonjs/commons@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3" @@ -304,12 +116,12 @@ dependencies: type-detect "4.0.8" -"@sinonjs/fake-timers@^10.0.2": - version "10.0.2" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.0.2.tgz#d10549ed1f423d80639c528b6c7f5a1017747d0c" - integrity sha512-SwUDyjWnah1AaNl7kxsa7cfLhlTYoiyhDAIgyh+El30YvXs/o7OLXpYH88Zdhyx9JExKrmHDJ+10bwIcY80Jmw== +"@sinonjs/fake-timers@^10.0.2", "@sinonjs/fake-timers@^10.3.0": + version "10.3.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== dependencies: - "@sinonjs/commons" "^2.0.0" + "@sinonjs/commons" "^3.0.0" "@sinonjs/samsam@^8.0.0": version "8.0.0" @@ -336,16 +148,16 @@ integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== "@types/cors@^2.8.12": - version "2.8.13" - resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.13.tgz#b8ade22ba455a1b8cb3b5d3f35910fd204f84f94" - integrity sha512-RG8AStHlUiV5ysZQKq97copd2UmVYw3/pRMLefISZ3S1hK104Cwm7iLQ3fTKx+lsUH2CE8FlLaYeEA2LSeqYUA== + version "2.8.17" + resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.17.tgz#5d718a5e494a8166f569d986794e49c48b216b2b" + integrity sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA== dependencies: "@types/node" "*" "@types/jquery@^3.5.16": - version "3.5.16" - resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-3.5.16.tgz#632131baf30951915b0317d48c98e9890bdf051d" - integrity sha512-bsI7y4ZgeMkmpG9OM710RRzDFp+w4P1RGiIt30C1mSBT+ExCleeh4HObwgArnDFELmRrOpXgSYN9VF1hj+f1lw== + version "3.5.29" + resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-3.5.29.tgz#3c06a1f519cd5fc3a7a108971436c00685b5dcea" + integrity sha512-oXQQC9X9MOPRrMhPHHOsXqeQDnWeCDT3PelUIg/Oy8FAbzSZtFHRjc7IpbfFVmpLtJ+UOoywpRsuO5Jxjybyeg== dependencies: "@types/sizzle" "*" @@ -355,37 +167,16 @@ integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== "@types/node@*", "@types/node@>=10.0.0": - version "18.15.11" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.11.tgz#b3b790f09cb1696cffcec605de025b088fa4225f" - integrity sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q== - -"@types/parse-json@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + version "20.10.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.5.tgz#47ad460b514096b7ed63a1dae26fad0914ed3ab2" + integrity sha512-nNPsNE65wjMxEKI93yOP+NPGGBJz/PoN3kZsVLee0XMiJolxSekEVD8wRwBUBqkwc7UWop0edW50yrCQW4CyRw== + dependencies: + undici-types "~5.26.4" "@types/sizzle@*": - version "2.3.3" - resolved "https://registry.yarnpkg.com/@types/sizzle/-/sizzle-2.3.3.tgz#ff5e2f1902969d305225a047c8a0fd5c915cebef" - integrity sha512-JYM8x9EGF163bEyhdJBpR2QX1R5naCJHC8ucJylJ3w9/CVBaskdQ8WqBf8MmQrd1kRvp/a4TS8HJ+bxzR7ZJYQ== - -"@types/unist@^2.0.0", "@types/unist@^2.0.2": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" - integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== - -"@typescript-eslint/typescript-estree@2.34.0": - version "2.34.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz#14aeb6353b39ef0732cc7f1b8285294937cf37d5" - integrity sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg== - dependencies: - debug "^4.1.1" - eslint-visitor-keys "^1.1.0" - glob "^7.1.6" - is-glob "^4.0.1" - lodash "^4.17.15" - semver "^7.3.2" - tsutils "^3.17.1" + version "2.3.8" + resolved "https://registry.yarnpkg.com/@types/sizzle/-/sizzle-2.3.8.tgz#518609aefb797da19bf222feb199e8f653ff7627" + integrity sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg== abbrev@1: version "1.1.1" @@ -400,33 +191,11 @@ accepts@~1.3.4: mime-types "~2.1.34" negotiator "0.6.3" -angular-estree-parser@1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/angular-estree-parser/-/angular-estree-parser-1.3.1.tgz#5b590c3ef431fccb512755eea563029f84c043ee" - integrity sha512-jvlnNk4aoEmA6EKK12OnsOkCSdsWleBsYB+aWyH8kpfTB6Li1kxWVbHKVldH9zDCwVVi1hXfqPi/gbSv49tkbQ== - dependencies: - lines-and-columns "^1.1.6" - tslib "^1.9.3" - -angular-html-parser@1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/angular-html-parser/-/angular-html-parser-1.7.0.tgz#3ac852c1af4ff7719cfa9b8a199ce28f1e35b5aa" - integrity sha512-/yjeqDQXGblZuFMI6vpDgiIDuv816QpIqa/mCotc0I4R0F5t5sfX1ntZ8VsBVQOUYRjPw8ggYlPZto76gHtf7Q== - dependencies: - tslib "^1.9.3" - ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" @@ -475,9 +244,9 @@ arrify@^2.0.1: integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== async@~3.2.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + version "3.2.5" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" + integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== backbone.paginator@^2.0.5: version "2.0.8" @@ -488,9 +257,9 @@ backbone.paginator@^2.0.5: underscore "^1.8.0" "backbone@1.1.2 || 1.2.3 || ^1.3.2": - version "1.4.1" - resolved "https://registry.yarnpkg.com/backbone/-/backbone-1.4.1.tgz#099a78184bc07b034048a8332229c2ccca1e3e62" - integrity sha512-ADy1ztN074YkWbHi8ojJVFe3vAanO/lrzMGZWUClIP7oDD/Pjy2vrASraUP+2EVCfIiTtCW4FChVow01XneivA== + version "1.5.0" + resolved "https://registry.yarnpkg.com/backbone/-/backbone-1.5.0.tgz#3f20e4c6feb22f896131bcc58eb599b9555e8744" + integrity sha512-RPKlstw5NW+rD2X4PnEnvgLhslRnXOugXw2iBloHkPMgOxvakP1/A+tZIGM3qCm8uvZeEf8zMm0uvcK1JwL+IA== dependencies: underscore ">=1.8.3" @@ -516,11 +285,6 @@ backgrid@~0.3.7: backbone "1.1.2 || 1.2.3 || ~1.3.2" underscore "^1.8.0" -bail@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" - integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== - balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -575,39 +339,13 @@ bytes@3.1.2: integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== call-bind@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809" - integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== - -chalk@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" - integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^2.0.0, chalk@^2.4.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + version "1.0.5" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" + integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.1" + set-function-length "^1.1.1" chalk@~4.1.0: version "4.1.2" @@ -617,21 +355,6 @@ chalk@~4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -character-entities-legacy@^1.0.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1" - integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== - -character-entities@^1.0.0: - version "1.2.4" - resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-1.2.4.tgz#e12c3939b7eaf4e5b15e7ad4c5e28e1d48c5b16b" - integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== - -character-reference-invalid@^1.0.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560" - integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== - chokidar@^3.5.1: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" @@ -647,19 +370,6 @@ chokidar@^3.5.1: optionalDependencies: fsevents "~2.3.2" -ci-info@3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.2.0.tgz#2876cb948a498797b5236f0095bc057d0dca38b6" - integrity sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A== - -cjk-regex@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/cjk-regex/-/cjk-regex-2.0.1.tgz#98cca187aa67931db14f0d9dde556150c8116d95" - integrity sha512-4YTL4Zxzy33EhD2YMBQg6qavT+3OrYYu45RHcLANXhbVTXmVcwNQIv0vL1TUWjOS7bH0n0dVcGAdJAGzWSAa3A== - dependencies: - regexp-util "^1.2.1" - unicode-regex "^2.0.0" - cliui@^7.0.2: version "7.0.4" resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" @@ -669,18 +379,6 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" -collapse-white-space@^1.0.2: - version "1.0.6" - resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" - integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" @@ -688,11 +386,6 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" @@ -708,11 +401,6 @@ commander@7: resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== -commander@^2.19.0: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" @@ -746,26 +434,15 @@ cors@~2.8.5: object-assign "^4" vary "^1" -cosmiconfig@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" - integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - custom-event@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" integrity sha512-GAj5FOq0Hd+RsCGVJxZuKaIDXDf3h6GQoNEjFgbLLI/trgtavwUbSnZ5pVfg27DVCaWjIohryS0JFwIJyT2cMg== "d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.3.tgz#39f1f4954e4a09ff69ac597c2d61906b04e84740" - integrity sha512-JRHwbQQ84XuAESWhvIPaUV4/1UYTBOLiOPGWqgFDHZS1D5QN9c57FbH3QpEnQMYiOXNzKUQyGTZf+EVO7RT5TQ== + version "3.2.4" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" + integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== dependencies: internmap "1 - 2" @@ -805,9 +482,9 @@ d3-contour@3: d3-array "2 - 3" d3-delaunay@6: - version "6.0.3" - resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.3.tgz#d0824ba2012a5f6cd17d035653d0515d1c098257" - integrity sha512-1gPbiMuikAgU/rFcT6WMu17zx0aelw9Hh80go7/TwZQ+/uq8DqqmiNYy+EqPEvTSp/BkJFIpQxjac4Gk/w0zOg== + version "6.0.4" + resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.4.tgz#98169038733a0a5babbeda55054f795bb9e4a58b" + integrity sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A== dependencies: delaunator "5" @@ -970,15 +647,10 @@ d3-zoom@3: d3-selection "2 - 3" d3-transition "2 - 3" -dashify@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/dashify/-/dashify-2.0.0.tgz#fff270ca2868ca427fee571de35691d6e437a648" - integrity sha512-hpA5C/YrPjucXypHPPc0oJ1l9Hf6wWbiOL7Ik42cxnsUOhWiCB/fylKbKqqJalW9FgkNQCw16YO8uW9Hs0Iy1A== - datatables.net@^1.13.2: - version "1.13.4" - resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.13.4.tgz#9a809cee82eca0a884e10b4d47a3a3d6e65e9fe7" - integrity sha512-yzhArTOB6tPO2QFKm1z3hA4vabtt2hRvgw8XLsT1xqEirinfGYqWDiWXlkTPTaJv2e7gG+Kf985sXkzBFlGrGQ== + version "1.13.8" + resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.13.8.tgz#05a2fb5a036b0b65b66d1bb1eae0ba018aaea8a3" + integrity sha512-2pDamr+GUwPTby2OgriVB9dR9ftFKD2AQyiuCXzZIiG4d9KkKFQ7gqPfNmG7uj9Tc5kDf+rGj86do4LAb/V71g== dependencies: jquery ">=1.7" @@ -999,17 +671,21 @@ debug@2.6.9: dependencies: ms "2.0.0" -debug@^4.1.1, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2: +debug@^4.3.4, debug@~4.3.1, debug@~4.3.2: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" -dedent@0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== +define-data-property@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" delaunator@5: version "5.0.0" @@ -1033,33 +709,16 @@ detect-file@^1.0.0: resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q== -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA== -diff@5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== - diff@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - dom-serialize@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" @@ -1070,21 +729,6 @@ dom-serialize@^2.2.1: extend "^3.0.0" void-elements "^2.0.0" -editorconfig-to-prettier@0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/editorconfig-to-prettier/-/editorconfig-to-prettier-0.1.1.tgz#7391c7067dfd68ffee65afc2c4fbe4fba8d4219a" - integrity sha512-MMadSSVRDb4uKdxV6bCXXN4cTsxIsXYtV4XdPu6FOCSAw6zsCIDA+QEktEU+u6h+c/mTrul5NR+pwFpPxwetiQ== - -editorconfig@0.15.3: - version "0.15.3" - resolved "https://registry.yarnpkg.com/editorconfig/-/editorconfig-0.15.3.tgz#bef84c4e75fb8dcb0ce5cee8efd51c15999befc5" - integrity sha512-M9wIMFx96vq0R4F+gRpY3o2exzb8hEj/n9S8unZtHSvYjibBp/iMufSzvmOcV/laG0ZtuTVGtiJggPOSW2r93g== - dependencies: - commander "^2.19.0" - lru-cache "^4.1.5" - semver "^5.6.0" - sigmund "^1.0.1" - ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -1100,15 +744,15 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -engine.io-parser@~5.0.3: - version "5.0.6" - resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45" - integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw== +engine.io-parser@~5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.2.1.tgz#9f213c77512ff1a6cc0c7a86108a7ffceb16fcfb" + integrity sha512-9JktcM3u18nU9N2Lz3bWeBgxVgOKpw7yhRaoxQA3FUDZzzw+9WlA6p4G4u0RixNkg14fH7EfEc/RhpurtiROTQ== -engine.io@~6.4.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.1.tgz#8056b4526a88e779f9c280d820422d4e3eeaaae5" - integrity sha512-JFYQurD/nbsA5BSPmbaOSLa3tSVj8L6o4srSwXXY3NqE+gGUNmmPTbhn8tjzcCtSqhFgIeqef81ngny8JM25hw== +engine.io@~6.5.2: + version "6.5.4" + resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.5.4.tgz#6822debf324e781add2254e912f8568508850cdc" + integrity sha512-KdVSDKhVKyOi+r5uEabrDLZw2qXStVvCsEB/LN3mw4WFi6Gx50jTyuxYVCwAAC0U46FdnzP/ScKRBTXb/NiEOg== dependencies: "@types/cookie" "^0.4.1" "@types/cors" "^2.8.12" @@ -1118,7 +762,7 @@ engine.io@~6.4.1: cookie "~0.4.1" cors "~2.8.5" debug "~4.3.1" - engine.io-parser "~5.0.3" + engine.io-parser "~5.2.1" ws "~8.11.0" ent@~2.2.0: @@ -1126,13 +770,6 @@ ent@~2.2.0: resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA== -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" @@ -1143,31 +780,11 @@ escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -eslint-visitor-keys@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== - esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esutils@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - eventemitter2@~0.4.13: version "0.4.14" resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-0.4.14.tgz#8f61b75cde012b2e9eb284d4545583b5643b61ab" @@ -1195,36 +812,6 @@ extend@^3.0.0, extend@^3.0.2: resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -fast-glob@3.2.5: - version "3.2.5" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" - integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.0" - merge2 "^1.3.0" - micromatch "^4.0.2" - picomatch "^2.2.1" - -fast-glob@^3.1.1: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fastq@^1.6.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" - integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== - dependencies: - reusify "^1.0.4" - fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -1245,16 +832,6 @@ finalhandler@1.1.2: statuses "~1.5.0" unpipe "~1.0.0" -find-parent-dir@0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/find-parent-dir/-/find-parent-dir-0.3.1.tgz#c5c385b96858c3351f95d446cab866cbf9f11125" - integrity sha512-o4UcykWV/XN9wm+jMEtWLPlV8RXCZnMhQI6F6OdHeSez7iiJWePw8ijOlskJZMsaQoGR/b7dH6lO02HhaTN7+A== - -find-project-root@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/find-project-root/-/find-project-root-1.1.1.tgz#d242727a2d904725df5714f23dfdcdedda0b6ef8" - integrity sha512-4+yZ013W+EZc+hvdgA2RlzlgNfP1eGdMNxU6xzw1yt518cF6/xZD3kLV+bprYX5+AD0IL76xcN28TPqYJHxrHw== - find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" @@ -1299,19 +876,14 @@ flagged-respawn@^1.0.1: integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q== flatted@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" - integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== - -flatten@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.3.tgz#c1283ac9f27b368abc1e36d1ff7b04501a30356b" - integrity sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg== + version "3.2.9" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf" + integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== follow-redirects@^1.0.0: - version "1.15.2" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + version "1.15.3" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" + integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== for-in@^1.0.1: version "1.0.2" @@ -1340,47 +912,43 @@ fs.realpath@^1.0.0: integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-intrinsic@^1.0.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" - integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== +get-intrinsic@^1.0.2, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" + integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== dependencies: - function-bind "^1.1.1" - has "^1.0.3" + function-bind "^1.1.2" + has-proto "^1.0.1" has-symbols "^1.0.3" - -get-stream@6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + hasown "^2.0.0" getobject@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/getobject/-/getobject-1.0.2.tgz#25ec87a50370f6dcc3c6ba7ef43c4c16215c4c89" integrity sha512-2zblDBaFcb3rB4rF77XVnuINOE2h2k/OnqXAiy0IrTxUfV1iFp3la33oAQVY9pCpWU268WFYVt2t71hlMuLsOg== -glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.2: +glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" -glob@^7.1.3, glob@^7.1.6, glob@^7.1.7: +glob@^7.1.3, glob@^7.1.7: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -1424,28 +992,18 @@ global-prefix@^1.0.1: is-windows "^1.0.1" which "^1.2.14" -globby@11.0.4: - version "11.0.4" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" - integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" + get-intrinsic "^1.1.3" graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== -graphql@15.5.1: - version "15.5.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.5.1.tgz#f2f84415d8985e7b84731e7f3536f8bb9d383aad" - integrity sha512-FeTRX67T3LoE3LWAxxOlW2K3Bz+rMYAC18rRguK4wgXaTZMiJwSUwDmPFo3UadAKbzirKIg5Qy+sNJXbpPRnQw== - grunt-cli@~1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/grunt-cli/-/grunt-cli-1.4.3.tgz#22c9f1a3d2780bf9b0d206e832e40f8f499175ff" @@ -1519,39 +1077,34 @@ grunt@^1.6.1: minimatch "~3.0.4" nopt "~3.0.6" -handlebars@^4.7.4: - version "4.7.7" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" - integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== - dependencies: - minimist "^1.2.5" - neo-async "^2.6.0" - source-map "^0.6.1" - wordwrap "^1.0.0" - optionalDependencies: - uglify-js "^3.1.4" - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - has-flag@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== +has-property-descriptors@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz#52ba30b6c5ec87fd89fa574bc1c39125c6f65340" + integrity sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg== + dependencies: + get-intrinsic "^1.2.2" + +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== +hasown@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== dependencies: - function-bind "^1.1.1" + function-bind "^1.1.2" homedir-polyfill@^1.0.1: version "1.0.3" @@ -1565,26 +1118,6 @@ hooker@~0.2.3: resolved "https://registry.yarnpkg.com/hooker/-/hooker-0.2.3.tgz#b834f723cc4a242aa65963459df6d984c5d3d959" integrity sha512-t+UerCsQviSymAInD01Pw+Dn/usmz1sRO+3Zk1+lx8eg+WKpD2ulcwWqHHL0+aseRBr+3+vIhiG1K1JTwaIcTA== -html-element-attributes@2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/html-element-attributes/-/html-element-attributes-2.3.0.tgz#a192ac90a457be9f1e2cc9ab69000ee89be74aa6" - integrity sha512-RJv2v3BBaYSc0ODHwT0sqWI+2lFs6DATBvCRnW20BDmULxoAWvfT6r28uL8LcW1a9/eqUl+1DccUOJzw00qVXQ== - -html-styles@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/html-styles/-/html-styles-1.0.0.tgz#a18061fd651f99c6b75c45c8e0549a3bc3e01a75" - integrity sha512-cDl5dcj73oI4Hy0DSUNh54CAwslNLJRCCoO+RNkVo+sBrjA/0+7E/xzvj3zH/GxbbBLGJhE0hBe1eg+0FINC6w== - -html-tag-names@1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/html-tag-names/-/html-tag-names-1.1.5.tgz#f537420c16769511283f8ae1681785fbc89ee0a9" - integrity sha512-aI5tKwNTBzOZApHIynaAwecLBv8TlZTEy/P4Sj2SzzAhBrGuI8yGZ0UIXVPQzOHGS+to2mjb04iy6VWt/8+d8A== - -html-void-elements@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" - integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== - http-errors@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" @@ -1619,29 +1152,6 @@ iconv-lite@0.6, iconv-lite@~0.6.3: dependencies: safer-buffer ">= 2.1.2 < 3.0.0" -ignore@4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -ignore@^5.1.4: - version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" - integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== - -import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -indexes-of@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" - integrity sha512-bup+4tap3Hympa+JBJUG7XuOsdNQ6fxt0MHyXMKuLBKn0OqsTfvUxkUrroEX1+B2VsSHvCjiIcZVxRtYa4nllA== - inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" @@ -1650,7 +1160,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.0: +inherits@2, inherits@2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -1678,24 +1188,6 @@ is-absolute@^1.0.0: is-relative "^1.0.0" is-windows "^1.0.1" -is-alphabetical@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" - integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== - -is-alphanumerical@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz#7eb9a2431f855f6b1ef1a78e326df515696c4dbf" - integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== - dependencies: - is-alphabetical "^1.0.0" - is-decimal "^1.0.0" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -1703,22 +1195,12 @@ is-binary-path@~2.1.0: dependencies: binary-extensions "^2.0.0" -is-buffer@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" - integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== - -is-core-module@^2.2.0, is-core-module@^2.9.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" - integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== +is-core-module@^2.13.0: + version "2.13.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== dependencies: - has "^1.0.3" - -is-decimal@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" - integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + hasown "^2.0.0" is-extglob@^2.1.1: version "2.1.1" @@ -1737,21 +1219,11 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" -is-hexadecimal@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz#cc35c97588da4bd49a8eedd6bc4082d44dcb23a7" - integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-plain-obj@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" - integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== - is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" @@ -1773,21 +1245,11 @@ is-unc-path@^1.0.0: dependencies: unc-path-regex "^0.1.2" -is-whitespace-character@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7" - integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== - is-windows@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== -is-word-character@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230" - integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== - isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" @@ -1808,22 +1270,10 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -jest-docblock@27.0.1: - version "27.0.1" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.0.1.tgz#bd9752819b49fa4fab1a50b73eb58c653b962e8b" - integrity sha512-TA4+21s3oebURc7VgFV4r7ltdIJ5rtBH1E3Tbovcg7AV+oLfD5DcJ2V2vJ5zFA9sL5CFd/d2D6IpsAeSheEdrA== - dependencies: - detect-newline "^3.0.0" - jquery@>=1.7: - version "3.6.4" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.6.4.tgz#ba065c188142100be4833699852bf7c24dc0252f" - integrity sha512-v28EW9DWDFpzcD9O5iyJXg3R3+q+mET5JhnjJzQUZMHOv67bpSIHq81GEYpPNZHG+XXHsfSme3nxp/hndKEcsQ== - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + version "3.7.1" + resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.7.1.tgz#083ef98927c9a6a74d05a6af02806566d16274de" + integrity sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg== js-yaml@~3.14.0: version "3.14.1" @@ -1833,18 +1283,6 @@ js-yaml@~3.14.0: argparse "^1.0.7" esprima "^4.0.0" -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-stable-stringify@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" - integrity sha512-i/J297TW6xyj7sDFa7AmBPkQvLIxWr2kKPWI26tXydnZrzVAocNqn5DMNT1Mzk0vit1V5UkRM7C1KdVNp7Lmcg== - dependencies: - jsonify "~0.0.0" - jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" @@ -1852,20 +1290,15 @@ jsonfile@^4.0.0: optionalDependencies: graceful-fs "^4.1.6" -jsonify@~0.0.0: - version "0.0.1" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.1.tgz#2aa3111dae3d34a0f151c63f3a45d995d9420978" - integrity sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg== - just-extend@^4.0.2: version "4.2.1" resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== karma@^6.4.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/karma/-/karma-6.4.1.tgz#f2253716dd3a41aaa813fa9f54b6ee047e1127d9" - integrity sha512-Cj57NKOskK7wtFWSlMvZf459iX+kpYIPXmkNUzP2WAFcA7nhr/ALn5R7sw3w+1udFDcpMx/tuB8d5amgm3ijaA== + version "6.4.2" + resolved "https://registry.yarnpkg.com/karma/-/karma-6.4.2.tgz#a983f874cee6f35990c4b2dcc3d274653714de8e" + integrity sha512-C6SU/53LB31BEgRg+omznBEMY4SjHU3ricV6zBcAe1EeILKkeScr+fZXtaI5WyDbkVowJxxAI6h73NcFPmXolQ== dependencies: "@colors/colors" "1.5.0" body-parser "^1.19.0" @@ -1897,16 +1330,6 @@ kind-of@^6.0.2: resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== -leven@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -leven@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" - integrity sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA== - liftup@~3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/liftup/-/liftup-3.0.1.tgz#1cb81aff0f368464ed3a5f1a7286372d6b1a60ce" @@ -1921,21 +1344,6 @@ liftup@~3.0.1: rechoir "^0.7.0" resolve "^1.19.0" -lines-and-columns@1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" - integrity sha512-8ZmlJFVK9iCmtLz19HpSsR8HaAMWBT284VMNednLwlIMDP2hJDCIhUp0IZ2xUcZ+Ob6BM0VvCSJwzASDM45NLQ== - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -linguist-languages@7.10.0: - version "7.10.0" - resolved "https://registry.yarnpkg.com/linguist-languages/-/linguist-languages-7.10.0.tgz#c2098dfe6c5a4ab665fb78f13b190344e4cff1af" - integrity sha512-Uqt94P4iAznscZtccnNE1IBi105U+fmQKEUlDJv54JDdFZDInomkepEIRpZLOQcPyGdcNu3JO9Tvo5wpQVbfKw== - load-grunt-tasks@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/load-grunt-tasks/-/load-grunt-tasks-5.1.0.tgz#14894c27a7e34ebbef9937c39cc35c573cd04c1c" @@ -1959,7 +1367,7 @@ lodash.get@^4.4.2: resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== -lodash@4.17.21, lodash@^4.17.10, lodash@^4.17.15, lodash@^4.17.21, lodash@~4.17.19, lodash@~4.17.21: +lodash@^4.17.10, lodash@^4.17.21, lodash@~4.17.19, lodash@~4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -1975,21 +1383,6 @@ log4js@^6.4.1: rfdc "^1.3.0" streamroller "^3.1.5" -lru-cache@^4.1.5: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - lunr@^0.7.0: version "0.7.2" resolved "https://registry.yarnpkg.com/lunr/-/lunr-0.7.2.tgz#79a30e932e216cba163541ee37a3607c12cd7281" @@ -2002,41 +1395,16 @@ make-iterator@^1.0.0: dependencies: kind-of "^6.0.2" -map-age-cleaner@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" - integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== - dependencies: - p-defer "^1.0.0" - map-cache@^0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== -markdown-escapes@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535" - integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== - media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== -mem@8.1.1: - version "8.1.1" - resolved "https://registry.yarnpkg.com/mem/-/mem-8.1.1.tgz#cf118b357c65ab7b7e0817bdf00c8062297c0122" - integrity sha512-qFCFUDs7U3b8mBDPyz5EToEKoAkgCzqquIgi9nkkR9bixxOVOre+09lbuH7+9Kn2NFpm56M3GUWVbU2hQgdACA== - dependencies: - map-age-cleaner "^0.1.3" - mimic-fn "^3.1.0" - -merge2@^1.3.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - micromatch@^4.0.2, micromatch@^4.0.4: version "4.0.5" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" @@ -2062,18 +1430,6 @@ mime@^2.5.2: resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== -mimic-fn@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-3.1.0.tgz#65755145bbf3e36954b949c16450427451d5ca74" - integrity sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ== - -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - minimatch@^3.0.4, minimatch@^3.1.1: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" @@ -2088,12 +1444,7 @@ minimatch@~3.0.4: dependencies: brace-expansion "^1.1.7" -minimist@1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== - -minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -2126,30 +1477,15 @@ multimatch@^4.0.0: arrify "^2.0.1" minimatch "^3.0.4" -n-readlines@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/n-readlines/-/n-readlines-1.0.1.tgz#bbb7364d38bc31a170a199f986fcacfa76b95f6e" - integrity sha512-z4SyAIVgMy7CkgsoNw7YVz40v0g4+WWvvqy8+ZdHrCtgevcEO758WQyrYcw3XPxcLxF+//RszTz/rO48nzD0wQ== - -nanoid@^3.3.4: - version "3.3.6" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" - integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== - negotiator@0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== -neo-async@^2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - nise@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.4.tgz#491ce7e7307d4ec546f5a659b2efe94a18b4bbc0" - integrity sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg== + version "5.1.5" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.5.tgz#f2aef9536280b6c18940e32ba1fbdc770b8964ee" + integrity sha512-VJuPIfUFaXNRzETTQEEItTOP8Y171ijr+JLq42wHes3DiryR8vT+1TXQW/Rx8JNUhyYYWyIvjXTU6dOhJcs9Nw== dependencies: "@sinonjs/commons" "^2.0.0" "@sinonjs/fake-timers" "^10.0.2" @@ -2183,9 +1519,9 @@ object-assign@^4: integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-inspect@^1.9.0: - version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== + version "1.13.1" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" + integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== object.defaults@^1.1.0: version "1.1.0" @@ -2251,11 +1587,6 @@ osenv@^0.1.4: os-homedir "^1.0.0" os-tmpdir "^1.0.0" -p-defer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" - integrity sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw== - p-limit@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" @@ -2275,25 +1606,6 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-entities@^1.1.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-1.2.2.tgz#c31bf0f653b6661354f8973559cb86dd1d5edf50" - integrity sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg== - dependencies: - character-entities "^1.0.0" - character-entities-legacy "^1.0.0" - character-reference-invalid "^1.0.0" - is-alphanumerical "^1.0.0" - is-decimal "^1.0.0" - is-hexadecimal "^1.0.0" - parse-filepath@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" @@ -2303,16 +1615,6 @@ parse-filepath@^1.0.1: map-cache "^0.2.0" path-root "^0.1.1" -parse-json@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" @@ -2333,7 +1635,7 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== -path-parse@^1.0.6, path-parse@^1.0.7: +path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -2357,21 +1659,6 @@ path-to-regexp@^1.7.0: dependencies: isarray "0.0.1" -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -picocolors@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -2384,79 +1671,6 @@ pkg-up@^3.1.0: dependencies: find-up "^3.0.0" -please-upgrade-node@3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz#aeddd3f994c933e4ad98b99d9a556efa0e2fe942" - integrity sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg== - dependencies: - semver-compare "^1.0.0" - -postcss-less@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-less/-/postcss-less-4.0.1.tgz#73caf5dac056d4b706f4cc136cefeaf4e79067a4" - integrity sha512-C92S4sHlbDpefJ2QQJjrucCcypq3+KZPstjfuvgOCNnGx0tF9h8hXgAlOIATGAxMXZXaF+nVp+/Mi8pCAWdSmw== - dependencies: - postcss "^8.1.2" - -postcss-media-query-parser@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz#27b39c6f4d94f81b1a73b8f76351c609e5cef244" - integrity sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig== - -postcss-scss@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-2.1.1.tgz#ec3a75fa29a55e016b90bf3269026c53c1d2b383" - integrity sha512-jQmGnj0hSGLd9RscFw9LyuSVAa5Bl1/KBPqG1NQw9w8ND55nY4ZEsdlVuYJvLPpV+y0nwTV5v/4rHPzZRihQbA== - dependencies: - postcss "^7.0.6" - -postcss-selector-parser@2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz#f9437788606c3c9acee16ffe8d8b16297f27bb90" - integrity sha512-3pqyakeGhrO0BQ5+/tGTfvi5IAUAhHRayGK8WFSu06aEv2BmHoXw/Mhb+w7VY5HERIuC+QoUI7wgrCcq2hqCVA== - dependencies: - flatten "^1.0.2" - indexes-of "^1.0.1" - uniq "^1.0.1" - -postcss-values-parser@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/postcss-values-parser/-/postcss-values-parser-2.0.1.tgz#da8b472d901da1e205b47bdc98637b9e9e550e5f" - integrity sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg== - dependencies: - flatten "^1.0.2" - indexes-of "^1.0.1" - uniq "^1.0.1" - -postcss@^7.0.6: - version "7.0.39" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.1.2: - version "8.4.21" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.21.tgz#c639b719a57efc3187b13a1d765675485f4134f4" - integrity sha512-tP7u/Sn/dVxK2NnruI4H9BG+x+Wxz6oeZ1cJ8P6G/PZY0IKk4k/63TDsQf2kQq3+qoJeLm2kIBUNlZe3zgb4Zg== - dependencies: - nanoid "^3.3.4" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prettier-plugin-x@^0.0.10: - version "0.0.10" - resolved "https://registry.yarnpkg.com/prettier-plugin-x/-/prettier-plugin-x-0.0.10.tgz#ab2e167882c73543efa03d17d982e0777441b92b" - integrity sha512-uxYOYXNyMvkWs3ZkfXSP6yT4R45Gg2lZxAqeTj5hoIjdLPzMEyTqOMU8lh7wb1hLYDvxGjK37yYKotSDD22H8g== - dependencies: - x-formatter "^0.0.2" - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== - qjobs@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" @@ -2469,11 +1683,6 @@ qs@6.11.0: dependencies: side-channel "^1.0.4" -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - range-parser@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" @@ -2503,25 +1712,6 @@ rechoir@^0.7.0: dependencies: resolve "^1.9.0" -regexp-util@1.2.2, regexp-util@^1.2.0, regexp-util@^1.2.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/regexp-util/-/regexp-util-1.2.2.tgz#5cf599134921eb0d776e41d41e9c0da33f0fa2fc" - integrity sha512-5/rl2UD18oAlLQEIuKBeiSIOp1hb5wCXcakl5yvHxlY1wyWI4D5cUKKzCibBeu741PA9JKvZhMqbkDQqPusX3w== - dependencies: - tslib "^1.9.0" - -remark-math@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/remark-math/-/remark-math-1.0.6.tgz#49eb3dd15d298734c9ae21673115389793af4d1b" - integrity sha512-I43wU/QOQpXvVFXKjA4FHp5xptK65+5F6yolm8+69/JV0EqSOB64wURUZ3JK50JtnTL8FvwLiH2PZ+fvsBxviA== - dependencies: - trim-trailing-lines "^1.1.0" - -repeat-string@^1.5.4: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== - require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" @@ -2540,11 +1730,6 @@ resolve-dir@^1.0.0, resolve-dir@^1.0.1: expand-tilde "^2.0.0" global-modules "^1.0.0" -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" @@ -2557,28 +1742,15 @@ resolve-pkg@^2.0.0: dependencies: resolve-from "^5.0.0" -resolve@1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" - integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== - dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" - resolve@^1.19.0, resolve@^1.9.0: - version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: - is-core-module "^2.9.0" + is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - rfdc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" @@ -2592,16 +1764,9 @@ rimraf@^3.0.0, rimraf@^3.0.2: glob "^7.1.3" robust-predicates@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.1.tgz#ecde075044f7f30118682bd9fb3f123109577f9a" - integrity sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g== - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" + version "3.0.2" + resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771" + integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg== rw@1: version "1.3.3" @@ -2613,29 +1778,15 @@ rw@1: resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -semver-compare@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" - integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow== - -semver@7.3.5: - version "7.3.5" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" - integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== - dependencies: - lru-cache "^6.0.0" - -semver@^5.6.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -semver@^7.3.2: - version "7.3.8" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" - integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== +set-function-length@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" + integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== dependencies: - lru-cache "^6.0.0" + define-data-property "^1.1.1" + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" setprototypeof@1.2.0: version "1.2.0" @@ -2651,33 +1802,18 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" -sigmund@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590" - integrity sha512-fCvEXfh6NWpm+YSuY2bpXb/VIihqWA6hLsgboC+0nl71Q7N7o2eaCW8mJa/NLvQhs6jpd3VZV4UiUQlV6+lc8g== - -simple-html-tokenizer@^0.5.9: - version "0.5.11" - resolved "https://registry.yarnpkg.com/simple-html-tokenizer/-/simple-html-tokenizer-0.5.11.tgz#4c5186083c164ba22a7b477b7687ac056ad6b1d9" - integrity sha512-C2WEK/Z3HoSFbYq8tI7ni3eOo/NneSPRoPpcM7WdLjFOArFuyXEjAoCdOC3DgMfRyziZQ1hCNR4mrNdWEvD0og== - sinon@^15.0.3: - version "15.0.3" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-15.0.3.tgz#38005fcd80827177b6aa0245f82401d9ec88994b" - integrity sha512-si3geiRkeovP7Iel2O+qGL4NrO9vbMf3KsrJEi0ghP1l5aBkB5UxARea5j0FUsSqH3HLBh0dQPAyQ8fObRUqHw== + version "15.2.0" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-15.2.0.tgz#5e44d4bc5a9b5d993871137fd3560bebfac27565" + integrity sha512-nPS85arNqwBXaIsFCkolHjGIkFo+Oxu9vbgmBJizLAhqe6P2o3Qmj3KCUoRkfhHtvgDhZdWD3risLHAUJ8npjw== dependencies: "@sinonjs/commons" "^3.0.0" - "@sinonjs/fake-timers" "^10.0.2" + "@sinonjs/fake-timers" "^10.3.0" "@sinonjs/samsam" "^8.0.0" diff "^5.1.0" nise "^5.1.4" supports-color "^7.2.0" -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - socket.io-adapter@~2.5.2: version "2.5.2" resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-2.5.2.tgz#5de9477c9182fdc171cd8c8364b9a8894ec75d12" @@ -2685,30 +1821,26 @@ socket.io-adapter@~2.5.2: dependencies: ws "~8.11.0" -socket.io-parser@~4.2.1: - version "4.2.2" - resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.2.tgz#1dd384019e25b7a3d374877f492ab34f2ad0d206" - integrity sha512-DJtziuKypFkMMHCm2uIshOYC7QaylbtzQwiMYDuCKy3OPkjLzu4B2vAhTlqipRHHzrI0NJeBAizTK7X+6m1jVw== +socket.io-parser@~4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83" + integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== dependencies: "@socket.io/component-emitter" "~3.1.0" debug "~4.3.1" socket.io@^4.4.1: - version "4.6.1" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70" - integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA== + version "4.7.2" + resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.7.2.tgz#22557d76c3f3ca48f82e73d68b7add36a22df002" + integrity sha512-bvKVS29/I5fl2FGLNHuXlQaUH/BlzX1IN6S+NKLNZpBsPZIDH+90eQmCs2Railn4YUiww4SzUedJ6+uzwFnKLw== dependencies: accepts "~1.3.4" base64id "~2.0.0" + cors "~2.8.5" debug "~4.3.2" - engine.io "~6.4.1" + engine.io "~6.5.2" socket.io-adapter "~2.5.2" - socket.io-parser "~4.2.1" - -source-map-js@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + socket.io-parser "~4.2.4" source-map@^0.6.1: version "0.6.1" @@ -2716,25 +1848,15 @@ source-map@^0.6.1: integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== sprintf-js@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673" - integrity sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug== + version "1.1.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" + integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== -srcset@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/srcset/-/srcset-3.0.0.tgz#8afd8b971362dfc129ae9c1a99b3897301ce6441" - integrity sha512-D59vF08Qzu/C4GAOXVgMTLfgryt5fyWo93FZyhEWANo0PokFz/iWdDe13mX3O5TRf6l8vMTqckAfR4zPiaH0yQ== - -state-toggle@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe" - integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== - statuses@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" @@ -2754,15 +1876,6 @@ streamroller@^3.1.5: debug "^4.3.4" fs-extra "^8.1.0" -string-width@4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" - integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.0" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" @@ -2779,13 +1892,6 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - supports-color@^7.1.0, supports-color@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -2817,33 +1923,6 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== -trim-trailing-lines@^1.0.0, trim-trailing-lines@^1.1.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0" - integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== - -trim@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/trim/-/trim-0.0.3.tgz#05243a47a3a4113e6b49367880a9cca59697a20b" - integrity sha512-h82ywcYhHK7veeelXrCScdH7HkWfbIT1D/CgYO+nmDarz3SGNssVBMws6jU16Ga60AJCRAvPV6w6RLuNerQqjg== - -trough@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" - integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== - -tslib@1.14.1, tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tsutils@^3.17.1: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - type-detect@4.0.8, type-detect@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" @@ -2857,20 +1936,10 @@ type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -typescript@^4.9.5: - version "4.9.5" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" - integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== - ua-parser-js@^0.7.30: - version "0.7.34" - resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.34.tgz#afb439e2e3e394bdc90080acb661a39c685b67d7" - integrity sha512-cJMeh/eOILyGu0ejgTKB95yKT3zOenSe9UGE3vj6WfiOwgGYnmATUsnDixMFvdU+rNMvWih83hrUP8VwhF9yXQ== - -uglify-js@^3.1.4: - version "3.17.4" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" - integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== + version "0.7.37" + resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.37.tgz#e464e66dac2d33a7a1251d7d7a99d6157ec27832" + integrity sha512-xV8kqRKM+jhMvcHWUKthV9fNebIzrNy//2O9ZwWcfiBFR5f25XVZPLlEajk/sf3Ra15V92isyQqnIEXRDaZWEA== unc-path-regex@^0.1.2: version "0.1.2" @@ -2890,77 +1959,10 @@ underscore@>=1.7.0, underscore@>=1.8.3, underscore@^1.8.0, underscore@^1.8.3: resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441" integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== -unherit@^1.0.4: - version "1.1.3" - resolved "https://registry.yarnpkg.com/unherit/-/unherit-1.1.3.tgz#6c9b503f2b41b262330c80e91c8614abdaa69c22" - integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== - dependencies: - inherits "^2.0.0" - xtend "^4.0.0" - -unicode-regex@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/unicode-regex/-/unicode-regex-3.0.0.tgz#0c20df914c6da0412b3714cd300726e0f7f24698" - integrity sha512-WiDJdORsqgxkZrjC8WsIP573130HNn7KsB0IDnUccW2BG2b19QQNloNhVe6DKk3Aef0UcoIHhNVj7IkkcYWrNw== - dependencies: - regexp-util "^1.2.0" - -unicode-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-regex/-/unicode-regex-2.0.0.tgz#ef8f6642c37dddcaa0c09af5b9456aabf6b436a3" - integrity sha512-5nbEG2YU7loyTvPABaKb+8B0u8L7vWCsVmCSsiaO249ZdMKlvrXlxR2ex4TUVAdzv/Cne/TdoXSSaJArGXaleQ== - dependencies: - regexp-util "^1.2.0" - -unified@9.2.1: - version "9.2.1" - resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.1.tgz#ae18d5674c114021bfdbdf73865ca60f410215a3" - integrity sha512-juWjuI8Z4xFg8pJbnEZ41b5xjGUWGHqXALmBZ3FC3WX0PIx1CZBIIJ6mXbYMcf6Yw4Fi0rFUTA1cdz/BglbOhA== - dependencies: - bail "^1.0.0" - extend "^3.0.0" - is-buffer "^2.0.0" - is-plain-obj "^2.0.0" - trough "^1.0.0" - vfile "^4.0.0" - -uniq@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" - integrity sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA== - -unist-util-is@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-3.0.0.tgz#d9e84381c2468e82629e4a5be9d7d05a2dd324cd" - integrity sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A== - -unist-util-remove-position@^1.0.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz#ec037348b6102c897703eee6d0294ca4755a2020" - integrity sha512-tLqd653ArxJIPnKII6LMZwH+mb5q+n/GtXQZo6S6csPRs5zB0u79Yw8ouR3wTw8wxvdJFhpP6Y7jorWdCgLO0A== - dependencies: - unist-util-visit "^1.1.0" - -unist-util-stringify-position@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da" - integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g== - dependencies: - "@types/unist" "^2.0.2" - -unist-util-visit-parents@^2.0.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz#25e43e55312166f3348cae6743588781d112c1e9" - integrity sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g== - dependencies: - unist-util-is "^3.0.0" - -unist-util-visit@^1.1.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-1.4.1.tgz#4724aaa8486e6ee6e26d7ff3c8685960d560b1e3" - integrity sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw== - dependencies: - unist-util-visit-parents "^2.0.0" +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== universalify@^0.1.0: version "0.1.2" @@ -2994,38 +1996,6 @@ vary@^1: resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== -vfile-location@^2.0.0: - version "2.0.6" - resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-2.0.6.tgz#8a274f39411b8719ea5728802e10d9e0dff1519e" - integrity sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA== - -vfile-message@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a" - integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== - dependencies: - "@types/unist" "^2.0.0" - unist-util-stringify-position "^2.0.0" - -vfile@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/vfile/-/vfile-4.2.1.tgz#03f1dce28fc625c625bc6514350fbdb00fa9e624" - integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA== - dependencies: - "@types/unist" "^2.0.0" - is-buffer "^2.0.0" - unist-util-stringify-position "^2.0.0" - vfile-message "^2.0.0" - -vnopts@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/vnopts/-/vnopts-1.0.2.tgz#f6a331473de0179d1679112cc090572b695202f7" - integrity sha512-d2rr2EFhAGHnTlURu49G7GWmiJV80HbAnkYdD9IFAtfhmxC+kSWEaZ6ZF064DJFTv9lQZQV1vuLTntyQpoanGQ== - dependencies: - chalk "^2.4.1" - leven "^2.1.0" - tslib "^1.9.3" - void-elements@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" @@ -3045,11 +2015,6 @@ which@~2.0.2: dependencies: isexe "^2.0.0" -wordwrap@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== - wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -3069,47 +2034,11 @@ ws@~8.11.0: resolved "https://registry.yarnpkg.com/ws/-/ws-8.11.0.tgz#6a0d36b8edfd9f96d8b25683db2f8d7de6e8e143" integrity sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg== -x-formatter@^0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/x-formatter/-/x-formatter-0.0.2.tgz#de40baa26832382fdc92a849b20575928018153f" - integrity sha512-JO3XWRfc2cpmUZHyOgerDXx/d4TzSz+mtsmUXZdw9LQSiPiLmyCvNad4a8N7cNS1Zgbq2IDW0xw47exvy0B8ZA== - dependencies: - "@prettier-x/formatter-2021-01" "^0.0.1-rc01" - -xtend@^4.0.0, xtend@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - y18n@^5.0.5: version "5.0.8" resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yaml-unist-parser@1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/yaml-unist-parser/-/yaml-unist-parser-1.3.1.tgz#4305a54d8f8750dfff782bb998ff93d0da538d1a" - integrity sha512-4aHBMpYcnByF8l2OKj5hlBJlxSYIMON8Z1Hm57ymbBL4omXMlGgY+pEf4Di6h2qNT8ZG8seTVvAQYNOa7CZ9eA== - dependencies: - lines-and-columns "^1.1.6" - tslib "^1.10.0" - yaml "^1.10.0" - -yaml@^1.10.0: - version "1.10.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" - integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== - yargs-parser@^20.2.2: version "20.2.9" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" From bd63ad7b90dc8269346e60784cba49de12978915 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 18 Dec 2023 10:15:26 +0300 Subject: [PATCH 130/270] Update base image Signed-off-by: Kipchirchir Sigei --- docker/onadata-uwsgi/Dockerfile.ubuntu | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index bcbcee5aba..b3c5eda7a8 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -13,9 +13,9 @@ RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts # hadolint ignore=DL3013 RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi -FROM ubuntu:focal-20230801 +FROM ubuntu:focal-20231211 -ARG release_version=v3.7.1 +ARG release_version=v3.17.1 # Silence configuration prompts ENV DEBIAN_FRONTEND noninteractive From 7772618e1e289c0a3fb42e0b09ee46aa40ad1ef1 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 18 Dec 2023 09:04:03 +0300 Subject: [PATCH 131/270] Tag release v3.17.2 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 12 ++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b617b5d1ab..d1e8ed78f4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,18 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.17.2(2023-12-18) +------------------- +- Security Updates + `PR #2529 ` + [@KipSigei] +- Enable Token Authentication on Form List API + `PR #2525 ` + [@KipSigei] +- Set AWS credentials when generating pre-signed URLS + `PR #2527 ` + [@ukanga] + v3.17.1(2023-12-11) ------------------- - Enable TokenAuthentication on briefcase viewset diff --git a/onadata/__init__.py b/onadata/__init__.py index 8ae438470b..82c545f86b 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.17.1" +__version__ = "3.17.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 741ce91201..5ce92ae823 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.17.1 +version = 3.17.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 3ab0734e7bba9f3d84ae43ff4eaf3b0ec13b5890 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 15 Jan 2024 16:06:47 +0300 Subject: [PATCH 132/270] Explicitly set AWS_S3_ENDPOINT_URL if defined in settings Signed-off-by: Kipchirchir Sigei --- onadata/libs/utils/image_tools.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/onadata/libs/utils/image_tools.py b/onadata/libs/utils/image_tools.py index 6268dcf876..178b296639 100644 --- a/onadata/libs/utils/image_tools.py +++ b/onadata/libs/utils/image_tools.py @@ -79,6 +79,7 @@ def generate_aws_media_url( """Generate S3 URL.""" s3_class = get_storage_class("storages.backends.s3boto3.S3Boto3Storage")() bucket_name = s3_class.bucket.name + aws_endpoint_url = getattr(settings, "AWS_S3_ENDPOINT_URL", "") s3_config = Config( signature_version=getattr(settings, "AWS_S3_SIGNATURE_VERSION", "s3v4"), region_name=getattr(settings, "AWS_S3_REGION_NAME", ""), @@ -86,6 +87,7 @@ def generate_aws_media_url( s3_client = boto3.client( "s3", config=s3_config, + endpoint_url=aws_endpoint_url, aws_access_key_id=s3_class.access_key, aws_secret_access_key=s3_class.secret_key, ) From 66e09cd404bcf28340458d86cc497204fa5f8a14 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 15 Jan 2024 17:03:05 +0300 Subject: [PATCH 133/270] Tag release v3.17.3 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 6 ++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index d1e8ed78f4..b3ae317372 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,12 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.17.3(2024-01-15) +------------------- +- Explicitly set AWS_S3_ENDPOINT_URL in boto3 configs + `PR #2540 ` + [@KipSigei] + v3.17.2(2023-12-18) ------------------- - Security Updates diff --git a/onadata/__init__.py b/onadata/__init__.py index 82c545f86b..3f6028d678 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.17.2" +__version__ = "3.17.3" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 5ce92ae823..f0e3f95f1a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.17.2 +version = 3.17.3 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 4914a3c3955bc28dc11372a257cb75ee705883c0 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 29 Jan 2024 17:16:37 +0300 Subject: [PATCH 134/270] Process Instance metadata from light tasks synchronously (#2547) * save Instance json _id synchronously always If the asynchronous task to update Instance.json with the full dict gets delayed, _id will be missing from the data. The change ensures even if async processing is enabled, non-performance intensive Instance.json data are always save asynchronously * add tests * fix failing tests * update code comment * fix failing tests --- onadata/apps/api/tests/test_tasks.py | 12 +- .../test_organization_profile_viewset.py | 2 +- onadata/apps/logger/models/instance.py | 139 +++++++++++------- .../apps/logger/tests/models/test_instance.py | 76 ++++++++-- 4 files changed, 156 insertions(+), 73 deletions(-) diff --git a/onadata/apps/api/tests/test_tasks.py b/onadata/apps/api/tests/test_tasks.py index 868f0dc4b4..0051008f32 100644 --- a/onadata/apps/api/tests/test_tasks.py +++ b/onadata/apps/api/tests/test_tasks.py @@ -42,7 +42,9 @@ class RegenerateFormInstanceJsonTestCase(TestBase): def test_regenerates_instances_json(self): """Regenerates instances json""" - def mock_get_full_dict(self): # pylint: disable=unused-argument + def mock_get_full_dict( + self, include_related=True + ): # pylint: disable=unused-argument return {} with patch.object(Instance, "get_full_dict", mock_get_full_dict): @@ -64,7 +66,9 @@ def mock_get_full_dict(self): # pylint: disable=unused-argument def test_json_overriden(self): """Existing json is overriden""" - def mock_get_full_dict(self): # pylint: disable=unused-argument + def mock_get_full_dict( + self, include_related=True + ): # pylint: disable=unused-argument return {"foo": "bar"} with patch.object(Instance, "get_full_dict", mock_get_full_dict): @@ -87,7 +91,9 @@ def test_form_id_invalid(self, mock_log_exception): def test_already_generated(self): """Regeneration fails for a form whose regeneration has already been done""" - def mock_get_full_dict(self): # pylint: disable=unused-argument + def mock_get_full_dict( + self, include_related=True + ): # pylint: disable=unused-argument return {} with patch.object(Instance, "get_full_dict", mock_get_full_dict): diff --git a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py index 3e9b9a4167..700d071698 100644 --- a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py @@ -1140,7 +1140,7 @@ def test_orgs_non_creator_delete(self): response = view(request, user="denoinc") expected_results = ["denoinc", "alice"] self.assertEqual(status.HTTP_201_CREATED, response.status_code) - self.assertEqual(expected_results, response.data) + self.assertCountEqual(expected_results, response.data) self._login_user_and_profile(extra_post_data=alice_data) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 2c0fd87d12..c33addd711 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -79,9 +79,6 @@ from onadata.libs.utils.model_tools import set_uuid from onadata.libs.utils.timing import calculate_duration -ASYNC_POST_SUBMISSION_PROCESSING_ENABLED = getattr( - settings, "ASYNC_POST_SUBMISSION_PROCESSING_ENABLED", False -) # pylint: disable=invalid-name User = get_user_model() storage = get_storage_class()() @@ -355,12 +352,17 @@ def save_full_json_async(self, instance_id): save_full_json(instance) -def save_full_json(instance: "Instance"): - """Save full json dict""" +def save_full_json(instance: "Instance", include_related=True): + """Save full json dict + + Args: + include_related (bool): Whether to include related objects + """ # Queryset.update ensures the model's save is not called and # the pre_save and post_save signals aren't sent - json = instance.get_full_dict() - Instance.objects.filter(pk=instance.pk).update(json=json, version=json.get(VERSION)) + Instance.objects.filter(pk=instance.pk).update( + json=instance.get_full_dict(include_related) + ) @app.task(bind=True, max_retries=3) @@ -468,62 +470,77 @@ def _set_geom(self): else: self.geom = None - def get_full_dict(self): - """Returns the submission XML as a python dictionary object.""" + def get_full_dict(self, include_related=True): + """Returns the submission XML as a python dictionary object + + Include metadata + + Args: + include_related (bool): Whether to include related objects + or not + """ # Get latest dict doc = self.get_dict() - # pylint: disable=no-member + # Update dict + geopoint = [self.point.y, self.point.x] if self.point else [None, None] + doc.update( + { + UUID: self.uuid, + BAMBOO_DATASET_ID: self.xform.bamboo_dataset, + STATUS: self.status, + VERSION: self.version, + DURATION: self.get_duration(), + XFORM_ID_STRING: self._parser.get_xform_id_string(), + XFORM_ID: self.xform.pk, + GEOLOCATION: geopoint, + SUBMITTED_BY: self.user.username if self.user else None, + DATE_MODIFIED: self.date_modified.isoformat(), + SUBMISSION_TIME: self.date_created.isoformat(), + TOTAL_MEDIA: self.total_media, + MEDIA_COUNT: self.media_count, + MEDIA_ALL_RECEIVED: self.media_all_received, + } + ) - if self.id: - geopoint = [self.point.y, self.point.x] if self.point else [None, None] - doc.update( - { - UUID: self.uuid, - ID: self.id, - BAMBOO_DATASET_ID: self.xform.bamboo_dataset, - ATTACHMENTS: _get_attachments_from_instance(self), - STATUS: self.status, - TAGS: list(self.tags.names()), - NOTES: self.get_notes(), - VERSION: doc.get(VERSION, self.xform.version), - DURATION: self.get_duration(), - XFORM_ID_STRING: self._parser.get_xform_id_string(), - XFORM_ID: self.xform.pk, - GEOLOCATION: geopoint, - SUBMITTED_BY: self.user.username if self.user else None, - } - ) + if isinstance(self.deleted_at, datetime): + doc[DELETEDAT] = self.deleted_at.isoformat() - for osm in self.osm_data.all(): - doc.update(osm.get_tags_with_prefix()) + edited = False - if isinstance(self.deleted_at, datetime): - doc[DELETEDAT] = self.deleted_at.isoformat() + if hasattr(self, "last_edited"): + edited = self.last_edited is not None - # pylint: disable=no-member - if self.has_a_review: - review = self.get_latest_review() - if review: - doc[REVIEW_STATUS] = review.status - doc[REVIEW_DATE] = review.date_created.isoformat() - if review.get_note_text(): - doc[REVIEW_COMMENT] = review.get_note_text() - - doc[DATE_MODIFIED] = self.date_modified.isoformat() - doc[SUBMISSION_TIME] = self.date_created.isoformat() - doc[TOTAL_MEDIA] = self.total_media - doc[MEDIA_COUNT] = self.media_count - doc[MEDIA_ALL_RECEIVED] = self.media_all_received - - edited = False - if hasattr(self, "last_edited"): - edited = self.last_edited is not None - - doc[EDITED] = edited - if edited: + doc[EDITED] = edited + + if edited: + doc.update({LAST_EDITED: convert_to_serializable_date(self.last_edited)}) + + if self.id: + doc[ID] = self.id + + if include_related: doc.update( - {LAST_EDITED: convert_to_serializable_date(self.last_edited)} + { + ATTACHMENTS: _get_attachments_from_instance(self), + TAGS: list(self.tags.names()), + NOTES: self.get_notes(), + } ) + + for osm in self.osm_data.all(): + doc.update(osm.get_tags_with_prefix()) + + # pylint: disable=no-member + if self.has_a_review: + review = self.get_latest_review() + + if review: + doc[REVIEW_STATUS] = review.status + doc[REVIEW_DATE] = review.date_created.isoformat() + + if review.get_note_text(): + doc[REVIEW_COMMENT] = review.get_note_text() + return doc def _set_parser(self): @@ -766,7 +783,7 @@ def attachments_count(self): def save(self, *args, **kwargs): force = kwargs.get("force") self.date_modified = now() - self.json = self.get_dict() # XML converted to json + self.version = self.get_dict().get(VERSION, self.xform.version) if force: del kwargs["force"] @@ -814,7 +831,15 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): if instance.deleted_at is not None: _update_xform_submission_count_delete(instance) - if ASYNC_POST_SUBMISSION_PROCESSING_ENABLED: + if ( + hasattr(settings, "ASYNC_POST_SUBMISSION_PROCESSING_ENABLED") + and settings.ASYNC_POST_SUBMISSION_PROCESSING_ENABLED + ): + # We first save metadata data without related objects + # (metadata from non-performance intensive tasks) first since we + # do not know when the async processing will complete + save_full_json(instance, False) + transaction.on_commit( lambda: update_xform_submission_count_async.apply_async( args=[instance.pk, created] diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 3d826abd28..09cef2fcb9 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -6,6 +6,7 @@ from django.http.request import HttpRequest from django.utils.timezone import utc from django_digest.test import DigestAuth +from django.test import override_settings from mock import patch, Mock from onadata.apps.logger.models import XForm, Instance, SubmissionReview @@ -34,19 +35,35 @@ def setUp(self): def test_stores_json(self): self._publish_transportation_form_and_submit_instance() - instances = Instance.objects.all() - xform_id_string = XForm.objects.all()[0].id_string + instance = Instance.objects.first() - for instance in instances: - self.assertNotEqual(instance.json, {}) - self.assertEqual(instance.json.get("_id"), instance.pk) - self.assertEqual( - instance.json.get("_date_modified"), instance.date_modified.isoformat() - ) - self.assertEqual( - instance.json.get("_submission_time"), instance.date_created.isoformat() - ) - self.assertEqual(instance.json.get("_xform_id_string"), xform_id_string) + self.assertEqual( + instance.json, + { + "_id": instance.pk, + "_tags": [], + "_uuid": "5b2cc313-fc09-437e-8149-fcd32f695d41", + "_notes": [], + "image1": "1335783522563.jpg", + "_edited": False, + "_status": "submitted_via_web", + "_version": "2014111", + "_duration": "", + "_xform_id": instance.xform.pk, + "_attachments": [], + "_geolocation": [None, None], + "_media_count": 0, + "_total_media": 1, + "_submitted_by": "bob", + "_date_modified": instance.date_modified.isoformat(), + "meta/instanceID": "uuid:5b2cc313-fc09-437e-8149-fcd32f695d41", + "_submission_time": instance.date_created.isoformat(), + "_xform_id_string": "transportation_2011_07_25", + "_bamboo_dataset_id": "", + "_media_all_received": False, + "transport/available_transportation_types_to_referral_facility": "none", + }, + ) def test_updates_json_date_modified_on_save(self): """_date_modified in `json` field is updated on save""" @@ -363,3 +380,38 @@ def test_numeric_checker_with_negative_integer_values(self): string_value = "Hello World" result = numeric_checker(string_value) self.assertEqual(result, "Hello World") + + @override_settings(ASYNC_POST_SUBMISSION_PROCESSING_ENABLED=True) + @patch("onadata.apps.logger.models.instance.save_full_json_async.apply_async") + def test_light_tasks_synchronous(self, mock_json_async): + """Metadata from light tasks is always processed synchronously""" + self._publish_transportation_form_and_submit_instance() + instance = Instance.objects.first() + mock_json_async.assert_called() + # _notes, _tags, _attachments should be missing since getting related + # objects is performance intensive and should be handled async. Here + # we mock the async task to simulate a failed async job + self.assertEqual( + instance.json, + { + "_id": instance.pk, + "_uuid": "5b2cc313-fc09-437e-8149-fcd32f695d41", + "image1": "1335783522563.jpg", + "_edited": False, + "_status": "submitted_via_web", + "_version": "2014111", + "_duration": "", + "_xform_id": instance.xform.pk, + "_geolocation": [None, None], + "_media_count": 0, + "_total_media": 1, + "_submitted_by": "bob", + "_date_modified": instance.date_modified.isoformat(), + "meta/instanceID": "uuid:5b2cc313-fc09-437e-8149-fcd32f695d41", + "_submission_time": instance.date_created.isoformat(), + "_xform_id_string": "transportation_2011_07_25", + "_bamboo_dataset_id": "", + "_media_all_received": False, + "transport/available_transportation_types_to_referral_facility": "none", + }, + ) From 2c98480744fa4bdb10a694f78aa50d19f27fe270 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 29 Jan 2024 09:03:21 +0300 Subject: [PATCH 135/270] Fix SQL syntax error when grouping by select one Signed-off-by: Kipchirchir Sigei --- onadata/libs/data/query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/data/query.py b/onadata/libs/data/query.py index 614cf27add..96e350f965 100644 --- a/onadata/libs/data/query.py +++ b/onadata/libs/data/query.py @@ -80,7 +80,7 @@ def _postgres_count_group_field_n_group_by(field, name, xform, group_by, data_vi "count(*) as count " "FROM %(table)s WHERE " + restricted_string - + "AND deleted_at IS NULL " + + " AND deleted_at IS NULL " + additional_filters + " GROUP BY %(json)s, %(group_by)s" + " ORDER BY %(json)s, %(group_by)s" From fe506983d78e11ffee994c26313a35cade12d4ae Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 29 Jan 2024 09:03:39 +0300 Subject: [PATCH 136/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../api/tests/viewsets/test_charts_viewset.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/onadata/apps/api/tests/viewsets/test_charts_viewset.py b/onadata/apps/api/tests/viewsets/test_charts_viewset.py index e340e5c446..ebafc8f3be 100644 --- a/onadata/apps/api/tests/viewsets/test_charts_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_charts_viewset.py @@ -524,3 +524,35 @@ def test_charts_caching(self): ) self.assertEqual(response.status_code, 200) self.assertNotEqual(response.data, initial_data) + + def test_charts_group_by_select_one(self): + """ + Test that the chart endpoint works correctly + when grouping with select one field + """ + data = {"field_name": "gender", "group_by": "pizza_fan"} + request = self.factory.get("/charts", data) + force_authenticate(request, user=self.user) + initial_data = { + "data": [ + {"gender": ["Male"], "items": [{"pizza_fan": ["No"], "count": 1}]}, + { + "gender": ["Female"], + "items": [ + {"pizza_fan": ["No"], "count": 1}, + {"pizza_fan": ["Yes"], "count": 1}, + ], + }, + ], + "data_type": "categorized", + "field_label": "Gender", + "field_xpath": "gender", + "field_name": "gender", + "field_type": "select one", + "grouped_by": "pizza_fan", + "xform": self.xform.pk, + } + + response = self.view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, initial_data) From 2713f21dedbef02a6e209ddd2ce8f07ba49ad164 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 6 Feb 2024 09:45:55 +0300 Subject: [PATCH 137/270] Tag release v3.18.0 (#2550) * tag release v3.17.4 * bump version to 3.18.0 --- CHANGES.rst | 9 +++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b3ae317372..162b0ef672 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,15 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.18.0(2024-02-05) +------------------- +- Fix SQL syntax error when grouping by select one + `PR #2549 ` + [@KipSigei] +- Process Instance metadata from light tasks synchronously + `PR #2547 ` + [@kelvin-muchiri] + v3.17.3(2024-01-15) ------------------- - Explicitly set AWS_S3_ENDPOINT_URL in boto3 configs diff --git a/onadata/__init__.py b/onadata/__init__.py index 3f6028d678..3c9dc6e0cd 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.17.3" +__version__ = "3.18.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index f0e3f95f1a..f72a44fc49 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.17.3 +version = 3.18.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 67c161548af04d1543c0a19fec0e0a4e292f7bd0 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 8 Feb 2024 12:24:05 +0300 Subject: [PATCH 138/270] Fix bug invalid endpoint when fetching media files (#2551) * solve invalid endpoint error when fetching media files Error was being thrown causing a 500 error when getting media files * do not pass botocore endpoint_url param * re-add boto3 endpoint_url and set default to None * remove redundant boto3 region_name argument * set None as default AWS_S3_REGION_NAME when generating aws media url empty strings are interpreted as truthy by the package --- onadata/libs/utils/image_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/onadata/libs/utils/image_tools.py b/onadata/libs/utils/image_tools.py index 178b296639..b0d404f748 100644 --- a/onadata/libs/utils/image_tools.py +++ b/onadata/libs/utils/image_tools.py @@ -79,10 +79,10 @@ def generate_aws_media_url( """Generate S3 URL.""" s3_class = get_storage_class("storages.backends.s3boto3.S3Boto3Storage")() bucket_name = s3_class.bucket.name - aws_endpoint_url = getattr(settings, "AWS_S3_ENDPOINT_URL", "") + aws_endpoint_url = getattr(settings, "AWS_S3_ENDPOINT_URL", None) s3_config = Config( signature_version=getattr(settings, "AWS_S3_SIGNATURE_VERSION", "s3v4"), - region_name=getattr(settings, "AWS_S3_REGION_NAME", ""), + region_name=getattr(settings, "AWS_S3_REGION_NAME", None), ) s3_client = boto3.client( "s3", From 39d7cb453b882727ed277c53cf3266e8e2deb1b5 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 20 Feb 2024 12:07:43 +0300 Subject: [PATCH 139/270] Bump version to v3.19.0 (#2555) * bump version to v3.19.0 * update change log doc --- CHANGES.rst | 4 ++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 162b0ef672..19ba7d4a9d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,10 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.19.0(2024-02-20) +------------------- +- Upgrade package pricing to version v1.1.1 + v3.18.0(2024-02-05) ------------------- - Fix SQL syntax error when grouping by select one diff --git a/onadata/__init__.py b/onadata/__init__.py index 3c9dc6e0cd..82441273fd 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.18.0" +__version__ = "3.19.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index f72a44fc49..c145e98ec5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.18.0 +version = 3.19.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 474b9e39f7cd9bd87da72bff46cd3bff0dbb0a9d Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 20 Feb 2024 17:40:08 +0300 Subject: [PATCH 140/270] rollback version to v3.18.0 (#2556) --- CHANGES.rst | 4 ---- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 2 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 19ba7d4a9d..162b0ef672 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,10 +3,6 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` -v3.19.0(2024-02-20) -------------------- -- Upgrade package pricing to version v1.1.1 - v3.18.0(2024-02-05) ------------------- - Fix SQL syntax error when grouping by select one diff --git a/onadata/__init__.py b/onadata/__init__.py index 82441273fd..3c9dc6e0cd 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.19.0" +__version__ = "3.18.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index c145e98ec5..f72a44fc49 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.19.0 +version = 3.18.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 2cb42b420a626db94520776c9591335b1ab4817f Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 21 Feb 2024 09:16:43 +0300 Subject: [PATCH 141/270] bump version to v3.18.1 (#2557) --- CHANGES.rst | 5 +++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 162b0ef672..c49b95d5cb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,11 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.18.1(2024-02-21) +- Fix bug invalid endpoint when fetching media files + `PR #2551 ` + [@kelvin-muchiri] + v3.18.0(2024-02-05) ------------------- - Fix SQL syntax error when grouping by select one diff --git a/onadata/__init__.py b/onadata/__init__.py index 3c9dc6e0cd..c9f6a1a451 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.18.0" +__version__ = "3.18.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index f72a44fc49..b7f5fd5242 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.18.0 +version = 3.18.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From e8290d55beec3ab47a61803743ee206995227b68 Mon Sep 17 00:00:00 2001 From: apiyo Date: Fri, 23 Feb 2024 10:00:08 +0300 Subject: [PATCH 142/270] refactor: Improve perfomance of /status endpoint --- .../apps/main/tests/test_service_health.py | 31 +++++++++---------- onadata/apps/main/views.py | 9 +++++- 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/onadata/apps/main/tests/test_service_health.py b/onadata/apps/main/tests/test_service_health.py index b9f1767df8..9137acdb99 100644 --- a/onadata/apps/main/tests/test_service_health.py +++ b/onadata/apps/main/tests/test_service_health.py @@ -1,7 +1,6 @@ import json from django.http import HttpRequest -from django.db.utils import DatabaseError -from mock import patch +from django.test import override_settings from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import service_health @@ -20,21 +19,21 @@ def test_service_health(self): self.assertEqual(resp.status_code, 200) self.assertEqual( - json.loads(resp.content.decode('utf-8')), - { - 'default-Database': 'OK', - 'Cache-Service': 'OK' - }) + json.loads(resp.content.decode("utf-8")), + {"default-Database": "OK", "Cache-Service": "OK"}, + ) - with patch('onadata.apps.main.views.XForm') as xform_mock: - xform_mock.objects.using().first.side_effect = DatabaseError( - 'Some database error') - resp = service_health(req) + sql_statement_with_error = "SELECT id FROM non_existent_table limit 1;" + with override_settings(CHECK_DB_SQL_STATEMENT=sql_statement_with_error): + resp = service_health(req) self.assertEqual(resp.status_code, 500) + response_json = json.loads(resp.content.decode("utf-8")) + self.assertEqual(response_json["Cache-Service"], "OK") self.assertEqual( - json.loads(resp.content.decode('utf-8')), - { - 'default-Database': 'Degraded state; Some database error', - 'Cache-Service': 'OK' - }) + response_json["default-Database"][:111], + ( + 'Degraded state; relation "non_existent_table" does not exist' + + f"\nLINE 1: {sql_statement_with_error}" + ), + ) diff --git a/onadata/apps/main/views.py b/onadata/apps/main/views.py index 02b1c1531f..8abc4c19d5 100644 --- a/onadata/apps/main/views.py +++ b/onadata/apps/main/views.py @@ -8,6 +8,7 @@ from datetime import datetime from http import HTTPStatus +from django.db import connections from django.conf import settings from django.contrib import messages from django.contrib.auth import get_user_model @@ -1552,7 +1553,13 @@ def service_health(request): for database in getattr(settings, "DATABASES").keys(): # pylint: disable=broad-except try: - XForm.objects.using(database).first() + with connections[database].cursor() as cursor: + fetch_first_xform_sql = ( + getattr(settings, "CHECK_DB_SQL_STATEMENT", None) + or "SELECT id FROM logger_xform limit 1;" + ) + cursor.execute(fetch_first_xform_sql) + cursor.fetchall() except Exception as e: service_statuses[f"{database}-Database"] = f"Degraded state; {e}" service_degraded = True From 952731b4a8cf903f7daa3206d4dbdd0ab66039f3 Mon Sep 17 00:00:00 2001 From: apiyo Date: Fri, 23 Feb 2024 11:22:21 +0300 Subject: [PATCH 143/270] feat: Add onadata version to the status endpoint --- onadata/apps/main/tests/test_service_health.py | 8 +++++++- onadata/apps/main/views.py | 7 ++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/onadata/apps/main/tests/test_service_health.py b/onadata/apps/main/tests/test_service_health.py index 9137acdb99..b97dc295f9 100644 --- a/onadata/apps/main/tests/test_service_health.py +++ b/onadata/apps/main/tests/test_service_health.py @@ -4,6 +4,7 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import service_health +import onadata class TestServiceHealthView(TestBase): @@ -20,7 +21,11 @@ def test_service_health(self): self.assertEqual(resp.status_code, 200) self.assertEqual( json.loads(resp.content.decode("utf-8")), - {"default-Database": "OK", "Cache-Service": "OK"}, + { + "default-Database": "OK", + "Cache-Service": "OK", + "onadata-version": onadata.__version__, + }, ) sql_statement_with_error = "SELECT id FROM non_existent_table limit 1;" @@ -30,6 +35,7 @@ def test_service_health(self): self.assertEqual(resp.status_code, 500) response_json = json.loads(resp.content.decode("utf-8")) self.assertEqual(response_json["Cache-Service"], "OK") + self.assertEqual(response_json["onadata-version"], onadata.__version__) self.assertEqual( response_json["default-Database"][:111], ( diff --git a/onadata/apps/main/views.py b/onadata/apps/main/views.py index 8abc4c19d5..bf4e82cef5 100644 --- a/onadata/apps/main/views.py +++ b/onadata/apps/main/views.py @@ -91,6 +91,7 @@ set_profile_data, ) from onadata.libs.utils.viewer_tools import get_enketo_urls, get_form +import onadata # pylint: disable=invalid-name User = get_user_model() @@ -1576,6 +1577,11 @@ def service_health(request): else: service_statuses["Cache-Service"] = "OK" + if onadata.__version__: + service_statuses["onadata-version"] = onadata.__version__ + else: + service_statuses["onadata-version"] = "Unable to find onadata version" + return JsonResponse( service_statuses, status=HTTPStatus.INTERNAL_SERVER_ERROR if service_degraded else HTTPStatus.OK, @@ -1599,7 +1605,6 @@ def username_list(request): # pylint: disable=too-few-public-methods class OnaAuthorizationView(AuthorizationView): - """ Overrides the AuthorizationView provided by oauth2_provider and adds the user to the context From ff21c451a67c3f146095ce4515e941b685aca54c Mon Sep 17 00:00:00 2001 From: apiyo Date: Fri, 23 Feb 2024 12:15:55 +0300 Subject: [PATCH 144/270] Bump onadata version to v3.18.2 --- CHANGES.rst | 7 +++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c49b95d5cb..f5418931f2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,14 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.18.2(2024-02-23) +------------------- +- Improve perfomance of /status endpoint + `PR #2551 ` + [@FrankApiyo] + v3.18.1(2024-02-21) +------------------- - Fix bug invalid endpoint when fetching media files `PR #2551 ` [@kelvin-muchiri] diff --git a/onadata/__init__.py b/onadata/__init__.py index c9f6a1a451..101038e5e4 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.18.1" +__version__ = "3.18.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index b7f5fd5242..91d615cd1a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.18.1 +version = 3.18.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From e0ef23c0829e84e1f3d1b4263c62907a14d0970d Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 27 Feb 2024 10:50:48 +0300 Subject: [PATCH 145/270] Ignore child questions of grouped sections within repeating sections during CSV export (#2559) * ignore child questions of repating questions fix bug where child questions of repeating questions were not ignored as intended * remove reference to another repo * refactor test names * address failing test * rename test case method name * refactor code * fix flaky test --- .../api/tests/viewsets/test_charts_viewset.py | 523 +++++++++--------- onadata/libs/tests/utils/test_csv_builder.py | 220 ++++++++ onadata/libs/utils/csv_builder.py | 56 +- 3 files changed, 517 insertions(+), 282 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_charts_viewset.py b/onadata/apps/api/tests/viewsets/test_charts_viewset.py index ebafc8f3be..b334d5a1a1 100644 --- a/onadata/apps/api/tests/viewsets/test_charts_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_charts_viewset.py @@ -55,53 +55,79 @@ def setUp(self): self._publish_xls_file_and_set_xform( os.path.join( os.path.dirname(__file__), - '..', 'fixtures', 'forms', 'tutorial', 'tutorial.xlsx')) + "..", + "fixtures", + "forms", + "tutorial", + "tutorial.xlsx", + ) + ) self.api_client = APIClient() self.api_client.login( - username=self.login_username, password=self.login_password) - self.view = ChartsViewSet.as_view({ - 'get': 'retrieve' - }) + username=self.login_username, password=self.login_password + ) + self.view = ChartsViewSet.as_view({"get": "retrieve"}) self.factory = APIRequestFactory() self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'tutorial', 'instances', '1.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "tutorial", + "instances", + "1.xml", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'tutorial', 'instances', '2.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "tutorial", + "instances", + "2.xml", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'tutorial', 'instances', '3.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "tutorial", + "instances", + "3.xml", + ) + ) def test_correct_merged_dataset_data_for_charts(self): """Return correct data from the charts endpoint""" - view = MergedXFormViewSet.as_view({ - 'post': 'create', - }) + view = MergedXFormViewSet.as_view( + { + "post": "create", + } + ) # pylint: disable=attribute-defined-outside-init self.project = get_user_default_project(self.user) - xform_a = self._publish_markdown(MD, self.user, id_string='a') - xform_b = self._publish_markdown(MD2, self.user, id_string='b') + xform_a = self._publish_markdown(MD, self.user, id_string="a") + xform_b = self._publish_markdown(MD2, self.user, id_string="b") data = { - 'xforms': [ + "xforms": [ "http://testserver/api/v1/forms/%s" % xform_a.pk, "http://testserver/api/v1/forms/%s" % xform_b.pk, ], - 'name': - 'Merged Dataset', - 'project': - f"http://testserver/api/v1/projects/{self.project.pk}", + "name": "Merged Dataset", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", } # anonymous user - request = self.factory.post('/', data=data) + request = self.factory.post("/", data=data) response = view(request) self.assertEqual(response.status_code, 401) - request = self.factory.post('/', data=data) + request = self.factory.post("/", data=data) force_authenticate(request, user=self.user) response = view(request) self.assertEqual(response.status_code, 201) @@ -114,19 +140,17 @@ def test_correct_merged_dataset_data_for_charts(self): xml = 'apple cherries' Instance(xform=xform_b, xml=xml).save() - data = {'field_xpath': 'fruits'} - request = self.factory.get('/charts', data=data) + data = {"field_xpath": "fruits"} + request = self.factory.get("/charts", data=data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=response.data['id'] - ) + response = self.view(request, pk=response.data["id"]) self.assertEqual(response.status_code, 200) # check that the data is correct - expected_data = [{'fruits': ['Apple', 'Cherries'], 'count': 1}, - {'fruits': ['Orange', 'Mango'], 'count': 1}] - self.assertEqual(response.data['data'], - expected_data) + expected_data = [ + {"fruits": ["Apple", "Cherries"], "count": 1}, + {"fruits": ["Orange", "Mango"], "count": 1}, + ] + self.assertEqual(response.data["data"], expected_data) # Ensure response is renderable response.render() cache.clear() @@ -135,223 +159,191 @@ def test_duration_field_on_metadata(self): # the instance below has valid start and end times instance = Instance.objects.all()[0] _dict = instance.parsed_instance.to_dict_for_mongo() - self.assertIn('_duration', list(_dict)) - self.assertEqual(_dict.get('_duration'), 24.0) - self.assertNotEqual(_dict.get('_duration'), None) + self.assertIn("_duration", list(_dict)) + self.assertEqual(_dict.get("_duration"), 24.0) + self.assertNotEqual(_dict.get("_duration"), None) _dict = instance.json - duration = calculate_duration(_dict.get('start_time'), 'invalid') - self.assertIn('_duration', list(_dict)) - self.assertEqual(duration, '') + duration = calculate_duration(_dict.get("start_time"), "invalid") + self.assertIn("_duration", list(_dict)) + self.assertEqual(duration, "") self.assertNotEqual(duration, None) def test_get_on_categorized_field(self): - data = {'field_name': 'gender'} - request = self.factory.get('/charts', data) + data = {"field_name": "gender"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select one') - self.assertEqual(response.data['field_name'], 'gender') - self.assertEqual(response.data['data_type'], 'categorized') - self.assertEqual(response.data['data'][0]['gender'], 'Male') - self.assertEqual(response.data['data'][1]['gender'], 'Female') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select one") + self.assertEqual(response.data["field_name"], "gender") + self.assertEqual(response.data["data_type"], "categorized") + self.assertEqual(response.data["data"][0]["gender"], "Male") + self.assertEqual(response.data["data"][1]["gender"], "Female") def test_get_on_date_field(self): - data = {'field_name': 'date'} - request = self.factory.get('/charts', data) + data = {"field_name": "date"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'date') - self.assertEqual(response.data['field_name'], 'date') - self.assertEqual(response.data['data_type'], 'time_based') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "date") + self.assertEqual(response.data["field_name"], "date") + self.assertEqual(response.data["data_type"], "time_based") - @mock.patch('onadata.libs.data.query._execute_query', - side_effect=raise_data_error) + @mock.patch("onadata.libs.data.query._execute_query", side_effect=raise_data_error) def test_get_on_date_field_with_invalid_data(self, mock_execute_query): - data = {'field_name': 'date'} - request = self.factory.get('/charts', data) + data = {"field_name": "date"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 400) def test_get_on_numeric_field(self): - data = {'field_name': 'age'} - request = self.factory.get('/charts', data) + data = {"field_name": "age"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'integer') - self.assertEqual(response.data['field_name'], 'age') - self.assertEqual(response.data['data_type'], 'numeric') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "integer") + self.assertEqual(response.data["field_name"], "age") + self.assertEqual(response.data["data_type"], "numeric") def test_get_on_select_field(self): - data = {'field_name': 'gender'} - request = self.factory.get('/charts', data) + data = {"field_name": "gender"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select one') - self.assertEqual(response.data['field_name'], 'gender') - self.assertEqual(response.data['data_type'], 'categorized') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select one") + self.assertEqual(response.data["field_name"], "gender") + self.assertEqual(response.data["data_type"], "categorized") def test_get_on_select_field_xpath(self): - data = {'field_xpath': 'gender'} - request = self.factory.get('/charts', data) + data = {"field_xpath": "gender"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select one') - self.assertEqual(response.data['field_name'], 'gender') - self.assertEqual(response.data['data_type'], 'categorized') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select one") + self.assertEqual(response.data["field_name"], "gender") + self.assertEqual(response.data["data_type"], "categorized") def test_get_on_select_multi_field(self): - field_name = 'favorite_toppings' - data = {'field_name': field_name} - request = self.factory.get('/charts', data) + field_name = "favorite_toppings" + data = {"field_name": field_name} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select all that apply') - self.assertEqual(response.data['field_name'], field_name) - self.assertEqual(response.data['data_type'], 'categorized') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select all that apply") + self.assertEqual(response.data["field_name"], field_name) + self.assertEqual(response.data["data_type"], "categorized") - options = response.data['data'][0][field_name] - self.assertEqual(options, ['Green Peppers', 'Pepperoni']) + options = response.data["data"][0][field_name] + self.assertEqual(options, ["Green Peppers", "Pepperoni"]) def test_get_on_select_multi_field_html_format(self): - field_name = 'favorite_toppings' - data = {'field_name': field_name} - request = self.factory.get('/charts', data) + field_name = "favorite_toppings" + data = {"field_name": field_name} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select all that apply') - self.assertEqual(response.data['field_name'], field_name) - self.assertEqual(response.data['data_type'], 'categorized') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select all that apply") + self.assertEqual(response.data["field_name"], field_name) + self.assertEqual(response.data["data_type"], "categorized") - options = response.data['data'][0][field_name] - self.assertEqual(options, 'Green Peppers, Pepperoni') + options = response.data["data"][0][field_name] + self.assertEqual(options, "Green Peppers, Pepperoni") def test_get_all_fields(self): - data = {'fields': 'all'} - request = self.factory.get('/', data) + data = {"fields": "all"} + request = self.factory.get("/", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertIn('age', response.data) - self.assertIn('date', response.data) - self.assertIn('gender', response.data) - self.assertEqual(response.data['age']['field_type'], 'integer') - self.assertEqual(response.data['age']['field_name'], 'age') - self.assertEqual(response.data['age']['data_type'], 'numeric') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertIn("age", response.data) + self.assertIn("date", response.data) + self.assertIn("gender", response.data) + self.assertEqual(response.data["age"]["field_type"], "integer") + self.assertEqual(response.data["age"]["field_name"], "age") + self.assertEqual(response.data["age"]["data_type"], "numeric") def test_get_specific_fields(self): - data = {'fields': 'date,age'} - request = self.factory.get('/', data) + data = {"fields": "date,age"} + request = self.factory.get("/", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) - self.assertNotIn('gender', response.data) + self.assertNotIn("gender", response.data) - self.assertIn('age', response.data) - data = response.data['age'] - self.assertEqual(data['field_type'], 'integer') - self.assertEqual(data['field_name'], 'age') - self.assertEqual(data['data_type'], 'numeric') + self.assertIn("age", response.data) + data = response.data["age"] + self.assertEqual(data["field_type"], "integer") + self.assertEqual(data["field_name"], "age") + self.assertEqual(data["data_type"], "numeric") - self.assertIn('date', response.data) - data = response.data['date'] - self.assertEqual(data['field_type'], 'date') - self.assertEqual(data['field_name'], 'date') - self.assertEqual(data['data_type'], 'time_based') + self.assertIn("date", response.data) + data = response.data["date"] + self.assertEqual(data["field_type"], "date") + self.assertEqual(data["field_name"], "date") + self.assertEqual(data["data_type"], "time_based") def test_get_invalid_field_name(self): - data = {'fields': 'invalid_field_name'} - request = self.factory.get('/', data) + data = {"fields": "invalid_field_name"} + request = self.factory.get("/", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id - ) + response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 404) def test_chart_list(self): - self.view = ChartsViewSet.as_view({ - 'get': 'list' - }) - request = self.factory.get('/charts') + self.view = ChartsViewSet.as_view({"get": "list"}) + request = self.factory.get("/charts") force_authenticate(request, user=self.user) response = self.view(request) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - data = {'id': self.xform.pk, 'id_string': self.xform.id_string, - 'url': 'http://testserver/api/v1/charts/%s' % self.xform.pk} + data = { + "id": self.xform.pk, + "id_string": self.xform.id_string, + "url": "http://testserver/api/v1/charts/%s" % self.xform.pk, + } self.assertEqual(response.data, [data]) - request = self.factory.get('/charts') + request = self.factory.get("/charts") response = self.view(request) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) def test_chart_list_with_xform_in_delete_async(self): - self.view = ChartsViewSet.as_view({ - 'get': 'list' - }) - request = self.factory.get('/charts') + self.view = ChartsViewSet.as_view({"get": "list"}) + request = self.factory.get("/charts") force_authenticate(request, user=self.user) response = self.view(request) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - data = {'id': self.xform.pk, 'id_string': self.xform.id_string, - 'url': 'http://testserver/api/v1/charts/%s' % self.xform.pk} + data = { + "id": self.xform.pk, + "id_string": self.xform.id_string, + "url": "http://testserver/api/v1/charts/%s" % self.xform.pk, + } self.assertEqual(response.data, [data]) self.xform.deleted_at = timezone.now() self.xform.save() - request = self.factory.get('/charts') + request = self.factory.get("/charts") force_authenticate(request, user=self.user) response = self.view(request) self.assertEqual(response.status_code, 200) @@ -362,132 +354,142 @@ def test_cascading_select(self): self._publish_xls_file_and_set_xform( os.path.join( os.path.dirname(__file__), - '..', 'fixtures', 'forms', 'cascading', 'cascading.xlsx')) + "..", + "fixtures", + "forms", + "cascading", + "cascading.xlsx", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'cascading', 'instances', '1.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "cascading", + "instances", + "1.xml", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'cascading', 'instances', '2.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "cascading", + "instances", + "2.xml", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'cascading', 'instances', '3.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "cascading", + "instances", + "3.xml", + ) + ) self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'cascading', 'instances', '4.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "cascading", + "instances", + "4.xml", + ) + ) - data = {'field_name': 'cities'} - request = self.factory.get('/charts', data) + data = {"field_name": "cities"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='json' - ) + response = self.view(request, pk=self.xform.id, format="json") self.assertEqual(response.status_code, 200) self.assertTrue(response.data) expected = [ - {'cities': [u'Nice'], 'count': 1}, - {'cities': [u'Seoul'], 'count': 1}, - {'cities': [u'Cape Town'], 'count': 2} + {"cities": ["Nice"], "count": 1}, + {"cities": ["Seoul"], "count": 1}, + {"cities": ["Cape Town"], "count": 2}, ] - self.assertEqual(expected, response.data['data']) + self.assertEqual(expected, response.data["data"]) @override_settings(XFORM_CHARTS_CACHE_TIME=0) def test_deleted_submission_not_in_chart_endpoint(self): - data = {'field_name': 'gender'} - request = self.factory.get('/charts', data) + data = {"field_name": "gender"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) - self.assertEqual(sum([i['count'] for i in response.data['data']]), 3) + self.assertEqual(sum([i["count"] for i in response.data["data"]]), 3) # soft delete one instance inst = self.xform.instances.all()[0] inst.set_deleted(timezone.now()) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) - self.assertEqual(sum([i['count'] for i in response.data['data']]), 2) + self.assertEqual(sum([i["count"] for i in response.data["data"]]), 2) def test_nan_not_json_response(self): self._make_submission( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'forms', - 'tutorial', 'instances', 'nan_net_worth.xml')) + os.path.dirname(__file__), + "..", + "fixtures", + "forms", + "tutorial", + "instances", + "nan_net_worth.xml", + ) + ) - data = {'field_name': 'networth_calc', - 'group_by': 'pizza_fan'} - request = self.factory.get('/charts', data) + data = {"field_name": "networth_calc", "group_by": "pizza_fan"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='json' - ) + response = self.view(request, pk=self.xform.id, format="json") renderer = DecimalJSONRenderer() - res = json.loads(renderer.render(response.data).decode('utf-8')) + res = json.loads(renderer.render(response.data).decode("utf-8")) expected = { "field_type": "calculate", "data_type": "numeric", "field_xpath": "networth_calc", "data": [ - { - "count": 2, - "sum": 150000.0, - "pizza_fan": ["No"], - "mean": 75000.0 - }, - { - "count": 2, - "sum": None, - "pizza_fan": ["Yes"], - "mean": None - } + {"count": 2, "sum": 150000.0, "pizza_fan": ["No"], "mean": 75000.0}, + {"count": 2, "sum": None, "pizza_fan": ["Yes"], "mean": None}, ], "grouped_by": "pizza_fan", "field_label": "Networth Calc", "field_name": "networth_calc", - "xform": self.xform.pk + "xform": self.xform.pk, } self.assertEqual(expected, res) def test_on_charts_with_content_type(self): - request = self.factory.get('/charts', content_type="application/json") + request = self.factory.get("/charts", content_type="application/json") force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.pk, - id_string=self.xform.id_string - ) + response = self.view(request, pk=self.xform.pk, id_string=self.xform.id_string) expected = { - 'id': self.xform.pk, - 'id_string': self.xform.id_string, - 'url': 'http://testserver/api/v1/charts/{}'.format(self.xform.pk) + "id": self.xform.pk, + "id_string": self.xform.id_string, + "url": "http://testserver/api/v1/charts/{}".format(self.xform.pk), } self.assertEqual(200, response.status_code) self.assertDictContainsSubset(expected, response.data) # If content-type is not returned; Assume that the desired # response is JSON - request = self.factory.get('/') + request = self.factory.get("/") force_authenticate(request, user=self.user) response = self.view(request, pk=self.xform.pk) self.assertEqual(200, response.status_code) @@ -497,31 +499,23 @@ def test_charts_caching(self): """ Test that the chart endpoints caching works as expected """ - data = {'field_name': 'gender'} - request = self.factory.get('/charts', data) + data = {"field_name": "gender"} + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) cache_key = f"{XFORM_CHARTS}{self.xform.id}NonegenderNonehtml" initial_data = {"some_data": "some_value"} cache.set(cache_key, initial_data) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) self.assertEqual(response.data, initial_data) # Ensure that the initially cached data is refreshed # when `refresh` query param is true - data['refresh'] = 'true' - request = self.factory.get('/charts', data) + data["refresh"] = "true" + request = self.factory.get("/charts", data) force_authenticate(request, user=self.user) - response = self.view( - request, - pk=self.xform.id, - format='html' - ) + response = self.view(request, pk=self.xform.id, format="html") self.assertEqual(response.status_code, 200) self.assertNotEqual(response.data, initial_data) @@ -535,7 +529,12 @@ def test_charts_group_by_select_one(self): force_authenticate(request, user=self.user) initial_data = { "data": [ - {"gender": ["Male"], "items": [{"pizza_fan": ["No"], "count": 1}]}, + { + "gender": ["Male"], + "items": [ + {"pizza_fan": ["No"], "count": 1}, + ], + }, { "gender": ["Female"], "items": [ @@ -555,4 +554,6 @@ def test_charts_group_by_select_one(self): response = self.view(request, pk=self.xform.id) self.assertEqual(response.status_code, 200) + # response.data['data'] items can be in any order + self.assertCountEqual(response.data.pop("data"), initial_data.pop("data")) self.assertEqual(response.data, initial_data) diff --git a/onadata/libs/tests/utils/test_csv_builder.py b/onadata/libs/tests/utils/test_csv_builder.py index f9b2b631f7..b9452bc5a8 100644 --- a/onadata/libs/tests/utils/test_csv_builder.py +++ b/onadata/libs/tests/utils/test_csv_builder.py @@ -1805,3 +1805,223 @@ def test_split_select_multiples_with_randomize(self): [(key, result[key]) for key in list(result) if key in list(cursor[0])] ) self.assertEqual(cursor[0], result) + + def test_select_multiples_grouped_repeating_w_split(self): + """Select multiple choices within group within repeat with split""" + md_xform = """ + | survey | | | | + | | type | name | label | + | | text | name | Name | + | | integer | age | Age | + | | begin group | grp1 | Group 1 | + | | begin group | grp2 | Group 2 | + | | begin repeat | browser_use | Browser Use | + | | begin group | grp3 | Group 3 | + | | begin group | grp4 | Group 4 | + | | begin group | grp5 | Group 5 | + | | integer | year | Year | + | | select_multiple browsers | browsers | Browsers | + | | end group | | | + | | end group | | | + | | end group | | | + | | end repeat | | | + | | end group | | | + | | end group | | | + | choices | | | | + | | list_name | name | label | + | | browsers | firefox | Firefox | + | | browsers | chrome | Chrome | + | | browsers | ie | Internet Explorer | + | | browsers | safari | Safari |""" + + xform = self._publish_markdown(md_xform, self.user, id_string="nested_split") + cursor = [ + { + "name": "Bob", + "age": 24, + "grp1/grp2/browser_use": [ + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": "2010", + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox safari", + }, + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": "2011", + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox chrome", + }, + ], + }, + ] + builder = CSVDataFrameBuilder( + self.user.username, + xform.id_string, + split_select_multiples=True, + include_images=False, + ) + temp_file = NamedTemporaryFile(suffix=".csv", delete=False) + builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") + csv_reader = csv.reader(csv_file) + header = next(csv_reader) + expected_header = [ + "name", + "age", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/year", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/firefox", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/chrome", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/ie", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/safari", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/year", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/firefox", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/chrome", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/ie", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/safari", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] + self.assertEqual(header, expected_header) + row = next(csv_reader) + expected_row = [ + "Bob", + "24", + "2010", + "True", + "False", + "False", + "True", + "2011", + "True", + "True", + "False", + "False", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + ] + self.assertEqual(row, expected_row) + + csv_file.close() + + def test_select_multiples_grouped_repeating_wo_split(self): + """Select multiple choices within group within repeat without split""" + md_xform = """ + | survey | | | | + | | type | name | label | + | | text | name | Name | + | | integer | age | Age | + | | begin group | grp1 | Group 1 | + | | begin group | grp2 | Group 2 | + | | begin repeat | browser_use | Browser Use | + | | begin group | grp3 | Group 3 | + | | begin group | grp4 | Group 4 | + | | begin group | grp5 | Group 5 | + | | integer | year | Year | + | | select_multiple browsers | browsers | Browsers | + | | end group | | | + | | end group | | | + | | end group | | | + | | end repeat | | | + | | end group | | | + | | end group | | | + | choices | | | | + | | list_name | name | label | + | | browsers | firefox | Firefox | + | | browsers | chrome | Chrome | + | | browsers | ie | Internet Explorer | + | | browsers | safari | Safari |""" + + xform = self._publish_markdown(md_xform, self.user, id_string="nested_split") + cursor = [ + { + "name": "Bob", + "age": 24, + "grp1/grp2/browser_use": [ + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": "2010", + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox safari", + }, + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": "2011", + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox chrome", + }, + ], + }, + ] + builder = CSVDataFrameBuilder( + self.user.username, + xform.id_string, + split_select_multiples=False, + include_images=False, + ) + temp_file = NamedTemporaryFile(suffix=".csv", delete=False) + builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") + csv_reader = csv.reader(csv_file) + header = next(csv_reader) + expected_header = [ + "name", + "age", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/year", + "grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/year", + "grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ] + self.assertEqual(header, expected_header) + row = next(csv_reader) + expected_row = [ + "Bob", + "24", + "2010", + "firefox safari", + "2011", + "firefox chrome", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + "n/a", + ] + self.assertEqual(row, expected_row) + + csv_file.close() diff --git a/onadata/libs/utils/csv_builder.py b/onadata/libs/utils/csv_builder.py index bf497f8a2f..d3aa644544 100644 --- a/onadata/libs/utils/csv_builder.py +++ b/onadata/libs/utils/csv_builder.py @@ -10,7 +10,7 @@ import unicodecsv as csv from pyxform.question import Question -from pyxform.section import RepeatingSection, Section +from pyxform.section import RepeatingSection, Section, GroupedSection from six import iteritems from onadata.apps.logger.models import OsmData @@ -385,15 +385,19 @@ def _split_select_multiples( if value_select_multiples: record.update( { - choice.replace("/" + name, "/" + label) - if show_choice_labels - else choice: ( - label + ( + choice.replace("/" + name, "/" + label) if show_choice_labels - else record[key].split()[selections.index(choice)] + else choice + ): ( + ( + label + if show_choice_labels + else record[key].split()[selections.index(choice)] + ) + if choice in selections + else None ) - if choice in selections - else None for choice, name, label in choices } ) @@ -402,20 +406,23 @@ def _split_select_multiples( # False and set to True for items in selections record.update( { - choice.replace("/" + name, "/" + label) - if show_choice_labels - else choice: choice in selections + ( + choice.replace("/" + name, "/" + label) + if show_choice_labels + else choice + ): choice + in selections for choice, name, label in choices } ) else: record.update( { - choice.replace("/" + name, "/" + label) - if show_choice_labels - else choice: YES - if choice in selections - else NO + ( + choice.replace("/" + name, "/" + label) + if show_choice_labels + else choice + ): (YES if choice in selections else NO) for choice, name, label in choices } ) @@ -593,7 +600,6 @@ def get_ordered_repeat_value(xpath, repeat_value): # order repeat according to xform order _item = get_ordered_repeat_value(key, item) if key in _item and _item[key] == "n/a": - # See https://github.com/onaio/zebra/issues/6830 # handles the case of a repeat construct in the data but the # form has no repeat construct defined using begin repeat for # example when you have a hidden value that has a repeat_count @@ -701,9 +707,15 @@ def _build_ordered_columns( for child in survey_element.children: if isinstance(child, Section): child_is_repeating = False + + if isinstance(child, RepeatingSection) or ( + isinstance(child, GroupedSection) and is_repeating_section + ): + child_is_repeating = True + if isinstance(child, RepeatingSection): ordered_columns[child.get_abbreviated_xpath()] = [] - child_is_repeating = True + cls._build_ordered_columns(child, ordered_columns, child_is_repeating) elif ( isinstance(child, Question) @@ -729,9 +741,11 @@ def _update_ordered_columns_from_data(self, cursor): if key in self.ordered_columns.keys(): self.ordered_columns[key] = remove_dups_from_list_maintain_order( [ - choice.replace("/" + name, "/" + label) - if self.show_choice_labels - else choice + ( + choice.replace("/" + name, "/" + label) + if self.show_choice_labels + else choice + ) for choice, name, label in choices ] ) From 04f152c6ae1fb4f4699f2e5139ed4de32d219805 Mon Sep 17 00:00:00 2001 From: Eric Musyoka Date: Thu, 8 Feb 2024 14:08:59 +0300 Subject: [PATCH 146/270] Add migration to update old enketo urls --- .../migrations/0014_update_enketo_old_ids.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 onadata/apps/main/migrations/0014_update_enketo_old_ids.py diff --git a/onadata/apps/main/migrations/0014_update_enketo_old_ids.py b/onadata/apps/main/migrations/0014_update_enketo_old_ids.py new file mode 100644 index 0000000000..a2670ccec3 --- /dev/null +++ b/onadata/apps/main/migrations/0014_update_enketo_old_ids.py @@ -0,0 +1,21 @@ +# Generated by Django 3.2.23 on 2024-02-08 10:46 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0013_passwordhistory'), + ] + + operations = [ + migrations.RunSQL("UPDATE main_metadata SET data_value = REPLACE(data_value, '#', '') \ + WHERE data_type = 'enketo_url' AND id IN (SELECT id FROM main_metadata \ + WHERE data_type = 'enketo_url' AND UPPER(data_value) LIKE '%#%') ;", + migrations.RunSQL.noop), + migrations.RunSQL("UPDATE main_metadata SET data_value = REPLACE(data_value, '_', 'x') \ + WHERE data_type = 'enketo_url' AND id IN (SELECT id FROM main_metadata \ + WHERE data_type = 'enketo_url' AND UPPER(data_value) LIKE '%\_%');", + migrations.RunSQL.noop) + ] From 96214c7922b7fbcc96f00196acc17c4546bfdb21 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 7 Mar 2024 10:05:56 +0300 Subject: [PATCH 147/270] Add Instance History indexing to checksum & uuid fields Signed-off-by: Kipchirchir Sigei --- ..._instance_history_uuid_and_checksum_idx.py | 40 +++++++++++++++++++ onadata/apps/logger/models/instance.py | 4 +- 2 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 onadata/apps/logger/migrations/0012_add_instance_history_uuid_and_checksum_idx.py diff --git a/onadata/apps/logger/migrations/0012_add_instance_history_uuid_and_checksum_idx.py b/onadata/apps/logger/migrations/0012_add_instance_history_uuid_and_checksum_idx.py new file mode 100644 index 0000000000..6a6e6d37a5 --- /dev/null +++ b/onadata/apps/logger/migrations/0012_add_instance_history_uuid_and_checksum_idx.py @@ -0,0 +1,40 @@ +# Generated by Django 3.2.23 on 2024-03-07 06:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ('logger', '0011_add_xform_id_instance_id_idx'), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_hist_checksum_05f7bf_idx" ON "logger_instancehistory" ("checksum");', + reverse_sql='DROP INDEX "logger_inst_hist_checksum_05f7bf_idx";', + ), + migrations.RunSQL( + sql='CREATE INDEX CONCURRENTLY "logger_inst_hist_uuid_f5ae42_idx" ON "logger_instancehistory" ("uuid");', + reverse_sql='DROP INDEX "logger_inst_hist_uuid_f5ae42_idx";', + ), + ], + state_operations=[ + migrations.AddIndex( + model_name='instancehistory', + index=models.Index( + fields=['checksum'], name='logger_inst_hist_checksum_05f7bf_idx' + ), + ), + migrations.AddIndex( + model_name='instancehistory', + index=models.Index( + fields=['uuid'], name='logger_inst_hist_uuid_f5ae42_idx' + ), + ), + ], + ) + ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index c33addd711..dfe6337b55 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -901,13 +901,13 @@ class Meta: xml = models.TextField() # old instance id - uuid = models.CharField(max_length=249, default="") + uuid = models.CharField(max_length=249, default="", db_index=True) date_created = models.DateTimeField(auto_now_add=True) date_modified = models.DateTimeField(auto_now=True) submission_date = models.DateTimeField(null=True, default=None) geom = models.GeometryCollectionField(null=True) - checksum = models.CharField(max_length=64, null=True, blank=True) + checksum = models.CharField(max_length=64, null=True, blank=True, db_index=True) @property def xform(self): From 7d39096703df46a2c8b29d8bd491b966fbe4f874 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 8 Mar 2024 10:10:44 +0300 Subject: [PATCH 148/270] V3.18.2 docker updates (#2563) * Update docker image to latest ubuntu:focal image - ubuntu:focal-20231211 * Remove yarn.lock - it is not in use --- docker/onadata-uwsgi/Dockerfile.ubuntu | 4 +- yarn.lock | 2058 ------------------------ 2 files changed, 2 insertions(+), 2060 deletions(-) delete mode 100644 yarn.lock diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index b3c5eda7a8..ccc91edab3 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -13,9 +13,9 @@ RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts # hadolint ignore=DL3013 RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi -FROM ubuntu:focal-20231211 +FROM ubuntu:focal-20240123 -ARG release_version=v3.17.1 +ARG release_version=v3.18.2 # Silence configuration prompts ENV DEBIAN_FRONTEND noninteractive diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index 83cc73a501..0000000000 --- a/yarn.lock +++ /dev/null @@ -1,2058 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@bower_components/backbone@jashkenas/backbone#~1.4.1": - version "1.4.1" - resolved "https://codeload.github.com/jashkenas/backbone/tar.gz/9260f3cb43d26b0e185f5800b31d9ae913999a1f" - dependencies: - underscore ">=1.8.3" - -"@bower_components/backgrid-filter@cloudflare/backgrid-filter#~0.3.7": - version "0.3.7" - resolved "https://codeload.github.com/cloudflare/backgrid-filter/tar.gz/744051831fa96b79908471d5a90a8864cb3136ba" - dependencies: - backbone "~1.2.3" - backgrid "~0.3.7" - lunr "^0.7.0" - underscore "^1.8.3" - -"@bower_components/backgrid-paginator@cloudflare/backgrid-paginator#~0.3.9": - version "0.3.9" - resolved "https://codeload.github.com/cloudflare/backgrid-paginator/tar.gz/11b1e9b160bcf3dd543d33439b0035771e3f8f62" - dependencies: - backbone "1.1.2 || 1.2.3 || ~1.3.2" - backbone.paginator "^2.0.5" - backgrid "~0.3.7" - underscore "^1.8.0" - -"@bower_components/backgrid@cloudflare/backgrid#~0.3.8": - version "0.3.8" - resolved "https://codeload.github.com/cloudflare/backgrid/tar.gz/c7518bbbb9aad641073f0b19716e54bb68ac5679" - dependencies: - backbone "1.1.2 || 1.2.3 || ~1.3.2" - underscore "^1.8.0" - -"@bower_components/d3@D3/d3#~7.5.0": - version "7.5.0" - resolved "https://codeload.github.com/D3/d3/tar.gz/d61883547c058d9e19fa04269fec48e2f4f8d494" - dependencies: - d3-array "3" - d3-axis "3" - d3-brush "3" - d3-chord "3" - d3-color "3" - d3-contour "3" - d3-delaunay "6" - d3-dispatch "3" - d3-drag "3" - d3-dsv "3" - d3-ease "3" - d3-fetch "3" - d3-force "3" - d3-format "3" - d3-geo "3" - d3-hierarchy "3" - d3-interpolate "3" - d3-path "3" - d3-polygon "3" - d3-quadtree "3" - d3-random "3" - d3-scale "4" - d3-scale-chromatic "3" - d3-selection "3" - d3-shape "3" - d3-time "3" - d3-time-format "4" - d3-timer "3" - d3-transition "3" - d3-zoom "3" - -"@bower_components/datatables-plugins@datatables/plugins#*": - version "1.13.5" - resolved "https://codeload.github.com/datatables/plugins/tar.gz/9047d05de714e8a98a558a3b92462c6097c34c9c" - dependencies: - "@types/jquery" "^3.5.16" - datatables.net "^1.13.2" - -"@bower_components/datatables@datatables/datatables#~1.10.18": - version "1.10.21" - resolved "https://codeload.github.com/datatables/datatables/tar.gz/83e59694a105225ff889ddfa0d723a3ab24fda78" - dependencies: - jquery ">=1.7" - -"@bower_components/dimple@PMSI-AlignAlytics/dimple#~2.3.0": - version "2.3.0" - resolved "https://codeload.github.com/PMSI-AlignAlytics/dimple/tar.gz/65f69857f5e64f3a49f0898fa00799a530607a41" - -"@bower_components/jquery@jquery/jquery#~3.6": - version "3.6.4" - resolved "https://codeload.github.com/jquery/jquery/tar.gz/2f0ac8237db2923ac9171e4d6f00a0361ab829a2" - -"@bower_components/leaflet-dist@leaflet/leaflet#~1.8.0": - version "1.8.0" - resolved "https://codeload.github.com/leaflet/leaflet/tar.gz/8a1ccbe3c821ec501911d7d7b698af4b1636216c" - -"@bower_components/underscore@jashkenas/underscore#~1.13.4": - version "1.13.6" - resolved "https://codeload.github.com/jashkenas/underscore/tar.gz/bd2d35c87620a7da36250a006c97fdae89f4902d" - -"@colors/colors@1.5.0": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" - integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== - -"@sinonjs/commons@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3" - integrity sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg== - dependencies: - type-detect "4.0.8" - -"@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^10.0.2", "@sinonjs/fake-timers@^10.3.0": - version "10.3.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" - integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== - dependencies: - "@sinonjs/commons" "^3.0.0" - -"@sinonjs/samsam@^8.0.0": - version "8.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.0.tgz#0d488c91efb3fa1442e26abea81759dfc8b5ac60" - integrity sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew== - dependencies: - "@sinonjs/commons" "^2.0.0" - lodash.get "^4.4.2" - type-detect "^4.0.8" - -"@sinonjs/text-encoding@^0.7.1": - version "0.7.2" - resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" - integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== - -"@socket.io/component-emitter@~3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@socket.io/component-emitter/-/component-emitter-3.1.0.tgz#96116f2a912e0c02817345b3c10751069920d553" - integrity sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg== - -"@types/cookie@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d" - integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== - -"@types/cors@^2.8.12": - version "2.8.17" - resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.17.tgz#5d718a5e494a8166f569d986794e49c48b216b2b" - integrity sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA== - dependencies: - "@types/node" "*" - -"@types/jquery@^3.5.16": - version "3.5.29" - resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-3.5.29.tgz#3c06a1f519cd5fc3a7a108971436c00685b5dcea" - integrity sha512-oXQQC9X9MOPRrMhPHHOsXqeQDnWeCDT3PelUIg/Oy8FAbzSZtFHRjc7IpbfFVmpLtJ+UOoywpRsuO5Jxjybyeg== - dependencies: - "@types/sizzle" "*" - -"@types/minimatch@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" - integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== - -"@types/node@*", "@types/node@>=10.0.0": - version "20.10.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.5.tgz#47ad460b514096b7ed63a1dae26fad0914ed3ab2" - integrity sha512-nNPsNE65wjMxEKI93yOP+NPGGBJz/PoN3kZsVLee0XMiJolxSekEVD8wRwBUBqkwc7UWop0edW50yrCQW4CyRw== - dependencies: - undici-types "~5.26.4" - -"@types/sizzle@*": - version "2.3.8" - resolved "https://registry.yarnpkg.com/@types/sizzle/-/sizzle-2.3.8.tgz#518609aefb797da19bf222feb199e8f653ff7627" - integrity sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg== - -abbrev@1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" - integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== - -accepts@~1.3.4: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -anymatch@~3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -array-differ@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" - integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg== - -array-each@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f" - integrity sha512-zHjL5SZa68hkKHBFBK6DJCTtr9sfTCPCaph/L7tMSLcTFgy+zX7E+6q5UArbtOtMBCtxdICpfTCspRse+ywyXA== - -array-slice@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4" - integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w== - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -arrify@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" - integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== - -async@~3.2.0: - version "3.2.5" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" - integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== - -backbone.paginator@^2.0.5: - version "2.0.8" - resolved "https://registry.yarnpkg.com/backbone.paginator/-/backbone.paginator-2.0.8.tgz#3a7e34f0a8b97b096333587ed3048556b9eafdcb" - integrity sha512-8XS2CTbjnwMbJ/3Traa1te2RPecOGbZ9tc52T89pzo6NXlVEJDFnC++dp7CQLBUZpgk3g0veX8mUbEF4wbD2NQ== - dependencies: - backbone "1.1.2 || 1.2.3 || ^1.3.2" - underscore "^1.8.0" - -"backbone@1.1.2 || 1.2.3 || ^1.3.2": - version "1.5.0" - resolved "https://registry.yarnpkg.com/backbone/-/backbone-1.5.0.tgz#3f20e4c6feb22f896131bcc58eb599b9555e8744" - integrity sha512-RPKlstw5NW+rD2X4PnEnvgLhslRnXOugXw2iBloHkPMgOxvakP1/A+tZIGM3qCm8uvZeEf8zMm0uvcK1JwL+IA== - dependencies: - underscore ">=1.8.3" - -"backbone@1.1.2 || 1.2.3 || ~1.3.2": - version "1.3.3" - resolved "https://registry.yarnpkg.com/backbone/-/backbone-1.3.3.tgz#4cc80ea7cb1631ac474889ce40f2f8bc683b2999" - integrity sha512-aK+k3TiU4tQDUrRCymDDE7XDFnMVuyE6zbZ4JX7mb4pJbQTVOH997/kyBzb8wB2s5Y/Oh7EUfj+sZhwRPxWwow== - dependencies: - underscore ">=1.8.3" - -backbone@~1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/backbone/-/backbone-1.2.3.tgz#c22cfd07fc86ebbeae61d18929ed115e999d65b9" - integrity sha512-1/eXj4agG79UDN7TWnZXcGD6BJrBwLZKCX7zYcBIy9jWf4mrtVkw7IE1VOYFnrKahsmPF9L55Tib9IQRvk027w== - dependencies: - underscore ">=1.7.0" - -backgrid@~0.3.7: - version "0.3.8" - resolved "https://registry.yarnpkg.com/backgrid/-/backgrid-0.3.8.tgz#7d26816742d72c859cad39b13f19c9f27baffed7" - integrity sha512-Klzo941ahoj8Kqd0tRsau+VfXddV3YnQTwb6wVwIaaQxoJ9ORykQy2MNit1MUBnZO6IValYJPvCQyvZhnV6Lfg== - dependencies: - backbone "1.1.2 || 1.2.3 || ~1.3.2" - underscore "^1.8.0" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -base64id@2.0.0, base64id@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/base64id/-/base64id-2.0.0.tgz#2770ac6bc47d312af97a8bf9a634342e0cd25cb6" - integrity sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -body-parser@^1.19.0: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== - dependencies: - bytes "3.1.2" - content-type "~1.0.5" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.11.0" - raw-body "2.5.2" - type-is "~1.6.18" - unpipe "1.0.0" - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -call-bind@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" - integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== - dependencies: - function-bind "^1.1.2" - get-intrinsic "^1.2.1" - set-function-length "^1.1.1" - -chalk@~4.1.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chokidar@^3.5.1: - version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -cliui@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colors@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" - integrity sha512-ENwblkFQpqqia6b++zLD/KUWafYlVY/UNnAp7oz7LY7E924wmpye416wBOmvv/HMWzl8gL1kJlfvId/1Dg176w== - -commander@7: - version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -connect@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" - integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== - dependencies: - debug "2.6.9" - finalhandler "1.1.2" - parseurl "~1.3.3" - utils-merge "1.0.1" - -content-type@~1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" - integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== - -cookie@~0.4.1: - version "0.4.2" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" - integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== - -cors@~2.8.5: - version "2.8.5" - resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" - integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== - dependencies: - object-assign "^4" - vary "^1" - -custom-event@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" - integrity sha512-GAj5FOq0Hd+RsCGVJxZuKaIDXDf3h6GQoNEjFgbLLI/trgtavwUbSnZ5pVfg27DVCaWjIohryS0JFwIJyT2cMg== - -"d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: - version "3.2.4" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" - integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== - dependencies: - internmap "1 - 2" - -d3-axis@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-3.0.0.tgz#c42a4a13e8131d637b745fc2973824cfeaf93322" - integrity sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw== - -d3-brush@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-3.0.0.tgz#6f767c4ed8dcb79de7ede3e1c0f89e63ef64d31c" - integrity sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ== - dependencies: - d3-dispatch "1 - 3" - d3-drag "2 - 3" - d3-interpolate "1 - 3" - d3-selection "3" - d3-transition "3" - -d3-chord@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-3.0.1.tgz#d156d61f485fce8327e6abf339cb41d8cbba6966" - integrity sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g== - dependencies: - d3-path "1 - 3" - -"d3-color@1 - 3", d3-color@3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" - integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== - -d3-contour@3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-3.1.0.tgz#708484a5d89be9558dfdda4a95b3df2fdebb65ab" - integrity sha512-vV3xtwrYK5p1J4vyukr70m57mtFTEQYqoaDC1ylBfht/hkdUF0nfWZ1b3V2EPBUVkUkoqq5/fbRoBImBWJgOsg== - dependencies: - d3-array "2 - 3" - -d3-delaunay@6: - version "6.0.4" - resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.4.tgz#98169038733a0a5babbeda55054f795bb9e4a58b" - integrity sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A== - dependencies: - delaunator "5" - -"d3-dispatch@1 - 3", d3-dispatch@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-3.0.1.tgz#5fc75284e9c2375c36c839411a0cf550cbfc4d5e" - integrity sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg== - -"d3-drag@2 - 3", d3-drag@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-3.0.0.tgz#994aae9cd23c719f53b5e10e3a0a6108c69607ba" - integrity sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg== - dependencies: - d3-dispatch "1 - 3" - d3-selection "3" - -"d3-dsv@1 - 3", d3-dsv@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73" - integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q== - dependencies: - commander "7" - iconv-lite "0.6" - rw "1" - -"d3-ease@1 - 3", d3-ease@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4" - integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== - -d3-fetch@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-3.0.1.tgz#83141bff9856a0edb5e38de89cdcfe63d0a60a22" - integrity sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw== - dependencies: - d3-dsv "1 - 3" - -d3-force@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-3.0.0.tgz#3e2ba1a61e70888fe3d9194e30d6d14eece155c4" - integrity sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg== - dependencies: - d3-dispatch "1 - 3" - d3-quadtree "1 - 3" - d3-timer "1 - 3" - -"d3-format@1 - 3", d3-format@3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" - integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== - -d3-geo@3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-3.1.0.tgz#74fd54e1f4cebd5185ac2039217a98d39b0a4c0e" - integrity sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA== - dependencies: - d3-array "2.5.0 - 3" - -d3-hierarchy@3: - version "3.1.2" - resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz#b01cd42c1eed3d46db77a5966cf726f8c09160c6" - integrity sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA== - -"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" - integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== - dependencies: - d3-color "1 - 3" - -"d3-path@1 - 3", d3-path@3, d3-path@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526" - integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== - -d3-polygon@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-3.0.1.tgz#0b45d3dd1c48a29c8e057e6135693ec80bf16398" - integrity sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg== - -"d3-quadtree@1 - 3", d3-quadtree@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-3.0.1.tgz#6dca3e8be2b393c9a9d514dabbd80a92deef1a4f" - integrity sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw== - -d3-random@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-3.0.1.tgz#d4926378d333d9c0bfd1e6fa0194d30aebaa20f4" - integrity sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ== - -d3-scale-chromatic@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#15b4ceb8ca2bb0dcb6d1a641ee03d59c3b62376a" - integrity sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g== - dependencies: - d3-color "1 - 3" - d3-interpolate "1 - 3" - -d3-scale@4: - version "4.0.2" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" - integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== - dependencies: - d3-array "2.10.0 - 3" - d3-format "1 - 3" - d3-interpolate "1.2.0 - 3" - d3-time "2.1.1 - 3" - d3-time-format "2 - 4" - -"d3-selection@2 - 3", d3-selection@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-3.0.0.tgz#c25338207efa72cc5b9bd1458a1a41901f1e1b31" - integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ== - -d3-shape@3: - version "3.2.0" - resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5" - integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== - dependencies: - d3-path "^3.1.0" - -"d3-time-format@2 - 4", d3-time-format@4: - version "4.1.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" - integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== - dependencies: - d3-time "1 - 3" - -"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7" - integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== - dependencies: - d3-array "2 - 3" - -"d3-timer@1 - 3", d3-timer@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" - integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== - -"d3-transition@2 - 3", d3-transition@3: - version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-3.0.1.tgz#6869fdde1448868077fdd5989200cb61b2a1645f" - integrity sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w== - dependencies: - d3-color "1 - 3" - d3-dispatch "1 - 3" - d3-ease "1 - 3" - d3-interpolate "1 - 3" - d3-timer "1 - 3" - -d3-zoom@3: - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-3.0.0.tgz#d13f4165c73217ffeaa54295cd6969b3e7aee8f3" - integrity sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw== - dependencies: - d3-dispatch "1 - 3" - d3-drag "2 - 3" - d3-interpolate "1 - 3" - d3-selection "2 - 3" - d3-transition "2 - 3" - -datatables.net@^1.13.2: - version "1.13.8" - resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.13.8.tgz#05a2fb5a036b0b65b66d1bb1eae0ba018aaea8a3" - integrity sha512-2pDamr+GUwPTby2OgriVB9dR9ftFKD2AQyiuCXzZIiG4d9KkKFQ7gqPfNmG7uj9Tc5kDf+rGj86do4LAb/V71g== - dependencies: - jquery ">=1.7" - -date-format@^4.0.14: - version "4.0.14" - resolved "https://registry.yarnpkg.com/date-format/-/date-format-4.0.14.tgz#7a8e584434fb169a521c8b7aa481f355810d9400" - integrity sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg== - -dateformat@~4.6.2: - version "4.6.3" - resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" - integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== - -debug@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.3.4, debug@~4.3.1, debug@~4.3.2: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -define-data-property@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" - integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== - dependencies: - get-intrinsic "^1.2.1" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - -delaunator@5: - version "5.0.0" - resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-5.0.0.tgz#60f052b28bd91c9b4566850ebf7756efe821d81b" - integrity sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw== - dependencies: - robust-predicates "^3.0.0" - -depd@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -detect-file@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" - integrity sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q== - -di@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" - integrity sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA== - -diff@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" - integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== - -dom-serialize@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" - integrity sha512-Yra4DbvoW7/Z6LBN560ZwXMjoNOSAN2wRsKFGc4iBeso+mpIA6qj1vfdf9HpMaKAqG6wXTy+1SYEzmNpKXOSsQ== - dependencies: - custom-event "~1.0.0" - ent "~2.2.0" - extend "^3.0.0" - void-elements "^2.0.0" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -engine.io-parser@~5.2.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.2.1.tgz#9f213c77512ff1a6cc0c7a86108a7ffceb16fcfb" - integrity sha512-9JktcM3u18nU9N2Lz3bWeBgxVgOKpw7yhRaoxQA3FUDZzzw+9WlA6p4G4u0RixNkg14fH7EfEc/RhpurtiROTQ== - -engine.io@~6.5.2: - version "6.5.4" - resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.5.4.tgz#6822debf324e781add2254e912f8568508850cdc" - integrity sha512-KdVSDKhVKyOi+r5uEabrDLZw2qXStVvCsEB/LN3mw4WFi6Gx50jTyuxYVCwAAC0U46FdnzP/ScKRBTXb/NiEOg== - dependencies: - "@types/cookie" "^0.4.1" - "@types/cors" "^2.8.12" - "@types/node" ">=10.0.0" - accepts "~1.3.4" - base64id "2.0.0" - cookie "~0.4.1" - cors "~2.8.5" - debug "~4.3.1" - engine.io-parser "~5.2.1" - ws "~8.11.0" - -ent@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - integrity sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA== - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -eventemitter2@~0.4.13: - version "0.4.14" - resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-0.4.14.tgz#8f61b75cde012b2e9eb284d4545583b5643b61ab" - integrity sha512-K7J4xq5xAD5jHsGM5ReWXRTFa3JRGofHiMcVgQ8PRwgWxzjHpMWCIzsmyf60+mh8KLsqYPcjUMa0AC4hd6lPyQ== - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -exit@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expand-tilde@^2.0.0, expand-tilde@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" - integrity sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw== - dependencies: - homedir-polyfill "^1.0.1" - -extend@^3.0.0, extend@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.3" - statuses "~1.5.0" - unpipe "~1.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -findup-sync@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-4.0.0.tgz#956c9cdde804052b881b428512905c4a5f2cdef0" - integrity sha512-6jvvn/12IC4quLBL1KNokxC7wWTvYncaVUYSoxWw7YykPLuRrnv4qdHcSOywOI5RpkOVGeQRtWM8/q+G6W6qfQ== - dependencies: - detect-file "^1.0.0" - is-glob "^4.0.0" - micromatch "^4.0.2" - resolve-dir "^1.0.1" - -findup-sync@~5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-5.0.0.tgz#54380ad965a7edca00cc8f63113559aadc541bd2" - integrity sha512-MzwXju70AuyflbgeOhzvQWAvvQdo1XL0A9bVvlXsYcFEBM87WR4OakL4OfZq+QRmr+duJubio+UtNQCPsVESzQ== - dependencies: - detect-file "^1.0.0" - is-glob "^4.0.3" - micromatch "^4.0.4" - resolve-dir "^1.0.1" - -fined@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b" - integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng== - dependencies: - expand-tilde "^2.0.2" - is-plain-object "^2.0.3" - object.defaults "^1.1.0" - object.pick "^1.2.0" - parse-filepath "^1.0.1" - -flagged-respawn@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41" - integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q== - -flatted@^3.2.7: - version "3.2.9" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf" - integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== - -follow-redirects@^1.0.0: - version "1.15.3" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" - integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== - -for-in@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ== - -for-own@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" - integrity sha512-0OABksIGrxKK8K4kynWkQ7y1zounQxP+CWnyclVwj81KW3vlLlGUx57DKGcP/LH216GzqnstnPocF16Nxs0Ycg== - dependencies: - for-in "^1.0.1" - -fs-extra@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" - integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -function-bind@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" - integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" - integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== - dependencies: - function-bind "^1.1.2" - has-proto "^1.0.1" - has-symbols "^1.0.3" - hasown "^2.0.0" - -getobject@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/getobject/-/getobject-1.0.2.tgz#25ec87a50370f6dcc3c6ba7ef43c4c16215c4c89" - integrity sha512-2zblDBaFcb3rB4rF77XVnuINOE2h2k/OnqXAiy0IrTxUfV1iFp3la33oAQVY9pCpWU268WFYVt2t71hlMuLsOg== - -glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob@^7.1.3, glob@^7.1.7: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@~7.1.6: - version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" - integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-modules@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" - integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== - dependencies: - global-prefix "^1.0.1" - is-windows "^1.0.1" - resolve-dir "^1.0.0" - -global-prefix@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" - integrity sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg== - dependencies: - expand-tilde "^2.0.2" - homedir-polyfill "^1.0.1" - ini "^1.3.4" - is-windows "^1.0.1" - which "^1.2.14" - -gopd@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" - integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== - dependencies: - get-intrinsic "^1.1.3" - -graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -grunt-cli@~1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/grunt-cli/-/grunt-cli-1.4.3.tgz#22c9f1a3d2780bf9b0d206e832e40f8f499175ff" - integrity sha512-9Dtx/AhVeB4LYzsViCjUQkd0Kw0McN2gYpdmGYKtE2a5Yt7v1Q+HYZVWhqXc/kGnxlMtqKDxSwotiGeFmkrCoQ== - dependencies: - grunt-known-options "~2.0.0" - interpret "~1.1.0" - liftup "~3.0.1" - nopt "~4.0.1" - v8flags "~3.2.0" - -grunt-karma@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/grunt-karma/-/grunt-karma-4.0.2.tgz#9a8b7b4ea4b0b537c1412d51477181d55bf96f09" - integrity sha512-4+iBBkXZjHHMDAG5kpHCdDUqlSEBJ6sqouLMRf0p+QB8wGMs300DtaCQphHqd7pM3gpXoGVT3yRRsT7KOZpJMA== - dependencies: - lodash "^4.17.10" - -grunt-known-options@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/grunt-known-options/-/grunt-known-options-2.0.0.tgz#cac641e897f9a0a680b8c9839803d35f3325103c" - integrity sha512-GD7cTz0I4SAede1/+pAbmJRG44zFLPipVtdL9o3vqx9IEyb7b4/Y3s7r6ofI3CchR5GvYJ+8buCSioDv5dQLiA== - -grunt-legacy-log-utils@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/grunt-legacy-log-utils/-/grunt-legacy-log-utils-2.1.0.tgz#49a8c7dc74051476dcc116c32faf9db8646856ef" - integrity sha512-lwquaPXJtKQk0rUM1IQAop5noEpwFqOXasVoedLeNzaibf/OPWjKYvvdqnEHNmU+0T0CaReAXIbGo747ZD+Aaw== - dependencies: - chalk "~4.1.0" - lodash "~4.17.19" - -grunt-legacy-log@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/grunt-legacy-log/-/grunt-legacy-log-3.0.0.tgz#1c6eaf92371ea415af31ea84ce50d434ef6d39c4" - integrity sha512-GHZQzZmhyq0u3hr7aHW4qUH0xDzwp2YXldLPZTCjlOeGscAOWWPftZG3XioW8MasGp+OBRIu39LFx14SLjXRcA== - dependencies: - colors "~1.1.2" - grunt-legacy-log-utils "~2.1.0" - hooker "~0.2.3" - lodash "~4.17.19" - -grunt-legacy-util@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/grunt-legacy-util/-/grunt-legacy-util-2.0.1.tgz#0f929d13a2faf9988c9917c82bff609e2d9ba255" - integrity sha512-2bQiD4fzXqX8rhNdXkAywCadeqiPiay0oQny77wA2F3WF4grPJXCvAcyoWUJV+po/b15glGkxuSiQCK299UC2w== - dependencies: - async "~3.2.0" - exit "~0.1.2" - getobject "~1.0.0" - hooker "~0.2.3" - lodash "~4.17.21" - underscore.string "~3.3.5" - which "~2.0.2" - -grunt@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/grunt/-/grunt-1.6.1.tgz#0b4dd1524f26676dcf45d8f636b8d9061a8ede16" - integrity sha512-/ABUy3gYWu5iBmrUSRBP97JLpQUm0GgVveDCp6t3yRNIoltIYw7rEj3g5y1o2PGPR2vfTRGa7WC/LZHLTXnEzA== - dependencies: - dateformat "~4.6.2" - eventemitter2 "~0.4.13" - exit "~0.1.2" - findup-sync "~5.0.0" - glob "~7.1.6" - grunt-cli "~1.4.3" - grunt-known-options "~2.0.0" - grunt-legacy-log "~3.0.0" - grunt-legacy-util "~2.0.1" - iconv-lite "~0.6.3" - js-yaml "~3.14.0" - minimatch "~3.0.4" - nopt "~3.0.6" - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-property-descriptors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz#52ba30b6c5ec87fd89fa574bc1c39125c6f65340" - integrity sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg== - dependencies: - get-intrinsic "^1.2.2" - -has-proto@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" - integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== - -has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -hasown@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" - integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== - dependencies: - function-bind "^1.1.2" - -homedir-polyfill@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" - integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== - dependencies: - parse-passwd "^1.0.0" - -hooker@~0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/hooker/-/hooker-0.2.3.tgz#b834f723cc4a242aa65963459df6d984c5d3d959" - integrity sha512-t+UerCsQviSymAInD01Pw+Dn/usmz1sRO+3Zk1+lx8eg+WKpD2ulcwWqHHL0+aseRBr+3+vIhiG1K1JTwaIcTA== - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -iconv-lite@0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -iconv-lite@0.6, iconv-lite@~0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -ini@^1.3.4: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -"internmap@1 - 2": - version "2.0.3" - resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" - integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== - -interpret@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614" - integrity sha512-CLM8SNMDu7C5psFCn6Wg/tgpj/bKAg7hc2gWqcuR9OD5Ft9PhBpIu8PLicPeis+xDd6YX2ncI8MCA64I9tftIA== - -is-absolute@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" - integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA== - dependencies: - is-relative "^1.0.0" - is-windows "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-core-module@^2.13.0: - version "2.13.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" - integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== - dependencies: - hasown "^2.0.0" - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-relative@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" - integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA== - dependencies: - is-unc-path "^1.0.0" - -is-unc-path@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d" - integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ== - dependencies: - unc-path-regex "^0.1.2" - -is-windows@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - -isbinaryfile@^4.0.8: - version "4.0.10" - resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-4.0.10.tgz#0c5b5e30c2557a2f06febd37b7322946aaee42b3" - integrity sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw== - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== - -jquery@>=1.7: - version "3.7.1" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.7.1.tgz#083ef98927c9a6a74d05a6af02806566d16274de" - integrity sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg== - -js-yaml@~3.14.0: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== - optionalDependencies: - graceful-fs "^4.1.6" - -just-extend@^4.0.2: - version "4.2.1" - resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" - integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== - -karma@^6.4.1: - version "6.4.2" - resolved "https://registry.yarnpkg.com/karma/-/karma-6.4.2.tgz#a983f874cee6f35990c4b2dcc3d274653714de8e" - integrity sha512-C6SU/53LB31BEgRg+omznBEMY4SjHU3ricV6zBcAe1EeILKkeScr+fZXtaI5WyDbkVowJxxAI6h73NcFPmXolQ== - dependencies: - "@colors/colors" "1.5.0" - body-parser "^1.19.0" - braces "^3.0.2" - chokidar "^3.5.1" - connect "^3.7.0" - di "^0.0.1" - dom-serialize "^2.2.1" - glob "^7.1.7" - graceful-fs "^4.2.6" - http-proxy "^1.18.1" - isbinaryfile "^4.0.8" - lodash "^4.17.21" - log4js "^6.4.1" - mime "^2.5.2" - minimatch "^3.0.4" - mkdirp "^0.5.5" - qjobs "^1.2.0" - range-parser "^1.2.1" - rimraf "^3.0.2" - socket.io "^4.4.1" - source-map "^0.6.1" - tmp "^0.2.1" - ua-parser-js "^0.7.30" - yargs "^16.1.1" - -kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -liftup@~3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/liftup/-/liftup-3.0.1.tgz#1cb81aff0f368464ed3a5f1a7286372d6b1a60ce" - integrity sha512-yRHaiQDizWSzoXk3APcA71eOI/UuhEkNN9DiW2Tt44mhYzX4joFoCZlxsSOF7RyeLlfqzFLQI1ngFq3ggMPhOw== - dependencies: - extend "^3.0.2" - findup-sync "^4.0.0" - fined "^1.2.0" - flagged-respawn "^1.0.1" - is-plain-object "^2.0.4" - object.map "^1.0.1" - rechoir "^0.7.0" - resolve "^1.19.0" - -load-grunt-tasks@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/load-grunt-tasks/-/load-grunt-tasks-5.1.0.tgz#14894c27a7e34ebbef9937c39cc35c573cd04c1c" - integrity sha512-oNj0Jlka1TsfDe+9He0kcA1cRln+TMoTsEByW7ij6kyktNLxBKJtslCFEvFrLC2Dj0S19IWJh3fOCIjLby2Xrg== - dependencies: - arrify "^2.0.1" - multimatch "^4.0.0" - pkg-up "^3.1.0" - resolve-pkg "^2.0.0" - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -lodash.get@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" - integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== - -lodash@^4.17.10, lodash@^4.17.21, lodash@~4.17.19, lodash@~4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -log4js@^6.4.1: - version "6.9.1" - resolved "https://registry.yarnpkg.com/log4js/-/log4js-6.9.1.tgz#aba5a3ff4e7872ae34f8b4c533706753709e38b6" - integrity sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g== - dependencies: - date-format "^4.0.14" - debug "^4.3.4" - flatted "^3.2.7" - rfdc "^1.3.0" - streamroller "^3.1.5" - -lunr@^0.7.0: - version "0.7.2" - resolved "https://registry.yarnpkg.com/lunr/-/lunr-0.7.2.tgz#79a30e932e216cba163541ee37a3607c12cd7281" - integrity sha512-qXxxSzrWOhFu4EhyvYqCGMv1nJsTy5OGQN3GtClGbRSaqJ/1XASk41nF2jjxzKTS8kjU0QybhOgGgGo6HUZqSQ== - -make-iterator@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6" - integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw== - dependencies: - kind-of "^6.0.2" - -map-cache@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@^2.5.2: - version "2.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" - integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== - -minimatch@^3.0.4, minimatch@^3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@~3.0.4: - version "3.0.8" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" - integrity sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.6: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -mkdirp@^0.5.5: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -multimatch@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-4.0.0.tgz#8c3c0f6e3e8449ada0af3dd29efb491a375191b3" - integrity sha512-lDmx79y1z6i7RNx0ZGCPq1bzJ6ZoDDKbvh7jxr9SJcWLkShMzXrHbYVpTdnhNM5MXpDUxCQ4DgqVttVXlBgiBQ== - dependencies: - "@types/minimatch" "^3.0.3" - array-differ "^3.0.0" - array-union "^2.1.0" - arrify "^2.0.1" - minimatch "^3.0.4" - -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -nise@^5.1.4: - version "5.1.5" - resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.5.tgz#f2aef9536280b6c18940e32ba1fbdc770b8964ee" - integrity sha512-VJuPIfUFaXNRzETTQEEItTOP8Y171ijr+JLq42wHes3DiryR8vT+1TXQW/Rx8JNUhyYYWyIvjXTU6dOhJcs9Nw== - dependencies: - "@sinonjs/commons" "^2.0.0" - "@sinonjs/fake-timers" "^10.0.2" - "@sinonjs/text-encoding" "^0.7.1" - just-extend "^4.0.2" - path-to-regexp "^1.7.0" - -nopt@~3.0.6: - version "3.0.6" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" - integrity sha512-4GUt3kSEYmk4ITxzB/b9vaIDfUVWN/Ml1Fwl11IlnIG2iaJ9O6WXZ9SrYM9NLI8OCBieN2Y8SWC2oJV0RQ7qYg== - dependencies: - abbrev "1" - -nopt@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48" - integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg== - dependencies: - abbrev "1" - osenv "^0.1.4" - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -object-assign@^4: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-inspect@^1.9.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" - integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== - -object.defaults@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" - integrity sha512-c/K0mw/F11k4dEUBMW8naXUuBuhxRCfG7W+yFy8EcijU/rSmazOUd1XAEEe6bC0OuXY4HUKjTJv7xbxIMqdxrA== - dependencies: - array-each "^1.0.1" - array-slice "^1.0.0" - for-own "^1.0.0" - isobject "^3.0.0" - -object.map@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37" - integrity sha512-3+mAJu2PLfnSVGHwIWubpOFLscJANBKuB/6A4CxBstc4aqwQY0FWcsppuy4jU5GSB95yES5JHSI+33AWuS4k6w== - dependencies: - for-own "^1.0.0" - make-iterator "^1.0.0" - -object.pick@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ== - dependencies: - isobject "^3.0.1" - -on-finished@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww== - dependencies: - ee-first "1.1.1" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ== - -os-tmpdir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== - -osenv@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" - integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -p-limit@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parse-filepath@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" - integrity sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q== - dependencies: - is-absolute "^1.0.0" - map-cache "^0.2.0" - path-root "^0.1.1" - -parse-passwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" - integrity sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q== - -parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-root-regex@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" - integrity sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ== - -path-root@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" - integrity sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg== - dependencies: - path-root-regex "^0.1.0" - -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pkg-up@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -qjobs@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" - integrity sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg== - -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== - dependencies: - side-channel "^1.0.4" - -range-parser@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -readdirp@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -rechoir@^0.7.0: - version "0.7.1" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.7.1.tgz#9478a96a1ca135b5e88fc027f03ee92d6c645686" - integrity sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg== - dependencies: - resolve "^1.9.0" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== - -resolve-dir@^1.0.0, resolve-dir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" - integrity sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg== - dependencies: - expand-tilde "^2.0.0" - global-modules "^1.0.0" - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-pkg@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-pkg/-/resolve-pkg-2.0.0.tgz#ac06991418a7623edc119084edc98b0e6bf05a41" - integrity sha512-+1lzwXehGCXSeryaISr6WujZzowloigEofRB+dj75y9RRa/obVcYgbHJd53tdYw8pvZj8GojXaaENws8Ktw/hQ== - dependencies: - resolve-from "^5.0.0" - -resolve@^1.19.0, resolve@^1.9.0: - version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" - integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -rfdc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" - integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== - -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -robust-predicates@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771" - integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg== - -rw@1: - version "1.3.3" - resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" - integrity sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ== - -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -set-function-length@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" - integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== - dependencies: - define-data-property "^1.1.1" - get-intrinsic "^1.2.1" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -sinon@^15.0.3: - version "15.2.0" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-15.2.0.tgz#5e44d4bc5a9b5d993871137fd3560bebfac27565" - integrity sha512-nPS85arNqwBXaIsFCkolHjGIkFo+Oxu9vbgmBJizLAhqe6P2o3Qmj3KCUoRkfhHtvgDhZdWD3risLHAUJ8npjw== - dependencies: - "@sinonjs/commons" "^3.0.0" - "@sinonjs/fake-timers" "^10.3.0" - "@sinonjs/samsam" "^8.0.0" - diff "^5.1.0" - nise "^5.1.4" - supports-color "^7.2.0" - -socket.io-adapter@~2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-2.5.2.tgz#5de9477c9182fdc171cd8c8364b9a8894ec75d12" - integrity sha512-87C3LO/NOMc+eMcpcxUBebGjkpMDkNBS9tf7KJqcDsmL936EChtVva71Dw2q4tQcuVC+hAUy4an2NO/sYXmwRA== - dependencies: - ws "~8.11.0" - -socket.io-parser@~4.2.4: - version "4.2.4" - resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83" - integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== - dependencies: - "@socket.io/component-emitter" "~3.1.0" - debug "~4.3.1" - -socket.io@^4.4.1: - version "4.7.2" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.7.2.tgz#22557d76c3f3ca48f82e73d68b7add36a22df002" - integrity sha512-bvKVS29/I5fl2FGLNHuXlQaUH/BlzX1IN6S+NKLNZpBsPZIDH+90eQmCs2Railn4YUiww4SzUedJ6+uzwFnKLw== - dependencies: - accepts "~1.3.4" - base64id "~2.0.0" - cors "~2.8.5" - debug "~4.3.2" - engine.io "~6.5.2" - socket.io-adapter "~2.5.2" - socket.io-parser "~4.2.4" - -source-map@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sprintf-js@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" - integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -statuses@~1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - -streamroller@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-3.1.5.tgz#1263182329a45def1ffaef58d31b15d13d2ee7ff" - integrity sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw== - dependencies: - date-format "^4.0.14" - debug "^4.3.4" - fs-extra "^8.1.0" - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -supports-color@^7.1.0, supports-color@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -tmp@0.2.1, tmp@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14" - integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ== - dependencies: - rimraf "^3.0.0" - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toidentifier@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -type-detect@4.0.8, type-detect@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -ua-parser-js@^0.7.30: - version "0.7.37" - resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.37.tgz#e464e66dac2d33a7a1251d7d7a99d6157ec27832" - integrity sha512-xV8kqRKM+jhMvcHWUKthV9fNebIzrNy//2O9ZwWcfiBFR5f25XVZPLlEajk/sf3Ra15V92isyQqnIEXRDaZWEA== - -unc-path-regex@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" - integrity sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg== - -underscore.string@~3.3.5: - version "3.3.6" - resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-3.3.6.tgz#ad8cf23d7423cb3b53b898476117588f4e2f9159" - integrity sha512-VoC83HWXmCrF6rgkyxS9GHv8W9Q5nhMKho+OadDJGzL2oDYbYEppBaCMH6pFlwLeqj2QS+hhkw2kpXkSdD1JxQ== - dependencies: - sprintf-js "^1.1.1" - util-deprecate "^1.0.2" - -underscore@>=1.7.0, underscore@>=1.8.3, underscore@^1.8.0, underscore@^1.8.3: - version "1.13.6" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441" - integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== - -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== - -universalify@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -util-deprecate@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - -v8flags@~3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656" - integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg== - dependencies: - homedir-polyfill "^1.0.1" - -vary@^1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - -void-elements@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" - integrity sha512-qZKX4RnBzH2ugr8Lxa7x+0V6XD9Sb/ouARtiasEQCHB1EVU4NXtmHsDDrx1dO4ne5fc3J6EW05BP1Dl0z0iung== - -which@^1.2.14: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -which@~2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -ws@~8.11.0: - version "8.11.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.11.0.tgz#6a0d36b8edfd9f96d8b25683db2f8d7de6e8e143" - integrity sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg== - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs@^16.1.1: - version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" From 4f4542174d73127b23f9a4fd9e0b53802ede33f8 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Mar 2024 11:16:47 +0300 Subject: [PATCH 149/270] Bump application dependencies Signed-off-by: Kipchirchir Sigei --- onadata/libs/test_utils/pyxform_test_case.py | 4 +- requirements/azure.in | 2 +- requirements/azure.pip | 32 +- requirements/base.pip | 224 +++++++------ requirements/dev.pip | 333 ++++++++++--------- requirements/s3.in | 2 +- requirements/s3.pip | 22 +- requirements/ses.in | 2 +- requirements/ses.pip | 22 +- setup.cfg | 4 +- 10 files changed, 346 insertions(+), 301 deletions(-) diff --git a/onadata/libs/test_utils/pyxform_test_case.py b/onadata/libs/test_utils/pyxform_test_case.py index 4d46c83bf9..c6d440cd79 100644 --- a/onadata/libs/test_utils/pyxform_test_case.py +++ b/onadata/libs/test_utils/pyxform_test_case.py @@ -45,7 +45,7 @@ class MatcherContext: debug: bool nsmap_xpath: "Dict[str, str]" - nsmap_subs: "NSMAPSubs" + nsmap_subs: "NSMAPSubs" # noqa: F821 content_str: str @@ -545,7 +545,7 @@ def reorder_attributes(root): def xpath_clean_result_strings( - nsmap_subs: "NSMAPSubs", results: "Set[_Element]" + nsmap_subs: "NSMAPSubs", results: "Set[_Element]" # noqa: F821 ) -> "Set[str]": """ Clean XPath results: stringify, remove namespace declarations, clean up whitespace. diff --git a/requirements/azure.in b/requirements/azure.in index c7b8e930ab..b8799ecf47 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ django-storages[azure] cryptography>=39.0.1 -django >=3.2.23,<4 +django >=3.2.25,<4 diff --git a/requirements/azure.pip b/requirements/azure.pip index 7401938f51..169326e3e8 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -1,38 +1,40 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/azure.pip requirements/azure.in # asgiref==3.7.2 # via django -azure-core==1.29.0 - # via azure-storage-blob -azure-storage-blob==12.17.0 +azure-core==1.30.1 + # via + # azure-storage-blob + # django-storages +azure-storage-blob==12.19.1 # via django-storages -certifi==2023.7.22 +certifi==2024.2.2 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests -cryptography==41.0.3 +cryptography==42.0.5 # via # -r requirements/azure.in # azure-storage-blob -django==3.2.23 +django==3.2.25 # via # -r requirements/azure.in # django-storages -django-storages[azure]==1.13.2 +django-storages[azure]==1.14.2 # via -r requirements/azure.in -idna==3.4 +idna==3.6 # via requests isodate==0.6.1 # via azure-storage-blob pycparser==2.21 # via cffi -pytz==2023.3 +pytz==2024.1 # via django requests==2.31.0 # via azure-core @@ -42,10 +44,10 @@ six==1.16.0 # isodate sqlparse==0.4.4 # via django -typing-extensions==4.7.1 +typing-extensions==4.10.0 # via # asgiref # azure-core # azure-storage-blob -urllib3==2.0.4 +urllib3==2.2.1 # via requests diff --git a/requirements/base.pip b/requirements/base.pip index 32a36f9663..8f91aca45b 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -10,7 +10,6 @@ # via -r requirements/base.in -e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc # via -r requirements/base.in - # via -r requirements/base.in -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip # via -r requirements/base.in -e git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36#egg=python-digest @@ -19,59 +18,65 @@ # via -r requirements/base.in -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx -amqp==5.1.1 +amqp==5.2.0 # via kombu analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata asgiref==3.7.2 - # via django -async-timeout==4.0.2 + # via + # django + # django-cors-headers +async-timeout==4.0.3 # via redis -attrs==23.1.0 +attrs==23.2.0 # via # jsonlines # jsonschema # referencing -babel==2.12.1 +babel==2.14.0 # via sphinx backoff==1.10.0 # via analytics-python -billiard==4.1.0 +billiard==4.2.0 # via celery -boto3==1.28.22 - # via tabulator -botocore==1.31.22 +boto3==1.34.61 + # via + # dataflows-tabulator + # tabulator +botocore==1.34.61 # via # boto3 # s3transfer cached-property==1.5.2 # via tableschema -cachetools==5.3.1 +cachetools==5.3.3 # via google-auth -celery==5.3.1 +celery==5.3.6 # via onadata -certifi==2023.7.22 +certifi==2024.2.2 # via # requests # sentry-sdk -cffi==1.15.1 +cffi==1.16.0 # via cryptography chardet==5.2.0 # via + # dataflows-tabulator # datapackage # tabulator -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # celery # click-didyoumean # click-plugins # click-repl + # dataflows-tabulator # datapackage # tableschema # tabulator @@ -81,12 +86,14 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==41.0.3 +cryptography==42.0.5 # via # jwcrypto # onadata # pyjwt -datapackage==1.15.2 +dataflows-tabulator==1.54.0 + # via datapackage +datapackage==1.15.4 # via pyfloip defusedxml==0.7.1 # via @@ -94,12 +101,10 @@ defusedxml==0.7.1 # onadata # pyxform deprecated==1.2.14 - # via - # jwcrypto - # onadata -dict2xml==1.7.3 # via onadata -django==3.2.23 +dict2xml==1.7.5 + # via onadata +django==3.2.25 # via # django-activity-stream # django-cors-headers @@ -118,15 +123,15 @@ django==3.2.23 # djangorestframework-jsonapi # ona-oidc # onadata -django-activity-stream==1.4.2 +django-activity-stream==2.0.0 # via onadata -django-cors-headers==4.2.0 +django-cors-headers==4.3.1 # via onadata -django-csp==3.7 +django-csp==3.8 # via onadata -django-debug-toolbar==4.1.0 +django-debug-toolbar==4.3.0 # via onadata -django-filter==21.1 +django-filter==23.5 # via onadata django-guardian==2.4.0 # via @@ -138,15 +143,15 @@ django-oauth-toolkit==2.3.0 # via onadata django-ordered-model==3.7.4 # via onadata -django-query-builder==3.1.0 +django-query-builder==3.2.0 # via onadata -django-redis==5.3.0 +django-redis==5.4.0 # via onadata -django-registration-redux==2.12 +django-registration-redux==2.13 # via onadata django-render-block==0.9.2 # via django-templated-email -django-reversion==5.0.4 +django-reversion==5.0.12 # via onadata django-taggit==4.0.0 # via onadata @@ -160,21 +165,21 @@ djangorestframework==3.14.0 # djangorestframework-jsonapi # ona-oidc # onadata -djangorestframework-csv==2.1.1 +djangorestframework-csv==3.0.2 # via onadata djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==6.0.0 +djangorestframework-jsonapi==6.1.0 # via onadata djangorestframework-jsonp==1.0.2 # via onadata djangorestframework-xml==2.0.0 # via onadata -dnspython==2.4.1 +dnspython==2.6.1 # via pymongo -docutils==0.19 +docutils==0.20.1 # via sphinx dpath==2.1.6 # via onadata @@ -184,59 +189,65 @@ et-xmlfile==1.1.0 # via openpyxl fleming==0.7.0 # via django-query-builder -future==0.18.3 +future==1.0.0 # via python-json2xlsclient -geojson==3.0.1 +geojson==3.1.0 # via onadata -google-auth==2.22.0 +google-auth==2.28.2 # via # google-auth-oauthlib # onadata -google-auth-oauthlib==1.0.0 +google-auth-oauthlib==1.2.0 # via onadata -greenlet==2.0.2 +greenlet==3.0.3 # via sqlalchemy httplib2==0.22.0 # via onadata -idna==3.4 +idna==3.6 # via requests ijson==3.2.3 - # via tabulator + # via + # dataflows-tabulator + # tabulator imagesize==1.4.1 # via sphinx inflection==0.5.1 # via djangorestframework-jsonapi isodate==0.6.1 # via tableschema -jinja2==3.1.2 +jinja2==3.1.3 # via sphinx jmespath==1.0.1 # via # boto3 # botocore -jsonlines==3.1.0 - # via tabulator -jsonpickle==3.0.1 +jsonlines==4.0.0 + # via + # dataflows-tabulator + # tabulator +jsonpickle==3.0.3 # via onadata jsonpointer==2.4 # via datapackage -jsonschema==4.19.0 +jsonschema==4.21.1 # via # datapackage # tableschema -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2023.12.1 # via jsonschema -jwcrypto==1.5.0 +jwcrypto==1.5.6 # via django-oauth-toolkit -kombu==5.3.1 +kombu==5.3.5 # via celery linear-tsv==1.1.0 - # via tabulator -lxml==4.9.3 + # via + # dataflows-tabulator + # tabulator +lxml==5.1.0 # via onadata -markdown==3.4.4 +markdown==3.5.2 # via onadata -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 modilabs-python-utils==0.1.5 # via onadata @@ -244,7 +255,7 @@ monotonic==1.6 # via analytics-python nose==1.3.7 # via django-nose -numpy==1.25.2 +numpy==1.26.4 # via onadata oauthlib==3.2.2 # via @@ -252,22 +263,23 @@ oauthlib==3.2.2 # requests-oauthlib openpyxl==3.0.9 # via + # dataflows-tabulator # onadata # pyxform # tabulator -packaging==23.1 +packaging==24.0 # via sphinx -paho-mqtt==1.6.1 +paho-mqtt==2.0.0 # via onadata -pillow==10.0.1 +pillow==10.2.0 # via # elaphe3 # onadata -prompt-toolkit==3.0.39 +prompt-toolkit==3.0.43 # via click-repl -psycopg2-binary==2.9.7 +psycopg2-binary==2.9.9 # via onadata -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa @@ -275,7 +287,7 @@ pyasn1-modules==0.3.0 # via google-auth pycparser==2.21 # via cffi -pygments==2.16.1 +pygments==2.17.2 # via sphinx pyjwt[crypto]==2.8.0 # via @@ -283,11 +295,11 @@ pyjwt[crypto]==2.8.0 # onadata pylibmc==1.6.3 # via onadata -pymongo==4.4.1 +pymongo==4.6.2 # via onadata -pyparsing==3.1.1 +pyparsing==3.1.2 # via httplib2 -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # analytics-python # botocore @@ -295,30 +307,31 @@ python-dateutil==2.8.2 # fleming # onadata # tableschema -python-memcached==1.59 +python-memcached==1.62 # via onadata -pytz==2023.3 +pytz==2024.1 # via # django # django-query-builder # djangorestframework # fleming # onadata -pyxform==1.12.1 +pyxform==1.12.2 # via # onadata # pyfloip recaptcha-client==1.0.6 # via onadata -redis==4.6.0 +redis==5.0.3 # via django-redis -referencing==0.30.2 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications requests==2.31.0 # via # analytics-python + # dataflows-tabulator # datapackage # django-oauth-toolkit # ona-oidc @@ -328,103 +341,96 @@ requests==2.31.0 # sphinx # tableschema # tabulator -requests-oauthlib==1.3.1 +requests-oauthlib==1.4.0 # via google-auth-oauthlib rfc3986==2.0.0 # via tableschema -rpds-py==0.9.2 +rpds-py==0.18.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.10.0 # via boto3 -sentry-sdk==1.29.2 +sentry-sdk==1.41.0 # via onadata -simplejson==3.19.1 +simplejson==3.19.2 # via onadata six==1.16.0 # via # analytics-python # appoptics-metrics + # dataflows-tabulator # datapackage - # djangorestframework-csv - # google-auth # isodate # linear-tsv # python-dateutil - # python-memcached # tableschema # tabulator snowballstemmer==2.2.0 # via sphinx -sphinx==6.2.1 - # via - # onadata - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinxcontrib-applehelp==1.0.5 +sphinx==7.2.6 + # via onadata +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.3 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.2 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.4 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.6 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy==2.0.19 - # via tabulator +sqlalchemy==2.0.28 + # via + # dataflows-tabulator + # tabulator sqlparse==0.4.4 # via # django # django-debug-toolbar -tableschema==1.20.2 +tableschema==1.20.7 # via datapackage tabulator==1.53.5 - # via - # datapackage - # tableschema -typing-extensions==4.7.1 + # via tableschema +typing-extensions==4.10.0 # via # asgiref + # jwcrypto # sqlalchemy -tzdata==2023.3 +tzdata==2024.1 # via celery -ujson==5.8.0 +ujson==5.9.0 # via onadata unicodecsv==0.14.1 # via + # dataflows-tabulator # datapackage - # djangorestframework-csv # onadata # tableschema # tabulator -urllib3==1.26.16 +urllib3==2.0.7 # via # botocore - # google-auth # requests # sentry-sdk -uwsgi==2.0.22 +uwsgi==2.0.24 # via onadata -vine==5.0.0 +vine==5.1.0 # via # amqp # celery # kombu -wcwidth==0.2.6 +wcwidth==0.2.13 # via prompt-toolkit -wrapt==1.15.0 +wrapt==1.16.0 # via deprecated xlrd==2.0.1 # via + # dataflows-tabulator # pyxform # tabulator xlwt==1.3.0 diff --git a/requirements/dev.pip b/requirements/dev.pip index 9f3489f777..7e79ee9a7a 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/dev.pip requirements/dev.in # @@ -18,65 +18,73 @@ # via -r requirements/base.in -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx -amqp==5.1.1 +amqp==5.2.0 # via kombu analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata asgiref==3.7.2 - # via django -astroid==2.15.6 - # via pylint -asttokens==2.2.1 + # via + # django + # django-cors-headers +astroid==2.15.8 + # via + # pylint + # pylint-celery + # pylint-flask + # requirements-detector +asttokens==2.4.1 # via stack-data -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis -attrs==23.1.0 +attrs==23.2.0 # via # jsonlines # jsonschema # referencing -babel==2.12.1 +babel==2.14.0 # via sphinx -backcall==0.2.0 - # via ipython backoff==1.10.0 # via analytics-python -billiard==4.1.0 +billiard==4.2.0 # via celery -boto3==1.28.22 - # via tabulator -botocore==1.31.22 +boto3==1.34.61 + # via + # dataflows-tabulator + # tabulator +botocore==1.34.61 # via # boto3 # s3transfer cached-property==1.5.2 # via tableschema -cachetools==5.3.1 +cachetools==5.3.3 # via google-auth -celery==5.3.1 +celery==5.3.6 # via onadata -certifi==2023.7.22 +certifi==2024.2.2 # via # requests # sentry-sdk -cffi==1.15.1 +cffi==1.16.0 # via cryptography chardet==5.2.0 # via + # dataflows-tabulator # datapackage # tabulator -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # celery # click-didyoumean # click-plugins # click-repl + # dataflows-tabulator # datapackage # tableschema # tabulator @@ -86,12 +94,14 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==41.0.3 +cryptography==42.0.5 # via # jwcrypto # onadata # pyjwt -datapackage==1.15.2 +dataflows-tabulator==1.54.0 + # via datapackage +datapackage==1.15.4 # via pyfloip decorator==5.1.1 # via @@ -103,14 +113,12 @@ defusedxml==0.7.1 # onadata # pyxform deprecated==1.2.14 - # via - # jwcrypto - # onadata -dict2xml==1.7.3 # via onadata -dill==0.3.7 +dict2xml==1.7.5 + # via onadata +dill==0.3.8 # via pylint -django==3.2.23 +django==3.2.25 # via # django-activity-stream # django-cors-headers @@ -130,17 +138,17 @@ django==3.2.23 # djangorestframework-jsonapi # ona-oidc # onadata -django-activity-stream==1.4.2 +django-activity-stream==2.0.0 # via onadata -django-cors-headers==4.2.0 +django-cors-headers==4.3.1 # via onadata -django-csp==3.7 +django-csp==3.8 # via onadata -django-debug-toolbar==4.1.0 +django-debug-toolbar==4.3.0 # via onadata django-extensions==3.2.3 # via -r requirements/dev.in -django-filter==21.1 +django-filter==23.5 # via onadata django-guardian==2.4.0 # via @@ -152,15 +160,15 @@ django-oauth-toolkit==2.3.0 # via onadata django-ordered-model==3.7.4 # via onadata -django-query-builder==3.1.0 +django-query-builder==3.2.0 # via onadata -django-redis==5.3.0 +django-redis==5.4.0 # via onadata -django-registration-redux==2.12 +django-registration-redux==2.13 # via onadata django-render-block==0.9.2 # via django-templated-email -django-reversion==5.0.4 +django-reversion==5.0.12 # via onadata django-taggit==4.0.0 # via onadata @@ -174,115 +182,128 @@ djangorestframework==3.14.0 # djangorestframework-jsonapi # ona-oidc # onadata -djangorestframework-csv==2.1.1 +djangorestframework-csv==3.0.2 # via onadata djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==6.0.0 +djangorestframework-jsonapi==6.1.0 # via onadata djangorestframework-jsonp==1.0.2 # via onadata djangorestframework-xml==2.0.0 # via onadata -dnspython==2.4.1 +dnspython==2.6.1 # via pymongo -docutils==0.19 +docutils==0.20.1 # via sphinx +dodgy==0.2.1 + # via prospector dpath==2.1.6 # via onadata elaphe3==0.2.0 # via onadata et-xmlfile==1.1.0 # via openpyxl -executing==1.2.0 +exceptiongroup==1.2.0 + # via ipython +executing==2.0.1 # via stack-data -flake8==4.0.1 +flake8==3.8.4 # via # -r requirements/dev.in # flake8-polyfill flake8-polyfill==1.0.2 # via pep8-naming -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/dev.in fleming==0.7.0 # via django-query-builder -future==0.18.3 +future==1.0.0 # via python-json2xlsclient -geojson==3.0.1 +geojson==3.1.0 # via onadata -google-auth==2.22.0 +google-auth==2.28.2 # via # google-auth-oauthlib # onadata -google-auth-oauthlib==1.0.0 +google-auth-oauthlib==1.2.0 # via onadata -greenlet==2.0.2 +greenlet==3.0.3 # via sqlalchemy httmock==1.4.0 # via -r requirements/dev.in httplib2==0.22.0 # via onadata -idna==3.4 +idna==3.6 # via requests ijson==3.2.3 - # via tabulator + # via + # dataflows-tabulator + # tabulator imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==7.0.2 # via yapf inflection==0.5.1 # via djangorestframework-jsonapi ipdb==0.13.13 # via -r requirements/dev.in -ipython==8.14.0 +ipython==8.22.2 # via ipdb isodate==0.6.1 # via tableschema -isort==5.12.0 +isort==5.13.2 # via # -r requirements/dev.in # pylint -jedi==0.19.0 +jedi==0.19.1 # via ipython -jinja2==3.1.2 +jinja2==3.1.3 # via sphinx jmespath==1.0.1 # via # boto3 # botocore -jsonlines==3.1.0 - # via tabulator -jsonpickle==3.0.1 +jsonlines==4.0.0 + # via + # dataflows-tabulator + # tabulator +jsonpickle==3.0.3 # via onadata jsonpointer==2.4 # via datapackage -jsonschema==4.19.0 +jsonschema==4.21.1 # via # datapackage # tableschema -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2023.12.1 # via jsonschema -jwcrypto==1.5.0 +jwcrypto==1.5.6 # via django-oauth-toolkit -kombu==5.3.1 +kombu==5.3.5 # via celery -lazy-object-proxy==1.9.0 +lazy-object-proxy==1.10.0 # via astroid linear-tsv==1.1.0 - # via tabulator -lxml==4.9.3 + # via + # dataflows-tabulator + # tabulator +lxml==5.1.0 # via onadata -markdown==3.4.4 +markdown==3.5.2 # via onadata -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 matplotlib-inline==0.1.6 # via ipython mccabe==0.6.1 - # via pylint -mock==4.0.3 + # via + # flake8 + # prospector + # pylint +mock==5.1.0 # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata @@ -290,7 +311,7 @@ monotonic==1.6 # via analytics-python nose==1.3.7 # via django-nose -numpy==1.25.2 +numpy==1.26.4 # via onadata oauthlib==3.2.2 # via @@ -298,52 +319,62 @@ oauthlib==3.2.2 # requests-oauthlib openpyxl==3.0.9 # via + # dataflows-tabulator # onadata # pyxform # tabulator -packaging==23.1 - # via sphinx -paho-mqtt==1.6.1 +packaging==24.0 + # via + # prospector + # requirements-detector + # sphinx +paho-mqtt==2.0.0 # via onadata parso==0.8.3 # via jedi pep8-naming==0.10.0 # via prospector -pexpect==4.8.0 +pexpect==4.9.0 # via ipython -pickleshare==0.7.5 - # via ipython -pillow==10.0.1 +pillow==10.2.0 # via # elaphe3 # onadata -platformdirs==3.10.0 +platformdirs==4.2.0 # via # pylint # yapf -prompt-toolkit==3.0.39 +prompt-toolkit==3.0.43 # via # click-repl # ipython -psycopg2-binary==2.9.7 +prospector==1.4.1.1 + # via -r requirements/dev.in +psycopg2-binary==2.9.9 # via onadata ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa pyasn1-modules==0.3.0 # via google-auth -pycodestyle==2.8.0 +pycodestyle==2.6.0 # via # flake8 # prospector pycparser==2.21 # via cffi -pygments==2.16.1 +pydocstyle==6.3.0 + # via prospector +pyflakes==2.2.0 + # via + # flake8 + # prospector +pygments==2.17.2 # via # ipython # sphinx @@ -353,7 +384,7 @@ pyjwt[crypto]==2.8.0 # onadata pylibmc==1.6.3 # via onadata -pylint==2.17.5 +pylint==2.17.7 # via # -r requirements/dev.in # prospector @@ -363,15 +394,23 @@ pylint==2.17.5 # pylint-plugin-utils pylint-celery==0.3 # via prospector -pylint-django==2.5.3 - # via -r requirements/dev.in +pylint-django==2.1.0 + # via + # -r requirements/dev.in + # prospector +pylint-flask==0.6 + # via prospector pylint-plugin-utils==0.8.2 - # via pylint-django -pymongo==4.4.1 + # via + # prospector + # pylint-celery + # pylint-django + # pylint-flask +pymongo==4.6.2 # via onadata -pyparsing==3.1.1 +pyparsing==3.1.2 # via httplib2 -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # analytics-python # botocore @@ -379,34 +418,33 @@ python-dateutil==2.8.2 # fleming # onadata # tableschema -python-memcached==1.59 +python-memcached==1.62 # via onadata -pytz==2023.3 +pytz==2024.1 # via - # babel - # celery # django # django-query-builder # djangorestframework # fleming # onadata -pyxform==1.12.1 +pyxform==1.12.2 # via # onadata # pyfloip -pyyaml==6.0 +pyyaml==6.0.1 # via prospector recaptcha-client==1.0.6 # via onadata -redis==4.6.0 +redis==5.0.3 # via django-redis -referencing==0.30.2 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications requests==2.31.0 # via # analytics-python + # dataflows-tabulator # datapackage # django-oauth-toolkit # httmock @@ -420,134 +458,133 @@ requests==2.31.0 # tabulator requests-mock==1.11.0 # via -r requirements/dev.in -requests-oauthlib==1.3.1 +requests-oauthlib==1.4.0 # via google-auth-oauthlib -requirements-detector==0.7 +requirements-detector==1.2.2 # via prospector rfc3986==2.0.0 # via tableschema -rpds-py==0.9.2 +rpds-py==0.18.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.10.0 # via boto3 -sentry-sdk==1.29.2 +semver==3.0.2 + # via requirements-detector +sentry-sdk==1.41.0 # via onadata -simplejson==3.19.1 +setoptconf==0.3.0 + # via prospector +simplejson==3.19.2 # via onadata six==1.16.0 # via # analytics-python # appoptics-metrics # asttokens - # click-repl + # dataflows-tabulator # datapackage - # django-query-builder - # djangorestframework-csv - # google-auth # isodate # linear-tsv # python-dateutil - # python-memcached # requests-mock # tableschema # tabulator snowballstemmer==2.2.0 - # via sphinx -sphinx==6.2.1 # via - # onadata - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinxcontrib-applehelp==1.0.5 + # pydocstyle + # sphinx +sphinx==7.2.6 + # via onadata +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.3 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.2 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.4 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.6 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy==2.0.19 - # via tabulator +sqlalchemy==2.0.28 + # via + # dataflows-tabulator + # tabulator sqlparse==0.4.4 # via # django # django-debug-toolbar -stack-data==0.6.2 +stack-data==0.6.3 # via ipython -tableschema==1.20.2 +tableschema==1.20.7 # via datapackage tabulator==1.53.5 - # via - # datapackage - # tableschema -tblib==2.0.0 + # via tableschema +tblib==3.0.0 # via -r requirements/dev.in +toml==0.10.2 + # via requirements-detector tomli==2.0.1 # via # ipdb # pylint # yapf -tomlkit==0.12.1 +tomlkit==0.12.4 # via pylint -traitlets==5.9.0 +traitlets==5.14.2 # via # ipython # matplotlib-inline -typing-extensions==4.7.1 +typing-extensions==4.10.0 # via # asgiref # astroid + # jwcrypto # sqlalchemy -tzdata==2023.3 +tzdata==2024.1 # via celery -ujson==5.8.0 +ujson==5.9.0 # via onadata unicodecsv==0.14.1 # via + # dataflows-tabulator # datapackage - # djangorestframework-csv # onadata # tableschema # tabulator -urllib3==1.26.16 +urllib3==2.0.7 # via # botocore - # google-auth # requests # sentry-sdk -uwsgi==2.0.22 +uwsgi==2.0.24 # via onadata -vine==5.0.0 +vine==5.1.0 # via # amqp # celery # kombu -wcwidth==0.2.6 +wcwidth==0.2.13 # via prompt-toolkit -wrapt==1.15.0 +wrapt==1.16.0 # via # astroid # deprecated xlrd==2.0.1 # via + # dataflows-tabulator # pyxform # tabulator xlwt==1.3.0 # via onadata xmltodict==0.13.0 # via onadata -yapf==0.40.1 +yapf==0.40.2 # via -r requirements/dev.in -zipp==3.16.2 +zipp==3.18.0 # via importlib-metadata diff --git a/requirements/s3.in b/requirements/s3.in index 84a3d03a03..108712204d 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ django-storages -django >=3.2.23,<4 +django >=3.2.25,<4 boto3 diff --git a/requirements/s3.pip b/requirements/s3.pip index 723bcbdf4c..d03912b6b1 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -1,38 +1,38 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/s3.pip requirements/s3.in # asgiref==3.7.2 # via django -boto3==1.28.22 +boto3==1.34.61 # via -r requirements/s3.in -botocore==1.31.22 +botocore==1.34.61 # via # boto3 # s3transfer -django==3.2.23 +django==3.2.25 # via # -r requirements/s3.in # django-storages -django-storages==1.13.2 +django-storages==1.14.2 # via -r requirements/s3.in jmespath==1.0.1 # via # boto3 # botocore -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via botocore -pytz==2023.3 +pytz==2024.1 # via django -s3transfer==0.6.1 +s3transfer==0.10.0 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -typing-extensions==4.7.1 +typing-extensions==4.10.0 # via asgiref -urllib3==1.26.16 +urllib3==2.0.7 # via botocore diff --git a/requirements/ses.in b/requirements/ses.in index 19f15cd2c8..4825ec8368 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django >=3.2.23,<4 +django >=3.2.25,<4 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index 6ef3d085f6..aee2de5254 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/ses.pip requirements/ses.in # @@ -8,35 +8,35 @@ asgiref==3.7.2 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.28.22 +boto3==1.34.61 # via django-ses -botocore==1.31.22 +botocore==1.34.61 # via # boto3 # s3transfer -django==3.2.23 +django==3.2.25 # via # -r requirements/ses.in # django-ses -django-ses==3.5.0 +django-ses==3.5.2 # via -r requirements/ses.in jmespath==1.0.1 # via # boto3 # botocore -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via botocore -pytz==2023.3 +pytz==2024.1 # via # django # django-ses -s3transfer==0.6.1 +s3transfer==0.10.0 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -typing-extensions==4.7.1 +typing-extensions==4.10.0 # via asgiref -urllib3==1.26.16 +urllib3==2.0.7 # via botocore diff --git a/setup.cfg b/setup.cfg index 91d615cd1a..472679f037 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,12 +27,12 @@ tests_require = mock requests-mock install_requires = - Django>=3.2.23,<4 + Django>=3.2.25,<4 django-guardian django-registration-redux django-templated-email django-reversion - django-filter<22.1 + django-filter<24.2 django-nose django-ordered-model #generic relation From 330f7a8591dfffe255dd8b99918d82ab2d68dbf3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Mar 2024 12:13:24 +0300 Subject: [PATCH 150/270] Downgrade sphinx packae Signed-off-by: Kipchirchir Sigei --- requirements/base.pip | 14 +++++++------- requirements/dev.pip | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/requirements/base.pip b/requirements/base.pip index 8f91aca45b..a75fd021b8 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -179,7 +179,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.20.1 +docutils==0.19 # via sphinx dpath==2.1.6 # via onadata @@ -370,19 +370,19 @@ six==1.16.0 # tabulator snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==6.2.1 # via onadata -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==1.0.5 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==1.0.3 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.0.2 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==1.0.4 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==1.1.6 # via sphinx sqlalchemy==2.0.28 # via diff --git a/requirements/dev.pip b/requirements/dev.pip index 7e79ee9a7a..d119fa4a15 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -196,7 +196,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.20.1 +docutils==0.19 # via sphinx dodgy==0.2.1 # via prospector @@ -497,19 +497,19 @@ snowballstemmer==2.2.0 # via # pydocstyle # sphinx -sphinx==7.2.6 +sphinx==6.2.1 # via onadata -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==1.0.5 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==1.0.3 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.0.2 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==1.0.4 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==1.1.6 # via sphinx sqlalchemy==2.0.28 # via From 0d730f943beffb037e602f035c1dad81550f00d3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 13 Mar 2024 15:00:54 +0300 Subject: [PATCH 151/270] Update deprecated django-filter classes Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/viewsets/submission_review_viewset.py | 2 +- onadata/apps/api/viewsets/xform_list_viewset.py | 2 +- onadata/apps/api/viewsets/xform_viewset.py | 2 +- requirements/dev.pip | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/onadata/apps/api/viewsets/submission_review_viewset.py b/onadata/apps/api/viewsets/submission_review_viewset.py index 4318a2ab7e..3c2a21c54a 100644 --- a/onadata/apps/api/viewsets/submission_review_viewset.py +++ b/onadata/apps/api/viewsets/submission_review_viewset.py @@ -36,7 +36,7 @@ class SubmissionReviewViewSet(AuthenticateHeaderMixin, CacheControlMixin, serializer_class = SubmissionReviewSerializer permission_classes = [SubmissionReviewPermissions] filter_backends = (DjangoFilterBackend, ) - filter_fields = ('instance', 'created_by', 'status') + filterset_fields = ('instance', 'created_by', 'status') def destroy(self, request, *args, **kwargs): """ diff --git a/onadata/apps/api/viewsets/xform_list_viewset.py b/onadata/apps/api/viewsets/xform_list_viewset.py index e0be32bfe1..5144f27f5e 100644 --- a/onadata/apps/api/viewsets/xform_list_viewset.py +++ b/onadata/apps/api/viewsets/xform_list_viewset.py @@ -52,7 +52,7 @@ class XFormListViewSet(ETagsMixin, BaseViewset, viewsets.ReadOnlyModelViewSet): TokenAuthentication, ) content_negotiation_class = MediaFileContentNegotiation - filter_class = filters.FormIDFilter + filterset_class = filters.FormIDFilter filter_backends = ( filters.XFormListObjectPermissionFilter, filters.XFormListXFormPKFilter, diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index cfeff8c041..749b4a74d4 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -323,7 +323,7 @@ class XFormViewSet( filters.XFormOwnerFilter, DjangoFilterBackend, ) - filter_fields = ("instances_with_osm",) + filterset_fields = ("instances_with_osm",) public_forms_endpoint = "public" diff --git a/requirements/dev.pip b/requirements/dev.pip index d119fa4a15..cb50c8ccdb 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -303,7 +303,7 @@ mccabe==0.6.1 # flake8 # prospector # pylint -mock==5.1.0 +mock==4.0.3 # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata From 5a86daa4d93c90e253fc500711c6e56389d944b6 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 26 Mar 2024 10:26:49 +0300 Subject: [PATCH 152/270] Tag release v3.19.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 15 +++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f5418931f2..8790e85fc4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,21 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v3.19.0(2024-03-26) +------------------- +- Security Remediations + `PR #2569 ` + [@KipSigei] +- Add Instance History indexing to checksum & uuid fields + `PR #2564 ` + [@KipSigei] +- Add migration to update old enketo urls + `PR #2552 ` + [@ciremusyoka] +- Ignore child questions of grouped sections within repeating sections during CSV export + `PR #2559 ` + [@kelvin-muchiri] + v3.18.2(2024-02-23) ------------------- - Improve perfomance of /status endpoint diff --git a/onadata/__init__.py b/onadata/__init__.py index 101038e5e4..82441273fd 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.18.2" +__version__ = "3.19.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 472679f037..70a7bc93e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 3.18.2 +version = 3.19.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 45283d6f8ae286edbfb0809641fbcca522b9fa53 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 26 Mar 2024 16:19:28 +0300 Subject: [PATCH 153/270] remove redundant Dockerfile for development (#2575) enhance docker-compose.yaml to use the same Dockerfile used for building the production image --- Dockerfile | 79 ------------------------- docker-compose.yml | 8 +-- docker/onadata-uwsgi/Dockerfile.ubuntu | 60 ++++++++++--------- docker/onadata-uwsgi/docker-compose.yml | 28 --------- 4 files changed, 36 insertions(+), 139 deletions(-) delete mode 100644 Dockerfile delete mode 100644 docker/onadata-uwsgi/docker-compose.yml diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index ca745b5c25..0000000000 --- a/Dockerfile +++ /dev/null @@ -1,79 +0,0 @@ -FROM ubuntu:20.04 - -# Silence configuration prompts -ENV DEBIAN_FRONTEND noninteractive -ENV PYTHONUNBUFFERED 1 - -ENV DJANGO_SETTINGS_MODULE onadata.settings.docker - -# Install service dependencies -# hadolint ignore=DL3008 -RUN apt-get update -q &&\ - apt-get install -y --no-install-recommends software-properties-common \ - binutils \ - libproj-dev \ - gdal-bin \ - memcached \ - libmemcached-dev \ - build-essential \ - supervisor \ - python3.9 \ - python3-dev \ - python3-pip \ - python3-setuptools \ - git \ - libssl-dev \ - libpq-dev \ - gfortran \ - libatlas-base-dev \ - libjpeg-dev \ - libxml2-dev \ - libxslt1-dev \ - libpython3.9-dev \ - zlib1g-dev \ - ghostscript \ - python3-celery \ - python3-sphinx \ - pkg-config \ - gcc \ - automake \ - libtool \ - openjdk-11-jre-headless \ - libpcre3 \ - libpcre3-dev \ - locales \ - netcat && \ - apt-get -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' dist-upgrade &&\ - rm -rf /var/lib/apt/lists/* - -# Generate and set en_US.UTF-8 locale -RUN locale-gen en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 -ENV LC_CTYPE en_US.UTF-8 -RUN dpkg-reconfigure locales - -# Create OnaData user and add to tty group -RUN useradd -G tty -m onadata - -# Make app directory -RUN mkdir -p /srv/onadata && chown -R onadata:onadata /srv - -# Copy local codebase -COPY . /srv/onadata - -# Install service requirements -# hadolint ignore=DL3013 -RUN python3.9 -m pip install --no-cache-dir -U pip && \ - python3.9 -m pip install --no-cache-dir -r /srv/onadata/requirements/base.pip && \ - python3.9 -m pip install --no-cache-dir -r /srv/onadata/requirements/s3.pip && \ - python3.9 -m pip install --no-cache-dir -r /srv/onadata/requirements/ses.pip && \ - python3.9 -m pip install --no-cache-dir -r /srv/onadata/requirements/azure.pip && \ - python3.9 -m pip install --no-cache-dir uwsgitop django-silk - -WORKDIR /srv/onadata - -EXPOSE 8000 - -USER onadata - -CMD ["/usr/local/bin/uwsgi", "--ini", "/srv/onadata/uwsgi.ini"] diff --git a/docker-compose.yml b/docker-compose.yml index 5a295c925f..a1b8585c30 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,13 +2,13 @@ services: api: build: context: . - dockerfile: Dockerfile + dockerfile: ./docker/onadata-uwsgi/Dockerfile.ubuntu + target: runserver depends_on: - database - cache stdin_open: true tty: true - user: "onadata" volumes: - ./:/srv/onadata ports: @@ -18,14 +18,14 @@ services: celery: build: context: . - dockerfile: Dockerfile + dockerfile: ./docker/onadata-uwsgi/Dockerfile.ubuntu + target: runserver depends_on: - database - cache - api volumes: - ./:/srv/onadata - user: "onadata" command: celery -A onadata.celeryapp worker -B -l INFO -E database: diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index ccc91edab3..b511802a4e 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -13,7 +13,7 @@ RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts # hadolint ignore=DL3013 RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi -FROM ubuntu:focal-20240123 +FROM ubuntu:focal-20240123 as base ARG release_version=v3.18.2 @@ -48,31 +48,31 @@ RUN add-apt-repository ppa:deadsnakes/ppa -y && apt-get update -q # Install OnaData Dependencies RUN apt-get install -y --no-install-recommends \ - libproj-dev \ - gdal-bin \ - memcached \ - libmemcached-dev \ - supervisor \ - python3.10 \ - python3.10-dev \ - python3-pip \ - python3-setuptools \ - libssl-dev \ - libpq-dev \ - gfortran \ - libatlas-base-dev \ - libjpeg-dev \ - libxml2-dev \ - libxslt1-dev \ - libpython3.10-dev \ - zlib1g-dev \ - ghostscript \ - python3-celery \ - python3-sphinx \ - libtool \ - openjdk-11-jre-headless \ - libpcre3 \ - libpcre3-dev \ + libproj-dev \ + gdal-bin \ + memcached \ + libmemcached-dev \ + supervisor \ + python3.10 \ + python3.10-dev \ + python3-pip \ + python3-setuptools \ + libssl-dev \ + libpq-dev \ + gfortran \ + libatlas-base-dev \ + libjpeg-dev \ + libxml2-dev \ + libxslt1-dev \ + libpython3.10-dev \ + zlib1g-dev \ + ghostscript \ + python3-celery \ + python3-sphinx \ + libtool \ + openjdk-11-jre-headless \ + libpcre3 \ + libpcre3-dev \ && apt-get autoremove -y \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* @@ -100,12 +100,16 @@ RUN python3.10 -m pip install --no-cache-dir -U pip && \ python3.10 -m pip install setuptools==65.5.1 && \ python3.10 -m pip install --no-cache-dir pyyaml uwsgitop - +FROM base as docs # Compile API Docs RUN make -C docs html +FROM base as runserver + EXPOSE 8000 +USER onadata + CMD ["/usr/local/bin/uwsgi", "--ini", "/uwsgi.ini"] -USER onadata + diff --git a/docker/onadata-uwsgi/docker-compose.yml b/docker/onadata-uwsgi/docker-compose.yml deleted file mode 100644 index 27ce6eb30b..0000000000 --- a/docker/onadata-uwsgi/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: '3' - -services: - db: - image: postgis/postgis:13-3.0 - environment: - - POSTGRES_PASSWORD=onadata - - POSTGRES_USER=onadata - - POSTGRES_DB=onadata - volumes: - # One level above the code to prevent having to move or delete - # it everytime we rebuild. - - ../../../.onadata_db:/var/lib/postgresql/data - queue: - image: rabbitmq - web: - build: - context: . - dockerfile: Dockerfile - image: onadata:latest - volumes: - # For local development - - ../../.:/srv/onadata - ports: - - "8000:8000" - depends_on: - - db - - queue From dfab6c97534edf0689aa90fdb78d84b93bd4ed2f Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 3 Apr 2024 15:43:37 +0300 Subject: [PATCH 154/270] Add user to organization asynchronously (#2574) * add user to organization asynchronously add user to organization asynchronously and refactor to remove redundant code * address failing tests * refactor code * add tests * add tests * suppress lint warning suppress lint warning Function name "add_org_user_and_share_projects_async" doesn't conform to '(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$' pattern * fix pylint error fix pylint: unrecognized-inline-option / Unrecognized file option 'invalid-name' * add tests * share project asynchronously * add test case * add test case * retry async task incase of exception * add tests * add test case * address typos * send email after adding user to org successfully * address lint warning address pylint: no-value-for-parameter / No value for argument 'from_email' --- onadata/apps/api/tasks.py | 82 ++++++- onadata/apps/api/tests/test_tasks.py | 232 +++++++++++++++++- onadata/apps/api/tests/test_tools.py | 172 +++++++++++++ .../test_organization_profile_viewset.py | 9 +- .../tests/viewsets/test_project_viewset.py | 3 +- onadata/apps/api/tools.py | 50 +++- onadata/apps/api/viewsets/project_viewset.py | 5 +- onadata/libs/models/share_project.py | 3 + .../organization_member_serializer.py | 90 +------ .../serializers/share_project_serializer.py | 44 ++-- onadata/libs/tests/models/__init__.py | 0 .../libs/tests/models/test_share_project.py | 93 +++++++ 12 files changed, 671 insertions(+), 112 deletions(-) create mode 100644 onadata/apps/api/tests/test_tools.py create mode 100644 onadata/libs/tests/models/__init__.py create mode 100644 onadata/libs/tests/models/test_share_project.py diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 09a751ae26..2b1a815f1b 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -11,21 +11,27 @@ from django.conf import settings from django.core.files.uploadedfile import TemporaryUploadedFile from django.core.files.storage import default_storage +from django.core.mail import send_mail from django.contrib.auth import get_user_model +from django.db import DatabaseError from django.utils import timezone from django.utils.datastructures import MultiValueDict from onadata.apps.api import tools +from onadata.apps.api.models.organization_profile import OrganizationProfile +from onadata.apps.logger.models import Instance, ProjectInvitation, XForm, Project from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator from onadata.libs.utils.cache_tools import ( safe_delete, XFORM_REGENERATE_INSTANCE_JSON_TASK, ) -from onadata.apps.logger.models import Instance, ProjectInvitation, XForm +from onadata.libs.models.share_project import ShareProject from onadata.libs.utils.email import ProjectInvitationEmail from onadata.celeryapp import app +logger = logging.getLogger(__name__) + User = get_user_model() @@ -145,7 +151,7 @@ def send_project_invitation_email_async( invitation = ProjectInvitation.objects.get(id=invitation_id) except ProjectInvitation.DoesNotExist as err: - logging.exception(err) + logger.exception(err) else: email = ProjectInvitationEmail(invitation, url) @@ -161,7 +167,7 @@ def regenerate_form_instance_json(xform_id: int): try: xform: XForm = XForm.objects.get(pk=xform_id) except XForm.DoesNotExist as err: - logging.exception(err) + logger.exception(err) else: if not xform.is_instance_json_regenerated: @@ -182,3 +188,73 @@ def regenerate_form_instance_json(xform_id: int): # Clear cache used to store the task id from the AsyncResult cache_key = f"{XFORM_REGENERATE_INSTANCE_JSON_TASK}{xform_id}" safe_delete(cache_key) + + +class ShareProjectBaseTask(app.Task): + autoretry_for = ( + DatabaseError, + ConnectionError, + ) + retry_backoff = 3 + + +@app.task(base=ShareProjectBaseTask) +def add_org_user_and_share_projects_async( + org_id: int, + user_id: int, + role: str = None, + email_subject: str = None, + email_msg: str = None, +): # pylint: disable=invalid-name + """Add user to organization and share projects asynchronously""" + try: + organization = OrganizationProfile.objects.get(pk=org_id) + user = User.objects.get(pk=user_id) + + except OrganizationProfile.DoesNotExist as err: + logger.exception(err) + + except User.DoesNotExist as err: + logger.exception(err) + + else: + tools.add_org_user_and_share_projects(organization, user, role) + + if email_msg and email_subject and user.email: + send_mail( + email_subject, + email_msg, + settings.DEFAULT_FROM_EMAIL, + (user.email,), + ) + + +@app.task(base=ShareProjectBaseTask) +def remove_org_user_async(org_id, user_id): + """Remove user from organization asynchronously""" + try: + organization = OrganizationProfile.objects.get(pk=org_id) + user = User.objects.get(pk=user_id) + + except OrganizationProfile.DoesNotExist as err: + logger.exception(err) + + except User.DoesNotExist as err: + logger.exception(err) + + else: + tools.remove_user_from_organization(organization, user) + + +@app.task(base=ShareProjectBaseTask) +def share_project_async(project_id, username, role, remove=False): + """Share project asynchronously""" + try: + project = Project.objects.get(pk=project_id) + + except Project.DoesNotExist as err: + logger.exception(err) + + else: + share = ShareProject(project, username, role, remove) + share.save() diff --git a/onadata/apps/api/tests/test_tasks.py b/onadata/apps/api/tests/test_tasks.py index 0051008f32..0edcca4138 100644 --- a/onadata/apps/api/tests/test_tasks.py +++ b/onadata/apps/api/tests/test_tasks.py @@ -1,19 +1,31 @@ """Tests for module onadata.apps.api.tasks""" + import sys from unittest.mock import patch from django.core.cache import cache +from django.contrib.auth import get_user_model +from django.db import DatabaseError, OperationalError +from django.test import override_settings -from onadata.apps.main.tests.test_base import TestBase from onadata.apps.api.tasks import ( send_project_invitation_email_async, regenerate_form_instance_json, + add_org_user_and_share_projects_async, + remove_org_user_async, + share_project_async, + ShareProject, ) +from onadata.apps.api.models.organization_profile import OrganizationProfile from onadata.apps.logger.models import ProjectInvitation, Instance +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.permissions import ManagerRole from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.utils.email import ProjectInvitationEmail +User = get_user_model() + class SendProjectInivtationEmailAsyncTestCase(TestBase): """Tests for send_project_invitation_email_async""" @@ -80,7 +92,7 @@ def mock_get_full_dict( instance.refresh_from_db() self.assertFalse("foo" in instance.json) - @patch("logging.exception") + @patch("onadata.apps.api.tasks.logger.exception") def test_form_id_invalid(self, mock_log_exception): """An invalid xform_id is handled""" @@ -107,3 +119,219 @@ def mock_get_full_dict( regenerate_form_instance_json.delay(self.xform.pk) instance.refresh_from_db() self.assertFalse(instance.json) + + +@patch("onadata.apps.api.tasks.tools.add_org_user_and_share_projects") +class AddOrgUserAndShareProjectsAsyncTestCase(TestBase): + """Tests for add_org_user_and_share_projects_async""" + + def setUp(self): + super().setUp() + + self.org_user = User.objects.create(username="onaorg") + alice = self._create_user("alice", "1234&&") + self.org = OrganizationProfile.objects.create( + user=self.org_user, name="Ona Org", creator=alice + ) + + def test_user_added_to_org(self, mock_add): + """User is added to organization""" + add_org_user_and_share_projects_async.delay( + self.org.pk, self.user.pk, "manager" + ) + mock_add.assert_called_once_with(self.org, self.user, "manager") + + def test_role_optional(self, mock_add): + """role param is optional""" + add_org_user_and_share_projects_async.delay(self.org.pk, self.user.pk) + mock_add.assert_called_once_with(self.org, self.user, None) + + @patch("onadata.apps.api.tasks.logger.exception") + def test_invalid_org_id(self, mock_log, mock_add): + """Invalid org_id is handled""" + add_org_user_and_share_projects_async.delay(sys.maxsize, self.user.pk) + mock_add.assert_not_called() + mock_log.assert_called_once() + + @patch("onadata.apps.api.tasks.logger.exception") + def test_invalid_user_id(self, mock_log, mock_add): + """Invalid org_id is handled""" + add_org_user_and_share_projects_async.delay(self.org.pk, sys.maxsize) + mock_add.assert_not_called() + mock_log.assert_called_once() + + @patch("onadata.apps.api.tasks.add_org_user_and_share_projects_async.retry") + def test_database_error(self, mock_retry, mock_add): + """We retry calls if DatabaseError is raised""" + mock_add.side_effect = DatabaseError() + add_org_user_and_share_projects_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], DatabaseError)) + + @patch("onadata.apps.api.tasks.add_org_user_and_share_projects_async.retry") + def test_connection_error(self, mock_retry, mock_add): + """We retry calls if ConnectionError is raised""" + mock_add.side_effect = ConnectionError() + add_org_user_and_share_projects_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], ConnectionError)) + + @patch("onadata.apps.api.tasks.add_org_user_and_share_projects_async.retry") + def test_operation_error(self, mock_retry, mock_add): + """We retry calls if OperationError is raised""" + mock_add.side_effect = OperationalError() + add_org_user_and_share_projects_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], OperationalError)) + + @override_settings(DEFAULT_FROM_EMAIL="noreply@ona.io") + @patch("onadata.apps.api.tasks.send_mail") + def test_send_mail(self, mock_email, mock_add): + """Send mail works""" + self.user.email = "bob@example.com" + self.user.save() + add_org_user_and_share_projects_async.delay( + self.org.pk, self.user.pk, "manager", "Subject", "Body" + ) + mock_email.assert_called_with( + "Subject", + "Body", + "noreply@ona.io", + ("bob@example.com",), + ) + mock_add.assert_called_once_with(self.org, self.user, "manager") + + @override_settings(DEFAULT_FROM_EMAIL="noreply@ona.io") + @patch("onadata.apps.api.tasks.send_mail") + def test_user_email_none(self, mock_email, mock_add): + """Email not sent if user email is None""" + add_org_user_and_share_projects_async.delay( + self.org.pk, self.user.pk, "manager", "Subject", "Body" + ) + mock_email.assert_not_called() + mock_add.assert_called_once_with(self.org, self.user, "manager") + + +@patch("onadata.apps.api.tasks.tools.remove_user_from_organization") +class RemoveOrgUserAsyncTestCase(TestBase): + """Tests for remove_org_user_async""" + + def setUp(self): + super().setUp() + + self.org_user = User.objects.create(username="onaorg") + alice = self._create_user("alice", "1234&&") + self.org = OrganizationProfile.objects.create( + user=self.org_user, name="Ona Org", creator=alice + ) + + def test_user_removed_from_org(self, mock_remove): + """User is removed from organization""" + remove_org_user_async.delay(self.org.pk, self.user.pk) + mock_remove.assert_called_once_with(self.org, self.user) + + @patch("onadata.apps.api.tasks.logger.exception") + def test_invalid_org_id(self, mock_log, mock_remove): + """Invalid org_id is handled""" + remove_org_user_async.delay(sys.maxsize, self.user.pk) + mock_remove.assert_not_called() + mock_log.assert_called_once() + + @patch("onadata.apps.api.tasks.logger.exception") + def test_invalid_user_id(self, mock_log, mock_remove): + """Invalid user_id is handled""" + remove_org_user_async.delay(self.org.pk, sys.maxsize) + mock_remove.assert_not_called() + mock_log.assert_called_once() + + @patch("onadata.apps.api.tasks.remove_org_user_async.retry") + def test_database_error(self, mock_retry, mock_remove): + """We retry calls if DatabaseError is raised""" + mock_remove.side_effect = DatabaseError() + remove_org_user_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], DatabaseError)) + + @patch("onadata.apps.api.tasks.remove_org_user_async.retry") + def test_connection_error(self, mock_retry, mock_remove): + """We retry calls if ConnectionError is raised""" + mock_remove.side_effect = ConnectionError() + remove_org_user_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], ConnectionError)) + + @patch("onadata.apps.api.tasks.remove_org_user_async.retry") + def test_operation_error(self, mock_retry, mock_remove): + """We retry calls if OperationError is raised""" + mock_remove.side_effect = OperationalError() + remove_org_user_async.delay(self.org.pk, self.user.pk) + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], OperationalError)) + + +class ShareProjectAsyncTestCase(TestBase): + """Tests for share_project_async""" + + def setUp(self): + super().setUp() + + self._publish_transportation_form() + self.alice = self._create_user("alice", "Yuao8(-)") + + def test_share(self): + """Project is shared with user""" + share_project_async.delay(self.project.id, "alice", "manager") + + self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) + + def test_remove(self): + """User is removed from project""" + # Add user to project + ManagerRole.add(self.alice, self.project) + # Remove user + share_project_async.delay(self.project.id, "alice", "manager", True) + + self.assertFalse(ManagerRole.user_has_role(self.alice, self.project)) + + @patch("onadata.apps.api.tasks.logger.exception") + def test_invalid_project_id(self, mock_log): + """Invalid projecct_id is handled""" + share_project_async.delay(sys.maxsize, "alice", "manager") + self.assertFalse(ManagerRole.user_has_role(self.alice, self.project)) + mock_log.assert_called_once() + + @patch.object(ShareProject, "save") + @patch("onadata.apps.api.tasks.share_project_async.retry") + def test_database_error(self, mock_retry, mock_share): + """We retry calls if DatabaseError is raised""" + mock_share.side_effect = DatabaseError() + share_project_async.delay(self.project.id, self.user.pk, "manager") + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], DatabaseError)) + + @patch.object(ShareProject, "save") + @patch("onadata.apps.api.tasks.share_project_async.retry") + def test_connection_error(self, mock_retry, mock_share): + """We retry calls if ConnectionError is raised""" + mock_share.side_effect = ConnectionError() + share_project_async.delay(self.project.pk, self.user.pk, "manager") + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], ConnectionError)) + + @patch.object(ShareProject, "save") + @patch("onadata.apps.api.tasks.share_project_async.retry") + def test_operation_error(self, mock_retry, mock_share): + """We retry calls if OperationError is raised""" + mock_share.side_effect = OperationalError() + share_project_async.delay(self.project.pk, self.user.pk, "manager") + self.assertTrue(mock_retry.called) + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], OperationalError)) diff --git a/onadata/apps/api/tests/test_tools.py b/onadata/apps/api/tests/test_tools.py new file mode 100644 index 0000000000..ac3c59f4f5 --- /dev/null +++ b/onadata/apps/api/tests/test_tools.py @@ -0,0 +1,172 @@ +"""Tests for module onadata.apps.api.tools""" + +from django.contrib.auth import get_user_model + +from onadata.apps.api.models.organization_profile import ( + OrganizationProfile, + Team, + get_organization_members_team, +) +from onadata.apps.api.tools import ( + add_org_user_and_share_projects, + add_user_to_organization, +) +from onadata.apps.logger.models.project import Project +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.permissions import DataEntryRole, ManagerRole, OwnerRole + + +User = get_user_model() + + +class AddUserToOrganizationTestCase(TestBase): + """Add tests for add_user_to_organization""" + + def setUp(self) -> None: + super().setUp() + + self.org_user = User.objects.create(username="onaorg") + alice = self._create_user("alice", "1234&&") + self.org = OrganizationProfile.objects.create( + user=self.org_user, name="Ona Org", creator=alice + ) + + def test_add_owner(self): + """Owner is added to organization""" + add_user_to_organization(self.org, self.user, "owner") + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertTrue( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue(OwnerRole.user_has_role(self.user, self.org)) + self.assertTrue(OwnerRole.user_has_role(self.user, self.org.userprofile_ptr)) + + # If role changes, user is removed from owners team + add_user_to_organization(self.org, self.user, "editor") + + self.user.refresh_from_db() + owner_team.refresh_from_db() + + self.assertFalse( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertFalse(OwnerRole.user_has_role(self.user, self.org)) + self.assertFalse(OwnerRole.user_has_role(self.user, self.org.userprofile_ptr)) + + def test_non_owner(self): + """Non-owners add to organization""" + add_user_to_organization(self.org, self.user, "manager") + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertFalse( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue(ManagerRole.user_has_role(self.user, self.org)) + + def test_role_none(self): + """role param is None or not provided""" + add_user_to_organization(self.org, self.user) + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertFalse( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + + +class AddOrgUserAndShareProjectsTestCase(TestBase): + """Tests for add_org_user_and_share_projects""" + + def setUp(self) -> None: + super().setUp() + + self.org_user = User.objects.create(username="onaorg") + alice = self._create_user("alice", "1234&&") + self.org = OrganizationProfile.objects.create( + user=self.org_user, name="Ona Org", creator=alice + ) + self.project = Project.objects.create( + name="Demo", organization=self.org_user, created_by=alice + ) + + def test_add_owner(self): + """Owner added to org and projects shared""" + add_org_user_and_share_projects(self.org, self.user, "owner") + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertTrue( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue(OwnerRole.user_has_role(self.user, self.project)) + self.assertTrue(OwnerRole.user_has_role(self.user, self.org)) + + def test_non_owner(self): + """Non-owners add to org and projects shared + + Non-owners should be assigned default project permissions + """ + # Set default permissions for project + members_team = get_organization_members_team(self.org) + DataEntryRole.add(members_team, self.project) + + add_org_user_and_share_projects(self.org, self.user, "manager") + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertFalse( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue(DataEntryRole.user_has_role(self.user, self.project)) + self.assertTrue(ManagerRole.user_has_role(self.user, self.org)) + + def test_project_created_by_manager(self): + """A manager is assigned manager role on projects they created""" + self.project.created_by = self.user + self.project.save() + + add_org_user_and_share_projects(self.org, self.user, "manager") + + self.assertTrue(ManagerRole.user_has_role(self.user, self.project)) + + def test_role_none(self): + """role param is None or not provided""" + # Set default permissions for project + members_team = get_organization_members_team(self.org) + DataEntryRole.add(members_team, self.project) + + add_org_user_and_share_projects(self.org, self.user) + + self.user.refresh_from_db() + owner_team = Team.objects.get(name=f"{self.org_user.username}#Owners") + members_team = Team.objects.get(name=f"{self.org_user.username}#members") + self.assertFalse( + owner_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue( + members_team.user_set.filter(username=self.user.username).exists() + ) + self.assertTrue(DataEntryRole.user_has_role(self.user, self.project)) diff --git a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py index 700d071698..7ae8ae6f09 100644 --- a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py @@ -16,6 +16,7 @@ from onadata.apps.api.models.organization_profile import ( OrganizationProfile, get_organization_members_team, + Team, ) from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.tools import ( @@ -297,6 +298,8 @@ def test_add_members_to_org(self): response = view(request, user="denoinc") self.assertEqual(response.status_code, 201) self.assertEqual(set(response.data), set(["denoinc", "aboy"])) + team = Team.objects.get(name=f"{self.organization.user.username}#members") + self.assertTrue(team.user_set.filter(username="aboy").exists()) def test_inactive_members_not_listed(self): self._org_create() @@ -681,7 +684,8 @@ def test_put_bad_role(self): response = view(request, user="denoinc") self.assertEqual(response.status_code, 400) - @patch("onadata.libs.serializers.organization_member_serializer.send_mail") + @override_settings(DEFAULT_FROM_EMAIL="noreply@ona.io") + @patch("onadata.apps.api.tasks.send_mail") def test_add_members_to_org_email(self, mock_email): self._org_create() view = OrganizationProfileViewSet.as_view({"post": "members"}) @@ -705,7 +709,8 @@ def test_add_members_to_org_email(self, mock_email): ) self.assertEqual(set(response.data), set(["denoinc", "aboy"])) - @patch("onadata.libs.serializers.organization_member_serializer.send_mail") + @override_settings(DEFAULT_FROM_EMAIL="noreply@ona.io") + @patch("onadata.apps.api.tasks.send_mail") def test_add_members_to_org_email_custom_subj(self, mock_email): self._org_create() view = OrganizationProfileViewSet.as_view({"post": "members"}) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 1b96ca546c..f7b847aa4a 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -2488,8 +2488,7 @@ def test_project_share_atomicity(self, mock_send_mail): mock_rm_xform_perms, ): # noqa mock_rm_xform_perms.side_effect = Exception() - with self.assertRaises(Exception): - response = view(request, pk=projectid) + response = view(request, pk=projectid) # permissions have not changed for both xform and project self.assertTrue(role_class.user_has_role(alice, self.xform)) self.assertTrue(role_class.user_has_role(alice, self.project)) diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index 64832a7a66..9aaf21bcb0 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -55,6 +55,7 @@ get_role, get_role_in_org, is_organization, + get_team_project_default_permissions, ) from onadata.libs.serializers.project_serializer import ProjectSerializer from onadata.libs.utils.api_export_tools import ( @@ -75,6 +76,7 @@ publish_form, response_with_mimetype_and_name, ) +from onadata.libs.utils.model_tools import queryset_iterator from onadata.libs.utils.project_utils import ( set_project_perms_to_xform, set_project_perms_to_xform_async, @@ -227,12 +229,27 @@ def remove_user_from_team(team, user): remove_perm(perm.codename, user, members_team) -def add_user_to_organization(organization, user): +def add_user_to_organization(organization, user, role=None): """Add a user to an organization""" team = get_organization_members_team(organization) add_user_to_team(team, user) + if role is not None: + role_cls = ROLES.get(role) + role_cls.add(user, organization) + + owners_team = get_or_create_organization_owners_team(organization) + + if role == OwnerRole.name: + role_cls.add(user, organization.userprofile_ptr) + # Add user to their respective team + add_user_to_team(owners_team, user) + + else: + remove_user_from_team(owners_team, user) + OwnerRole.remove_obj_permissions(user, organization.userprofile_ptr) + def get_organization_members(organization): """Get members team user queryset""" @@ -814,3 +831,34 @@ def set_enketo_signed_cookies(resp, username=None, json_web_token=None): resp.set_signed_cookie(ENKETO_AUTH_COOKIE, json_web_token, **enketo) return resp + + +def add_org_user_and_share_projects( + organization: OrganizationProfile, user, org_role: str = None +): + """Add user to organization and share all projects""" + add_user_to_organization(organization, user, org_role) + + def share(project, role): + share = ShareProject(project, user.username, role) + share.save() + + project_qs = organization.user.project_org.all() + + if org_role == OwnerRole.name: + # New owners have owner role on all projects + for project in queryset_iterator(project_qs): + share(project, org_role) + + else: + # New members & managers gain default team permissions on projects + team = get_organization_members_team(organization) + + for project in queryset_iterator(project_qs): + if org_role == ManagerRole.name and project.created_by == user: + # New managers are only granted the manager role on the + # projects they created + share(project, org_role) + else: + project_role = get_team_project_default_permissions(team, project) + share(project, project_role) diff --git a/onadata/apps/api/viewsets/project_viewset.py b/onadata/apps/api/viewsets/project_viewset.py index 3f72099143..c8e6797ce7 100644 --- a/onadata/apps/api/viewsets/project_viewset.py +++ b/onadata/apps/api/viewsets/project_viewset.py @@ -33,7 +33,6 @@ from onadata.libs.serializers.share_project_serializer import ( RemoveUserFromProjectSerializer, ShareProjectSerializer, - propagate_project_permissions_async, ) from onadata.libs.serializers.user_profile_serializer import UserProfileSerializer from onadata.libs.serializers.xform_serializer import ( @@ -48,6 +47,7 @@ from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete from onadata.libs.utils.common_tools import merge_dicts from onadata.libs.utils.export_tools import str_to_bool +from onadata.libs.utils.project_utils import propagate_project_permissions_async from onadata.settings.common import DEFAULT_FROM_EMAIL, SHARE_PROJECT_SUBJECT # pylint: disable=invalid-name @@ -64,7 +64,6 @@ class ProjectViewSet( BaseViewset, ModelViewSet, ): - """ List, Retrieve, Update, Create Project and Project Forms. """ @@ -182,7 +181,7 @@ def share(self, request, *args, **kwargs): remove = strtobool(remove) if remove: - serializer = RemoveUserFromProjectSerializer(data=data) + serializer = RemoveUserFromProjectSerializer(data={**data, remove: True}) else: serializer = ShareProjectSerializer(data=data) if serializer.is_valid(): diff --git a/onadata/libs/models/share_project.py b/onadata/libs/models/share_project.py index f23807e975..d179d15972 100644 --- a/onadata/libs/models/share_project.py +++ b/onadata/libs/models/share_project.py @@ -18,6 +18,7 @@ PROJ_PERM_CACHE, safe_delete, ) +from onadata.libs.utils.project_utils import propagate_project_permissions_async # pylint: disable=invalid-name User = get_user_model() @@ -91,6 +92,8 @@ def save(self, **kwargs): # clear cache safe_delete(f"{PROJ_OWNER_CACHE}{self.project.pk}") safe_delete(f"{PROJ_PERM_CACHE}{self.project.pk}") + # propagate KPI permissions + propagate_project_permissions_async.apply_async(args=[self.project.pk]) @transaction.atomic() def __remove_user(self): diff --git a/onadata/libs/serializers/organization_member_serializer.py b/onadata/libs/serializers/organization_member_serializer.py index 8c3520455b..22d58be9f2 100644 --- a/onadata/libs/serializers/organization_member_serializer.py +++ b/onadata/libs/serializers/organization_member_serializer.py @@ -3,85 +3,30 @@ The OrganizationMemberSerializer - manages a users access in an organization """ from django.contrib.auth import get_user_model -from django.core.mail import send_mail from django.utils.translation import gettext as _ from rest_framework import serializers -from onadata.apps.api.models.organization_profile import get_organization_members_team from onadata.apps.api.tools import ( _get_owners, - add_user_to_organization, - add_user_to_team, - get_or_create_organization_owners_team, get_organization_members, - remove_user_from_organization, - remove_user_from_team, ) -from onadata.apps.logger.models.project import Project +from onadata.apps.api.tasks import ( + add_org_user_and_share_projects_async, + remove_org_user_async, +) from onadata.apps.main.models.user_profile import UserProfile from onadata.libs.permissions import ( ROLES, - ManagerRole, OwnerRole, - get_team_project_default_permissions, is_organization, ) from onadata.libs.serializers.fields.organization_field import OrganizationField -from onadata.libs.serializers.share_project_serializer import ShareProjectSerializer -from onadata.libs.utils.project_utils import propagate_project_permissions_async -from onadata.settings.common import DEFAULT_FROM_EMAIL, SHARE_ORG_SUBJECT +from onadata.settings.common import SHARE_ORG_SUBJECT User = get_user_model() -def _compose_send_email(organization, user, email_msg, email_subject=None): - - if not email_subject: - email_subject = SHARE_ORG_SUBJECT.format(user.username, organization.name) - - # send out email message. - send_mail(email_subject, email_msg, DEFAULT_FROM_EMAIL, (user.email,)) - - -def _set_organization_role_to_user(organization, user, role): - role_cls = ROLES.get(role) - if role_cls: - role_cls.add(user, organization) - - owners_team = get_or_create_organization_owners_team(organization) - members_team = get_organization_members_team(organization) - - # add user to their respective team - if role == OwnerRole.name: - # add user to owners team - role_cls.add(user, organization.userprofile_ptr) - add_user_to_team(owners_team, user) - # add user to org projects - for project in organization.user.project_org.all(): - data = {"project": project.pk, "username": user.username, "role": role} - serializer = ShareProjectSerializer(data=data) - if serializer.is_valid(): - serializer.save() - - elif role != OwnerRole.name: - add_user_to_team(members_team, user) - # add user to org projects - for project in organization.user.project_org.all(): - if role != ManagerRole.name: - role = get_team_project_default_permissions(members_team, project) - else: - if project.created_by != user: - role = get_team_project_default_permissions(members_team, project) - - data = {"project": project.pk, "username": user.username, "role": role} - serializer = ShareProjectSerializer(data=data) - if serializer.is_valid(): - serializer.save() - # remove user from owners team - remove_user_from_team(owners_team, user) - - class OrganizationMemberSerializer(serializers.Serializer): """ The OrganizationMemberSerializer - manages a users access in an organization @@ -155,29 +100,20 @@ def create(self, validated_data): username = validated_data.get("username") role = validated_data.get("role") email_msg = validated_data.get("email_msg") - email_subject = validated_data.get("email_subject") + email_subject = validated_data.get( + "email_subject", SHARE_ORG_SUBJECT.format(username, organization.name) + ) remove = validated_data.get("remove") if username: user = User.objects.get(username=username) - add_user_to_organization(organization, user) - _set_organization_role_to_user(organization, user, role) - - if email_msg: - _compose_send_email(organization, user, email_msg, email_subject) - if remove: - remove_user_from_organization(organization, user) - - projects = Project.objects.filter( - organization=organization.user, deleted_at__isnull=True - ) - for project in projects.iterator(): - # Queue permission propagation with a - # delay for permissions to be effected - propagate_project_permissions_async.apply_async( - args=[project.id], countdown=60 + remove_org_user_async.apply_async(args=[organization.pk, user.pk]) + + else: + add_org_user_and_share_projects_async.apply_async( + args=[organization.pk, user.pk, role, email_subject, email_msg] ) return organization diff --git a/onadata/libs/serializers/share_project_serializer.py b/onadata/libs/serializers/share_project_serializer.py index 40938addad..768abcd20c 100644 --- a/onadata/libs/serializers/share_project_serializer.py +++ b/onadata/libs/serializers/share_project_serializer.py @@ -7,10 +7,10 @@ from rest_framework import serializers +from onadata.apps.api.tasks import share_project_async from onadata.libs.models.share_project import ShareProject from onadata.libs.permissions import ROLES, OwnerRole, get_object_users_with_permissions from onadata.libs.serializers.fields.project_field import ProjectField -from onadata.libs.utils.project_utils import propagate_project_permissions_async User = get_user_model() @@ -40,19 +40,18 @@ def create(self, validated_data): for username in validated_data.pop("username").split(","): validated_data["username"] = username instance = ShareProject(**validated_data) - instance.save() created_instances.append(instance) + share_project_async.apply_async( + args=[instance.project.pk, instance.username, instance.role] + ) - propagate_project_permissions_async.apply_async( - args=[validated_data.get("project").id], countdown=30 - ) return created_instances def update(self, instance, validated_data): instance = attrs_to_instance(validated_data, instance) - instance.save() - propagate_project_permissions_async.apply_async( - args=[validated_data.get("project").id], countdown=30 + + share_project_async.apply_async( + args=[instance.project.pk, instance.username, instance.role] ) return instance @@ -119,22 +118,10 @@ class RemoveUserFromProjectSerializer(ShareProjectSerializer): remove = serializers.BooleanField() def update(self, instance, validated_data): - instance = attrs_to_instance(validated_data, instance) - instance.save() - propagate_project_permissions_async.apply_async( - args=[validated_data.get("project").id], countdown=30 - ) - - return instance + return attrs_to_instance(validated_data, instance) def create(self, validated_data): - instance = ShareProject(**validated_data) - instance.save() - propagate_project_permissions_async.apply_async( - args=[validated_data.get("project").id], countdown=30 - ) - - return instance + return ShareProject(**validated_data) def validate(self, attrs): """Check and confirm that the project will be left with at least one @@ -152,3 +139,16 @@ def validate(self, attrs): ) return attrs + + def save(self, **kwargs): + instance = super().save(**kwargs) + share_project_async.apply_async( + args=[ + instance.project.pk, + instance.username, + instance.role, + instance.remove, + ] + ) + + return instance diff --git a/onadata/libs/tests/models/__init__.py b/onadata/libs/tests/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/libs/tests/models/test_share_project.py b/onadata/libs/tests/models/test_share_project.py new file mode 100644 index 0000000000..3f2f9d7f16 --- /dev/null +++ b/onadata/libs/tests/models/test_share_project.py @@ -0,0 +1,93 @@ +"""Tests for module onadata.libs.models.share_project""" + +from unittest.mock import patch, call + +from onadata.apps.logger.models.data_view import DataView +from onadata.apps.logger.models.project import Project +from onadata.apps.logger.models.xform import XForm +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.models.share_project import ShareProject +from onadata.libs.permissions import ManagerRole + + +@patch( + "onadata.libs.models.share_project.propagate_project_permissions_async.apply_async" +) +class ShareProjectTestCase(TestBase): + """Tests for model ShareProject""" + + def setUp(self): + super().setUp() + + self._publish_transportation_form() + md_xform = """ + | survey | + | | type | name | label | + | | text | name | Name | + | | integer | age | Age | + | | select one fruits | fruit | Fruit | + | | | | | + | choices | list name | name | label | + | | fruits | 1 | Mango | + | | fruits | 2 | Orange | + | | fruits | 3 | Apple | + """ + project = Project.objects.create( + name="Demo", organization=self.user, created_by=self.user + ) + self._publish_markdown(md_xform, self.user, project) + self.dataview_form = XForm.objects.all().order_by("-pk")[0] + DataView.objects.create( + name="Demo", + xform=self.dataview_form, + project=self.project, + matches_parent=True, + columns=[], + ) + self.alice = self._create_user("alice", "Yuao8(-)") + + @patch("onadata.libs.models.share_project.safe_delete") + def test_share(self, mock_safe_delete, mock_propagate): + """A project is shared with a user + + Permissions assigned to project, xform and dataview + """ + instance = ShareProject(self.project, self.alice, "manager") + instance.save() + self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) + mock_propagate.assert_called_once_with(args=[self.project.pk]) + # Cache is invalidated + mock_safe_delete.assert_has_calls( + [ + call(f"ps-project_owner-{self.project.pk}"), + call(f"ps-project_permissions-{self.project.pk}"), + ] + ) + + @patch("onadata.libs.models.share_project.safe_delete") + def test_remove(self, mock_safe_delete, mock_propagate): + """A user is removed from a project""" + # Add user + ManagerRole.add(self.alice, self.project) + ManagerRole.add(self.alice, self.xform) + ManagerRole.add(self.alice, self.dataview_form) + + self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) + # Remove user + instance = ShareProject(self.project, self.alice, "manager", True) + instance.save() + self.assertFalse(ManagerRole.user_has_role(self.alice, self.project)) + self.assertFalse(ManagerRole.user_has_role(self.alice, self.xform)) + self.assertFalse(ManagerRole.user_has_role(self.alice, self.dataview_form)) + mock_propagate.assert_called_once_with(args=[self.project.pk]) + # Cache is invalidated + mock_safe_delete.assert_has_calls( + [ + call(f"ps-project_owner-{self.project.pk}"), + call(f"ps-project_permissions-{self.project.pk}"), + ] + ) From 60ab0a604661168d685f23a5d0548f844a5cb72f Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 20 Mar 2024 12:11:08 +0300 Subject: [PATCH 155/270] Django 4: initial update requirement packages --- requirements/azure.in | 4 +- requirements/azure.pip | 3 +- requirements/base.pip | 82 +++++++++++------------------ requirements/dev.in | 15 +++--- requirements/dev.pip | 117 +++++++++++++++++++---------------------- requirements/mysql.pip | 1 - requirements/s3.in | 4 +- requirements/s3.pip | 18 +++---- requirements/ses.in | 2 +- requirements/ses.pip | 20 +++---- setup.cfg | 4 +- 11 files changed, 115 insertions(+), 155 deletions(-) delete mode 100644 requirements/mysql.pip diff --git a/requirements/azure.in b/requirements/azure.in index b8799ecf47..fbae9b96a5 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ -django-storages[azure] cryptography>=39.0.1 -django >=3.2.25,<4 +django ==4.0,<5 +django-storages[azure] diff --git a/requirements/azure.pip b/requirements/azure.pip index 169326e3e8..8c8828a584 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=requirements/azure.pip requirements/azure.in @@ -46,7 +46,6 @@ sqlparse==0.4.4 # via django typing-extensions==4.10.0 # via - # asgiref # azure-core # azure-storage-blob urllib3==2.2.1 diff --git a/requirements/base.pip b/requirements/base.pip index a75fd021b8..53b832d678 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=requirements/base.pip requirements/base.in +# pip-compile --output-file=requirements/base.pip --strip-extras requirements/base.in # -e git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest # via -r requirements/base.in @@ -30,8 +30,6 @@ asgiref==3.7.2 # via # django # django-cors-headers -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # jsonlines @@ -43,11 +41,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.61 - # via - # dataflows-tabulator - # tabulator -botocore==1.34.61 +boto3==1.34.66 + # via dataflows-tabulator +botocore==1.34.66 # via # boto3 # s3transfer @@ -67,7 +63,6 @@ chardet==5.2.0 # via # dataflows-tabulator # datapackage - # tabulator charset-normalizer==3.3.2 # via requests click==8.1.7 @@ -79,7 +74,6 @@ click==8.1.7 # dataflows-tabulator # datapackage # tableschema - # tabulator click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -91,8 +85,10 @@ cryptography==42.0.5 # jwcrypto # onadata # pyjwt -dataflows-tabulator==1.54.0 - # via datapackage +dataflows-tabulator==1.54.1 + # via + # datapackage + # tableschema datapackage==1.15.4 # via pyfloip defusedxml==0.7.1 @@ -104,7 +100,7 @@ deprecated==1.2.14 # via onadata dict2xml==1.7.5 # via onadata -django==3.2.25 +django==4.0 # via # django-activity-stream # django-cors-headers @@ -139,7 +135,7 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.3.0 +django-oauth-toolkit==2.1.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -157,7 +153,7 @@ django-taggit==4.0.0 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.14.0 +djangorestframework==3.15.0 # via # djangorestframework-csv # djangorestframework-gis @@ -179,7 +175,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.19 +docutils==0.20.1 # via sphinx dpath==2.1.6 # via onadata @@ -206,9 +202,7 @@ httplib2==0.22.0 idna==3.6 # via requests ijson==3.2.3 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator imagesize==1.4.1 # via sphinx inflection==0.5.1 @@ -222,9 +216,7 @@ jmespath==1.0.1 # boto3 # botocore jsonlines==4.0.0 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator jsonpickle==3.0.3 # via onadata jsonpointer==2.4 @@ -240,12 +232,10 @@ jwcrypto==1.5.6 kombu==5.3.5 # via celery linear-tsv==1.1.0 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator lxml==5.1.0 # via onadata -markdown==3.5.2 +markdown==3.6 # via onadata markupsafe==2.1.5 # via jinja2 @@ -266,7 +256,6 @@ openpyxl==3.0.9 # dataflows-tabulator # onadata # pyxform - # tabulator packaging==24.0 # via sphinx paho-mqtt==2.0.0 @@ -289,7 +278,7 @@ pycparser==2.21 # via cffi pygments==2.17.2 # via sphinx -pyjwt[crypto]==2.8.0 +pyjwt==2.8.0 # via # ona-oidc # onadata @@ -311,9 +300,7 @@ python-memcached==1.62 # via onadata pytz==2024.1 # via - # django # django-query-builder - # djangorestframework # fleming # onadata pyxform==1.12.2 @@ -324,7 +311,7 @@ recaptcha-client==1.0.6 # via onadata redis==5.0.3 # via django-redis -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -340,7 +327,6 @@ requests==2.31.0 # requests-oauthlib # sphinx # tableschema - # tabulator requests-oauthlib==1.4.0 # via google-auth-oauthlib rfc3986==2.0.0 @@ -351,9 +337,9 @@ rpds-py==0.18.0 # referencing rsa==4.9 # via google-auth -s3transfer==0.10.0 +s3transfer==0.10.1 # via boto3 -sentry-sdk==1.41.0 +sentry-sdk==1.42.0 # via onadata simplejson==3.19.2 # via onadata @@ -367,38 +353,32 @@ six==1.16.0 # linear-tsv # python-dateutil # tableschema - # tabulator snowballstemmer==2.2.0 # via sphinx -sphinx==6.2.1 +sphinx==7.2.6 # via onadata -sphinxcontrib-applehelp==1.0.5 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.3 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.2 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.4 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.6 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy==2.0.28 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator sqlparse==0.4.4 # via # django # django-debug-toolbar -tableschema==1.20.7 +tableschema==1.20.9 # via datapackage -tabulator==1.53.5 - # via tableschema typing-extensions==4.10.0 # via - # asgiref # jwcrypto # sqlalchemy tzdata==2024.1 @@ -411,8 +391,7 @@ unicodecsv==0.14.1 # datapackage # onadata # tableschema - # tabulator -urllib3==2.0.7 +urllib3==2.2.1 # via # botocore # requests @@ -432,7 +411,6 @@ xlrd==2.0.1 # via # dataflows-tabulator # pyxform - # tabulator xlwt==1.3.0 # via onadata xmltodict==0.13.0 diff --git a/requirements/dev.in b/requirements/dev.in index f9bc526712..767475d4eb 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -2,15 +2,16 @@ -r base.in django-extensions +flake8 +flaky +httmock ipdb -pylint -pylint-django -yapf isort -prospector -httmock mock +pre-commit +prospector +pylint +pylint-django requests_mock tblib -flake8 -flaky +yapf diff --git a/requirements/dev.pip b/requirements/dev.pip index cb50c8ccdb..fc14b8dfad 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=requirements/dev.pip requirements/dev.in +# pip-compile --output-file=requirements/dev.pip --strip-extras requirements/dev.in # -e git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest # via -r requirements/base.in @@ -38,8 +38,6 @@ astroid==2.15.8 # requirements-detector asttokens==2.4.1 # via stack-data -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # jsonlines @@ -51,11 +49,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.61 - # via - # dataflows-tabulator - # tabulator -botocore==1.34.61 +boto3==1.34.66 + # via dataflows-tabulator +botocore==1.34.66 # via # boto3 # s3transfer @@ -71,11 +67,12 @@ certifi==2024.2.2 # sentry-sdk cffi==1.16.0 # via cryptography +cfgv==3.4.0 + # via pre-commit chardet==5.2.0 # via # dataflows-tabulator # datapackage - # tabulator charset-normalizer==3.3.2 # via requests click==8.1.7 @@ -87,7 +84,6 @@ click==8.1.7 # dataflows-tabulator # datapackage # tableschema - # tabulator click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -99,8 +95,10 @@ cryptography==42.0.5 # jwcrypto # onadata # pyjwt -dataflows-tabulator==1.54.0 - # via datapackage +dataflows-tabulator==1.54.1 + # via + # datapackage + # tableschema datapackage==1.15.4 # via pyfloip decorator==5.1.1 @@ -118,7 +116,9 @@ dict2xml==1.7.5 # via onadata dill==0.3.8 # via pylint -django==3.2.25 +distlib==0.3.8 + # via virtualenv +django==4.0 # via # django-activity-stream # django-cors-headers @@ -156,7 +156,7 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.3.0 +django-oauth-toolkit==2.1.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -174,7 +174,7 @@ django-taggit==4.0.0 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.14.0 +djangorestframework==3.15.0 # via # djangorestframework-csv # djangorestframework-gis @@ -196,7 +196,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.19 +docutils==0.20.1 # via sphinx dodgy==0.2.1 # via prospector @@ -206,10 +206,10 @@ elaphe3==0.2.0 # via onadata et-xmlfile==1.1.0 # via openpyxl -exceptiongroup==1.2.0 - # via ipython executing==2.0.1 # via stack-data +filelock==3.13.1 + # via virtualenv flake8==3.8.4 # via # -r requirements/dev.in @@ -236,12 +236,12 @@ httmock==1.4.0 # via -r requirements/dev.in httplib2==0.22.0 # via onadata +identify==2.5.35 + # via pre-commit idna==3.6 # via requests ijson==3.2.3 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator imagesize==1.4.1 # via sphinx importlib-metadata==7.0.2 @@ -267,9 +267,7 @@ jmespath==1.0.1 # boto3 # botocore jsonlines==4.0.0 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator jsonpickle==3.0.3 # via onadata jsonpointer==2.4 @@ -287,12 +285,10 @@ kombu==5.3.5 lazy-object-proxy==1.10.0 # via astroid linear-tsv==1.1.0 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator lxml==5.1.0 # via onadata -markdown==3.5.2 +markdown==3.6 # via onadata markupsafe==2.1.5 # via jinja2 @@ -303,12 +299,14 @@ mccabe==0.6.1 # flake8 # prospector # pylint -mock==4.0.3 +mock==5.1.0 # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata monotonic==1.6 # via analytics-python +nodeenv==1.8.0 + # via pre-commit nose==1.3.7 # via django-nose numpy==1.26.4 @@ -322,10 +320,8 @@ openpyxl==3.0.9 # dataflows-tabulator # onadata # pyxform - # tabulator packaging==24.0 # via - # prospector # requirements-detector # sphinx paho-mqtt==2.0.0 @@ -343,7 +339,10 @@ pillow==10.2.0 platformdirs==4.2.0 # via # pylint + # virtualenv # yapf +pre-commit==3.6.2 + # via -r requirements/dev.in prompt-toolkit==3.0.43 # via # click-repl @@ -378,7 +377,7 @@ pygments==2.17.2 # via # ipython # sphinx -pyjwt[crypto]==2.8.0 +pyjwt==2.8.0 # via # ona-oidc # onadata @@ -422,9 +421,7 @@ python-memcached==1.62 # via onadata pytz==2024.1 # via - # django # django-query-builder - # djangorestframework # fleming # onadata pyxform==1.12.2 @@ -432,12 +429,14 @@ pyxform==1.12.2 # onadata # pyfloip pyyaml==6.0.1 - # via prospector + # via + # pre-commit + # prospector recaptcha-client==1.0.6 # via onadata redis==5.0.3 # via django-redis -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -455,7 +454,6 @@ requests==2.31.0 # requests-oauthlib # sphinx # tableschema - # tabulator requests-mock==1.11.0 # via -r requirements/dev.in requests-oauthlib==1.4.0 @@ -470,11 +468,11 @@ rpds-py==0.18.0 # referencing rsa==4.9 # via google-auth -s3transfer==0.10.0 +s3transfer==0.10.1 # via boto3 semver==3.0.2 # via requirements-detector -sentry-sdk==1.41.0 +sentry-sdk==1.42.0 # via onadata setoptconf==0.3.0 # via prospector @@ -492,48 +490,40 @@ six==1.16.0 # python-dateutil # requests-mock # tableschema - # tabulator snowballstemmer==2.2.0 # via # pydocstyle # sphinx -sphinx==6.2.1 +sphinx==7.2.6 # via onadata -sphinxcontrib-applehelp==1.0.5 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.3 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.2 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.4 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.6 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy==2.0.28 - # via - # dataflows-tabulator - # tabulator + # via dataflows-tabulator sqlparse==0.4.4 # via # django # django-debug-toolbar stack-data==0.6.3 # via ipython -tableschema==1.20.7 +tableschema==1.20.9 # via datapackage -tabulator==1.53.5 - # via tableschema tblib==3.0.0 # via -r requirements/dev.in toml==0.10.2 # via requirements-detector tomli==2.0.1 - # via - # ipdb - # pylint - # yapf + # via yapf tomlkit==0.12.4 # via pylint traitlets==5.14.2 @@ -542,8 +532,6 @@ traitlets==5.14.2 # matplotlib-inline typing-extensions==4.10.0 # via - # asgiref - # astroid # jwcrypto # sqlalchemy tzdata==2024.1 @@ -556,8 +544,7 @@ unicodecsv==0.14.1 # datapackage # onadata # tableschema - # tabulator -urllib3==2.0.7 +urllib3==2.2.1 # via # botocore # requests @@ -569,6 +556,8 @@ vine==5.1.0 # amqp # celery # kombu +virtualenv==20.25.1 + # via pre-commit wcwidth==0.2.13 # via prompt-toolkit wrapt==1.16.0 @@ -579,12 +568,14 @@ xlrd==2.0.1 # via # dataflows-tabulator # pyxform - # tabulator xlwt==1.3.0 # via onadata xmltodict==0.13.0 # via onadata yapf==0.40.2 # via -r requirements/dev.in -zipp==3.18.0 +zipp==3.18.1 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/mysql.pip b/requirements/mysql.pip deleted file mode 100644 index 658a2bfda2..0000000000 --- a/requirements/mysql.pip +++ /dev/null @@ -1 +0,0 @@ -MySQL-python>=1.2.2 \ No newline at end of file diff --git a/requirements/s3.in b/requirements/s3.in index 108712204d..9dcb6b3af8 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ -django-storages -django >=3.2.25,<4 boto3 +django ==4.0,<5 +django-storages diff --git a/requirements/s3.pip b/requirements/s3.pip index d03912b6b1..8e68f1a5fa 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -1,18 +1,18 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=requirements/s3.pip requirements/s3.in +# pip-compile --output-file=requirements/s3.pip --strip-extras requirements/s3.in # asgiref==3.7.2 # via django -boto3==1.34.61 +boto3==1.34.66 # via -r requirements/s3.in -botocore==1.34.61 +botocore==1.34.66 # via # boto3 # s3transfer -django==3.2.25 +django==4.0 # via # -r requirements/s3.in # django-storages @@ -24,15 +24,11 @@ jmespath==1.0.1 # botocore python-dateutil==2.9.0.post0 # via botocore -pytz==2024.1 - # via django -s3transfer==0.10.0 +s3transfer==0.10.1 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -typing-extensions==4.10.0 - # via asgiref -urllib3==2.0.7 +urllib3==2.2.1 # via botocore diff --git a/requirements/ses.in b/requirements/ses.in index 4825ec8368..d0a01aa09d 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django >=3.2.25,<4 +django ==4.0,<5 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index aee2de5254..5162be5bb4 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -1,20 +1,20 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=requirements/ses.pip requirements/ses.in +# pip-compile --output-file=requirements/ses.pip --strip-extras requirements/ses.in # asgiref==3.7.2 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.34.61 +boto3==1.34.66 # via django-ses -botocore==1.34.61 +botocore==1.34.66 # via # boto3 # s3transfer -django==3.2.25 +django==4.0 # via # -r requirements/ses.in # django-ses @@ -27,16 +27,12 @@ jmespath==1.0.1 python-dateutil==2.9.0.post0 # via botocore pytz==2024.1 - # via - # django - # django-ses -s3transfer==0.10.0 + # via django-ses +s3transfer==0.10.1 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.4.4 # via django -typing-extensions==4.10.0 - # via asgiref -urllib3==2.0.7 +urllib3==2.2.1 # via botocore diff --git a/setup.cfg b/setup.cfg index 70a7bc93e6..a32edc43a3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,7 +27,7 @@ tests_require = mock requests-mock install_requires = - Django>=3.2.25,<4 + Django==4.0,<5 django-guardian django-registration-redux django-templated-email @@ -68,7 +68,7 @@ install_requires = dict2xml lxml>=4.9.1 #pyxform - pyxform + pyxform==1.12.2 #memcached support pylibmc python-memcached From 72f2a280b2fb7b0178d0eb435dd08e62cc346c75 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 20 Mar 2024 13:47:31 +0300 Subject: [PATCH 156/270] Fix DeprecationWarning: invalid escape sequence Declare the regex pattern as a raw string. --- onadata/apps/logger/models/xform.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index c0743e3222..dd3076718a 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -315,11 +315,11 @@ def set_uuid_in_xml(self, file_name=None): # http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-\ # and-silly-whitespace/ text_re = re.compile(r"(>)\n\s*(\s[^<>\s].*?)\n\s*(\s)\n( )*") + output_re = re.compile(r"\n.*()\n( )*") pretty_xml = text_re.sub( lambda m: "".join(m.group(1, 2, 3)), self.xml.decode("utf-8") ) - inline_output = output_re.sub("\g<1>", pretty_xml) # noqa + inline_output = output_re.sub(r"\g<1>", pretty_xml) # noqa inline_output = re.compile(r"").sub( "", inline_output ) From 4f7aa4b4fbbf8064d8722af5a5a107b9413f3422 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 20 Mar 2024 15:56:05 +0300 Subject: [PATCH 157/270] Django 4: remove pytz --- onadata/apps/logger/models/instance.py | 12 +++------- onadata/apps/logger/models/xform.py | 8 +------ .../apps/logger/tests/models/test_instance.py | 24 +++++++++---------- 3 files changed, 16 insertions(+), 28 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index dfe6337b55..81201f160f 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -20,7 +20,6 @@ from django.utils import timezone from django.utils.translation import gettext as _ -import pytz from deprecated import deprecated from taggit.managers import TaggableManager @@ -188,12 +187,7 @@ def _update_submission_count_for_today( form_id: int, incr: bool = True, date_created=None ): # Track submissions made today - current_timzone_name = timezone.get_current_timezone_name() - current_timezone = pytz.timezone(current_timzone_name) - today = datetime.today() - current_date = current_timezone.localize( - datetime(today.year, today.month, today.day) - ).isoformat() + current_date = timezone.localtime().isoformat() date_cache_key = f"{XFORM_SUBMISSION_COUNT_FOR_DAY_DATE}" f"{form_id}" count_cache_key = f"{XFORM_SUBMISSION_COUNT_FOR_DAY}{form_id}" @@ -201,8 +195,8 @@ def _update_submission_count_for_today( cache.set(date_cache_key, current_date, 86400) if date_created: - date_created = current_timezone.localize( - datetime(date_created.year, date_created.month, date_created.day) + date_created = date_created.astimezone( + timezone.get_current_timezone() ).isoformat() current_count = cache.get(count_cache_key) diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index dd3076718a..195b304de5 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -24,7 +24,6 @@ from django.utils.translation import gettext as _ from django.utils.translation import gettext_lazy -import pytz from guardian.models import GroupObjectPermissionBase, UserObjectPermissionBase from pyxform import SurveyElementBuilder, constants, create_survey_element_from_dict from pyxform.question import Question @@ -1159,12 +1158,7 @@ def submission_count(self, force_update=False): @property def submission_count_for_today(self): """Returns the submissions count for the current day.""" - current_timzone_name = timezone.get_current_timezone_name() - current_timezone = pytz.timezone(current_timzone_name) - today = datetime.today() - current_date = current_timezone.localize( - datetime(today.year, today.month, today.day) - ).isoformat() + current_date = timezone.localtime().isoformat() count = ( cache.get(f"{XFORM_SUBMISSION_COUNT_FOR_DAY}{self.id}") if cache.get(f"{XFORM_SUBMISSION_COUNT_FOR_DAY_DATE}{self.id}") diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 09cef2fcb9..5dcf01d63c 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -1,15 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Test Instance model. +""" import os -import pytz -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta, timezone from django.http.request import HttpRequest +from django.test import override_settings from django.utils.timezone import utc + from django_digest.test import DigestAuth -from django.test import override_settings -from mock import patch, Mock +from mock import Mock, patch -from onadata.apps.logger.models import XForm, Instance, SubmissionReview +from onadata.apps.logger.models import Instance, SubmissionReview, XForm from onadata.apps.logger.models.instance import ( get_id_string_from_xml_str, numeric_checker, @@ -23,10 +26,7 @@ from onadata.libs.serializers.submission_review_serializer import ( SubmissionReviewSerializer, ) -from onadata.libs.utils.common_tags import ( - MONGO_STRFTIME, - SUBMITTED_BY, -) +from onadata.libs.utils.common_tags import MONGO_STRFTIME, SUBMITTED_BY class TestInstance(TestBase): @@ -67,7 +67,7 @@ def test_stores_json(self): def test_updates_json_date_modified_on_save(self): """_date_modified in `json` field is updated on save""" - old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=pytz.utc) + old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=timezone.utc) with patch("django.utils.timezone.now", Mock(return_value=old_mocked_now)): self._publish_transportation_form_and_submit_instance() @@ -79,7 +79,7 @@ def test_updates_json_date_modified_on_save(self): ) # After saving the date_modified in json should update - mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=pytz.utc) + mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=timezone.utc) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): instance.save() From 4e7a9b176568a3bd9c66c886039da5e214ecee61 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 20 Mar 2024 16:00:12 +0300 Subject: [PATCH 158/270] Django 4: update azure requirement packages --- requirements/azure.pip | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/requirements/azure.pip b/requirements/azure.pip index 8c8828a584..91dfe77194 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=requirements/azure.pip requirements/azure.in +# pip-compile --output-file=requirements/azure.pip --strip-extras requirements/azure.in # asgiref==3.7.2 # via django @@ -22,11 +22,11 @@ cryptography==42.0.5 # via # -r requirements/azure.in # azure-storage-blob -django==3.2.25 +django==4.0 # via # -r requirements/azure.in # django-storages -django-storages[azure]==1.14.2 +django-storages==1.14.2 # via -r requirements/azure.in idna==3.6 # via requests @@ -34,8 +34,6 @@ isodate==0.6.1 # via azure-storage-blob pycparser==2.21 # via cffi -pytz==2024.1 - # via django requests==2.31.0 # via azure-core six==1.16.0 From 95a10704c75524a2cec091673211f428694ef5d9 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 15:41:14 +0300 Subject: [PATCH 159/270] Django 4: remove deprecated pytz --- .../api/tests/viewsets/test_data_viewset.py | 19 ++++---- .../tests/viewsets/test_project_viewset.py | 26 +++++------ .../api/tests/viewsets/test_xform_viewset.py | 7 ++- onadata/apps/api/viewsets/xform_viewset.py | 7 +-- onadata/apps/logger/models/instance.py | 7 ++- .../apps/logger/tests/models/test_instance.py | 6 +-- .../tests/models/test_project_invitation.py | 22 +++++----- onadata/apps/logger/views.py | 19 ++++---- onadata/apps/main/tests/test_process.py | 8 ++-- onadata/apps/main/tests/test_signals.py | 10 ++--- onadata/apps/viewer/views.py | 14 +++--- onadata/libs/mixins/openrosa_headers_mixin.py | 7 +-- onadata/libs/renderers/renderers.py | 7 +-- onadata/libs/utils/logger_tools.py | 4 +- requirements/base.in | 2 +- requirements/base.pip | 13 +++--- requirements/dev.in | 3 +- requirements/dev.pip | 44 +++++++++++-------- setup.cfg | 1 - 19 files changed, 107 insertions(+), 119 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index bef0b1a0bf..f33dc18ee1 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -4,15 +4,14 @@ """ from __future__ import unicode_literals +import csv import datetime import json import logging import os -import pytz -import csv -from io import StringIO from builtins import open from datetime import timedelta +from io import StringIO from tempfile import NamedTemporaryFile from django.conf import settings @@ -30,7 +29,7 @@ from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock, urlmatch -from mock import patch, Mock +from mock import Mock, patch from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, @@ -2374,7 +2373,7 @@ def test_geojson_format(self): def test_geotraces_in_repeats(self): # publish sample geotrace submissions md = """ - | survey | + | survey | | | type | name | label | required | calculation | | | begin repeat | segment | Waterway trace | | | | | calculate | point_position | | | position(..)| @@ -2436,7 +2435,7 @@ def test_geotraces_in_repeats(self): def test_geoshapes_in_repeats(self): # publish sample geoshape submissions md = """ - | survey | + | survey | | | type | name | label | required | calculation | | | begin repeat | segment | Waterway trace | | | | | calculate | point_position | | | position(..)| @@ -2507,7 +2506,7 @@ def test_geoshapes_in_repeats(self): def test_empty_geotraces_in_repeats(self): # publish sample geotrace submissions md = """ - | survey | + | survey | | | type | name | label | required | calculation | | | begin repeat | segment | Waterway trace | | | | | calculate | point_position | | | position(..)| @@ -2551,7 +2550,7 @@ def test_empty_geotraces_in_repeats(self): [36.805943, -1.268118], [36.808822, -1.269405], ], - }, + }, "properties": {"id": instances[1].pk, "xform": self.xform.pk}, }, ], @@ -2561,7 +2560,7 @@ def test_empty_geotraces_in_repeats(self): def test_empty_geoshapes_in_repeats(self): # publish sample geoshape submissions md = """ - | survey | + | survey | | | type | name | label | required | calculation | | | begin repeat | segment | Waterway trace | | | | | calculate | point_position | | | position(..)| @@ -3518,7 +3517,7 @@ def test_data_list_xml_format(self): """Test DataViewSet list XML""" # create submission media_file = "1335783522563.jpg" - mocked_now = datetime.datetime(2023, 9, 20, 12, 49, 0, tzinfo=pytz.utc) + mocked_now = datetime.datetime(2023, 9, 20, 12, 49, 0, tzinfo=timezone.utc) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): self._make_submission_w_attachment( diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index f7b847aa4a..2a787b6f6c 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -3,24 +3,24 @@ Test ProjectViewSet module. """ import json -import pytz import os - from collections import OrderedDict -from six import iteritems -from operator import itemgetter from datetime import datetime +from operator import itemgetter from django.conf import settings -from django.db.models import Q +from django.contrib.auth import get_user_model from django.core.cache import cache +from django.db.models import Q from django.test import override_settings -from django.contrib.auth import get_user_model -from rest_framework.authtoken.models import Token -from httmock import HTTMock, urlmatch -from mock import MagicMock, patch, Mock +from django.utils import timezone + import dateutil.parser import requests +from httmock import HTTMock, urlmatch +from mock import MagicMock, Mock, patch +from rest_framework.authtoken.models import Token +from six import iteritems from onadata.apps.api import tools from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( @@ -34,11 +34,10 @@ from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.team_viewset import TeamViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Project, XForm, XFormVersion, ProjectInvitation +from onadata.apps.logger.models import Project, ProjectInvitation, XForm, XFormVersion from onadata.apps.main.models import MetaData from onadata.libs import permissions as role from onadata.libs.models.share_project import ShareProject -from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_key from onadata.libs.permissions import ( ROLES_ORDERED, DataEntryMinorRole, @@ -55,6 +54,7 @@ BaseProjectSerializer, ProjectSerializer, ) +from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_key User = get_user_model() @@ -3320,7 +3320,7 @@ def test_revoke_invite(self): ) post_data = {"invitation_id": invitation.pk} request = self.factory.post("/", data=post_data, **self.extra) - mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=pytz.utc) + mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=timezone.utc) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): response = self.view(request, pk=self.project.pk) @@ -3423,7 +3423,7 @@ def test_resend_invite(self, mock_send_mail): email="jandoe@example.com", role="editor" ) post_data = {"invitation_id": invitation.pk} - mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=pytz.utc) + mocked_now = datetime(2023, 5, 25, 10, 51, 0, tzinfo=timezone.utc) request = self.factory.post("/", data=post_data, **self.extra) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index ae55a4d1bc..049a0b12cd 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -8,7 +8,6 @@ import csv import json import os -import pytz import re from builtins import open from collections import OrderedDict @@ -33,7 +32,6 @@ from flaky import flaky from httmock import HTTMock from mock import Mock, patch -from onadata.libs.utils.api_export_tools import get_existing_file_format from rest_framework import status from onadata.apps.api.tests.mocked_data import ( @@ -58,8 +56,8 @@ from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.logger.models import Attachment, Instance, Project, XForm from onadata.apps.logger.models.xform_version import XFormVersion -from onadata.apps.logger.xform_instance_parser import XLSFormError from onadata.apps.logger.views import delete_xform +from onadata.apps.logger.xform_instance_parser import XLSFormError from onadata.apps.main.models import MetaData from onadata.apps.messaging.constants import FORM_UPDATED, XFORM from onadata.apps.viewer.models import Export @@ -78,6 +76,7 @@ XFormBaseSerializer, XFormSerializer, ) +from onadata.libs.utils.api_export_tools import get_existing_file_format from onadata.libs.utils.cache_tools import ( ENKETO_URL_CACHE, PROJ_FORMS_CACHE, @@ -3840,7 +3839,7 @@ def test_csv_export_with_win_excel_utf8(self): self._publish_xls_form_to_project(xlsform_path=xlsform_path) # submit one hxl instance _submission_time = parse_datetime("2013-02-18 15:54:01Z") - mock_date_modified = datetime(2023, 9, 20, 11, 41, 0, tzinfo=pytz.utc) + mock_date_modified = datetime(2023, 9, 20, 11, 41, 0, tzinfo=utc) with patch( "django.utils.timezone.now", Mock(return_value=mock_date_modified) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index 749b4a74d4..fb79f2ad89 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -76,17 +76,14 @@ XFormVersionListSerializer, ) from onadata.libs.utils.api_export_tools import ( + _get_export_type, custom_response_handler, get_async_response, get_existing_file_format, process_async_export, response_for_format, - _get_export_type, -) -from onadata.libs.utils.cache_tools import ( - PROJ_OWNER_CACHE, - safe_delete, ) +from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_delete from onadata.libs.utils.common_tools import json_stream from onadata.libs.utils.csv_import import ( get_async_csv_submission_status, diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 81201f160f..5ee33bc8be 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -6,7 +6,6 @@ import sys from datetime import datetime -from celery import current_task from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.gis.db import models @@ -20,6 +19,7 @@ from django.utils import timezone from django.utils.translation import gettext as _ +from celery import current_task from deprecated import deprecated from taggit.managers import TaggableManager @@ -32,8 +32,6 @@ get_uuid_from_xml, ) from onadata.celeryapp import app -from onadata.libs.utils.common_tools import report_exception -from onadata.libs.utils.model_tools import queryset_iterator from onadata.libs.data.query import get_numeric_fields from onadata.libs.utils.cache_tools import ( DATAVIEW_COUNT, @@ -74,8 +72,9 @@ XFORM_ID, XFORM_ID_STRING, ) +from onadata.libs.utils.common_tools import report_exception from onadata.libs.utils.dict_tools import get_values_matching_key -from onadata.libs.utils.model_tools import set_uuid +from onadata.libs.utils.model_tools import queryset_iterator, set_uuid from onadata.libs.utils.timing import calculate_duration # pylint: disable=invalid-name diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 5dcf01d63c..89222d97bc 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -3,7 +3,7 @@ Test Instance model. """ import os -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta from django.http.request import HttpRequest from django.test import override_settings @@ -67,7 +67,7 @@ def test_stores_json(self): def test_updates_json_date_modified_on_save(self): """_date_modified in `json` field is updated on save""" - old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=timezone.utc) + old_mocked_now = datetime(2023, 9, 21, 8, 27, 0, tzinfo=utc) with patch("django.utils.timezone.now", Mock(return_value=old_mocked_now)): self._publish_transportation_form_and_submit_instance() @@ -79,7 +79,7 @@ def test_updates_json_date_modified_on_save(self): ) # After saving the date_modified in json should update - mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=timezone.utc) + mocked_now = datetime(2023, 9, 21, 9, 3, 0, tzinfo=utc) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): instance.save() diff --git a/onadata/apps/logger/tests/models/test_project_invitation.py b/onadata/apps/logger/tests/models/test_project_invitation.py index 0c0646c1e1..dcca3590cf 100644 --- a/onadata/apps/logger/tests/models/test_project_invitation.py +++ b/onadata/apps/logger/tests/models/test_project_invitation.py @@ -2,8 +2,10 @@ Tests for ProjectInvitation model """ from datetime import datetime -from unittest.mock import patch, Mock -import pytz +from unittest.mock import Mock, patch + +from django.utils import timezone + from onadata.apps.logger.models import ProjectInvitation from onadata.apps.main.tests.test_base import TestBase from onadata.libs.utils.user_auth import get_user_default_project @@ -20,10 +22,10 @@ def setUp(self) -> None: def test_creation(self): """We can create a ProjectInvitation object""" - created_at = datetime(2023, 5, 17, 14, 21, 0, tzinfo=pytz.utc) - resent_at = datetime(2023, 5, 17, 14, 24, 0, tzinfo=pytz.utc) - accepted_at = datetime(2023, 5, 17, 14, 25, 0, tzinfo=pytz.utc) - revoked_at = datetime(2023, 5, 17, 14, 26, 0, tzinfo=pytz.utc) + created_at = datetime(2023, 5, 17, 14, 21, 0, tzinfo=timezone.utc) + resent_at = datetime(2023, 5, 17, 14, 24, 0, tzinfo=timezone.utc) + accepted_at = datetime(2023, 5, 17, 14, 25, 0, tzinfo=timezone.utc) + revoked_at = datetime(2023, 5, 17, 14, 26, 0, tzinfo=timezone.utc) jane = self._create_user("jane", "1234") with patch("django.utils.timezone.now", Mock(return_value=created_at)): @@ -67,7 +69,7 @@ def test_defaults(self): def test_revoke(self): """Calling revoke method works correctly""" - mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=pytz.utc) + mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=timezone.utc) with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): invitation = ProjectInvitation.objects.create( @@ -82,7 +84,7 @@ def test_revoke(self): self.assertEqual(invitation.status, ProjectInvitation.Status.REVOKED) # setting revoked_at explicitly works - revoked_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=pytz.utc) + revoked_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=timezone.utc) invitation = ProjectInvitation.objects.create( email="john@example.com", project=self.project, @@ -96,7 +98,7 @@ def test_revoke(self): def test_accept(self): """Calling accept method works correctly""" - mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=pytz.utc) + mocked_now = datetime(2023, 5, 25, 11, 17, 0, tzinfo=timezone.utc) jane = self._create_user("jane", "1234") with patch("django.utils.timezone.now", Mock(return_value=mocked_now)): @@ -113,7 +115,7 @@ def test_accept(self): self.assertEqual(invitation.status, ProjectInvitation.Status.ACCEPTED) # setting accepted_at explicitly works - accepted_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=pytz.utc) + accepted_at = datetime(2023, 5, 10, 11, 17, 0, tzinfo=timezone.utc) invitation = ProjectInvitation.objects.create( email="john@example.com", project=self.project, diff --git a/onadata/apps/logger/views.py b/onadata/apps/logger/views.py index 21f75b2d69..1308b8a900 100644 --- a/onadata/apps/logger/views.py +++ b/onadata/apps/logger/views.py @@ -6,12 +6,10 @@ import tempfile from datetime import datetime -import pytz -import six from django.conf import settings from django.contrib import messages -from django.contrib.auth.decorators import login_required from django.contrib.auth import get_user_model +from django.contrib.auth.decorators import login_required from django.core.files import File from django.core.files.storage import get_storage_class from django.http import ( @@ -24,21 +22,24 @@ from django.shortcuts import get_object_or_404, render from django.template import RequestContext, loader from django.urls import reverse +from django.utils import timezone from django.utils.translation import gettext as _ from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_GET, require_http_methods, require_POST + +import six from django_digest import HttpDigestAuthenticator +from onadata.apps.api.tools import get_host_domain from onadata.apps.logger.import_tools import import_instances_from_zip from onadata.apps.logger.models.attachment import Attachment from onadata.apps.logger.models.instance import Instance from onadata.apps.logger.models.xform import XForm -from onadata.apps.api.tools import get_host_domain from onadata.apps.main.models import MetaData, UserProfile from onadata.libs.exceptions import EnketoError +from onadata.libs.utils.cache_tools import USER_PROFILE_PREFIX, cache from onadata.libs.utils.decorators import is_owner from onadata.libs.utils.log import Actions, audit_log -from onadata.libs.utils.cache_tools import cache, USER_PROFILE_PREFIX from onadata.libs.utils.logger_tools import ( BaseOpenRosaResponse, OpenRosaResponse, @@ -250,9 +251,7 @@ def formList(request, username): # noqa N802 request, "xformsList.xml", data, content_type="text/xml; charset=utf-8" ) response["X-OpenRosa-Version"] = "1.0" - response["Date"] = datetime.now(pytz.timezone(settings.TIME_ZONE)).strftime( - "%a, %d %b %Y %H:%M:%S %Z" - ) + response["Date"] = timezone.localtime().strftime("%a, %d %b %Y %H:%M:%S %Z") return response @@ -288,9 +287,7 @@ def xformsManifest(request, username, id_string): # noqa N802 content_type="text/xml; charset=utf-8", ) response["X-OpenRosa-Version"] = "1.0" - response["Date"] = datetime.now(pytz.timezone(settings.TIME_ZONE)).strftime( - "%a, %d %b %Y %H:%M:%S %Z" - ) + response["Date"] = timezone.localtime().strftime("%a, %d %b %Y %H:%M:%S %Z") return response diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 27b6848e77..9af3918aa4 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -16,9 +16,9 @@ from django.core.files.uploadedfile import UploadedFile from django.test.testcases import SerializeMixin from django.urls import reverse +from django.utils import timezone import openpyxl -import pytz import requests from django_digest.test import Client as DigestClient from flaky import flaky @@ -77,9 +77,9 @@ def _update_dynamic_data(self): """ for uuid, submission_time in iteritems(self.uuid_to_submission_times): i = self.xform.instances.get(uuid=uuid) - i.date_created = pytz.timezone("UTC").localize( - datetime.strptime(submission_time, MONGO_STRFTIME) - ) + i.date_created = datetime.strptime( + submission_time, MONGO_STRFTIME + ).astimezone(timezone.utc) i.json = i.get_full_dict() i.save() diff --git a/onadata/apps/main/tests/test_signals.py b/onadata/apps/main/tests/test_signals.py index 0484910d29..fe9950c094 100644 --- a/onadata/apps/main/tests/test_signals.py +++ b/onadata/apps/main/tests/test_signals.py @@ -1,17 +1,15 @@ - """Tests for onadata.apps.main.signals module""" from datetime import datetime from unittest.mock import Mock, patch -import pytz - from django.contrib.auth import get_user_model +from django.utils import timezone +from onadata.apps.logger.models import Project, ProjectInvitation from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.logger.models import ProjectInvitation, Project -from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.permissions import EditorRole, ManagerRole +from onadata.libs.utils.user_auth import get_user_default_project User = get_user_model() @@ -27,7 +25,7 @@ def setUp(self): project=self.project, role="editor", ) - self.mocked_now = datetime(2023, 6, 21, 14, 29, 0, tzinfo=pytz.utc) + self.mocked_now = datetime(2023, 6, 21, 14, 29, 0, tzinfo=timezone.utc) def test_accept_invitation(self): """Accept invitation works""" diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index ff686bdbe7..7ae8f6b66f 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -24,10 +24,10 @@ from django.shortcuts import get_object_or_404, redirect, render from django.template import loader from django.urls import reverse +from django.utils import timezone from django.utils.translation import gettext as _ from django.views.decorators.http import require_POST -import pytz import requests from dict2xml import dict2xml from dpath import util as dpath_util @@ -56,7 +56,7 @@ str_to_bool, ) from onadata.libs.utils.google import create_flow -from onadata.libs.utils.image_tools import image_url, generate_media_download_url +from onadata.libs.utils.image_tools import generate_media_download_url, image_url from onadata.libs.utils.log import Actions, audit_log from onadata.libs.utils.logger_tools import ( generate_content_disposition_header, @@ -82,12 +82,12 @@ def _get_start_end_submission_time(request): end = None try: if request.GET.get("start"): - start = pytz.timezone("UTC").localize( - datetime.strptime(request.GET["start"], "%y_%m_%d_%H_%M_%S") - ) + start = datetime.strptime( + request.GET["start"], "%y_%m_%d_%H_%M_%S" + ).astimezone(timezone.utc) if request.GET.get("end"): - end = pytz.timezone("UTC").localize( - datetime.strptime(request.GET["end"], "%y_%m_%d_%H_%M_%S") + end = datetime.strptime(request.GET["end"], "%y_%m_%d_%H_%M_%S").astimezone( + timezone.utc ) except ValueError: return HttpResponseBadRequest( diff --git a/onadata/libs/mixins/openrosa_headers_mixin.py b/onadata/libs/mixins/openrosa_headers_mixin.py index fd61389a9f..29ef886ca1 100644 --- a/onadata/libs/mixins/openrosa_headers_mixin.py +++ b/onadata/libs/mixins/openrosa_headers_mixin.py @@ -2,11 +2,8 @@ """ OpenRosaHeadersMixin module """ -from datetime import datetime - from django.conf import settings - -import pytz +from django.utils import timezone # 10,000,000 bytes DEFAULT_CONTENT_LENGTH = getattr(settings, "DEFAULT_CONTENT_LENGTH", 10000000) @@ -17,7 +14,7 @@ def get_openrosa_headers(request, location=True): Returns a dict with OpenRosa headers 'Date', 'X-OpenRosa-Version', 'X-OpenRosa-Accept-Content-Length' and 'Location'. """ - now = datetime.now(pytz.timezone(settings.TIME_ZONE)) + now = timezone.localtime() data = { "Date": now.strftime("%a, %d %b %Y %H:%M:%S %Z"), "X-OpenRosa-Version": "1.0", diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 4410db62c2..15d5e49756 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -13,7 +13,6 @@ from django.utils.encoding import force_str, smart_str from django.utils.xmlutils import SimplerXMLGenerator -import pytz import six from rest_framework import negotiation from rest_framework.renderers import ( @@ -54,10 +53,8 @@ def floip_rows_list(data): """ try: _submission_time = ( - pytz.timezone("UTC") - .localize(parse_datetime(data["_submission_time"])) - .isoformat() - ) + parse_datetime(data["_submission_time"]).astimezone(timezone.utc) + ).isoformat() except ValueError: _submission_time = data["_submission_time"] diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index 818bf1be52..412af0ac03 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -37,7 +37,6 @@ from django.utils.encoding import DjangoUnicodeDecodeError from django.utils.translation import gettext as _ -import pytz from defusedxml.ElementTree import ParseError, fromstring from dict2xml import dict2xml from modilabs.utils.subprocess_timeout import ProcessTimedOut @@ -842,8 +841,7 @@ def set_default_openrosa_headers(response): """Sets the default OpenRosa headers into a ``response`` object.""" response["Content-Type"] = "text/html; charset=utf-8" response["X-OpenRosa-Accept-Content-Length"] = DEFAULT_CONTENT_LENGTH - tz = pytz.timezone(settings.TIME_ZONE) - dt = datetime.now(tz).strftime("%a, %d %b %Y %H:%M:%S %Z") + dt = timezone.localtime().strftime("%a, %d %b %Y %H:%M:%S %Z") response["Date"] = dt response[OPEN_ROSA_VERSION_HEADER] = OPEN_ROSA_VERSION response["Content-Type"] = DEFAULT_CONTENT_TYPE diff --git a/requirements/base.in b/requirements/base.in index 3307409f37..b9286956c0 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -7,5 +7,5 @@ -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip -e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient --e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter diff --git a/requirements/base.pip b/requirements/base.pip index 53b832d678..4284892d08 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -8,7 +8,7 @@ # via -r requirements/base.in -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc # via -r requirements/base.in -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip # via -r requirements/base.in @@ -26,7 +26,7 @@ analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.7.2 +asgiref==3.8.0 # via # django # django-cors-headers @@ -41,9 +41,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.66 +boto3==1.34.67 # via dataflows-tabulator -botocore==1.34.66 +botocore==1.34.67 # via # boto3 # s3transfer @@ -189,7 +189,7 @@ future==1.0.0 # via python-json2xlsclient geojson==3.1.0 # via onadata -google-auth==2.28.2 +google-auth==2.29.0 # via # google-auth-oauthlib # onadata @@ -302,7 +302,6 @@ pytz==2024.1 # via # django-query-builder # fleming - # onadata pyxform==1.12.2 # via # onadata @@ -339,7 +338,7 @@ rsa==4.9 # via google-auth s3transfer==0.10.1 # via boto3 -sentry-sdk==1.42.0 +sentry-sdk==1.43.0 # via onadata simplejson==3.19.2 # via onadata diff --git a/requirements/dev.in b/requirements/dev.in index 767475d4eb..3762559028 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -2,14 +2,13 @@ -r base.in django-extensions -flake8 flaky httmock ipdb isort mock pre-commit -prospector +prospector>=1.10.3 pylint pylint-django requests_mock diff --git a/requirements/dev.pip b/requirements/dev.pip index fc14b8dfad..d94e34baaa 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -8,7 +8,7 @@ # via -r requirements/base.in -e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@v1.0.3#egg=ona-oidc +-e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc # via -r requirements/base.in -e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip # via -r requirements/base.in @@ -26,7 +26,7 @@ analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.7.2 +asgiref==3.8.0 # via # django # django-cors-headers @@ -49,9 +49,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.66 +boto3==1.34.67 # via dataflows-tabulator -botocore==1.34.66 +botocore==1.34.67 # via # boto3 # s3transfer @@ -210,10 +210,10 @@ executing==2.0.1 # via stack-data filelock==3.13.1 # via virtualenv -flake8==3.8.4 +flake8==5.0.4 # via - # -r requirements/dev.in # flake8-polyfill + # prospector flake8-polyfill==1.0.2 # via pep8-naming flaky==3.8.1 @@ -224,7 +224,11 @@ future==1.0.0 # via python-json2xlsclient geojson==3.1.0 # via onadata -google-auth==2.28.2 +gitdb==4.0.11 + # via gitpython +gitpython==3.1.42 + # via prospector +google-auth==2.29.0 # via # google-auth-oauthlib # onadata @@ -244,7 +248,7 @@ ijson==3.2.3 # via dataflows-tabulator imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.2 +importlib-metadata==7.1.0 # via yapf inflection==0.5.1 # via djangorestframework-jsonapi @@ -294,7 +298,7 @@ markupsafe==2.1.5 # via jinja2 matplotlib-inline==0.1.6 # via ipython -mccabe==0.6.1 +mccabe==0.7.0 # via # flake8 # prospector @@ -322,6 +326,7 @@ openpyxl==3.0.9 # pyxform packaging==24.0 # via + # prospector # requirements-detector # sphinx paho-mqtt==2.0.0 @@ -347,7 +352,7 @@ prompt-toolkit==3.0.43 # via # click-repl # ipython -prospector==1.4.1.1 +prospector==1.10.3 # via -r requirements/dev.in psycopg2-binary==2.9.9 # via onadata @@ -361,7 +366,7 @@ pyasn1==0.5.1 # rsa pyasn1-modules==0.3.0 # via google-auth -pycodestyle==2.6.0 +pycodestyle==2.9.1 # via # flake8 # prospector @@ -369,7 +374,7 @@ pycparser==2.21 # via cffi pydocstyle==6.3.0 # via prospector -pyflakes==2.2.0 +pyflakes==2.5.0 # via # flake8 # prospector @@ -393,13 +398,13 @@ pylint==2.17.7 # pylint-plugin-utils pylint-celery==0.3 # via prospector -pylint-django==2.1.0 +pylint-django==2.5.3 # via # -r requirements/dev.in # prospector pylint-flask==0.6 # via prospector -pylint-plugin-utils==0.8.2 +pylint-plugin-utils==0.7 # via # prospector # pylint-celery @@ -423,7 +428,6 @@ pytz==2024.1 # via # django-query-builder # fleming - # onadata pyxform==1.12.2 # via # onadata @@ -472,9 +476,9 @@ s3transfer==0.10.1 # via boto3 semver==3.0.2 # via requirements-detector -sentry-sdk==1.42.0 +sentry-sdk==1.43.0 # via onadata -setoptconf==0.3.0 +setoptconf-tmp==0.3.1 # via prospector simplejson==3.19.2 # via onadata @@ -490,6 +494,8 @@ six==1.16.0 # python-dateutil # requests-mock # tableschema +smmap==5.0.1 + # via gitdb snowballstemmer==2.2.0 # via # pydocstyle @@ -521,7 +527,9 @@ tableschema==1.20.9 tblib==3.0.0 # via -r requirements/dev.in toml==0.10.2 - # via requirements-detector + # via + # prospector + # requirements-detector tomli==2.0.1 # via yapf tomlkit==0.12.4 diff --git a/setup.cfg b/setup.cfg index a32edc43a3..a5210b0110 100644 --- a/setup.cfg +++ b/setup.cfg @@ -88,7 +88,6 @@ install_requires = numpy Pillow python-dateutil - pytz requests simplejson uwsgi From b1f35eb1c0ac482d34a8025b1fb1c02666622785 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 16:38:08 +0300 Subject: [PATCH 160/270] pylintrc: address deprecation warning - use full path for Exception class --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 496ac61a1b..76c33d3520 100644 --- a/.pylintrc +++ b/.pylintrc @@ -422,4 +422,4 @@ min-public-methods=2 # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception +overgeneral-exceptions=builtins.Exception From 5aad6b5df3da5e163e9ad561df3d3dfe20429f69 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 16:40:41 +0300 Subject: [PATCH 161/270] Use hiredis for performance with redis-py --- requirements/base.pip | 6 +++++- requirements/dev.pip | 6 +++++- setup.cfg | 1 + 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/requirements/base.pip b/requirements/base.pip index 4284892d08..0cfd03d93e 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -197,6 +197,8 @@ google-auth-oauthlib==1.2.0 # via onadata greenlet==3.0.3 # via sqlalchemy +hiredis==2.3.2 + # via redis httplib2==0.22.0 # via onadata idna==3.6 @@ -309,7 +311,9 @@ pyxform==1.12.2 recaptcha-client==1.0.6 # via onadata redis==5.0.3 - # via django-redis + # via + # django-redis + # onadata referencing==0.34.0 # via # jsonschema diff --git a/requirements/dev.pip b/requirements/dev.pip index d94e34baaa..e3d8a9708c 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -236,6 +236,8 @@ google-auth-oauthlib==1.2.0 # via onadata greenlet==3.0.3 # via sqlalchemy +hiredis==2.3.2 + # via redis httmock==1.4.0 # via -r requirements/dev.in httplib2==0.22.0 @@ -439,7 +441,9 @@ pyyaml==6.0.1 recaptcha-client==1.0.6 # via onadata redis==5.0.3 - # via django-redis + # via + # django-redis + # onadata referencing==0.34.0 # via # jsonschema diff --git a/setup.cfg b/setup.cfg index a5210b0110..bc7909c944 100644 --- a/setup.cfg +++ b/setup.cfg @@ -101,6 +101,7 @@ install_requires = # Deprecation tagging deprecated # Redis cache + redis[hiredis] django-redis # osm defusedxml From a32a41782ba7b0cd23ce509bbaf2379bbca3ddc4 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 17:04:31 +0300 Subject: [PATCH 162/270] Django 4: request.is_ajax() has been removed --- onadata/apps/main/views.py | 6 +++--- onadata/apps/restservice/views.py | 2 +- onadata/apps/viewer/views.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/onadata/apps/main/views.py b/onadata/apps/main/views.py index bf4e82cef5..a2c19d9775 100644 --- a/onadata/apps/main/views.py +++ b/onadata/apps/main/views.py @@ -193,7 +193,7 @@ def set_form(): context = {"message": message, "message_list": message_list} - if request.is_ajax(): + if request.headers.get("x-requested-with") == "XMLHttpRequest": res = ( loader.render_to_string("message.html", context=context, request=request) .replace("'", r"\'") @@ -915,7 +915,7 @@ def edit(request, username, id_string): # noqa C901 xform.update() - if request.is_ajax(): + if request.headers.get("x-requested-with") == "XMLHttpRequest": return HttpResponse(_("Updated succeeded.")) return HttpResponseRedirect( reverse(show, kwargs={"username": username, "id_string": id_string}) @@ -1327,7 +1327,7 @@ def set_perm(request, username, id_string): # noqa C901 request, ) - if request.is_ajax(): + if request.headers.get("x-requested-with") == "XMLHttpRequest": return JsonResponse({"status": "success"}) return HttpResponseRedirect( diff --git a/onadata/apps/restservice/views.py b/onadata/apps/restservice/views.py index e8e1a4c5c3..5af8b213ab 100644 --- a/onadata/apps/restservice/views.py +++ b/onadata/apps/restservice/views.py @@ -63,7 +63,7 @@ def add_service(request, username, id_string): message += Template("{{ field.errors }}").render( Context({"field": field}) ) - if request.is_ajax(): + if request.headers.get("x-requested-with") == "XMLHttpRequest": response = {"status": status, "message": message} if restservice: response["restservice"] = f"{restservice}" diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 7ae8f6b66f..8b8c05a22f 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -971,7 +971,7 @@ def charts(request, username, id_string): summaries = build_chart_data(xform, lang_index, page) - if request.is_ajax(): + if request.headers.get("x-requested-with") == "XMLHttpRequest": template = "charts_snippet.html" else: template = "charts.html" From 4a7b9ce1d154b7a966e57cf6371ac84b9e865a7c Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 17:12:28 +0300 Subject: [PATCH 163/270] Django 4: The {% ifequal %} and {% ifnotequal %} template tags are removed. --- onadata/libs/templates/change_language.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/templates/change_language.html b/onadata/libs/templates/change_language.html index 7840cddf1a..31e27c9d1c 100644 --- a/onadata/libs/templates/change_language.html +++ b/onadata/libs/templates/change_language.html @@ -5,7 +5,7 @@ From 891a28c050dbfd9ade0e1b85442773e29ddcfddd Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 21 Mar 2024 18:24:27 +0300 Subject: [PATCH 164/270] Remove unused import --- onadata/apps/logger/views.py | 1 - 1 file changed, 1 deletion(-) diff --git a/onadata/apps/logger/views.py b/onadata/apps/logger/views.py index 1308b8a900..ed76b0d935 100644 --- a/onadata/apps/logger/views.py +++ b/onadata/apps/logger/views.py @@ -4,7 +4,6 @@ """ import os import tempfile -from datetime import datetime from django.conf import settings from django.contrib import messages From 0e5419b844abaa32862dc0b466a56aa495590091 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 12:42:30 +0300 Subject: [PATCH 165/270] Django 4: do not use never_cache decorator with DRF DRF requests are not HttpRequest objects --- onadata/apps/api/viewsets/connect_viewset.py | 7 ------- onadata/apps/api/viewsets/xform_list_viewset.py | 2 -- onadata/apps/api/viewsets/xform_viewset.py | 2 -- 3 files changed, 11 deletions(-) diff --git a/onadata/apps/api/viewsets/connect_viewset.py b/onadata/apps/api/viewsets/connect_viewset.py index 549a26fe55..75c90fe857 100644 --- a/onadata/apps/api/viewsets/connect_viewset.py +++ b/onadata/apps/api/viewsets/connect_viewset.py @@ -8,7 +8,6 @@ from django.utils import timezone from django.utils.decorators import classonlymethod from django.utils.translation import gettext as _ -from django.views.decorators.cache import never_cache from multidb.pinning import use_master from rest_framework import mixins, status, viewsets @@ -203,9 +202,3 @@ def odk_token(self, request, *args, **kwargs): data={"odk_token": token.raw_key, "expires": token.expires}, status=status_code, ) - - @classonlymethod - def as_view(cls, actions=None, **initkwargs): # noqa - view = super(ConnectViewSet, cls).as_view(actions, **initkwargs) - - return never_cache(view) diff --git a/onadata/apps/api/viewsets/xform_list_viewset.py b/onadata/apps/api/viewsets/xform_list_viewset.py index 5144f27f5e..e802cd9897 100644 --- a/onadata/apps/api/viewsets/xform_list_viewset.py +++ b/onadata/apps/api/viewsets/xform_list_viewset.py @@ -5,7 +5,6 @@ from django.conf import settings from django.http import Http404, StreamingHttpResponse from django.shortcuts import get_object_or_404 -from django.views.decorators.cache import never_cache from django_filters import rest_framework as django_filter_filters from rest_framework import permissions, viewsets @@ -151,7 +150,6 @@ def filter_queryset(self, queryset): return queryset - @never_cache def list(self, request, *args, **kwargs): # pylint: disable=attribute-defined-outside-init self.object_list = self.filter_queryset(self.get_queryset()) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index fb79f2ad89..a4b5d5f3e2 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -26,7 +26,6 @@ from django.utils import timezone from django.utils.http import urlencode from django.utils.translation import gettext as _ -from django.views.decorators.cache import never_cache import six from django_filters.rest_framework import DjangoFilterBackend @@ -406,7 +405,6 @@ def create_async(self, request, *args, **kwargs): return Response(data=resp, status=resp_code, headers=headers) @action(methods=["GET", "HEAD"], detail=True) - @never_cache def form(self, request, **kwargs): """Returns the XLSForm in any of JSON, XML, XLS(X), CSV formats.""" form = self.get_object() From 0a586b5d23e8a18713e68706599fc9b33566d135 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 13:15:00 +0300 Subject: [PATCH 166/270] test submission time should be considered to be in UTC already --- onadata/apps/main/tests/test_process.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 9af3918aa4..113e575403 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -77,9 +77,9 @@ def _update_dynamic_data(self): """ for uuid, submission_time in iteritems(self.uuid_to_submission_times): i = self.xform.instances.get(uuid=uuid) - i.date_created = datetime.strptime( - submission_time, MONGO_STRFTIME - ).astimezone(timezone.utc) + i.date_created = datetime.strptime(submission_time, MONGO_STRFTIME).replace( + tzinfo=timezone.utc + ) i.json = i.get_full_dict() i.save() From 9a195ff1d4b7b47b73cb353bf47c831f247fdd80 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 14:15:46 +0300 Subject: [PATCH 167/270] remove unused import --- onadata/apps/api/viewsets/connect_viewset.py | 1 - 1 file changed, 1 deletion(-) diff --git a/onadata/apps/api/viewsets/connect_viewset.py b/onadata/apps/api/viewsets/connect_viewset.py index 75c90fe857..a662f782b0 100644 --- a/onadata/apps/api/viewsets/connect_viewset.py +++ b/onadata/apps/api/viewsets/connect_viewset.py @@ -6,7 +6,6 @@ """ from django.core.exceptions import MultipleObjectsReturned from django.utils import timezone -from django.utils.decorators import classonlymethod from django.utils.translation import gettext as _ from multidb.pinning import use_master From fc19c4ef20eef0e0f66440290f984beba38dcbdf Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 14:16:23 +0300 Subject: [PATCH 168/270] Django 4: ping sphinx to version 6.x --- docs/projects.rst | 42 +++++++++++++++++++++--------------------- requirements/base.pip | 4 ++-- requirements/dev.pip | 4 ++-- setup.cfg | 2 +- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/docs/projects.rst b/docs/projects.rst index f25f20d941..21cb868636 100644 --- a/docs/projects.rst +++ b/docs/projects.rst @@ -204,7 +204,7 @@ You can share a project with a user or multiple users by ``PUT`` a payload with Example 1: Sharing with a specific user -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ :: curl -X PUT -d username=alice -d role=readonly https://api.ona.io/api/v1/projects/1/share @@ -215,8 +215,8 @@ Response HTTP 204 NO CONTENT -Example 2: Sharing with mutliple users -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Example 2: Sharing with more than one user +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ :: curl -X PUT -d username=alice,jake -d role=readonly https://api.ona.io/api/v1/projects/1/share @@ -528,14 +528,14 @@ Example ^^^^^^^ :: - + curl -X GET https://api.ona.io/api/v1/projects/1/invitations Response ^^^^^^^^ :: - + [ { "id": 1, @@ -571,16 +571,16 @@ Example ^^^^^^^ :: - + curl -X GET https://api.ona.io/api/v1/projects/1/invitations?status=2 Response ^^^^^^^^ :: - + [ - + { "id": 2, "email":"johndoe@example.com", @@ -604,7 +604,7 @@ Example ^^^^^^^ :: - + curl -X POST -d "email=janedoe@example.com" -d "role=readonly" https://api.ona.io/api/v1/projects/1/invitations @@ -629,16 +629,16 @@ Response ^^^^^^^^ :: - + { "id": 1, "email": "janedoe@example.com", "role": "readonly", "status": 1, } - -The link embedded in the email will be of the format ``http://{url}`` + +The link embedded in the email will be of the format ``http://{url}`` where: - ``url`` - is the URL the recipient will be redirected to on clicking the link. The default is ``{domain}/api/v1/profiles`` where ``domain`` is domain where the API is hosted. @@ -667,14 +667,14 @@ Example ^^^^^^^ :: - + curl -X PUT -d "email=janedoe@example.com" -d "role=editor" -d "invitation_id=1" https://api.ona.io/api/v1/projects/1/invitations/1 Response ^^^^^^^^ :: - + { "id": 1, "email": "janedoe@example.com", @@ -696,11 +696,11 @@ Example ^^^^^^^ :: - + curl -X POST -d "invitation_id=6" https://api.ona.io/api/v1/projects/1/resend-invitation -``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. +``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. - Must be a ``ProjectInvitation`` whose status is **Pending** @@ -708,7 +708,7 @@ Response ^^^^^^^^ :: - + { "message": "Success" } @@ -727,10 +727,10 @@ Example ^^^^^^^ :: - + curl -X POST -d "invitation_id=6" https://api.ona.io/api/v1/projects/1/revoke-invitation -``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. +``invitation_id``: The primary key of the ``ProjectInvitation`` to resend. - Must be a ``ProjectInvitation`` whose status is **Pending** @@ -738,7 +738,7 @@ Response ^^^^^^^^ :: - + { "message": "Success" } @@ -751,4 +751,4 @@ Since a project invitation is sent to an unregistered user, acceptance of the in when `creating a new user `_. All pending invitations whose email match the new user's email will be accepted and projects shared with the -user \ No newline at end of file +user diff --git a/requirements/base.pip b/requirements/base.pip index 0cfd03d93e..7f2fd6a14b 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -175,7 +175,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.20.1 +docutils==0.19 # via sphinx dpath==2.1.6 # via onadata @@ -358,7 +358,7 @@ six==1.16.0 # tableschema snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==6.2.1 # via onadata sphinxcontrib-applehelp==1.0.8 # via sphinx diff --git a/requirements/dev.pip b/requirements/dev.pip index e3d8a9708c..1c6e78f3d0 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -196,7 +196,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.20.1 +docutils==0.19 # via sphinx dodgy==0.2.1 # via prospector @@ -504,7 +504,7 @@ snowballstemmer==2.2.0 # via # pydocstyle # sphinx -sphinx==7.2.6 +sphinx==6.2.1 # via onadata sphinxcontrib-applehelp==1.0.8 # via sphinx diff --git a/setup.cfg b/setup.cfg index bc7909c944..cea9d0d529 100644 --- a/setup.cfg +++ b/setup.cfg @@ -75,7 +75,7 @@ install_requires = #XML Instance API utility xmltodict #docs - sphinx + sphinx>=6.2,<7 Markdown #others unicodecsv From 3575023889373dd3460e2d7c439c49dec3f97085 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 17:38:01 +0300 Subject: [PATCH 169/270] Ensure date to date comparison for delete cache values --- onadata/apps/logger/models/instance.py | 10 +++++----- onadata/apps/logger/models/xform.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 5ee33bc8be..5b87600ebf 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -186,17 +186,17 @@ def _update_submission_count_for_today( form_id: int, incr: bool = True, date_created=None ): # Track submissions made today - current_date = timezone.localtime().isoformat() - date_cache_key = f"{XFORM_SUBMISSION_COUNT_FOR_DAY_DATE}" f"{form_id}" + current_date = timezone.localdate().isoformat() + date_cache_key = f"{XFORM_SUBMISSION_COUNT_FOR_DAY_DATE}{form_id}" count_cache_key = f"{XFORM_SUBMISSION_COUNT_FOR_DAY}{form_id}" if not cache.get(date_cache_key) == current_date: cache.set(date_cache_key, current_date, 86400) if date_created: - date_created = date_created.astimezone( - timezone.get_current_timezone() - ).isoformat() + date_created = ( + date_created.astimezone(timezone.get_current_timezone()).date().isoformat() + ) current_count = cache.get(count_cache_key) if not current_count and incr: diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index 195b304de5..546d19c5dd 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -1158,7 +1158,7 @@ def submission_count(self, force_update=False): @property def submission_count_for_today(self): """Returns the submissions count for the current day.""" - current_date = timezone.localtime().isoformat() + current_date = timezone.localdate().isoformat() count = ( cache.get(f"{XFORM_SUBMISSION_COUNT_FOR_DAY}{self.id}") if cache.get(f"{XFORM_SUBMISSION_COUNT_FOR_DAY_DATE}{self.id}") From 338bef1e7c7a5e51a7f987434bcde962e19004a5 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 20:11:17 +0300 Subject: [PATCH 170/270] Django 4: update not found tests --- .../tests/viewsets/test_dataview_viewset.py | 4 +- .../api/tests/viewsets/test_osm_viewset.py | 159 +++++++++--------- 2 files changed, 83 insertions(+), 80 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py index 35d9f20f29..d959bd4d44 100644 --- a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py @@ -211,7 +211,9 @@ def test_dataview_with_attachment_field(self): ) self.assertEqual(response.status_code, 404) response_data = json.loads(json.dumps(response.data)) - self.assertEqual(response_data, {"detail": "Not found."}) + self.assertEqual( + response_data, {"detail": "No Attachment matches the given query"} + ) # a user with permissions can view a specific attachment object attachment_list_view = AttachmentViewSet.as_view({"get": "retrieve"}) diff --git a/onadata/apps/api/tests/viewsets/test_osm_viewset.py b/onadata/apps/api/tests/viewsets/test_osm_viewset.py index 026bff9d4e..4ba649601c 100644 --- a/onadata/apps/api/tests/viewsets/test_osm_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_osm_viewset.py @@ -11,108 +11,108 @@ from mock import patch -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.data_viewset import DataViewSet from onadata.apps.api.viewsets.osm_viewset import OsmViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.logger.models import Attachment, Instance, OsmData from onadata.apps.viewer.models import Export -from onadata.libs.utils.common_tools import (filename_from_disposition, - get_response_content) +from onadata.libs.utils.common_tools import ( + filename_from_disposition, + get_response_content, +) from onadata.libs.utils.osm import save_osm_data class TestOSMViewSet(TestAbstractViewSet): - def setUp(self): super(self.__class__, self).setUp() self._login_user_and_profile() self.factory = RequestFactory() - self.extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % self.user.auth_token} + self.extra = {"HTTP_AUTHORIZATION": "Token %s" % self.user.auth_token} def _publish_osm_with_submission(self): filenames = [ - 'OSMWay234134797.osm', - 'OSMWay34298972.osm', + "OSMWay234134797.osm", + "OSMWay34298972.osm", ] - self.fixtures_dir = osm_fixtures_dir = os.path.realpath(os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'osm')) - paths = [ - os.path.join(osm_fixtures_dir, filename) - for filename in filenames] - xlsform_path = os.path.join(osm_fixtures_dir, 'osm.xlsx') - self.combined_osm_path = os.path.join(osm_fixtures_dir, 'combined.osm') + self.fixtures_dir = osm_fixtures_dir = os.path.realpath( + os.path.join(os.path.dirname(__file__), "..", "fixtures", "osm") + ) + paths = [os.path.join(osm_fixtures_dir, filename) for filename in filenames] + xlsform_path = os.path.join(osm_fixtures_dir, "osm.xlsx") + self.combined_osm_path = os.path.join(osm_fixtures_dir, "combined.osm") self._publish_xls_form_to_project(xlsform_path=xlsform_path) - self.xform.version = '201511091147' + self.xform.version = "201511091147" self.xform.save() # look at the forms.json?instances_with_osm=True - request = self.factory.get('/', {'instances_with_osm': 'True'}, - **self.extra) - view = XFormViewSet.as_view({'get': 'list'}) - response = view(request, format='json') + request = self.factory.get("/", {"instances_with_osm": "True"}, **self.extra) + view = XFormViewSet.as_view({"get": "list"}) + response = view(request, format="json") self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) - submission_path = os.path.join(osm_fixtures_dir, 'instance_a.xml') + submission_path = os.path.join(osm_fixtures_dir, "instance_a.xml") files = [open(path) for path in paths] - count = Attachment.objects.filter(extension='osm').count() + count = Attachment.objects.filter(extension="osm").count() count_osm = OsmData.objects.count() - _submission_time = parse_datetime('2013-02-18 15:54:01Z') - self._make_submission(submission_path, media_file=files, - forced_submission_time=_submission_time) - self.assertTrue( - Attachment.objects.filter(extension='osm').count() > count) + _submission_time = parse_datetime("2013-02-18 15:54:01Z") + self._make_submission( + submission_path, media_file=files, forced_submission_time=_submission_time + ) + self.assertTrue(Attachment.objects.filter(extension="osm").count() > count) self.assertEqual(OsmData.objects.count(), count_osm + 2) @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_data_retrieve_instance_osm_format(self): self._publish_osm_with_submission() formid = self.xform.pk - dataid = self.xform.instances.latest('date_created').pk - request = self.factory.get('/') + dataid = self.xform.instances.latest("date_created").pk + request = self.factory.get("/") # look at the data/[pk]/[dataid].osm endpoint - view = OsmViewSet.as_view({'get': 'retrieve'}) - response = view(request, pk=formid, dataid=dataid, format='osm') + view = OsmViewSet.as_view({"get": "retrieve"}) + response = view(request, pk=formid, dataid=dataid, format="osm") self.assertEqual(response.status_code, 200) with open(self.combined_osm_path) as f: osm = f.read() response.render() - self.assertMultiLineEqual(response.content.decode('utf-8').strip(), - osm.strip()) + self.assertMultiLineEqual( + response.content.decode("utf-8").strip(), osm.strip() + ) # look at the data/[pk].osm endpoint - view = OsmViewSet.as_view({'get': 'list'}) - response = view(request, pk=formid, format='osm') + view = OsmViewSet.as_view({"get": "list"}) + response = view(request, pk=formid, format="osm") self.assertEqual(response.status_code, 200) response.render() - self.assertMultiLineEqual(response.content.decode('utf-8').strip(), - osm.strip()) + self.assertMultiLineEqual( + response.content.decode("utf-8").strip(), osm.strip() + ) # look at the data.osm endpoint - view = OsmViewSet.as_view({'get': 'list'}) - response = view(request, format='osm') + view = OsmViewSet.as_view({"get": "list"}) + response = view(request, format="osm") self.assertEqual(response.status_code, 301) - self.assertEqual(response['Location'], - 'http://testserver/api/v1/osm.json') + self.assertEqual(response["Location"], "http://testserver/api/v1/osm.json") - response = view(request, format='json') + response = view(request, format="json") self.assertEqual(response.status_code, 200) - data = [{ - 'url': 'http://testserver/api/v1/osm/{}'.format(self.xform.pk), - 'title': self.xform.title, - 'id_string': self.xform.id_string, 'user': self.xform.user.username - }] + data = [ + { + "url": "http://testserver/api/v1/osm/{}".format(self.xform.pk), + "title": self.xform.title, + "id_string": self.xform.id_string, + "user": self.xform.user.username, + } + ] self.assertEqual(response.data, data) # look at the forms.json?instances_with_osm=True - request = self.factory.get('/', {'instances_with_osm': 'True'}, - **self.extra) - view = XFormViewSet.as_view({'get': 'list'}) - response = view(request, format='json') + request = self.factory.get("/", {"instances_with_osm": "True"}, **self.extra) + view = XFormViewSet.as_view({"get": "list"}) + response = view(request, format="json") self.assertEqual(response.status_code, 200) self.assertNotEqual(response.data, []) @@ -121,46 +121,46 @@ def test_osm_csv_export(self): self._publish_osm_with_submission() count = Export.objects.all().count() - view = XFormViewSet.as_view({ - 'get': 'retrieve' - }) + view = XFormViewSet.as_view({"get": "retrieve"}) - request = self.factory.get('/', data={'include_images': False}, - **self.extra) - response = view(request, pk=self.xform.pk, format='csv') + request = self.factory.get("/", data={"include_images": False}, **self.extra) + response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) self.assertEqual(count + 1, Export.objects.all().count()) headers = dict(response.items()) - self.assertEqual(headers['Content-Type'], 'application/csv') - content_disposition = headers['Content-Disposition'] + self.assertEqual(headers["Content-Type"], "application/csv") + content_disposition = headers["Content-Disposition"] filename = filename_from_disposition(content_disposition) basename, ext = os.path.splitext(filename) - self.assertEqual(ext, '.csv') + self.assertEqual(ext, ".csv") content = get_response_content(response) reader = csv.DictReader(StringIO(content)) data = [_ for _ in reader] - test_file_path = os.path.join(self.fixtures_dir, 'osm.csv') - with open(test_file_path, 'r') as test_file: + test_file_path = os.path.join(self.fixtures_dir, "osm.csv") + with open(test_file_path, "r") as test_file: expected_csv_reader = csv.DictReader(test_file) for index, row in enumerate(expected_csv_reader): self.assertDictContainsSubset(row, data[index]) - request = self.factory.get('/', **self.extra) - response = view(request, pk=self.xform.pk, format='csv') + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) def test_process_error_osm_format(self): self._publish_xls_form_to_project() self._make_submissions() - request = self.factory.get('/') - view = DataViewSet.as_view({'get': 'retrieve'}) - dataid = self.xform.instances.all().order_by('id')[0].pk - response = view(request, pk=self.xform.pk, dataid=dataid, format='osm') - self.assertContains(response, 'Not found.', - status_code=404) + request = self.factory.get("/") + view = DataViewSet.as_view({"get": "retrieve"}) + dataid = self.xform.instances.all().order_by("id")[0].pk + response = view(request, pk=self.xform.pk, dataid=dataid, format="osm") + self.assertContains( + response, + "No data matches with given query.", + status_code=404, + ) def test_save_osm_data_transaction_atomic(self): """ @@ -178,7 +178,7 @@ def test_save_osm_data_transaction_atomic(self): # mock the save method on OsmData and cause it to raise an # IntegrityError on its first call only, so that we get into the # catch inside save_osm_data - with patch('onadata.libs.utils.osm.OsmData.save') as mock: + with patch("onadata.libs.utils.osm.OsmData.save") as mock: def _side_effect(*args): """ @@ -212,23 +212,24 @@ def test_save_osm_data_with_non_existing_media_file(self): Test that saving osm data with a non existing media file fails silenty and does not throw an IOError """ - osm_fixtures_dir = os.path.realpath(os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'osm')) + osm_fixtures_dir = os.path.realpath( + os.path.join(os.path.dirname(__file__), "..", "fixtures", "osm") + ) # publish form - xlsform_path = os.path.join(osm_fixtures_dir, 'osm.xlsx') + xlsform_path = os.path.join(osm_fixtures_dir, "osm.xlsx") self._publish_xls_form_to_project(xlsform_path=xlsform_path) self.xform.save() # make submission with osm data - submission_path = os.path.join(osm_fixtures_dir, 'instance_a.xml') - media_file = open(os.path.join(osm_fixtures_dir, - 'OSMWay234134797.osm')) + submission_path = os.path.join(osm_fixtures_dir, "instance_a.xml") + media_file = open(os.path.join(osm_fixtures_dir, "OSMWay234134797.osm")) self._make_submission(submission_path, media_file=media_file) # save osm data with a non existing file submission = Instance.objects.first() attachment = submission.attachments.first() attachment.media_file = os.path.join( - settings.PROJECT_ROOT, "test_media", "noFile.osm") + settings.PROJECT_ROOT, "test_media", "noFile.osm" + ) attachment.save() try: save_osm_data(submission.id) From f97d53a0effaf6828d20b270afa7cf238bbec8eb Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 22 Mar 2024 21:53:49 +0300 Subject: [PATCH 171/270] Django 4: +djangorestframework==3.15.1 --- .../api/tests/viewsets/test_user_viewset.py | 120 ++++++++++-------- requirements/base.pip | 10 +- requirements/dev.pip | 10 +- 3 files changed, 77 insertions(+), 63 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_user_viewset.py b/onadata/apps/api/tests/viewsets/test_user_viewset.py index 57461f02ee..63e3da68e1 100644 --- a/onadata/apps/api/tests/viewsets/test_user_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_user_viewset.py @@ -1,73 +1,81 @@ -from onadata.apps.api.tests.viewsets.test_abstract_viewset import\ - TestAbstractViewSet +# -*- coding: utf-8 -*- +""" +Test UserViewSet module. +""" + +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.user_viewset import UserViewSet class TestUserViewSet(TestAbstractViewSet): - def setUp(self): - super(self.__class__, self).setUp() - self.data = {'id': self.user.pk, 'username': u'bob', - 'first_name': u'Bob', 'last_name': u'erama'} + super().setUp() + self.data = { + "id": self.user.pk, + "username": "bob", + "first_name": "Bob", + "last_name": "erama", + } def test_user_get(self): """Test authenticated user can access user info""" - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) # users list - view = UserViewSet.as_view({'get': 'list'}) + view = UserViewSet.as_view({"get": "list"}) response = view(request) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertIsNone(response.get("Cache-Control")) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [self.data]) # user with username bob - view = UserViewSet.as_view({'get': 'retrieve'}) - response = view(request, username='bob') - self.assertNotEqual(response.get('Cache-Control'), None) + view = UserViewSet.as_view({"get": "retrieve"}) + response = view(request, username="bob") + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, self.data) # user with username BoB, mixed case - view = UserViewSet.as_view({'get': 'retrieve'}) - response = view(request, username='BoB') + view = UserViewSet.as_view({"get": "retrieve"}) + response = view(request, username="BoB") self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.data, self.data) # Test can retrieve profile for usernames with _ . @ symbols alice_data = { - 'username': 'alice.test@gmail.com', 'email': 'alice@localhost.com'} + "username": "alice.test@gmail.com", + "email": "alice@localhost.com", + } alice_profile = self._create_user_profile(alice_data) - extra = { - 'HTTP_AUTHORIZATION': f'Token {alice_profile.user.auth_token}'} + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} - request = self.factory.get('/', **extra) - response = view(request, username='alice.test@gmail.com') + request = self.factory.get("/", **extra) + response = view(request, username="alice.test@gmail.com") self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['username'], alice_data['username']) + self.assertEqual(response.data["username"], alice_data["username"]) # user bob is_active = False self.user.is_active = False self.user.save() - view = UserViewSet.as_view({'get': 'retrieve'}) - response = view(request, username='BoB') + view = UserViewSet.as_view({"get": "retrieve"}) + response = view(request, username="BoB") self.assertEqual(response.status_code, 404) def test_user_anon(self): """Test anonymous user can access user info""" - request = self.factory.get('/') + request = self.factory.get("/") # users list endpoint - view = UserViewSet.as_view({'get': 'list'}) + view = UserViewSet.as_view({"get": "list"}) response = view(request) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [self.data]) # user with username bob - view = UserViewSet.as_view({'get': 'retrieve'}) - response = view(request, username='bob') + view = UserViewSet.as_view({"get": "retrieve"}) + response = view(request, username="bob") self.assertEqual(response.status_code, 200) self.assertEqual(response.data, self.data) @@ -77,36 +85,44 @@ def test_user_anon(self): self.assertEqual(response.data, self.data) def test_get_user_using_email(self): - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'first_name': u'Alice', 'last_name': u'Kamande'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "first_name": "Alice", + "last_name": "Kamande", + } alice_profile = self._create_user_profile(alice_data) - data = [{'id': alice_profile.user.pk, 'username': u'alice', - 'first_name': u'Alice', 'last_name': u'Kamande'}] + data = [ + { + "id": alice_profile.user.pk, + "username": "alice", + "first_name": "Alice", + "last_name": "Kamande", + } + ] get_params = { - 'search': alice_profile.user.email, + "search": alice_profile.user.email, } - view = UserViewSet.as_view( - {'get': 'list'} - ) - request = self.factory.get('/', data=get_params) + view = UserViewSet.as_view({"get": "list"}) + request = self.factory.get("/", data=get_params) response = view(request) self.assertEqual(response.status_code, 401) - error = {'detail': 'Authentication credentials were not provided.'} + error = {"detail": "Authentication credentials were not provided."} self.assertEqual(response.data, error) # authenticated - request = self.factory.get('/', data=get_params, **self.extra) + request = self.factory.get("/", data=get_params, **self.extra) response = view(request) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, data) get_params = { - 'search': 'doesnotexist@email.com', + "search": "doesnotexist@email.com", } - request = self.factory.get('/', data=get_params, **self.extra) + request = self.factory.get("/", data=get_params, **self.extra) response = view(request) self.assertEqual(response.status_code, 200) @@ -114,10 +130,10 @@ def test_get_user_using_email(self): self.assertEqual(response.data, []) get_params = { - 'search': 'invalid@email.com', + "search": "invalid@email.com", } - request = self.factory.get('/', data=get_params, **self.extra) + request = self.factory.get("/", data=get_params, **self.extra) response = view(request) self.assertEqual(response.status_code, 200) @@ -127,22 +143,20 @@ def test_get_user_using_email(self): def test_get_non_org_users(self): self._org_create() - view = UserViewSet.as_view( - {'get': 'list'} - ) + view = UserViewSet.as_view({"get": "list"}) - all_users_request = self.factory.get('/') + all_users_request = self.factory.get("/") all_users_response = view(all_users_request) self.assertEqual(all_users_response.status_code, 200) - self.assertEqual(len( - [u for u in all_users_response.data if u['username'] == 'denoinc'] - ), 1) + self.assertEqual( + len([u for u in all_users_response.data if u["username"] == "denoinc"]), 1 + ) - no_orgs_request = self.factory.get('/', data={'orgs': 'false'}) + no_orgs_request = self.factory.get("/", data={"orgs": "false"}) no_orgs_response = view(no_orgs_request) self.assertEqual(no_orgs_response.status_code, 200) - self.assertEqual(len( - [u for u in no_orgs_response.data if u['username'] == 'denoinc']), - 0) + self.assertEqual( + len([u for u in no_orgs_response.data if u["username"] == "denoinc"]), 0 + ) diff --git a/requirements/base.pip b/requirements/base.pip index 7f2fd6a14b..81b9beb2cc 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -26,7 +26,7 @@ analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.8.0 +asgiref==3.8.1 # via # django # django-cors-headers @@ -41,9 +41,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.67 +boto3==1.34.68 # via dataflows-tabulator -botocore==1.34.67 +botocore==1.34.68 # via # boto3 # s3transfer @@ -153,7 +153,7 @@ django-taggit==4.0.0 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.15.0 +djangorestframework==3.15.1 # via # djangorestframework-csv # djangorestframework-gis @@ -378,7 +378,7 @@ sqlparse==0.4.4 # via # django # django-debug-toolbar -tableschema==1.20.9 +tableschema==1.20.10 # via datapackage typing-extensions==4.10.0 # via diff --git a/requirements/dev.pip b/requirements/dev.pip index 1c6e78f3d0..89902aa040 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -26,7 +26,7 @@ analytics-python==1.4.post1 # via onadata appoptics-metrics==5.1.0 # via onadata -asgiref==3.8.0 +asgiref==3.8.1 # via # django # django-cors-headers @@ -49,9 +49,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.67 +boto3==1.34.68 # via dataflows-tabulator -botocore==1.34.67 +botocore==1.34.68 # via # boto3 # s3transfer @@ -174,7 +174,7 @@ django-taggit==4.0.0 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.15.0 +djangorestframework==3.15.1 # via # djangorestframework-csv # djangorestframework-gis @@ -526,7 +526,7 @@ sqlparse==0.4.4 # django-debug-toolbar stack-data==0.6.3 # via ipython -tableschema==1.20.9 +tableschema==1.20.10 # via datapackage tblib==3.0.0 # via -r requirements/dev.in From e7c8b413e1bb96ad7ee9f661cfe46b75848fd1d3 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 10:27:00 +0300 Subject: [PATCH 172/270] Add missing dot in test error message --- onadata/apps/api/tests/viewsets/test_dataview_viewset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py index d959bd4d44..6b28323406 100644 --- a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py @@ -212,7 +212,7 @@ def test_dataview_with_attachment_field(self): self.assertEqual(response.status_code, 404) response_data = json.loads(json.dumps(response.data)) self.assertEqual( - response_data, {"detail": "No Attachment matches the given query"} + response_data, {"detail": "No Attachment matches the given query."} ) # a user with permissions can view a specific attachment object From 770eccbb848982f1992c75b24b63b6d4b1a9d92b Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 10:28:34 +0300 Subject: [PATCH 173/270] User endpoint has cache headers due to removal of never_cache decorator --- onadata/apps/api/tests/viewsets/test_user_viewset.py | 1 - 1 file changed, 1 deletion(-) diff --git a/onadata/apps/api/tests/viewsets/test_user_viewset.py b/onadata/apps/api/tests/viewsets/test_user_viewset.py index 63e3da68e1..7a76b45cec 100644 --- a/onadata/apps/api/tests/viewsets/test_user_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_user_viewset.py @@ -24,7 +24,6 @@ def test_user_get(self): # users list view = UserViewSet.as_view({"get": "list"}) response = view(request) - self.assertIsNone(response.get("Cache-Control")) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [self.data]) From 24ad97009460500e1beaebaeeee66b084ff44680 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 12:11:28 +0300 Subject: [PATCH 174/270] Switch to the standard python library unittest.mock --- .../api/tests/permissions/test_permissions.py | 31 +- .../tests/viewsets/test_connect_viewset.py | 12 +- .../api/tests/viewsets/test_data_viewset.py | 8 +- .../tests/viewsets/test_dataview_viewset.py | 27 +- .../api/tests/viewsets/test_media_viewset.py | 8 +- .../tests/viewsets/test_metadata_viewset.py | 548 +++++++++--------- .../test_organization_profile_viewset.py | 2 +- .../api/tests/viewsets/test_osm_viewset.py | 11 +- .../tests/viewsets/test_project_viewset.py | 2 +- .../test_submission_review_viewset.py | 15 +- .../viewsets/test_user_profile_viewset.py | 9 +- .../tests/viewsets/test_xform_list_viewset.py | 13 +- .../api/tests/viewsets/test_xform_viewset.py | 4 +- .../apps/logger/tests/models/test_instance.py | 2 +- .../apps/logger/tests/test_form_submission.py | 18 +- onadata/apps/main/tests/test_process.py | 2 +- onadata/apps/main/tests/test_user_profile.py | 142 +++-- .../messaging/tests/test_backends_mqtt.py | 158 ++--- onadata/apps/messaging/tests/test_signals.py | 34 +- .../restservice/tests/test_restservice.py | 3 +- .../viewsets/test_restservicesviewset.py | 4 +- .../apps/viewer/tests/test_attachment_url.py | 50 +- onadata/apps/viewer/tests/test_exports.py | 24 +- onadata/libs/tests/data/test_statistics.py | 8 + onadata/libs/tests/data/test_tools.py | 16 +- .../serializers/test_project_serializer.py | 156 ++--- .../serializers/test_xform_serializer.py | 11 +- onadata/libs/tests/test_authentication.py | 11 +- onadata/libs/tests/test_permissions.py | 119 ++-- onadata/libs/tests/utils/test_csv_import.py | 29 +- onadata/libs/tests/utils/test_email.py | 20 +- .../libs/tests/utils/test_export_builder.py | 2 +- onadata/libs/tests/utils/test_logger_tools.py | 2 +- .../libs/tests/utils/test_project_utils.py | 3 +- onadata/libs/tests/utils/test_viewer_tools.py | 3 +- requirements/base.pip | 12 +- requirements/dev.in | 1 - requirements/dev.pip | 14 +- setup.cfg | 1 - 39 files changed, 806 insertions(+), 729 deletions(-) diff --git a/onadata/apps/api/tests/permissions/test_permissions.py b/onadata/apps/api/tests/permissions/test_permissions.py index c2e499e69c..040af9aedb 100644 --- a/onadata/apps/api/tests/permissions/test_permissions.py +++ b/onadata/apps/api/tests/permissions/test_permissions.py @@ -1,14 +1,19 @@ +# -*- coding: utf-8 -*- +""" +Test onadata.apps.api.permissions module. +""" +from unittest.mock import MagicMock, patch + from django.contrib.auth.models import User from django.http import Http404 -from mock import MagicMock, patch -from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet -from onadata.apps.logger.models import Instance, XForm from onadata.apps.api.permissions import ( + AlternateHasObjectPermissionMixin, IsAuthenticatedSubmission, MetaDataObjectPermissions, - AlternateHasObjectPermissionMixin ) +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet +from onadata.apps.logger.models import Instance, XForm from onadata.libs.permissions import UserProfile @@ -25,13 +30,11 @@ def setUp(self): self.instance.xform = MagicMock(XForm) def test_delete_instance_metadata_perms(self): - request = MagicMock(user=MagicMock(), method='DELETE') + request = MagicMock(user=MagicMock(), method="DELETE") obj = MagicMock(content_object=self.instance) - self.assertTrue( - self.permissions.has_object_permission( - request, self.view, obj)) + self.assertTrue(self.permissions.has_object_permission(request, self.view, obj)) - @patch.object(AlternateHasObjectPermissionMixin, '_has_object_permission') + @patch.object(AlternateHasObjectPermissionMixin, "_has_object_permission") def test_delete_instance_metadata_without_perms(self, has_perms_mock): """ Test that a user cannot delete an instance if they are not allowed @@ -41,11 +44,11 @@ def test_delete_instance_metadata_without_perms(self, has_perms_mock): user = User(username="test") instance = Instance(user=User(username="username")) instance.xform = XForm() - request = MagicMock(user=user, method='DELETE') + request = MagicMock(user=user, method="DELETE") obj = MagicMock(content_object=instance) self.assertFalse( - self.permissions.has_object_permission( - request, self.view, obj)) + self.permissions.has_object_permission(request, self.view, obj) + ) def test_is_authenticated_submission_permissions(self): """ @@ -56,11 +59,11 @@ def test_is_authenticated_submission_permissions(self): project = self.xform.project submission_permission = IsAuthenticatedSubmission() - request = MagicMock(method='GET') + request = MagicMock(method="GET") view = MagicMock(username=user.username) self.assertTrue(submission_permission.has_permission(request, self.view)) - request = MagicMock(method='POST') + request = MagicMock(method="POST") view = MagicMock(kwargs={"username": user.username}) self.assertTrue(submission_permission.has_permission(request, view)) diff --git a/onadata/apps/api/tests/viewsets/test_connect_viewset.py b/onadata/apps/api/tests/viewsets/test_connect_viewset.py index b2b309b755..d6aa51706b 100644 --- a/onadata/apps/api/tests/viewsets/test_connect_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_connect_viewset.py @@ -2,8 +2,8 @@ """ Test /user API endpoint """ -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta +from unittest.mock import patch from django.conf import settings from django.contrib.auth.models import User @@ -12,19 +12,19 @@ from django.utils.encoding import force_bytes from django.utils.http import urlsafe_base64_encode from django.utils.timezone import now + from django_digest.backend.db import update_partial_digests -from django_digest.test import DigestAuth, BasicAuth -from mock import patch +from django_digest.test import BasicAuth, DigestAuth from rest_framework import authentication from rest_framework.authtoken.models import Token -from onadata.apps.api.models.temp_token import TempToken from onadata.apps.api.models.odk_token import ODKToken +from onadata.apps.api.models.temp_token import TempToken from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.connect_viewset import ConnectViewSet -from onadata.libs.serializers.password_reset_serializer import default_token_generator from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.libs.authentication import DigestAuthentication +from onadata.libs.serializers.password_reset_serializer import default_token_generator from onadata.libs.serializers.project_serializer import ProjectSerializer from onadata.libs.utils.cache_tools import safe_key diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index f33dc18ee1..7adadc8b31 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -13,6 +13,7 @@ from datetime import timedelta from io import StringIO from tempfile import NamedTemporaryFile +from unittest.mock import Mock, patch from django.conf import settings from django.core.cache import cache @@ -29,7 +30,6 @@ from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock, urlmatch -from mock import Mock, patch from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, @@ -1805,7 +1805,7 @@ def test_deletion_of_bulk_submissions(self, send_message_mock): "%d records were deleted" % len(records_to_be_deleted), ) self.assertTrue(send_message_mock.called) - send_message_mock.called_with( + send_message_mock.assert_called_with( [str(i.pk) for i in records_to_be_deleted], formid, XFORM, @@ -1905,7 +1905,7 @@ def test_permanent_deletions_bulk_submissions(self, send_message_mock): "%d records were deleted" % len(records_to_be_deleted), ) self.assertTrue(send_message_mock.called) - send_message_mock.called_with( + send_message_mock.assert_called_with( [str(i.pk) for i in records_to_be_deleted], formid, XFORM, @@ -2058,7 +2058,7 @@ def test_delete_submissions(self, send_message_mock): "%d records were deleted" % len(deleted_instances_subset), ) self.assertTrue(send_message_mock.called) - send_message_mock.called_with( + send_message_mock.assert_called_with( [str(i.pk) for i in deleted_instances_subset], formid, XFORM, diff --git a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py index 6b28323406..3069b655fd 100644 --- a/onadata/apps/api/tests/viewsets/test_dataview_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_dataview_viewset.py @@ -1,33 +1,36 @@ # -*- coding: utf-8 -*- """Test DataViewViewSet""" +import csv import json import os -import csv - from datetime import datetime, timedelta +from unittest.mock import patch + from django.conf import settings -from django.test.utils import override_settings from django.core.cache import cache from django.core.files.storage import default_storage +from django.test.utils import override_settings from django.utils.timezone import utc -from mock import patch + from openpyxl import load_workbook -from onadata.libs.permissions import ReadOnlyRole -from onadata.apps.logger.models.data_view import DataView -from onadata.apps.logger.models import Instance, Attachment -from onadata.apps.api.viewsets.attachment_viewset import AttachmentViewSet from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet -from onadata.apps.viewer.models.export import Export -from onadata.apps.api.viewsets.project_viewset import ProjectViewSet +from onadata.apps.api.viewsets.attachment_viewset import AttachmentViewSet from onadata.apps.api.viewsets.dataview_viewset import ( DataViewViewSet, + apply_filters, filter_to_field_lookup, get_field_lookup, get_filter_kwargs, - apply_filters, ) from onadata.apps.api.viewsets.note_viewset import NoteViewSet +from onadata.apps.api.viewsets.project_viewset import ProjectViewSet +from onadata.apps.api.viewsets.xform_viewset import XFormViewSet +from onadata.apps.logger.models import Attachment, Instance +from onadata.apps.logger.models.data_view import DataView +from onadata.apps.viewer.models.export import Export +from onadata.libs.permissions import ReadOnlyRole +from onadata.libs.serializers.attachment_serializer import AttachmentSerializer from onadata.libs.serializers.xform_serializer import XFormSerializer from onadata.libs.utils.cache_tools import ( DATAVIEW_COUNT, @@ -35,12 +38,10 @@ PROJECT_LINKED_DATAVIEWS, ) from onadata.libs.utils.common_tags import EDITED, MONGO_STRFTIME -from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.libs.utils.common_tools import ( filename_from_disposition, get_response_content, ) -from onadata.libs.serializers.attachment_serializer import AttachmentSerializer class TestDataViewViewSet(TestAbstractViewSet): diff --git a/onadata/apps/api/tests/viewsets/test_media_viewset.py b/onadata/apps/api/tests/viewsets/test_media_viewset.py index 31a674c155..37f3623e38 100644 --- a/onadata/apps/api/tests/viewsets/test_media_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_media_viewset.py @@ -1,8 +1,13 @@ +# -*- coding: utf-8 -*- +""" +Tests the MediaViewSet. +""" +# pylint: disable=too-many-lines import os import urllib +from unittest.mock import MagicMock, patch from django.utils import timezone -from mock import MagicMock, patch from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.media_viewset import MediaViewSet @@ -103,7 +108,6 @@ def test_returned_media_is_based_on_form_perms(self): @patch("onadata.libs.utils.image_tools.get_storage_class") @patch("onadata.libs.utils.image_tools.boto3.client") def test_retrieve_view_from_s3(self, mock_presigned_urls, mock_get_storage_class): - expected_url = ( "https://testing.s3.amazonaws.com/doe/attachments/" "4_Media_file/media.png?" diff --git a/onadata/apps/api/tests/viewsets/test_metadata_viewset.py b/onadata/apps/api/tests/viewsets/test_metadata_viewset.py index a87ff665f1..d81c8b5c5a 100644 --- a/onadata/apps/api/tests/viewsets/test_metadata_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_metadata_viewset.py @@ -1,38 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Tests the MetaDataViewSet. +""" +# pylint: disable=too-many-lines import os from builtins import open -from mock import patch +from unittest.mock import patch from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.files.uploadedfile import InMemoryUploadedFile -from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( - TestAbstractViewSet) +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.metadata_viewset import MetaDataViewSet from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.main.models.meta_data import MetaData -from onadata.libs.permissions import (DataEntryRole, DataEntryOnlyRole, - EditorRole, EditorMinorRole) +from onadata.libs.permissions import ( + DataEntryOnlyRole, + DataEntryRole, + EditorMinorRole, + EditorRole, +) from onadata.libs.serializers.metadata_serializer import UNIQUE_TOGETHER_ERROR from onadata.libs.serializers.xform_serializer import XFormSerializer from onadata.libs.utils.common_tags import XFORM_META_PERMS class TestMetaDataViewSet(TestAbstractViewSet): + """ + Tests the MetaDataViewSet. + """ def setUp(self): super(TestMetaDataViewSet, self).setUp() - self.view = MetaDataViewSet.as_view({ - 'delete': 'destroy', - 'get': 'retrieve', - 'post': 'create' - }) + self.view = MetaDataViewSet.as_view( + {"delete": "destroy", "get": "retrieve", "post": "create"} + ) self._publish_xls_form_to_project() self.data_value = "screenshot.png" self.fixture_dir = os.path.join( - settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", - "transportation" + settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "transportation" ) self.path = os.path.join(self.fixture_dir, self.data_value) @@ -40,189 +48,191 @@ def setUp(self): ContentType.objects.get_or_create(app_label="logger", model="instance") def _add_project_metadata(self, project, data_type, data_value, path=None): - data = { - 'data_type': data_type, - 'data_value': data_value, - 'project': project.id - } + data = {"data_type": data_type, "data_value": data_value, "project": project.id} if path and data_value: - with open(path, 'rb') as media_file: - data.update({ - 'data_file': media_file, - }) + with open(path, "rb") as media_file: + data.update( + { + "data_file": media_file, + } + ) return self._post_metadata(data) else: return self._post_metadata(data) - def _add_instance_metadata(self, - data_type, - data_value, - path=None): + def _add_instance_metadata(self, data_type, data_value, path=None): xls_file_path = os.path.join( - settings.PROJECT_ROOT, "apps", "logger", "fixtures", - "tutorial", "tutorial.xlsx") + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "tutorial", + "tutorial.xlsx", + ) self._publish_xls_form_to_project(xlsform_path=xls_file_path) xml_submission_file_path = os.path.join( - settings.PROJECT_ROOT, "apps", "logger", "fixtures", - "tutorial", "instances", "tutorial_2012-06-27_11-27-53.xml") + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "tutorial", + "instances", + "tutorial_2012-06-27_11-27-53.xml", + ) - self._make_submission(xml_submission_file_path, - username=self.user.username) + self._make_submission(xml_submission_file_path, username=self.user.username) self.xform.refresh_from_db() self.instance = self.xform.instances.first() data = { - 'data_type': data_type, - 'data_value': data_value, - 'instance': self.instance.id + "data_type": data_type, + "data_value": data_value, + "instance": self.instance.id, } if path and data_value: - with open(path, 'rb') as media_file: - data.update({ - 'data_file': media_file, - }) + with open(path, "rb") as media_file: + data.update( + { + "data_file": media_file, + } + ) self._post_metadata(data) else: self._post_metadata(data) def test_add_metadata_with_file_attachment(self): - for data_type in ['supporting_doc', 'media', 'source']: - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) + for data_type in ["supporting_doc", "media", "source"]: + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) def test_parse_error_is_raised(self): """Parse error is raised when duplicate media is uploaded""" data_type = "supporting_doc" - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) # Duplicate upload - response = self._add_form_metadata(self.xform, data_type, - self.data_value, self.path, False) + response = self._add_form_metadata( + self.xform, data_type, self.data_value, self.path, False + ) self.assertEqual(response.status_code, 400) self.assertIn(UNIQUE_TOGETHER_ERROR, response.data) def test_forms_endpoint_with_metadata(self): date_modified = self.xform.date_modified - for data_type in ['supporting_doc', 'media', 'source']: - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) + for data_type in ["supporting_doc", "media", "source"]: + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) self.xform.refresh_from_db() self.assertNotEqual(date_modified, self.xform.date_modified) # /forms - view = XFormViewSet.as_view({ - 'get': 'retrieve' - }) + view = XFormViewSet.as_view({"get": "retrieve"}) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 200) - data = XFormSerializer(self.xform, context={'request': request}).data + data = XFormSerializer(self.xform, context={"request": request}).data self.assertEqual(response.data, data) # /projects/[pk]/forms - view = ProjectViewSet.as_view({ - 'get': 'forms' - }) - request = self.factory.get('/', **self.extra) + view = ProjectViewSet.as_view({"get": "forms"}) + request = self.factory.get("/", **self.extra) response = view(request, pk=self.project.pk) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [data]) - @patch('onadata.libs.serializers.metadata_serializer.is_azure_storage') - @patch('azure.storage.blob.generate_blob_sas') + @patch("onadata.libs.serializers.metadata_serializer.is_azure_storage") + @patch("azure.storage.blob.generate_blob_sas") def test_forms_endpoint_with_metadata_and_azure_storage( - self, mock_generate_blob_sas, mock_is_azure_storage): - sas_token = 'sc=date+randomText' + self, mock_generate_blob_sas, mock_is_azure_storage + ): + sas_token = "sc=date+randomText" mock_is_azure_storage.return_value = True mock_generate_blob_sas.return_value = sas_token - self._add_form_metadata(self.xform, 'media', - self.data_value, self.path) + self._add_form_metadata(self.xform, "media", self.data_value, self.path) self.xform.refresh_from_db() # /forms - view = XFormViewSet.as_view({ - 'get': 'retrieve' - }) + view = XFormViewSet.as_view({"get": "retrieve"}) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 200) - data = XFormSerializer(self.xform, context={'request': request}).data + data = XFormSerializer(self.xform, context={"request": request}).data self.assertEqual(response.data, data) self.assertIn(f"?{sas_token}", str(data)) def test_get_metadata_with_file_attachment(self): - for data_type in ['supporting_doc', 'media', 'source']: - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) - request = self.factory.get('/', **self.extra) + for data_type in ["supporting_doc", "media", "source"]: + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) + request = self.factory.get("/", **self.extra) response = self.view(request, pk=self.metadata.pk) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, self.metadata_data) - ext = self.data_value[self.data_value.rindex('.') + 1:] - request = self.factory.get('/', **self.extra) + ext = self.data_value[self.data_value.rindex(".") + 1 :] + request = self.factory.get("/", **self.extra) response = self.view(request, pk=self.metadata.pk, format=ext) self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Type'], 'image/png') + self.assertEqual(response["Content-Type"], "image/png") def test_get_metadata(self): self.fixture_dir = os.path.join( - settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", - "transportation", "instances", "transport_2011-07-25_19-05-49" + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances", + "transport_2011-07-25_19-05-49", ) - self.data_value = '1335783522563.jpg' + self.data_value = "1335783522563.jpg" self.path = os.path.join(self.fixture_dir, self.data_value) - self._add_form_metadata( - self.xform, "media", self.data_value, self.path) + self._add_form_metadata(self.xform, "media", self.data_value, self.path) data = { - 'id': self.metadata.pk, - 'xform': self.xform.pk, - 'data_value': u'1335783522563.jpg', - 'data_type': u'media', - 'extra_data': None, - 'data_file': u'http://localhost:8000/media/%s/formid-media/' - '1335783522563.jpg' % self.user.username, - 'data_file_type': u'image/jpeg', - 'media_url': u'http://localhost:8000/media/%s/formid-media/' - '1335783522563.jpg' % self.user.username, - 'file_hash': u'md5:2ca0d22073a9b6b4ebe51368b08da60c', - 'url': 'http://testserver/api/v1/metadata/%s' % self.metadata.pk, - 'date_created': self.metadata.date_created + "id": self.metadata.pk, + "xform": self.xform.pk, + "data_value": "1335783522563.jpg", + "data_type": "media", + "extra_data": None, + "data_file": "http://localhost:8000/media/%s/formid-media/" + "1335783522563.jpg" % self.user.username, + "data_file_type": "image/jpeg", + "media_url": "http://localhost:8000/media/%s/formid-media/" + "1335783522563.jpg" % self.user.username, + "file_hash": "md5:2ca0d22073a9b6b4ebe51368b08da60c", + "url": "http://testserver/api/v1/metadata/%s" % self.metadata.pk, + "date_created": self.metadata.date_created, } - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = self.view(request, pk=self.metadata.pk) self.assertEqual(response.status_code, 200) self.assertDictEqual(dict(response.data), data) def test_add_mapbox_layer(self): - data_type = 'mapbox_layer' - data_value = 'test_mapbox_layer||http://0.0.0.0:8080||attribution' + data_type = "mapbox_layer" + data_value = "test_mapbox_layer||http://0.0.0.0:8080||attribution" self._add_form_metadata(self.xform, data_type, data_value) def test_delete_metadata(self): - for data_type in ['supporting_doc', 'media', 'source']: + for data_type in ["supporting_doc", "media", "source"]: count = MetaData.objects.count() - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) - request = self.factory.delete('/', **self.extra) + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) + request = self.factory.delete("/", **self.extra) response = self.view(request, pk=self.metadata.pk) self.assertEqual(response.status_code, 204) self.assertEqual(count, MetaData.objects.count()) def test_delete_xform_deletes_media_metadata(self): self._add_test_metadata() - self.view = MetaDataViewSet.as_view({'get': 'list'}) - data = {'xform': self.xform.pk} - request = self.factory.get('/', data, **self.extra) + self.view = MetaDataViewSet.as_view({"get": "list"}) + data = {"xform": self.xform.pk} + request = self.factory.get("/", data, **self.extra) response = self.view(request) meta_count = self.xform.metadata_set.all().count() self.assertEqual(response.status_code, 200) @@ -237,189 +247,188 @@ def test_delete_xform_deletes_media_metadata(self): self.assertEqual(response2.data, []) def test_windows_csv_file_upload_to_metadata(self): - data_value = 'transportation.csv' + data_value = "transportation.csv" path = os.path.join(self.fixture_dir, data_value) with open(path) as f: f = InMemoryUploadedFile( - f, 'media', data_value, 'application/octet-stream', 2625, None) + f, "media", data_value, "application/octet-stream", 2625, None + ) data = { - 'data_value': data_value, - 'data_file': f, - 'data_type': 'media', - 'xform': self.xform.pk + "data_value": data_value, + "data_file": f, + "data_type": "media", + "xform": self.xform.pk, } self._post_metadata(data) - self.assertEqual(self.metadata.data_file_type, 'text/csv') + self.assertEqual(self.metadata.data_file_type, "text/csv") def test_add_media_url(self): - data_type = 'media' + data_type = "media" # test invalid URL - data_value = 'some thing random here' + data_value = "some thing random here" response = self._add_form_metadata( - self.xform, data_type, data_value, test=False) - expected_exception = { - 'data_value': [u"Invalid url 'some thing random here'."] - } + self.xform, data_type, data_value, test=False + ) + expected_exception = {"data_value": ["Invalid url 'some thing random here'."]} self.assertEqual(response.data, expected_exception) # test valid URL - data_value = 'https://devtrac.ona.io/fieldtrips.csv' + data_value = "https://devtrac.ona.io/fieldtrips.csv" self._add_form_metadata(self.xform, data_type, data_value) - request = self.factory.get('/', **self.extra) - ext = self.data_value[self.data_value.rindex('.') + 1:] + request = self.factory.get("/", **self.extra) + ext = self.data_value[self.data_value.rindex(".") + 1 :] response = self.view(request, pk=self.metadata.pk, format=ext) self.assertEqual(response.status_code, 302) - self.assertEqual(response['Location'], data_value) + self.assertEqual(response["Location"], data_value) def test_add_media_xform_link(self): - data_type = 'media' + data_type = "media" # test missing parameters - data_value = 'xform {}'.format(self.xform.pk) + data_value = "xform {}".format(self.xform.pk) response = self._add_form_metadata( - self.xform, data_type, data_value, test=False) + self.xform, data_type, data_value, test=False + ) expected_exception = { - 'data_value': [ - u"Expecting 'xform [xform id] [media name]' or " - "'dataview [dataview id] [media name]' or a valid URL."] + "data_value": [ + "Expecting 'xform [xform id] [media name]' or " + "'dataview [dataview id] [media name]' or a valid URL." + ] } self.assertEqual(response.data, expected_exception) - data_value = 'xform {} transportation'.format(self.xform.pk) + data_value = "xform {} transportation".format(self.xform.pk) self._add_form_metadata(self.xform, data_type, data_value) - self.assertIsNotNone(self.metadata_data['media_url']) + self.assertIsNotNone(self.metadata_data["media_url"]) - request = self.factory.get('/', **self.extra) - ext = self.data_value[self.data_value.rindex('.') + 1:] + request = self.factory.get("/", **self.extra) + ext = self.data_value[self.data_value.rindex(".") + 1 :] response = self.view(request, pk=self.metadata.pk, format=ext) self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], - 'attachment; filename=transportation.csv') + self.assertEqual( + response["Content-Disposition"], "attachment; filename=transportation.csv" + ) def test_add_media_geojson_link(self): - data_type = 'media' - data_value = 'xform_geojson {} transportation'.format(self.xform.pk) + data_type = "media" + data_value = "xform_geojson {} transportation".format(self.xform.pk) extra_data = { "data_title": "test", "data_simple_style": True, "data_geo_field": "test", - "data_fields": "transport/available_transportation_types_to_referral_facility/ambulance" # noqa + "data_fields": "transport/available_transportation_types_to_referral_facility/ambulance", # noqa } self._add_form_metadata( - self.xform, - data_type, - data_value, - extra_data=extra_data + self.xform, data_type, data_value, extra_data=extra_data ) - self.assertIsNotNone(self.metadata_data['media_url']) - request = self.factory.get('/', **self.extra) - ext = self.data_value[self.data_value.rindex('.') + 1:] + self.assertIsNotNone(self.metadata_data["media_url"]) + request = self.factory.get("/", **self.extra) + ext = self.data_value[self.data_value.rindex(".") + 1 :] response = self.view(request, pk=self.metadata.pk, format=ext) self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], - 'attachment; filename=transportation.geojson') + self.assertEqual( + response["Content-Disposition"], + "attachment; filename=transportation.geojson", + ) def test_add_media_dataview_link(self): self._create_dataview() - data_type = 'media' - data_value = 'dataview {} transportation'.format(self.data_view.pk) + data_type = "media" + data_value = "dataview {} transportation".format(self.data_view.pk) self._add_form_metadata(self.xform, data_type, data_value) - self.assertIsNotNone(self.metadata_data['media_url']) + self.assertIsNotNone(self.metadata_data["media_url"]) - request = self.factory.get('/', **self.extra) - ext = self.data_value[self.data_value.rindex('.') + 1:] + request = self.factory.get("/", **self.extra) + ext = self.data_value[self.data_value.rindex(".") + 1 :] response = self.view(request, pk=self.metadata.pk, format=ext) self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], - 'attachment; filename=transportation.csv') + self.assertEqual( + response["Content-Disposition"], "attachment; filename=transportation.csv" + ) def test_invalid_post(self): response = self._post_metadata({}, False) self.assertEqual(response.status_code, 400) - response = self._post_metadata({ - 'data_type': 'supporting_doc'}, False) + response = self._post_metadata({"data_type": "supporting_doc"}, False) self.assertEqual(response.status_code, 400) - response = self._post_metadata({ - 'data_type': 'supporting_doc', - 'xform': self.xform.pk - }, False) + response = self._post_metadata( + {"data_type": "supporting_doc", "xform": self.xform.pk}, False + ) self.assertEqual(response.status_code, 400) - response = self._post_metadata({ - 'data_type': 'supporting_doc', - 'data_value': 'supporting.doc' - }, False) + response = self._post_metadata( + {"data_type": "supporting_doc", "data_value": "supporting.doc"}, False + ) self.assertEqual(response.status_code, 400) def _add_test_metadata(self): - for data_type in ['supporting_doc', 'media', 'source']: - self._add_form_metadata(self.xform, data_type, - self.data_value, self.path) + for data_type in ["supporting_doc", "media", "source"]: + self._add_form_metadata(self.xform, data_type, self.data_value, self.path) def test_list_metadata(self): self._add_test_metadata() - self.view = MetaDataViewSet.as_view({'get': 'list'}) - request = self.factory.get('/') + self.view = MetaDataViewSet.as_view({"get": "list"}) + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 401) - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = self.view(request) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) def test_list_metadata_for_specific_form(self): self._add_test_metadata() - self.view = MetaDataViewSet.as_view({'get': 'list'}) - data = {'xform': self.xform.pk} - request = self.factory.get('/', data) + self.view = MetaDataViewSet.as_view({"get": "list"}) + data = {"xform": self.xform.pk} + request = self.factory.get("/", data) response = self.view(request) self.assertEqual(response.status_code, 401) - request = self.factory.get('/', data, **self.extra) + request = self.factory.get("/", data, **self.extra) response = self.view(request) - self.assertNotEqual(response.get('Cache-Control'), None) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - data['xform'] = 1234509909 - request = self.factory.get('/', data, **self.extra) + data["xform"] = 1234509909 + request = self.factory.get("/", data, **self.extra) response = self.view(request) self.assertEqual(response.status_code, 404) - data['xform'] = "INVALID" - request = self.factory.get('/', data, **self.extra) + data["xform"] = "INVALID" + request = self.factory.get("/", data, **self.extra) response = self.view(request) self.assertEqual(response.status_code, 400) def test_project_metadata_has_project_field(self): self._add_project_metadata( - self.project, 'supporting_doc', self.data_value, self.path) + self.project, "supporting_doc", self.data_value, self.path + ) # Test json of project metadata - request = self.factory.get('/', **self.extra) - response = self.view(request, pk=self.metadata_data['id']) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.metadata_data["id"]) self.assertEqual(response.status_code, 200) data = dict(response.data) - self.assertIsNotNone(data['media_url']) - self.assertEqual(data['project'], self.metadata.object_id) + self.assertIsNotNone(data["media_url"]) + self.assertEqual(data["project"], self.metadata.object_id) def test_instance_metadata_has_instance_field(self): - self._add_instance_metadata( - 'supporting_doc', self.data_value, self.path) + self._add_instance_metadata("supporting_doc", self.data_value, self.path) # Test json of project metadata - request = self.factory.get('/', **self.extra) - response = self.view(request, pk=self.metadata_data['id']) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.metadata_data["id"]) self.assertEqual(response.status_code, 200) data = dict(response.data) - self.assertIsNotNone(data['media_url']) - self.assertEqual(data['instance'], self.metadata.object_id) + self.assertIsNotNone(data["media_url"]) + self.assertEqual(data["instance"], self.metadata.object_id) def test_should_return_both_xform_and_project_metadata(self): # delete all existing metadata @@ -427,14 +436,16 @@ def test_should_return_both_xform_and_project_metadata(self): expected_metadata_count = 2 project_response = self._add_project_metadata( - self.project, 'media', "check.png", self.path) - self.assertTrue("image/png" in project_response.data['data_file_type']) + self.project, "media", "check.png", self.path + ) + self.assertTrue("image/png" in project_response.data["data_file_type"]) form_response = self._add_form_metadata( - self.xform, 'supporting_doc', "bla.png", self.path) - self.assertTrue("image/png" in form_response.data['data_file_type']) + self.xform, "supporting_doc", "bla.png", self.path + ) + self.assertTrue("image/png" in form_response.data["data_file_type"]) - view = MetaDataViewSet.as_view({'get': 'list'}) + view = MetaDataViewSet.as_view({"get": "list"}) request = self.factory.get("/", **self.extra) response = view(request) @@ -442,23 +453,21 @@ def test_should_return_both_xform_and_project_metadata(self): for record in response.data: if record.get("xform"): - self.assertEqual(record.get('xform'), self.xform.id) - self.assertIsNone(record.get('project')) + self.assertEqual(record.get("xform"), self.xform.id) + self.assertIsNone(record.get("project")) else: - self.assertEqual(record.get('project'), self.project.id) - self.assertIsNone(record.get('xform')) + self.assertEqual(record.get("project"), self.project.id) + self.assertIsNone(record.get("xform")) def test_should_only_return_xform_metadata(self): # delete all existing metadata MetaData.objects.all().delete() - self._add_project_metadata( - self.project, 'media', "check.png", self.path) + self._add_project_metadata(self.project, "media", "check.png", self.path) - self._add_form_metadata( - self.xform, 'supporting_doc', "bla.png", self.path) + self._add_form_metadata(self.xform, "supporting_doc", "bla.png", self.path) - view = MetaDataViewSet.as_view({'get': 'list'}) + view = MetaDataViewSet.as_view({"get": "list"}) query_data = {"xform": self.xform.id} request = self.factory.get("/", data=query_data, **self.extra) response = view(request) @@ -468,15 +477,15 @@ def test_should_only_return_xform_metadata(self): self.assertNotIn("project", response.data[0]) def _create_metadata_object(self): - view = MetaDataViewSet.as_view({'post': 'create'}) - with open(self.path, 'rb') as media_file: + view = MetaDataViewSet.as_view({"post": "create"}) + with open(self.path, "rb") as media_file: data = { - 'data_type': 'media', - 'data_value': 'check.png', - 'data_file': media_file, - 'project': self.project.id + "data_type": "media", + "data_value": "check.png", + "data_file": media_file, + "project": self.project.id, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) return response @@ -492,134 +501,123 @@ def test_integrity_error_is_handled(self): self.assertEqual(response.status_code, 400) def test_invalid_form_metadata(self): - view = MetaDataViewSet.as_view({'post': 'create'}) - with open(self.path, 'rb') as media_file: + view = MetaDataViewSet.as_view({"post": "create"}) + with open(self.path, "rb") as media_file: data = { - 'data_type': "media", - 'data_value': self.data_value, - 'xform': 999912, - 'data_file': media_file, + "data_type": "media", + "data_value": self.data_value, + "xform": 999912, + "data_file": media_file, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, - {'xform': ['XForm does not exist']}) + self.assertEqual(response.data, {"xform": ["XForm does not exist"]}) def test_xform_meta_permission(self): - view = MetaDataViewSet.as_view({'post': 'create'}) + view = MetaDataViewSet.as_view({"post": "create"}) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor-minor|dataentry', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor-minor|dataentry", + "xform": self.xform.pk, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) meta = MetaData.xform_meta_permission(self.xform) - self.assertEqual(meta.data_value, response.data.get('data_value')) + self.assertEqual(meta.data_value, response.data.get("data_value")) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor-minors|invalid_role', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor-minors|invalid_role", + "xform": self.xform.pk, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 400) - error = u"Format 'role'|'role' or Invalid role" - self.assertEqual(response.data, {'non_field_errors': [error]}) + error = "Format 'role'|'role' or Invalid role" + self.assertEqual(response.data, {"non_field_errors": [error]}) def test_role_update_xform_meta_perms(self): - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} alice_profile = self._create_user_profile(alice_data) EditorRole.add(alice_profile.user, self.xform) - view = MetaDataViewSet.as_view({ - 'post': 'create', - 'put': 'update' - }) + view = MetaDataViewSet.as_view({"post": "create", "put": "update"}) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor-minor|dataentry', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor-minor|dataentry", + "xform": self.xform.pk, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) - self.assertFalse( - EditorRole.user_has_role(alice_profile.user, self.xform)) + self.assertFalse(EditorRole.user_has_role(alice_profile.user, self.xform)) - self.assertTrue( - EditorMinorRole.user_has_role(alice_profile.user, self.xform)) + self.assertTrue(EditorMinorRole.user_has_role(alice_profile.user, self.xform)) meta = MetaData.xform_meta_permission(self.xform) DataEntryRole.add(alice_profile.user, self.xform) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor|dataentry-only', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor|dataentry-only", + "xform": self.xform.pk, } - request = self.factory.put('/', data, **self.extra) + request = self.factory.put("/", data, **self.extra) response = view(request, pk=meta.pk) self.assertEqual(response.status_code, 200) - self.assertFalse( - DataEntryRole.user_has_role(alice_profile.user, self.xform)) + self.assertFalse(DataEntryRole.user_has_role(alice_profile.user, self.xform)) - self.assertTrue( - DataEntryOnlyRole.user_has_role(alice_profile.user, self.xform)) + self.assertTrue(DataEntryOnlyRole.user_has_role(alice_profile.user, self.xform)) def test_xform_meta_perms_duplicates(self): - view = MetaDataViewSet.as_view({ - 'post': 'create', - 'put': 'update' - }) + view = MetaDataViewSet.as_view({"post": "create", "put": "update"}) ct = ContentType.objects.get_for_model(self.xform) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor-minor|dataentry', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor-minor|dataentry", + "xform": self.xform.pk, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) - count = MetaData.objects.filter(data_type=XFORM_META_PERMS, - object_id=self.xform.pk, - content_type=ct.pk).count() + count = MetaData.objects.filter( + data_type=XFORM_META_PERMS, object_id=self.xform.pk, content_type=ct.pk + ).count() self.assertEqual(1, count) data = { - 'data_type': XFORM_META_PERMS, - 'data_value': 'editor-minor|dataentry-only', - 'xform': self.xform.pk + "data_type": XFORM_META_PERMS, + "data_value": "editor-minor|dataentry-only", + "xform": self.xform.pk, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) - count = MetaData.objects.filter(data_type=XFORM_META_PERMS, - object_id=self.xform.pk, - content_type=ct.pk).count() + count = MetaData.objects.filter( + data_type=XFORM_META_PERMS, object_id=self.xform.pk, content_type=ct.pk + ).count() self.assertEqual(1, count) @@ -634,13 +632,13 @@ def test_unique_submission_review_metadata(self): response = self._add_form_metadata(self.xform, data_type, data_value) # Duplicate with different Data Value - view = MetaDataViewSet.as_view({'post': 'create'}) + view = MetaDataViewSet.as_view({"post": "create"}) data = { - 'xform': response.data['xform'], - 'data_type': data_type, - 'data_value': False, + "xform": response.data["xform"], + "data_type": data_type, + "data_value": False, } - request = self.factory.post('/', data, **self.extra) + request = self.factory.post("/", data, **self.extra) d_response = view(request) self.assertEqual(d_response.status_code, 400) diff --git a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py index 7ae8ae6f09..3425b6fe23 100644 --- a/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_organization_profile_viewset.py @@ -4,13 +4,13 @@ """ import json from builtins import str as text +from unittest.mock import patch from django.contrib.auth.models import User, timezone from django.core.cache import cache from django.test.utils import override_settings from guardian.shortcuts import get_perms -from mock import patch from rest_framework import status from onadata.apps.api.models.organization_profile import ( diff --git a/onadata/apps/api/tests/viewsets/test_osm_viewset.py b/onadata/apps/api/tests/viewsets/test_osm_viewset.py index 4ba649601c..c2b5c8618a 100644 --- a/onadata/apps/api/tests/viewsets/test_osm_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_osm_viewset.py @@ -1,6 +1,11 @@ +# -*- coding: utf-8 -*- +""" +Test OSMViewSet module. +""" import csv import os from io import StringIO +from unittest.mock import patch from django.conf import settings from django.db import IntegrityError, transaction @@ -9,8 +14,6 @@ from django.test.utils import override_settings from django.utils.dateparse import parse_datetime -from mock import patch - from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.data_viewset import DataViewSet from onadata.apps.api.viewsets.osm_viewset import OsmViewSet @@ -25,6 +28,10 @@ class TestOSMViewSet(TestAbstractViewSet): + """ + Test OSMViewSet module. + """ + def setUp(self): super(self.__class__, self).setUp() self._login_user_and_profile() diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 2a787b6f6c..1695ac9360 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -7,6 +7,7 @@ from collections import OrderedDict from datetime import datetime from operator import itemgetter +from unittest.mock import MagicMock, Mock, patch from django.conf import settings from django.contrib.auth import get_user_model @@ -18,7 +19,6 @@ import dateutil.parser import requests from httmock import HTTMock, urlmatch -from mock import MagicMock, Mock, patch from rest_framework.authtoken.models import Token from six import iteritems diff --git a/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py b/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py index 24b3bb729a..c442439530 100644 --- a/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py @@ -3,15 +3,16 @@ """ from __future__ import unicode_literals -from mock import patch +from unittest.mock import patch + from rest_framework.test import APIRequestFactory from onadata.apps.api.viewsets.submission_review_viewset import SubmissionReviewViewSet -from onadata.apps.logger.models import SubmissionReview, Instance +from onadata.apps.logger.models import Instance, SubmissionReview from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.messaging.constants import XFORM, SUBMISSION_REVIEWED -from onadata.libs.permissions import EditorRole, OwnerRole, ManagerRole -from onadata.libs.utils.common_tags import REVIEW_STATUS, REVIEW_COMMENT +from onadata.apps.messaging.constants import SUBMISSION_REVIEWED, XFORM +from onadata.libs.permissions import EditorRole, ManagerRole, OwnerRole +from onadata.libs.utils.common_tags import REVIEW_COMMENT, REVIEW_STATUS class TestSubmissionReviewViewSet(TestBase): @@ -65,7 +66,7 @@ def test_submission_review_create(self, mock_send_message): ) # sends message upon saving the submission review self.assertTrue(mock_send_message.called) - mock_send_message.called_with( + mock_send_message.assert_called_with( submission_review.id, submission_review.instance.xform.id, XFORM, @@ -100,7 +101,7 @@ def test_bulk_create_submission_review(self, mock_send_message): already_seen = [] # sends message upon saving the submission review self.assertTrue(mock_send_message.called) - mock_send_message.called_with( + mock_send_message.assert_called_with( [s.id for s in self.xform.instances.all()], self.xform.id, XFORM, diff --git a/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py b/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py index aaed53b8d5..cdcef75cd1 100644 --- a/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_user_profile_viewset.py @@ -6,21 +6,21 @@ import datetime import json import os -from six.moves.urllib.parse import urlparse, parse_qs +from unittest.mock import call, patch from django.contrib.auth import get_user_model from django.core.cache import cache from django.db.models import signals from django.test.utils import override_settings -from django.utils.dateparse import parse_datetime from django.utils import timezone +from django.utils.dateparse import parse_datetime import requests from django_digest.test import DigestAuth from httmock import HTTMock, all_requests -from mock import patch, call from registration.models import RegistrationProfile from rest_framework.authtoken.models import Token +from six.moves.urllib.parse import parse_qs, urlparse from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.connect_viewset import ConnectViewSet @@ -30,9 +30,8 @@ from onadata.apps.main.models import UserProfile from onadata.apps.main.models.user_profile import set_kpi_formbuilder_permissions from onadata.libs.authentication import DigestAuthentication -from onadata.libs.serializers.user_profile_serializer import _get_first_last_names from onadata.libs.permissions import EditorRole - +from onadata.libs.serializers.user_profile_serializer import _get_first_last_names User = get_user_model() diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index 6f4709c2ea..9869903b09 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -1,13 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Test XFormListViewSet module. +""" import os from builtins import open from hashlib import md5 +from unittest.mock import patch from django.conf import settings from django.test import TransactionTestCase from django.urls import reverse + from django_digest.test import Client as DigestClient from django_digest.test import DigestAuth -from mock import patch from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.project_viewset import ProjectViewSet @@ -16,10 +21,14 @@ XFormListViewSet, ) from onadata.apps.main.models import MetaData -from onadata.libs.permissions import DataEntryRole, ReadOnlyRole, OwnerRole +from onadata.libs.permissions import DataEntryRole, OwnerRole, ReadOnlyRole class TestXFormListViewSet(TestAbstractViewSet, TransactionTestCase): + """ + Test XFormListViewSet module. + """ + def setUp(self): super(self.__class__, self).setUp() self.view = XFormListViewSet.as_view({"get": "list"}) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 049a0b12cd..9b9c385839 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -14,6 +14,7 @@ from datetime import datetime, timedelta from http.client import BadStatusLine from io import StringIO +from unittest.mock import Mock, patch from xml.dom import Node, minidom from django.conf import settings @@ -31,7 +32,6 @@ from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock -from mock import Mock, patch from rest_framework import status from onadata.apps.api.tests.mocked_data import ( @@ -214,7 +214,7 @@ def test_replace_form_with_external_choices(self, mock_send_message): self.assertEqual(response.status_code, 200) # send message upon form update self.assertTrue(mock_send_message.called) - mock_send_message.called_with( + mock_send_message.assert_called_with( self.xform.id, self.xform.id, XFORM, request.user, FORM_UPDATED ) diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 89222d97bc..5c2d9db30f 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -4,13 +4,13 @@ """ import os from datetime import datetime, timedelta +from unittest.mock import Mock, patch from django.http.request import HttpRequest from django.test import override_settings from django.utils.timezone import utc from django_digest.test import DigestAuth -from mock import Mock, patch from onadata.apps.logger.models import Instance, SubmissionReview, XForm from onadata.apps.logger.models.instance import ( diff --git a/onadata/apps/logger/tests/test_form_submission.py b/onadata/apps/logger/tests/test_form_submission.py index 4ab3d651dc..16c2d6b7ad 100644 --- a/onadata/apps/logger/tests/test_form_submission.py +++ b/onadata/apps/logger/tests/test_form_submission.py @@ -4,25 +4,25 @@ """ import os import re - from contextlib import contextmanager -from django.http import Http404 -from django.http import UnreadablePostError -from django_digest.test import DigestAuth -from django_digest.test import Client as DigestClient +from unittest.mock import ANY, Mock, patch + +from django.http import Http404, UnreadablePostError from django.test.utils import override_settings + +from django_digest.test import Client as DigestClient +from django_digest.test import DigestAuth from guardian.shortcuts import assign_perm -from mock import patch, Mock, ANY from nose import SkipTest -from onadata.apps.main.models.user_profile import UserProfile -from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models import Instance from onadata.apps.logger.models.instance import InstanceHistory from onadata.apps.logger.models.project import Project from onadata.apps.logger.models.xform import XForm from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml -from onadata.apps.viewer.models.parsed_instance import query_data, query_count +from onadata.apps.main.models.user_profile import UserProfile +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.viewer.models.parsed_instance import query_count, query_data from onadata.apps.viewer.signals import process_submission from onadata.libs.utils.common_tags import GEOLOCATION, LAST_EDITED diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 113e575403..0dc53e8f6b 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -10,6 +10,7 @@ from datetime import datetime from hashlib import md5 from io import BytesIO +from unittest.mock import patch from xml.dom import Node, minidom from django.conf import settings @@ -22,7 +23,6 @@ import requests from django_digest.test import Client as DigestClient from flaky import flaky -from mock import patch from six import iteritems from onadata.apps.logger.models import XForm diff --git a/onadata/apps/main/tests/test_user_profile.py b/onadata/apps/main/tests/test_user_profile.py index 0bc68bbd81..f83225c916 100644 --- a/onadata/apps/main/tests/test_user_profile.py +++ b/onadata/apps/main/tests/test_user_profile.py @@ -1,19 +1,25 @@ +# -*- coding: utf-8 -*- +""" +Test user profile +""" from __future__ import unicode_literals -from django.contrib.auth.models import AnonymousUser -from django.contrib.auth.models import User -from django.test import RequestFactory -from django.test import TestCase +from unittest.mock import patch + +from django.contrib.auth.models import AnonymousUser, User +from django.test import RequestFactory, TestCase from django.test.client import Client from django.urls import reverse -from mock import patch from onadata.apps.logger.xform_instance_parser import XLSFormError -from onadata.apps.main.views import profile, api_token +from onadata.apps.main.views import api_token, profile from onadata.libs.utils.common_tools import merge_dicts class TestUserProfile(TestCase): + """ + Test user profile + """ def setup(self): self.client = Client() @@ -21,82 +27,78 @@ def setup(self): def _login_user_and_profile(self, extra_post_data={}): post_data = { - 'username': 'bob', - 'email': 'bob@columbia.edu', - 'password1': 'bobbob', - 'password2': 'bobbob', - 'first_name': 'Bob', - 'last_name': 'User', - 'city': 'Bobville', - 'country': 'US', - 'organization': 'Bob Inc.', - 'home_page': 'bob.com', - 'twitter': 'boberama' + "username": "bob", + "email": "bob@columbia.edu", + "password1": "bobbob", + "password2": "bobbob", + "first_name": "Bob", + "last_name": "User", + "city": "Bobville", + "country": "US", + "organization": "Bob Inc.", + "home_page": "bob.com", + "twitter": "boberama", } - url = '/accounts/register/' + url = "/accounts/register/" post_data = merge_dicts(post_data, extra_post_data) self.response = self.client.post(url, post_data) try: - self.user = User.objects.get(username=post_data['username']) + self.user = User.objects.get(username=post_data["username"]) except User.DoesNotExist: pass def test_create_user_with_given_name(self): self._login_user_and_profile() self.assertEqual(self.response.status_code, 302) - self.assertEqual(self.user.username, 'bob') + self.assertEqual(self.user.username, "bob") - @patch('onadata.apps.main.views.render') + @patch("onadata.apps.main.views.render") def test_xlsform_error_returns_400(self, mock_render): - mock_render.side_effect = XLSFormError( - "Title shouldn't have an ampersand") + mock_render.side_effect = XLSFormError("Title shouldn't have an ampersand") self._login_user_and_profile() - response = self.client.get( - reverse(profile, kwargs={ - 'username': "bob" - })) + response = self.client.get(reverse(profile, kwargs={"username": "bob"})) self.assertTrue(mock_render.called) self.assertEqual(response.status_code, 400) - self.assertEqual(response.content.decode('utf-8'), - "Title shouldn't have an ampersand") + self.assertEqual( + response.content.decode("utf-8"), "Title shouldn't have an ampersand" + ) def test_create_user_profile_for_user(self): self._login_user_and_profile() self.assertEqual(self.response.status_code, 302) user_profile = self.user.profile - self.assertEqual(user_profile.city, 'Bobville') - self.assertTrue(hasattr(user_profile, 'metadata')) + self.assertEqual(user_profile.city, "Bobville") + self.assertTrue(hasattr(user_profile, "metadata")) def test_disallow_non_alpha_numeric(self): invalid_usernames = [ - 'b ob', - 'b.o.b.', - 'b-ob', - 'b!', - '@bob', - 'bob@bob.com', - 'bob$', - 'b&o&b', - 'bob?', - '#bob', - '(bob)', - 'b*ob', - '%s % bob', + "b ob", + "b.o.b.", + "b-ob", + "b!", + "@bob", + "bob@bob.com", + "bob$", + "b&o&b", + "bob?", + "#bob", + "(bob)", + "b*ob", + "%s % bob", ] users_before = User.objects.count() for username in invalid_usernames: - self._login_user_and_profile({'username': username}) + self._login_user_and_profile({"username": username}) self.assertEqual(User.objects.count(), users_before) def test_disallow_reserved_name(self): users_before = User.objects.count() - self._login_user_and_profile({'username': 'admin'}) + self._login_user_and_profile({"username": "admin"}) self.assertEqual(User.objects.count(), users_before) def test_404_if_user_does_not_exist(self): - response = self.client.get(reverse(profile, - kwargs={'username': 'nonuser'})) + response = self.client.get(reverse(profile, kwargs={"username": "nonuser"})) self.assertEqual(response.status_code, 404) def test_403_if_unauthorised_user_tries_to_access_api_token_link(self): @@ -105,54 +107,48 @@ def test_403_if_unauthorised_user_tries_to_access_api_token_link(self): # create user alice post_data = { - 'username': 'alice', - 'email': 'alice@columbia.edu', - 'password1': 'alicealice', - 'password2': 'alicealice', - 'first_name': 'Alice', - 'last_name': 'Wonderland', - 'city': 'Aliceville', - 'country': 'KE', - 'organization': 'Alice Inc.', - 'home_page': 'alice.com', - 'twitter': 'alicemsweet' + "username": "alice", + "email": "alice@columbia.edu", + "password1": "alicealice", + "password2": "alicealice", + "first_name": "Alice", + "last_name": "Wonderland", + "city": "Aliceville", + "country": "KE", + "organization": "Alice Inc.", + "home_page": "alice.com", + "twitter": "alicemsweet", } - url = '/accounts/register/' + url = "/accounts/register/" self.client.post(url, post_data) # try accessing api-token with an anonymous user - request = factory.get('/api-token') + request = factory.get("/api-token") request.user = AnonymousUser() - response = api_token(request, 'alice') + response = api_token(request, "alice") self.assertEqual(response.status_code, 302) # login with user bob self._login_user_and_profile() # try accessing api-token with user 'bob' but with username 'alice' - request = factory.get('/api-token') + request = factory.get("/api-token") request.user = self.user - response = api_token(request, 'alice') + response = api_token(request, "alice") self.assertEqual(response.status_code, 403) # try accessing api-token with user 'bob' but with username 'bob' - request = factory.get('/api-token') + request = factory.get("/api-token") request.user = self.user response = api_token(request, self.user.username) self.assertEqual(response.status_code, 200) def test_show_single_at_sign_in_twitter_link(self): self._login_user_and_profile() - response = self.client.get( - reverse(profile, kwargs={ - 'username': "bob" - })) + response = self.client.get(reverse(profile, kwargs={"username": "bob"})) self.assertContains(response, ">@boberama") # add the @ sign self.user.profile.twitter = "@boberama" self.user.profile.save() - response = self.client.get( - reverse(profile, kwargs={ - 'username': "bob" - })) + response = self.client.get(reverse(profile, kwargs={"username": "bob"})) self.assertContains(response, ">@boberama") diff --git a/onadata/apps/messaging/tests/test_backends_mqtt.py b/onadata/apps/messaging/tests/test_backends_mqtt.py index e4d3d3176e..f9bcd0f925 100644 --- a/onadata/apps/messaging/tests/test_backends_mqtt.py +++ b/onadata/apps/messaging/tests/test_backends_mqtt.py @@ -6,36 +6,37 @@ import json import ssl +from unittest.mock import MagicMock, patch from django.test import TestCase -from mock import MagicMock, patch - -from onadata.apps.messaging.backends.mqtt import (MQTTBackend, get_payload, - get_target_metadata) +from onadata.apps.messaging.backends.mqtt import ( + MQTTBackend, + get_payload, + get_target_metadata, +) from onadata.apps.messaging.constants import PROJECT, XFORM -from onadata.apps.messaging.tests.test_base import (_create_message, - _create_user) +from onadata.apps.messaging.tests.test_base import _create_message, _create_user class TestMQTTBackend(TestCase): """ Test MQTT Backend """ + maxDiff = None def test_mqtt_get_topic(self): """ Test MQTT backend get_topic method """ - from_user = _create_user('Bob') - to_user = _create_user('Alice') - instance = _create_message(from_user, to_user, 'I love oov') - mqtt = MQTTBackend(options={'HOST': 'localhost'}) - expected = ( - "/{topic_root}/{target_name}/{target_id}/messages/publish".format( - topic_root='onadata', target_name='user', - target_id=to_user.id)) + from_user = _create_user("Bob") + to_user = _create_user("Alice") + instance = _create_message(from_user, to_user, "I love oov") + mqtt = MQTTBackend(options={"HOST": "localhost"}) + expected = "/{topic_root}/{target_name}/{target_id}/messages/publish".format( + topic_root="onadata", target_name="user", target_id=to_user.id + ) self.assertEqual(expected, mqtt.get_topic(instance)) def test_get_target_metadata(self): @@ -44,106 +45,107 @@ def test_get_target_metadata(self): """ # User objects - user = _create_user('John') - user_metadata = {'id': user.pk, 'name': user.get_full_name()} + user = _create_user("John") + user_metadata = {"id": user.pk, "name": user.get_full_name()} self.assertEqual( - json.dumps(user_metadata), json.dumps(get_target_metadata(user))) + json.dumps(user_metadata), json.dumps(get_target_metadata(user)) + ) # XForm objects xform = MagicMock() xform.pk = 1337 - xform.title = 'Test Form' - xform.id_string = 'Test_Form_ID' + xform.title = "Test Form" + xform.id_string = "Test_Form_ID" xform._meta.model_name = XFORM - xform_metadata = { - 'id': 1337, - 'name': 'Test Form', - 'form_id': 'Test_Form_ID' - } + xform_metadata = {"id": 1337, "name": "Test Form", "form_id": "Test_Form_ID"} self.assertEqual( - json.dumps(xform_metadata), json.dumps(get_target_metadata(xform))) + json.dumps(xform_metadata), json.dumps(get_target_metadata(xform)) + ) # Project objects project = MagicMock() project.pk = 7331 - project.name = 'Test Project' + project.name = "Test Project" project._meta.model_name = PROJECT - project_metadata = {'id': 7331, 'name': 'Test Project'} + project_metadata = {"id": 7331, "name": "Test Project"} self.assertEqual( - json.dumps(project_metadata), - json.dumps(get_target_metadata(project))) + json.dumps(project_metadata), json.dumps(get_target_metadata(project)) + ) def test_mqtt_get_payload(self): """ Test MQTT backend get_payload function """ - from_user = _create_user('Bob') - to_user = _create_user('Alice') - instance = _create_message(from_user, to_user, 'I love oov') + from_user = _create_user("Bob") + to_user = _create_user("Alice") + instance = _create_message(from_user, to_user, "I love oov") payload = { - 'id': instance.id, - 'time': instance.timestamp.isoformat(), - 'payload': { - 'author': { - 'username': from_user.username, - 'real_name': from_user.get_full_name() + "id": instance.id, + "time": instance.timestamp.isoformat(), + "payload": { + "author": { + "username": from_user.username, + "real_name": from_user.get_full_name(), }, - 'context': { - 'type': to_user._meta.model_name, - 'metadata': { - 'id': to_user.pk, - 'name': to_user.get_full_name() - }, - 'verb': 'message' + "context": { + "type": to_user._meta.model_name, + "metadata": {"id": to_user.pk, "name": to_user.get_full_name()}, + "verb": "message", }, - 'message': "I love oov" - } + "message": "I love oov", + }, } self.assertEqual( - json.dumps(payload), get_payload(instance, verbose_payload=True)) + json.dumps(payload), get_payload(instance, verbose_payload=True) + ) expected_payload = { - 'id': instance.id, - 'verb': instance.verb, - 'message': "I love oov", - 'user': from_user.username, - 'timestamp': instance.timestamp.isoformat() + "id": instance.id, + "verb": instance.verb, + "message": "I love oov", + "user": from_user.username, + "timestamp": instance.timestamp.isoformat(), } self.assertEqual( - json.dumps(expected_payload), get_payload( - instance, verbose_payload=False)) + json.dumps(expected_payload), get_payload(instance, verbose_payload=False) + ) - @patch('onadata.apps.messaging.backends.mqtt.publish.single') + @patch("onadata.apps.messaging.backends.mqtt.publish.single") def test_mqtt_send(self, mocked): """ Test MQTT Backend send method """ - from_user = _create_user('Bob') - to_user = _create_user('Alice') - instance = _create_message(from_user, to_user, 'I love oov') - mqtt = MQTTBackend(options={ - 'HOST': 'localhost', - 'PORT': 8883, - 'SECURE': True, - 'CA_CERT_FILE': 'cacert.pem', - 'CERT_FILE': 'emq.pem', - 'KEY_FILE': 'emq.key' - }) + from_user = _create_user("Bob") + to_user = _create_user("Alice") + instance = _create_message(from_user, to_user, "I love oov") + mqtt = MQTTBackend( + options={ + "HOST": "localhost", + "PORT": 8883, + "SECURE": True, + "CA_CERT_FILE": "cacert.pem", + "CERT_FILE": "emq.pem", + "KEY_FILE": "emq.key", + } + ) mqtt.send(instance=instance) self.assertTrue(mocked.called) args, kwargs = mocked.call_args_list[0] self.assertEqual(mqtt.get_topic(instance), args[0]) - self.assertEqual(get_payload(instance), kwargs['payload']) - self.assertEqual('localhost', kwargs['hostname']) - self.assertEqual(8883, kwargs['port']) - self.assertEqual(0, kwargs['qos']) - self.assertEqual(False, kwargs['retain']) + self.assertEqual(get_payload(instance), kwargs["payload"]) + self.assertEqual("localhost", kwargs["hostname"]) + self.assertEqual(8883, kwargs["port"]) + self.assertEqual(0, kwargs["qos"]) + self.assertEqual(False, kwargs["retain"]) self.assertDictEqual( - dict(ca_certs='cacert.pem', - certfile='emq.pem', - keyfile='emq.key', - tls_version=ssl.PROTOCOL_TLSv1_2, - cert_reqs=ssl.CERT_NONE), - kwargs['tls']) + dict( + ca_certs="cacert.pem", + certfile="emq.pem", + keyfile="emq.key", + tls_version=ssl.PROTOCOL_TLSv1_2, + cert_reqs=ssl.CERT_NONE, + ), + kwargs["tls"], + ) diff --git a/onadata/apps/messaging/tests/test_signals.py b/onadata/apps/messaging/tests/test_signals.py index f05f9b42d7..0342a58ec8 100644 --- a/onadata/apps/messaging/tests/test_signals.py +++ b/onadata/apps/messaging/tests/test_signals.py @@ -4,10 +4,12 @@ """ from __future__ import unicode_literals -from actstream.models import Action +from unittest.mock import patch + from django.test import TestCase from django.test.utils import override_settings -from mock import patch + +from actstream.models import Action from onadata.apps.messaging.signals import messaging_backends_handler @@ -20,12 +22,11 @@ class TestSignals(TestCase): # pylint: disable=invalid-name @override_settings( NOTIFICATION_BACKENDS={ - 'mqtt': { - 'BACKEND': 'onadata.apps.messaging.backends.base.BaseBackend' - }, + "mqtt": {"BACKEND": "onadata.apps.messaging.backends.base.BaseBackend"}, }, - MESSAGING_ASYNC_NOTIFICATION=True) - @patch('onadata.apps.messaging.signals.call_backend_async.apply_async') + MESSAGING_ASYNC_NOTIFICATION=True, + ) + @patch("onadata.apps.messaging.signals.call_backend_async.apply_async") def test_messaging_backends_handler_async(self, call_backend_async_mock): """ Test messaging backends handler function. @@ -33,15 +34,15 @@ def test_messaging_backends_handler_async(self, call_backend_async_mock): messaging_backends_handler(Action, instance=Action(id=9), created=True) self.assertTrue(call_backend_async_mock.called) call_backend_async_mock.assert_called_with( - ('onadata.apps.messaging.backends.base.BaseBackend', 9, None), - countdown=2) + ("onadata.apps.messaging.backends.base.BaseBackend", 9, None), countdown=2 + ) - @override_settings(NOTIFICATION_BACKENDS={ - 'mqtt': { - 'BACKEND': 'onadata.apps.messaging.backends.base.BaseBackend' - }, - }) - @patch('onadata.apps.messaging.signals.call_backend') + @override_settings( + NOTIFICATION_BACKENDS={ + "mqtt": {"BACKEND": "onadata.apps.messaging.backends.base.BaseBackend"}, + } + ) + @patch("onadata.apps.messaging.signals.call_backend") def test_messaging_backends_handler(self, call_backend_mock): """ Test messaging backends handler function. @@ -49,4 +50,5 @@ def test_messaging_backends_handler(self, call_backend_mock): messaging_backends_handler(Action, instance=Action(id=9), created=True) self.assertTrue(call_backend_mock.called) call_backend_mock.assert_called_with( - 'onadata.apps.messaging.backends.base.BaseBackend', 9, None) + "onadata.apps.messaging.backends.base.BaseBackend", 9, None + ) diff --git a/onadata/apps/restservice/tests/test_restservice.py b/onadata/apps/restservice/tests/test_restservice.py index 21ea036b34..d85f6d6657 100644 --- a/onadata/apps/restservice/tests/test_restservice.py +++ b/onadata/apps/restservice/tests/test_restservice.py @@ -4,12 +4,11 @@ """ import os import time +from unittest.mock import patch from django.test.utils import override_settings from django.urls import reverse -from mock import patch - from onadata.apps.logger.models.xform import XForm from onadata.apps.main.models import MetaData from onadata.apps.main.tests.test_base import TestBase diff --git a/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py b/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py index 19ad06421a..af2052e068 100644 --- a/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py +++ b/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py @@ -2,9 +2,9 @@ """ Test /restservices API endpoint implementation. """ -from django.test.utils import override_settings +from unittest.mock import patch -from mock import patch +from django.test.utils import override_settings from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.main.models.meta_data import MetaData diff --git a/onadata/apps/viewer/tests/test_attachment_url.py b/onadata/apps/viewer/tests/test_attachment_url.py index 3a7a8f31c6..bf5b4c2a2b 100644 --- a/onadata/apps/viewer/tests/test_attachment_url.py +++ b/onadata/apps/viewer/tests/test_attachment_url.py @@ -1,10 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Test attachments. +""" import os +from unittest.mock import patch from django.conf import settings from django.contrib.auth import authenticate from django.http import HttpResponseRedirect from django.urls import reverse -from mock import patch + from rest_framework.test import APIRequestFactory from onadata.apps.logger.models import Attachment @@ -14,6 +19,9 @@ class TestAttachmentUrl(TestBase): + """ + Test attachments. + """ def setUp(self): self.attachment_count = 0 @@ -21,51 +29,49 @@ def setUp(self): self._create_user_and_login() self._publish_transportation_form() self._submit_transport_instance_w_attachment() - self.url = reverse( - attachment_url, kwargs={'size': 'original'}) + self.url = reverse(attachment_url, kwargs={"size": "original"}) self._submission_url = reverse( - 'submissions', kwargs={'username': self.user.username}) + "submissions", kwargs={"username": self.user.username} + ) def test_attachment_url(self): - self.assertEqual( - Attachment.objects.count(), self.attachment_count + 1) + self.assertEqual(Attachment.objects.count(), self.attachment_count + 1) response = self.client.get( - self.url, {"media_file": self.attachment_media_file.name}) + self.url, {"media_file": self.attachment_media_file.name} + ) self.assertEqual(response.status_code, 302) # redirects to amazon def test_attachment_url_no_redirect(self): - self.assertEqual( - Attachment.objects.count(), self.attachment_count + 1) + self.assertEqual(Attachment.objects.count(), self.attachment_count + 1) response = self.client.get( - self.url, {"media_file": self.attachment_media_file.name, - 'no_redirect': 'true'}) + self.url, + {"media_file": self.attachment_media_file.name, "no_redirect": "true"}, + ) self.assertEqual(response.status_code, 200) # no redirects to amazon def test_attachment_not_found(self): response = self.client.get( - self.url, {"media_file": "non_existent_attachment.jpg"}) + self.url, {"media_file": "non_existent_attachment.jpg"} + ) self.assertEqual(response.status_code, 404) def test_attachment_has_mimetype(self): attachment = Attachment.objects.all().reverse()[0] - self.assertEqual(attachment.mimetype, 'image/jpeg') + self.assertEqual(attachment.mimetype, "image/jpeg") def test_attachment_url_w_media_id(self): """Test attachment url with attachment id""" - self.assertEqual( - Attachment.objects.count(), self.attachment_count + 1) - response = self.client.get( - self.url, {"attachment_id": self.attachment.id}) + self.assertEqual(Attachment.objects.count(), self.attachment_count + 1) + response = self.client.get(self.url, {"attachment_id": self.attachment.id}) self.assertEqual(response.status_code, 302) # redirects to amazon # pylint: disable=invalid-name def test_attachment_url_w_media_id_no_redirect(self): """Test attachment url with attachment id no redirect""" - self.assertEqual( - Attachment.objects.count(), self.attachment_count + 1) + self.assertEqual(Attachment.objects.count(), self.attachment_count + 1) response = self.client.get( - self.url, {"attachment_id": self.attachment.id, - 'no_redirect': 'true'}) + self.url, {"attachment_id": self.attachment.id, "no_redirect": "true"} + ) self.assertEqual(response.status_code, 200) # no redirects to amazon @patch("onadata.apps.viewer.views.generate_media_download_url") @@ -108,7 +114,7 @@ def test_attachment_url_has_azure_sas_token(self, mock_media_url): # get submission enc attachment attachment = Attachment.objects.all()[1] - sas_token = "se=ab736fba7261" # nosec + sas_token = "se=ab736fba7261" # nosec expected_url = f"http://testserver/{attachment.media_file.name}?{sas_token}" mock_media_url.return_value = HttpResponseRedirect(redirect_to=expected_url) response = self.client.get(self.url, {"media_file": attachment.media_file.name}) diff --git a/onadata/apps/viewer/tests/test_exports.py b/onadata/apps/viewer/tests/test_exports.py index ce5393c463..55b8264523 100644 --- a/onadata/apps/viewer/tests/test_exports.py +++ b/onadata/apps/viewer/tests/test_exports.py @@ -1,49 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Test exports +""" import csv import datetime import json import os from io import StringIO from time import sleep +from unittest.mock import patch -import openpyxl - -from celery import current_app from django.conf import settings from django.core.files.storage import get_storage_class from django.http import Http404 from django.urls import reverse from django.utils.dateparse import parse_datetime -from mock import patch + +import openpyxl +from celery import current_app from onadata.apps.logger.models import Instance from onadata.apps.main.models.meta_data import MetaData from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import delete_data from onadata.apps.viewer.models.export import Export -from onadata.apps.viewer.models.parsed_instance import query_data, query_count +from onadata.apps.viewer.models.parsed_instance import query_count, query_data from onadata.apps.viewer.tasks import create_xlsx_export from onadata.apps.viewer.tests.export_helpers import viewer_fixture_path from onadata.apps.viewer.views import ( + create_export, delete_export, + export_download, export_list, - create_export, export_progress, - export_download, ) from onadata.apps.viewer.xls_writer import XlsWriter from onadata.libs.utils.common_tools import get_response_content from onadata.libs.utils.export_builder import dict_to_joined_export from onadata.libs.utils.export_tools import ( + clean_keys_of_slashes, generate_export, increment_index_in_filename, - clean_keys_of_slashes, ) AMBULANCE_KEY = ( - "transport/available_transportation_types_to_referral_fac" "ility/ambulance" + "transport/available_transportation_types_to_referral_facility/ambulance" ) AMBULANCE_KEY_DOTS = ( - "transport.available_transportation_types_to_referra" "l_facility.ambulance" + "transport.available_transportation_types_to_referral_facility.ambulance" ) diff --git a/onadata/libs/tests/data/test_statistics.py b/onadata/libs/tests/data/test_statistics.py index b3c4844055..f65735dcb5 100644 --- a/onadata/libs/tests/data/test_statistics.py +++ b/onadata/libs/tests/data/test_statistics.py @@ -1,9 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Test onadata.libs.data module +""" import unittest from onadata.libs.data import statistics as stats class TestStatistics(unittest.TestCase): + """ + Test onadata.libs.data module + """ + def test_get_mean(self): values = [1, 2, 3, 2, 5, 5] result = stats.get_mean(values) diff --git a/onadata/libs/tests/data/test_tools.py b/onadata/libs/tests/data/test_tools.py index 652d7e8523..cf81c8cee9 100644 --- a/onadata/libs/tests/data/test_tools.py +++ b/onadata/libs/tests/data/test_tools.py @@ -1,19 +1,27 @@ -from datetime import datetime, timedelta -from django.utils.timezone import utc +# -*- coding: utf-8 -*- +""" +Test onadata.libs.data.query module +""" import os +from datetime import datetime, timedelta +from unittest.mock import patch -from mock import patch +from django.utils.timezone import utc from onadata.apps.logger.models.instance import Instance from onadata.apps.main.tests.test_base import TestBase from onadata.libs.data.query import ( - get_form_submissions_grouped_by_field, get_date_fields, get_field_records, + get_form_submissions_grouped_by_field, ) class TestTools(TestBase): + """ + Test onadata.libs.data.query module + """ + def setUp(self): super().setUp() self._create_user_and_login() diff --git a/onadata/libs/tests/serializers/test_project_serializer.py b/onadata/libs/tests/serializers/test_project_serializer.py index 2775eec4bb..7a0b86774b 100644 --- a/onadata/libs/tests/serializers/test_project_serializer.py +++ b/onadata/libs/tests/serializers/test_project_serializer.py @@ -1,17 +1,28 @@ -from mock import MagicMock +# -*- coding: utf-8 -*- +""" +Test onadata.libs.serializers.project_serializer +""" +from unittest.mock import MagicMock + +from django.core.cache import cache + from rest_framework import serializers from rest_framework.test import APIRequestFactory -from django.core.cache import cache -from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_key -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.logger.models import Project -from onadata.libs.serializers.project_serializer import (BaseProjectSerializer, - ProjectSerializer) +from onadata.libs.serializers.project_serializer import ( + BaseProjectSerializer, + ProjectSerializer, +) +from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_key class TestBaseProjectSerializer(TestAbstractViewSet): + """ + Test onadata.libs.serializers.project_serializer + """ + def setUp(self): self.factory = APIRequestFactory() self._login_user_and_profile() @@ -19,46 +30,54 @@ def setUp(self): self._org_create() data = { - 'name': u'demo', - 'owner': - 'http://testserver/api/v1/users/%s' - % self.organization.user.username, - 'metadata': {'description': 'Some description', - 'location': 'Naivasha, Kenya', - 'category': 'governance'}, - 'public': False - } + "name": "demo", + "owner": "http://testserver/api/v1/users/%s" + % self.organization.user.username, + "metadata": { + "description": "Some description", + "location": "Naivasha, Kenya", + "category": "governance", + }, + "public": False, + } # Create the project self._project_create(data) def test_get_users(self): - "" + """""" # Is none when request to get users lacks a project users = self.serializer.get_users(None) self.assertEqual(users, None) # Has members and NOT collaborators when NOT passed 'owner' - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) request.user = self.user - self.serializer.context['request'] = request + self.serializer.context["request"] = request users = self.serializer.get_users(self.project) - self.assertEqual(sorted(users, key=lambda x: x['first_name']), - [{'first_name': u'Bob', - 'last_name': u'erama', - 'is_org': False, - 'role': 'owner', - 'user': u'bob', - 'metadata': {}}, - {'first_name': u'Dennis', - 'last_name': u'', - 'is_org': True, - 'role': 'owner', - 'user': u'denoinc', - 'metadata': {}}]) + self.assertEqual( + sorted(users, key=lambda x: x["first_name"]), + [ + { + "first_name": "Bob", + "last_name": "erama", + "is_org": False, + "role": "owner", + "user": "bob", + "metadata": {}, + }, + { + "first_name": "Dennis", + "last_name": "", + "is_org": True, + "role": "owner", + "user": "denoinc", + "metadata": {}, + }, + ], + ) class TestProjectSerializer(TestAbstractViewSet): - def setUp(self): self.serializer = ProjectSerializer() self.factory = APIRequestFactory() @@ -75,40 +94,42 @@ def test_get_project_forms(self): project = Project.objects.last() form = project.xform_set.last() - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) request.user = self.user serializer = ProjectSerializer(project) - serializer.context['request'] = request + serializer.context["request"] = request - self.assertEqual(len(serializer.data['forms']), 1) - self.assertEqual(serializer.data['forms'][0]['encrypted'], False) - self.assertEqual(serializer.data['num_datasets'], 1) + self.assertEqual(len(serializer.data["forms"]), 1) + self.assertEqual(serializer.data["forms"][0]["encrypted"], False) + self.assertEqual(serializer.data["num_datasets"], 1) # delete form in project form.delete() # Check that project has no forms self.assertIsNone(project.xform_set.last()) - serializer = ProjectSerializer(project, context={'request': request}) - self.assertEqual(len(serializer.data['forms']), 0) - self.assertEqual(serializer.data['num_datasets'], 0) + serializer = ProjectSerializer(project, context={"request": request}) + self.assertEqual(len(serializer.data["forms"]), 0) + self.assertEqual(serializer.data["num_datasets"], 0) def test_create_duplicate_projects(self): validated_data = { - 'name': u'demo', - 'organization': self.user, - 'metadata': {'description': 'Some description', - 'location': 'Naivasha, Kenya', - 'category': 'governance'}, - 'public': False - } + "name": "demo", + "organization": self.user, + "metadata": { + "description": "Some description", + "location": "Naivasha, Kenya", + "category": "governance", + }, + "public": False, + } # create first project request = MagicMock(user=self.user) - serializer = ProjectSerializer(context={'request': request}) + serializer = ProjectSerializer(context={"request": request}) project = serializer.create(validated_data) - self.assertEqual(project.name, u'demo') + self.assertEqual(project.name, "demo") self.assertEqual(project.organization, self.user) # create another project with same data @@ -116,37 +137,36 @@ def test_create_duplicate_projects(self): serializer.create(validated_data) self.assertEqual( e.exception.detail, - [u'The fields name, organization must make a unique set.']) + ["The fields name, organization must make a unique set."], + ) def test_new_project_set_to_cache(self): """ Test that newly created project is set to cache """ data = { - 'name': u'demo', - 'owner': - 'http://testserver/api/v1/users/%s' - % self.user, - 'metadata': {'description': 'Some description', - 'location': 'Naivasha, Kenya', - 'category': 'governance'}, - 'public': False - } + "name": "demo", + "owner": "http://testserver/api/v1/users/%s" % self.user, + "metadata": { + "description": "Some description", + "location": "Naivasha, Kenya", + "category": "governance", + }, + "public": False, + } # clear cache - cache.delete(safe_key(f'{PROJ_OWNER_CACHE}1')) - self.assertIsNone(cache.get(safe_key(f'{PROJ_OWNER_CACHE}1'))) + cache.delete(safe_key(f"{PROJ_OWNER_CACHE}1")) + self.assertIsNone(cache.get(safe_key(f"{PROJ_OWNER_CACHE}1"))) # Create the project self._project_create(data) self.assertIsNotNone(self.project_data) - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) request.user = self.user - serializer = ProjectSerializer( - self.project, context={'request': request}).data - self.assertEqual( - cache.get(f'{PROJ_OWNER_CACHE}{self.project.pk}'), serializer) + serializer = ProjectSerializer(self.project, context={"request": request}).data + self.assertEqual(cache.get(f"{PROJ_OWNER_CACHE}{self.project.pk}"), serializer) # clear cache - cache.delete(safe_key(f'{PROJ_OWNER_CACHE}{self.project.pk}')) + cache.delete(safe_key(f"{PROJ_OWNER_CACHE}{self.project.pk}")) diff --git a/onadata/libs/tests/serializers/test_xform_serializer.py b/onadata/libs/tests/serializers/test_xform_serializer.py index d6ba035198..36d8b9f90f 100644 --- a/onadata/libs/tests/serializers/test_xform_serializer.py +++ b/onadata/libs/tests/serializers/test_xform_serializer.py @@ -2,8 +2,9 @@ """ Test onadata.libs.serializers.xform_serializer """ +from unittest.mock import MagicMock + from django.test import TestCase -from mock import MagicMock from onadata.apps.main.tests.test_base import TestBase from onadata.libs.serializers.xform_serializer import XFormManifestSerializer @@ -22,10 +23,10 @@ def test_get_filename_from_url(self): serializer = XFormManifestSerializer() obj.data_value = "http://example.com/" - self.assertEqual(serializer.get_filename(obj), 'example.com') + self.assertEqual(serializer.get_filename(obj), "example.com") obj.data_value = "http://example.com/clinics.csv" - self.assertEqual(serializer.get_filename(obj), 'clinics.csv') + self.assertEqual(serializer.get_filename(obj), "clinics.csv") # pylint: disable=C0103 def test_get_filename_form_filtered_dataset(self): @@ -36,7 +37,7 @@ def test_get_filename_form_filtered_dataset(self): serializer = XFormManifestSerializer() obj.data_value = "xform 1 clinics" - self.assertEqual(serializer.get_filename(obj), 'clinics.csv') + self.assertEqual(serializer.get_filename(obj), "clinics.csv") def test_get_hash(self): """ @@ -54,7 +55,7 @@ def test_get_hash(self): obj.data_value = "xform {} test_dataset".format(self.xform.id) - obj.file_hash = u'md5:b9cc8695c526f3c7aaa882234f3b9484' + obj.file_hash = "md5:b9cc8695c526f3c7aaa882234f3b9484" obj.data_file = "" self.assertNotEqual(serializer.get_hash(obj), obj.file_hash) diff --git a/onadata/libs/tests/test_authentication.py b/onadata/libs/tests/test_authentication.py index caba609e8d..e19f4ab66d 100644 --- a/onadata/libs/tests/test_authentication.py +++ b/onadata/libs/tests/test_authentication.py @@ -1,27 +1,26 @@ -import jwt from datetime import timedelta +from unittest.mock import MagicMock, patch from django.conf import settings from django.contrib.auth.models import User -from django.test import TestCase from django.http.request import HttpRequest +from django.test import TestCase -from mock import patch, MagicMock +import jwt from oauth2_provider.models import AccessToken from rest_framework.exceptions import AuthenticationFailed from rest_framework.test import APIRequestFactory from onadata.apps.api.models.temp_token import TempToken -from onadata.libs.utils.common_tags import API_TOKEN from onadata.libs.authentication import ( DigestAuthentication, + MasterReplicaOAuth2Validator, TempTokenAuthentication, TempTokenURLParameterAuthentication, check_lockout, get_api_token, - MasterReplicaOAuth2Validator, ) - +from onadata.libs.utils.common_tags import API_TOKEN JWT_SECRET_KEY = getattr(settings, "JWT_SECRET_KEY", "jwt") JWT_ALGORITHM = getattr(settings, "JWT_ALGORITHM", "HS256") diff --git a/onadata/libs/tests/test_permissions.py b/onadata/libs/tests/test_permissions.py index 00e65a956e..a9ad1b218d 100644 --- a/onadata/libs/tests/test_permissions.py +++ b/onadata/libs/tests/test_permissions.py @@ -2,17 +2,27 @@ """ Tests onadata.libs.permissions module """ +from unittest.mock import patch + from django.contrib.auth.models import Group + from guardian.shortcuts import get_users_with_perms -from mock import patch from onadata.apps.api import tools from onadata.apps.main.models.user_profile import UserProfile from onadata.apps.main.tests.test_base import TestBase from onadata.libs.permissions import ( - CAN_ADD_XFORM_TO_PROFILE, DataEntryMinorRole, EditorRole, ManagerRole, - NoRecordsPermission, OwnerRole, ReadOnlyRole, ReadOnlyRoleNoDownload, - filter_queryset_xform_meta_perms_sql, get_object_users_with_permissions) + CAN_ADD_XFORM_TO_PROFILE, + DataEntryMinorRole, + EditorRole, + ManagerRole, + NoRecordsPermission, + OwnerRole, + ReadOnlyRole, + ReadOnlyRoleNoDownload, + filter_queryset_xform_meta_perms_sql, + get_object_users_with_permissions, +) def perms_for(user, obj): @@ -26,12 +36,13 @@ class TestPermissions(TestBase): """ Tests for onadata.libs.permissions module """ + def test_manager_role_add(self): """ Test adding ManagerRole """ bob, _ = UserProfile.objects.get_or_create(user=self.user) - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") self.assertFalse(alice.has_perm(CAN_ADD_XFORM_TO_PROFILE, bob)) ManagerRole.add(alice, bob) @@ -43,7 +54,7 @@ def test_manager_has_role(self): Test manager has role """ bob, _ = UserProfile.objects.get_or_create(user=self.user) - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") self.assertFalse(ManagerRole.user_has_role(alice, bob)) self.assertFalse(ManagerRole.has_role(perms_for(alice, bob), bob)) @@ -58,24 +69,21 @@ def test_reassign_role(self): Test role reassignment. """ self._publish_transportation_form() - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") self.assertFalse(ManagerRole.user_has_role(alice, self.xform)) ManagerRole.add(alice, self.xform) self.assertTrue(ManagerRole.user_has_role(alice, self.xform)) - self.assertTrue( - ManagerRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertTrue(ManagerRole.has_role(perms_for(alice, self.xform), self.xform)) ReadOnlyRole.add(alice, self.xform) self.assertFalse(ManagerRole.user_has_role(alice, self.xform)) self.assertTrue(ReadOnlyRole.user_has_role(alice, self.xform)) - self.assertFalse( - ManagerRole.has_role(perms_for(alice, self.xform), self.xform)) - self.assertTrue( - ReadOnlyRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertFalse(ManagerRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertTrue(ReadOnlyRole.has_role(perms_for(alice, self.xform), self.xform)) # pylint: disable=C0103 def test_reassign_role_owner_to_editor(self): @@ -83,83 +91,82 @@ def test_reassign_role_owner_to_editor(self): Test role reassignment owner to editor. """ self._publish_transportation_form() - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") self.assertFalse(OwnerRole.user_has_role(alice, self.xform)) OwnerRole.add(alice, self.xform) self.assertTrue(OwnerRole.user_has_role(alice, self.xform)) - self.assertTrue( - OwnerRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertTrue(OwnerRole.has_role(perms_for(alice, self.xform), self.xform)) EditorRole.add(alice, self.xform) self.assertFalse(OwnerRole.user_has_role(alice, self.xform)) self.assertTrue(EditorRole.user_has_role(alice, self.xform)) - self.assertFalse( - OwnerRole.has_role(perms_for(alice, self.xform), self.xform)) - self.assertTrue( - EditorRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertFalse(OwnerRole.has_role(perms_for(alice, self.xform), self.xform)) + self.assertTrue(EditorRole.has_role(perms_for(alice, self.xform), self.xform)) # pylint: disable=C0103 def test_get_object_users_with_permission(self): """ Test get_object_users_with_permissions() """ - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") UserProfile.objects.get_or_create(user=alice) org_user = tools.create_organization("modilabs", alice).user - demo_grp = Group.objects.create(name='demo') + demo_grp = Group.objects.create(name="demo") alice.groups.add(demo_grp) self._publish_transportation_form() EditorRole.add(org_user, self.xform) EditorRole.add(demo_grp, self.xform) users_with_perms = get_object_users_with_permissions( - self.xform, with_group_users=True) - self.assertTrue(org_user in [d['user'] for d in users_with_perms]) - self.assertTrue(alice in [d['user'] for d in users_with_perms]) + self.xform, with_group_users=True + ) + self.assertTrue(org_user in [d["user"] for d in users_with_perms]) + self.assertTrue(alice in [d["user"] for d in users_with_perms]) users_with_perms_first_keys = list(users_with_perms[0]) - self.assertIn('first_name', users_with_perms_first_keys) - self.assertIn('last_name', users_with_perms_first_keys) - self.assertIn('user', users_with_perms_first_keys) - self.assertIn('role', users_with_perms_first_keys) - self.assertIn('gravatar', users_with_perms_first_keys) - self.assertIn('metadata', users_with_perms_first_keys) - self.assertIn('is_org', users_with_perms_first_keys) + self.assertIn("first_name", users_with_perms_first_keys) + self.assertIn("last_name", users_with_perms_first_keys) + self.assertIn("user", users_with_perms_first_keys) + self.assertIn("role", users_with_perms_first_keys) + self.assertIn("gravatar", users_with_perms_first_keys) + self.assertIn("metadata", users_with_perms_first_keys) + self.assertIn("is_org", users_with_perms_first_keys) # pylint: disable=C0103 def test_user_profile_exists_for_users_with_perms(self): """ Test user profile exists when retrieving users with perms """ - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") # do not manually create user profile for alice, should be # handled by get_object_users_with_permissions() org_user = tools.create_organization("modilabs", alice).user - demo_grp = Group.objects.create(name='demo') + demo_grp = Group.objects.create(name="demo") alice.groups.add(demo_grp) self._publish_transportation_form() EditorRole.add(org_user, self.xform) EditorRole.add(demo_grp, self.xform) users_with_perms = get_object_users_with_permissions( - self.xform, with_group_users=True) - self.assertTrue(org_user in [d['user'] for d in users_with_perms]) - self.assertTrue(alice in [d['user'] for d in users_with_perms]) + self.xform, with_group_users=True + ) + self.assertTrue(org_user in [d["user"] for d in users_with_perms]) + self.assertTrue(alice in [d["user"] for d in users_with_perms]) for d in users_with_perms: - user_obj = d['user'] + user_obj = d["user"] self.assertTrue(hasattr(user_obj, "profile")) - # pylint: disable=C0103 + # pylint: disable=C0103 def test_exception_raised_for_missing_profiles(self): """ Test UserProfile.DoesNotExit exception raised for missing user profiles """ - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") UserProfile.objects.get_or_create(user=alice) org_user = tools.create_organization("modilabs", alice).user - demo_grp = Group.objects.create(name='demo') + demo_grp = Group.objects.create(name="demo") alice.groups.add(demo_grp) self._publish_transportation_form() EditorRole.add(org_user, self.xform) @@ -177,32 +184,31 @@ def test_exception_raised_for_missing_profiles(self): # check if profile is created for alice # when get_object_users_with_permissions() is called users_with_perms = get_object_users_with_permissions( - self.xform, with_group_users=True) - self.assertEqual("alice", users_with_perms[2]['user'].username) - self.assertTrue(hasattr(users_with_perms[2]['user'], "profile")) + self.xform, with_group_users=True + ) + self.assertEqual("alice", users_with_perms[2]["user"].username) + self.assertTrue(hasattr(users_with_perms[2]["user"], "profile")) def test_readonly_no_downloads_has_role(self): """ Test readonly no downloads role. """ self._publish_transportation_form() - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") + self.assertFalse(ReadOnlyRoleNoDownload.user_has_role(alice, self.xform)) self.assertFalse( - ReadOnlyRoleNoDownload.user_has_role(alice, self.xform)) - self.assertFalse( - ReadOnlyRoleNoDownload.has_role( - perms_for(alice, self.xform), self.xform)) + ReadOnlyRoleNoDownload.has_role(perms_for(alice, self.xform), self.xform) + ) ReadOnlyRoleNoDownload.add(alice, self.xform) + self.assertTrue(ReadOnlyRoleNoDownload.user_has_role(alice, self.xform)) self.assertTrue( - ReadOnlyRoleNoDownload.user_has_role(alice, self.xform)) - self.assertTrue( - ReadOnlyRoleNoDownload.has_role( - perms_for(alice, self.xform), self.xform)) + ReadOnlyRoleNoDownload.has_role(perms_for(alice, self.xform), self.xform) + ) - @patch('onadata.libs.permissions._check_meta_perms_enabled') + @patch("onadata.libs.permissions._check_meta_perms_enabled") def test_filter_queryset_xform_meta_perms_sql(self, check_meta_mock): """ Test filter query by meta permissions. @@ -210,12 +216,11 @@ def test_filter_queryset_xform_meta_perms_sql(self, check_meta_mock): self._publish_transportation_form() query = '{"_id": 1}' - result = filter_queryset_xform_meta_perms_sql(self.xform, self.user, - query) + result = filter_queryset_xform_meta_perms_sql(self.xform, self.user, query) self.assertEqual(result, query) check_meta_mock.return_value = True - alice = self._create_user('alice', 'alice') + alice = self._create_user("alice", "alice") # no records with self.assertRaises(NoRecordsPermission): diff --git a/onadata/libs/tests/utils/test_csv_import.py b/onadata/libs/tests/utils/test_csv_import.py index fb55f8ce39..13a31d11c0 100644 --- a/onadata/libs/tests/utils/test_csv_import.py +++ b/onadata/libs/tests/utils/test_csv_import.py @@ -4,27 +4,26 @@ import re from builtins import open from io import BytesIO +from unittest.mock import patch from xml.etree.ElementTree import fromstring -import mock +from django.conf import settings + import unicodecsv as ucsv from celery.backends.rpc import BacklogLimitExceeded -from django.conf import settings -from mock import patch from onadata.apps.logger.models import Instance, XForm -from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.models import MetaData +from onadata.apps.main.tests.test_base import TestBase from onadata.apps.messaging.constants import ( - XFORM, - SUBMISSION_EDITED, SUBMISSION_CREATED, + SUBMISSION_EDITED, + XFORM, ) from onadata.libs.utils import csv_import from onadata.libs.utils.common_tags import IMPORTED_VIA_CSV_BY -from onadata.libs.utils.csv_import import get_submission_meta_dict +from onadata.libs.utils.csv_import import get_columns_by_type, get_submission_meta_dict from onadata.libs.utils.user_auth import get_user_default_project -from onadata.libs.utils.csv_import import get_columns_by_type def strip_xml_uuid(s): @@ -60,7 +59,7 @@ def test_submit_csv_param_sanity_check(self): resp = csv_import.submit_csv("userX", XForm(), 123456) self.assertIsNotNone(resp.get("error")) - @mock.patch("onadata.libs.utils.csv_import.safe_create_instance") + @patch("onadata.libs.utils.csv_import.safe_create_instance") def test_submit_csv_xml_params(self, safe_create_instance): self._publish_xls_file(self.xls_file_path) self.xform = XForm.objects.get() @@ -87,8 +86,8 @@ def test_submit_csv_xml_params(self, safe_create_instance): ) self.assertEqual(safe_create_args[4], None) - @mock.patch("onadata.libs.utils.csv_import.safe_create_instance") - @mock.patch("onadata.libs.utils.csv_import.dict2xmlsubmission") + @patch("onadata.libs.utils.csv_import.safe_create_instance") + @patch("onadata.libs.utils.csv_import.dict2xmlsubmission") def test_submit_csv_xml_location_property_test(self, d2x, safe_create_instance): self._publish_xls_file(self.xls_file_path) self.xform = XForm.objects.get() @@ -173,7 +172,7 @@ def test_submit_csv_edits(self, send_message_mock): ) # message sent upon submission edit self.assertTrue(send_message_mock.called) - send_message_mock.called_with(self.xform.id, XFORM, SUBMISSION_EDITED) + send_message_mock.assert_called_with(self.xform.id, XFORM, SUBMISSION_EDITED) def test_import_non_utf8_csv(self): xls_file_path = os.path.join(self.fixtures_dir, "mali_health.xlsx") @@ -309,7 +308,7 @@ def test_csv_with__more_than_4_repeats_import(self): # repeats should be 6 self.assertEqual(6, len(instance.json.get("children"))) - @mock.patch("onadata.libs.utils.csv_import.AsyncResult") + @patch("onadata.libs.utils.csv_import.AsyncResult") def test_get_async_csv_submission_status(self, AsyncResult): result = csv_import.get_async_csv_submission_status(None) self.assertEqual(result, {"error": "Empty job uuid", "job_status": "FAILURE"}) @@ -383,7 +382,7 @@ def test_submission_xls_to_csv(self): self.assertEqual(g_csv_reader.fieldnames[10], c_csv_reader.fieldnames[10]) - @mock.patch("onadata.libs.utils.csv_import.safe_create_instance") + @patch("onadata.libs.utils.csv_import.safe_create_instance") def test_submit_csv_instance_id_consistency(self, safe_create_instance): self._publish_xls_file(self.xls_file_path) self.xform = XForm.objects.get() @@ -426,7 +425,7 @@ def test_data_upload(self, send_message_mock): self.assertEqual(self.xform.num_of_submissions, count + 1) # message sent upon submission creation self.assertTrue(send_message_mock.called) - send_message_mock.called_with(self.xform.id, XFORM, SUBMISSION_CREATED) + send_message_mock.assert_called_with(self.xform.id, XFORM, SUBMISSION_CREATED) def test_excel_date_conversion(self): """Convert date from 01/01/1900 to 01-01-1900""" diff --git a/onadata/libs/tests/utils/test_email.py b/onadata/libs/tests/utils/test_email.py index b1732b515b..bcff7b6f7a 100644 --- a/onadata/libs/tests/utils/test_email.py +++ b/onadata/libs/tests/utils/test_email.py @@ -1,22 +1,30 @@ -from six.moves.urllib.parse import urlencode -from mock import patch +# -*- coding: utf-8 -*- +""" +Test onadata.utils.emails module. +""" +from unittest.mock import patch + from django.test import RequestFactory from django.test.utils import override_settings + +from six.moves.urllib.parse import urlencode + +from onadata.apps.logger.models import ProjectInvitation from onadata.apps.main.tests.test_base import TestBase from onadata.libs.utils.email import ( + ProjectInvitationEmail, + get_project_invitation_url, get_verification_email_data, get_verification_url, - get_project_invitation_url, ) -from onadata.libs.utils.email import ProjectInvitationEmail -from onadata.apps.logger.models import ProjectInvitation from onadata.libs.utils.user_auth import get_user_default_project - VERIFICATION_URL = "http://ab.cd.ef" class TestEmail(TestBase): + """Test onadata.utils.email module""" + def setUp(self): self.email = "john@doe.com" self.username = ("johndoe",) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index c158b62268..e4aeee65ef 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -13,10 +13,10 @@ from collections import OrderedDict from ctypes import ArgumentError from io import BytesIO +from unittest.mock import patch from django.conf import settings from django.core.files.temp import NamedTemporaryFile -from mock import patch from openpyxl import load_workbook from pyxform.builder import create_survey_from_xls diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index 03516b7e66..ff0a74fd2b 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -5,13 +5,13 @@ import os import re from io import BytesIO +from unittest.mock import patch from django.conf import settings from django.core.files.uploadedfile import InMemoryUploadedFile from django.http.request import HttpRequest from defusedxml.ElementTree import ParseError -from mock import patch from onadata.apps.logger.import_tools import django_file from onadata.apps.logger.models import Instance diff --git a/onadata/libs/tests/utils/test_project_utils.py b/onadata/libs/tests/utils/test_project_utils.py index a1267779ad..84696778c9 100644 --- a/onadata/libs/tests/utils/test_project_utils.py +++ b/onadata/libs/tests/utils/test_project_utils.py @@ -2,10 +2,11 @@ """ Test onadata.libs.utils.project_utils """ +from unittest.mock import MagicMock, patch + from django.test.utils import override_settings from kombu.exceptions import OperationalError -from mock import MagicMock, patch from requests import Response from onadata.apps.logger.models import Project diff --git a/onadata/libs/tests/utils/test_viewer_tools.py b/onadata/libs/tests/utils/test_viewer_tools.py index 2ed76c193f..3b0030b135 100644 --- a/onadata/libs/tests/utils/test_viewer_tools.py +++ b/onadata/libs/tests/utils/test_viewer_tools.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """Test onadata.libs.utils.viewer_tools.""" import os +from unittest.mock import patch from django.core.files.base import File from django.core.files.temp import NamedTemporaryFile @@ -9,8 +10,6 @@ from django.test.utils import override_settings from django.utils import timezone -from mock import patch - from onadata.apps.logger.models import Attachment, Instance, XForm from onadata.apps.main.tests.test_base import TestBase from onadata.libs.utils.viewer_tools import ( diff --git a/requirements/base.pip b/requirements/base.pip index 81b9beb2cc..d0ae022f23 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -41,9 +41,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.68 +boto3==1.34.69 # via dataflows-tabulator -botocore==1.34.68 +botocore==1.34.69 # via # boto3 # s3transfer @@ -74,7 +74,7 @@ click==8.1.7 # dataflows-tabulator # datapackage # tableschema -click-didyoumean==0.3.0 +click-didyoumean==0.3.1 # via celery click-plugins==1.1.1 # via celery @@ -85,7 +85,7 @@ cryptography==42.0.5 # jwcrypto # onadata # pyjwt -dataflows-tabulator==1.54.1 +dataflows-tabulator==1.54.3 # via # datapackage # tableschema @@ -330,7 +330,7 @@ requests==2.31.0 # requests-oauthlib # sphinx # tableschema -requests-oauthlib==1.4.0 +requests-oauthlib==2.0.0 # via google-auth-oauthlib rfc3986==2.0.0 # via tableschema @@ -372,7 +372,7 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy==2.0.28 +sqlalchemy==2.0.29 # via dataflows-tabulator sqlparse==0.4.4 # via diff --git a/requirements/dev.in b/requirements/dev.in index 3762559028..7c5947dad6 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -6,7 +6,6 @@ flaky httmock ipdb isort -mock pre-commit prospector>=1.10.3 pylint diff --git a/requirements/dev.pip b/requirements/dev.pip index 89902aa040..8f3db50884 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -49,9 +49,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.68 +boto3==1.34.69 # via dataflows-tabulator -botocore==1.34.68 +botocore==1.34.69 # via # boto3 # s3transfer @@ -84,7 +84,7 @@ click==8.1.7 # dataflows-tabulator # datapackage # tableschema -click-didyoumean==0.3.0 +click-didyoumean==0.3.1 # via celery click-plugins==1.1.1 # via celery @@ -95,7 +95,7 @@ cryptography==42.0.5 # jwcrypto # onadata # pyjwt -dataflows-tabulator==1.54.1 +dataflows-tabulator==1.54.3 # via # datapackage # tableschema @@ -348,7 +348,7 @@ platformdirs==4.2.0 # pylint # virtualenv # yapf -pre-commit==3.6.2 +pre-commit==3.7.0 # via -r requirements/dev.in prompt-toolkit==3.0.43 # via @@ -464,7 +464,7 @@ requests==2.31.0 # tableschema requests-mock==1.11.0 # via -r requirements/dev.in -requests-oauthlib==1.4.0 +requests-oauthlib==2.0.0 # via google-auth-oauthlib requirements-detector==1.2.2 # via prospector @@ -518,7 +518,7 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy==2.0.28 +sqlalchemy==2.0.29 # via dataflows-tabulator sqlparse==0.4.4 # via diff --git a/setup.cfg b/setup.cfg index cea9d0d529..2ccf9ad228 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,6 @@ tests_require = flake8 flaky httmock - mock requests-mock install_requires = Django==4.0,<5 From 013773699c2497048dc8b990e89331b81721db75 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 12:56:52 +0300 Subject: [PATCH 175/270] _submission_time is already in UTC so set to UTC --- onadata/libs/renderers/renderers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 15d5e49756..08f003b980 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -53,7 +53,7 @@ def floip_rows_list(data): """ try: _submission_time = ( - parse_datetime(data["_submission_time"]).astimezone(timezone.utc) + parse_datetime(data["_submission_time"]).replace(tzinfo=timezone.utc) ).isoformat() except ValueError: From 831fe07baa8ac351546050ba2b4487b36e3819cf Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 15:13:34 +0300 Subject: [PATCH 176/270] csv_import: Update assert_called_with tests --- onadata/libs/tests/utils/test_csv_import.py | 40 ++++++++++++++++----- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/onadata/libs/tests/utils/test_csv_import.py b/onadata/libs/tests/utils/test_csv_import.py index 13a31d11c0..461b46bafa 100644 --- a/onadata/libs/tests/utils/test_csv_import.py +++ b/onadata/libs/tests/utils/test_csv_import.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Tests the onadata.libs.utils.csv_import module +""" from __future__ import unicode_literals import os @@ -31,6 +35,10 @@ def strip_xml_uuid(s): class CSVImportTestCase(TestBase): + """ + Tests the onadata.libs.utils.csv_import module + """ + def setUp(self): super(CSVImportTestCase, self).setUp() self.fixtures_dir = os.path.join( @@ -149,9 +157,9 @@ def test_submit_csv_edits(self, send_message_mock): settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "tutorial.xlsx" ) self._publish_xls_file(xls_file_path) - self.xform = XForm.objects.get() + xform = XForm.objects.get() - csv_import.submit_csv(self.user.username, self.xform, self.good_csv) + csv_import.submit_csv(self.user.username, xform, self.good_csv) self.assertEqual( Instance.objects.count(), 9, "submit_csv edits #1 test Failed!" ) @@ -166,13 +174,20 @@ def test_submit_csv_edits(self, send_message_mock): ) count = Instance.objects.count() - csv_import.submit_csv(self.user.username, self.xform, edit_csv) + csv_import.submit_csv(self.user.username, xform, edit_csv) self.assertEqual( Instance.objects.count(), count, "submit_csv edits #2 test Failed!" ) # message sent upon submission edit self.assertTrue(send_message_mock.called) - send_message_mock.assert_called_with(self.xform.id, XFORM, SUBMISSION_EDITED) + instance_id = xform.instances.filter().order_by("date_modified").last().pk + send_message_mock.assert_called_with( + instance_id=instance_id, + target_id=xform.id, + target_type=XFORM, + message_verb=SUBMISSION_EDITED, + user=self.user, + ) def test_import_non_utf8_csv(self): xls_file_path = os.path.join(self.fixtures_dir, "mali_health.xlsx") @@ -415,17 +430,24 @@ def test_submit_csv_instance_id_consistency(self, safe_create_instance): def test_data_upload(self, send_message_mock): """Data upload for submissions with no uuids""" self._publish_xls_file(self.xls_file_path) - self.xform = XForm.objects.get() + xform = XForm.objects.get() count = Instance.objects.count() single_csv = open( os.path.join(self.fixtures_dir, "single_data_upload.csv"), "rb" ) - csv_import.submit_csv(self.user.username, self.xform, single_csv) - self.xform.refresh_from_db() - self.assertEqual(self.xform.num_of_submissions, count + 1) + csv_import.submit_csv(self.user.username, xform, single_csv) + xform.refresh_from_db() + self.assertEqual(xform.num_of_submissions, count + 1) + instance_id = xform.instances.last().pk # message sent upon submission creation self.assertTrue(send_message_mock.called) - send_message_mock.assert_called_with(self.xform.id, XFORM, SUBMISSION_CREATED) + send_message_mock.assert_called_with( + instance_id=instance_id, + target_id=xform.id, + target_type=XFORM, + message_verb=SUBMISSION_CREATED, + user=self.user, + ) def test_excel_date_conversion(self): """Convert date from 01/01/1900 to 01-01-1900""" From ee94bb23703b9d442627e5951192680ea087090b Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 17:38:40 +0300 Subject: [PATCH 177/270] start and end time filters are in UTC --- onadata/apps/main/tests/test_form_exports.py | 190 ++++++++++++------- onadata/apps/viewer/views.py | 6 +- 2 files changed, 123 insertions(+), 73 deletions(-) diff --git a/onadata/apps/main/tests/test_form_exports.py b/onadata/apps/main/tests/test_form_exports.py index 254aba0ea6..a9aa0f943a 100644 --- a/onadata/apps/main/tests/test_form_exports.py +++ b/onadata/apps/main/tests/test_form_exports.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Tests exports +""" import csv import os import tempfile @@ -17,40 +21,46 @@ class TestFormExports(TestBase): + """ + Tests exports + """ def setUp(self): TestBase.setUp(self) self._create_user_and_login() self._publish_transportation_form_and_submit_instance() - self.csv_url = reverse('csv_export', kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string}) - self.xls_url = reverse('xlsx_export', kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string}) + self.csv_url = reverse( + "csv_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + self.xls_url = reverse( + "xlsx_export", + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) def _num_rows(self, content, export_format): def xls_rows(f): wb = openpyxl.load_workbook(filename=BytesIO(f)) - current_sheet = wb.get_sheet_by_name('data') + current_sheet = wb.get_sheet_by_name("data") return len(list(current_sheet.rows)) def csv_rows(f): - with tempfile.TemporaryFile('w+') as tmp: - tmp.write(f.decode('utf-8')) + with tempfile.TemporaryFile("w+") as tmp: + tmp.write(f.decode("utf-8")) tmp.seek(0) - return len([line for line in csv.reader(tmp)]) + return len(list(line for line in csv.reader(tmp))) + num_rows_fn = { - 'xls': xls_rows, - 'xlsx': xls_rows, - 'csv': csv_rows, + "xls": xls_rows, + "xlsx": xls_rows, + "csv": csv_rows, } return num_rows_fn[export_format](content) def test_csv_raw_export_name(self): - response = self.client.get(self.csv_url + '?raw=1') + response = self.client.get(self.csv_url + "?raw=1") self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], 'attachment;') + self.assertEqual(response["Content-Disposition"], "attachment;") def _filter_export_test(self, url, export_format): """ @@ -59,82 +69,106 @@ def _filter_export_test(self, url, export_format): """ time.sleep(1) # 1 survey exists before this time - start_time = timezone.now().strftime('%y_%m_%d_%H_%M_%S') + start_time = ( + timezone.now().astimezone(timezone.utc).strftime("%y_%m_%d_%H_%M_%S") + ) time.sleep(1) s = self.surveys[1] self._make_submission( - os.path.join(self.this_directory, 'fixtures', - 'transportation', 'instances', s, s + '.xml')) + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + s, + s + ".xml", + ) + ) time.sleep(1) # 2 surveys exist before this time - end_time = timezone.now().strftime('%y_%m_%d_%H_%M_%S') + end_time = timezone.now().astimezone(timezone.utc).strftime("%y_%m_%d_%H_%M_%S") time.sleep(1) # 3 surveys exist in total s = self.surveys[2] self._make_submission( - os.path.join(self.this_directory, 'fixtures', - 'transportation', 'instances', s, s + '.xml')) + os.path.join( + self.this_directory, + "fixtures", + "transportation", + "instances", + s, + s + ".xml", + ) + ) # test restricting to before end time - params = {'end': end_time} + params = {"end": end_time} response = self.client.get(url, params) self.assertEqual(response.status_code, 200) content = get_response_content(response, decode=False) self.assertEqual(self._num_rows(content, export_format), 3) + # test restricting to after start time, thus excluding the initial # submission - params = {'start': start_time} + params = {"start": start_time} response = self.client.get(url, params) self.assertEqual(response.status_code, 200) content = get_response_content(response, decode=False) self.assertEqual(self._num_rows(content, export_format), 3) + # test no time restriction response = self.client.get(url) - self.assertEqual(response.status_code, 200) content = get_response_content(response, decode=False) self.assertEqual(self._num_rows(content, export_format), 4) + # test restricting to between start time and end time - params = {'start': start_time, 'end': end_time} + params = {"start": start_time, "end": end_time} response = self.client.get(url, params) self.assertEqual(response.status_code, 200) content = get_response_content(response, decode=False) self.assertEqual(self._num_rows(content, export_format), 2) def test_filter_by_date_csv(self): - self._filter_export_test(self.csv_url, 'csv') + self._filter_export_test(self.csv_url, "csv") def test_filter_by_date_xls(self): - self._filter_export_test(self.xls_url, 'xlsx') + self._filter_export_test(self.xls_url, "xlsx") def test_restrict_csv_export_if_not_shared(self): response = self.anon.get(self.csv_url) self.assertEqual(response.status_code, 403) def test_xls_raw_export_name(self): - response = self.client.get(self.xls_url + '?raw=1') + response = self.client.get(self.xls_url + "?raw=1") self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], 'attachment;') + self.assertEqual(response["Content-Disposition"], "attachment;") def test_restrict_xlsx_export_if_not_shared(self): response = self.anon.get(self.xls_url) self.assertEqual(response.status_code, 403) def test_zip_raw_export_name(self): - url = reverse(zip_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) - response = self.client.get(url + '?raw=1') + url = reverse( + zip_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + response = self.client.get(url + "?raw=1") self.assertEqual(response.status_code, 200) - self.assertEqual(response['Content-Disposition'], 'attachment;') + self.assertEqual(response["Content-Disposition"], "attachment;") def test_restrict_zip_export_if_not_shared(self): - url = reverse(zip_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + zip_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url) self.assertEqual(response.status_code, 403) def test_restrict_kml_export_if_not_shared(self): - url = reverse(kml_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + kml_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url) self.assertEqual(response.status_code, 403) @@ -153,16 +187,20 @@ def test_allow_xlsx_export_if_shared(self): def test_allow_zip_export_if_shared(self): self.xform.shared_data = True self.xform.save() - url = reverse(zip_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + zip_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url) self.assertEqual(response.status_code, 200) def test_allow_kml_export_if_shared(self): self.xform.shared_data = True self.xform.save() - url = reverse(kml_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + kml_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url) self.assertEqual(response.status_code, 200) @@ -175,72 +213,84 @@ def test_allow_xlsx_export(self): self.assertEqual(response.status_code, 200) def test_allow_zip_export(self): - url = reverse(zip_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + zip_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_allow_kml_export(self): - url = reverse(kml_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + kml_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_allow_csv_export_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': http_auth_string(self.login_username, - self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } response = self.anon.get(self.csv_url, **extra) self.assertEqual(response.status_code, 200) def test_allow_xlsx_export_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': http_auth_string(self.login_username, - self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } response = self.anon.get(self.xls_url, **extra) self.assertEqual(response.status_code, 200) def test_allow_zip_export_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': http_auth_string(self.login_username, - self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } - url = reverse(zip_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + zip_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url, **extra) self.assertEqual(response.status_code, 200) def test_allow_kml_export_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': http_auth_string(self.login_username, - self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } - url = reverse(kml_export, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + url = reverse( + kml_export, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.anon.get(url, **extra) self.assertEqual(response.status_code, 200) def test_allow_export_download_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': http_auth_string(self.login_username, - self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } # create export options = {"extension": "csv"} - export = generate_export( - Export.CSV_EXPORT, - self.xform, - None, - options) + export = generate_export(Export.CSV_EXPORT, self.xform, None, options) self.assertTrue(isinstance(export, Export)) - url = reverse(export_download, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': export.export_type, - 'filename': export.filename - }) + url = reverse( + export_download, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": export.export_type, + "filename": export.filename, + }, + ) response = self.anon.get(url, **extra) self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/viewer/views.py b/onadata/apps/viewer/views.py index 8b8c05a22f..41516d01b3 100644 --- a/onadata/apps/viewer/views.py +++ b/onadata/apps/viewer/views.py @@ -84,10 +84,10 @@ def _get_start_end_submission_time(request): if request.GET.get("start"): start = datetime.strptime( request.GET["start"], "%y_%m_%d_%H_%M_%S" - ).astimezone(timezone.utc) + ).replace(tzinfo=timezone.utc) if request.GET.get("end"): - end = datetime.strptime(request.GET["end"], "%y_%m_%d_%H_%M_%S").astimezone( - timezone.utc + end = datetime.strptime(request.GET["end"], "%y_%m_%d_%H_%M_%S").replace( + tzinfo=timezone.utc ) except ValueError: return HttpResponseBadRequest( From e045edee606439bc62612d3e278e870369b1bf71 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 18:02:27 +0300 Subject: [PATCH 178/270] Update assert_called_with tests --- .../api/tests/viewsets/test_data_viewset.py | 30 +++++++++---------- .../test_submission_review_viewset.py | 20 ++++++------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 7adadc8b31..223925aae3 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -1806,11 +1806,11 @@ def test_deletion_of_bulk_submissions(self, send_message_mock): ) self.assertTrue(send_message_mock.called) send_message_mock.assert_called_with( - [str(i.pk) for i in records_to_be_deleted], - formid, - XFORM, - request.user, - SUBMISSION_DELETED, + instance_id=[str(i.pk) for i in records_to_be_deleted], + target_id=formid, + target_type=XFORM, + user=request.user, + message_verb=SUBMISSION_DELETED, ) self.xform.refresh_from_db() current_count = self.xform.instances.filter(deleted_at=None).count() @@ -1906,11 +1906,11 @@ def test_permanent_deletions_bulk_submissions(self, send_message_mock): ) self.assertTrue(send_message_mock.called) send_message_mock.assert_called_with( - [str(i.pk) for i in records_to_be_deleted], - formid, - XFORM, - request.user, - SUBMISSION_DELETED, + instance_id=[str(i.pk) for i in records_to_be_deleted], + target_id=formid, + target_type=XFORM, + user=request.user, + message_verb=SUBMISSION_DELETED, ) self.xform.refresh_from_db() current_count = self.xform.num_of_submissions @@ -2059,11 +2059,11 @@ def test_delete_submissions(self, send_message_mock): ) self.assertTrue(send_message_mock.called) send_message_mock.assert_called_with( - [str(i.pk) for i in deleted_instances_subset], - formid, - XFORM, - request.user, - SUBMISSION_DELETED, + instance_id=[str(i.pk) for i in deleted_instances_subset], + target_id=formid, + target_type=XFORM, + user=request.user, + message_verb=SUBMISSION_DELETED, ) # Test that num of submissions for the form is successfully updated diff --git a/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py b/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py index c442439530..12c146b970 100644 --- a/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_submission_review_viewset.py @@ -67,11 +67,11 @@ def test_submission_review_create(self, mock_send_message): # sends message upon saving the submission review self.assertTrue(mock_send_message.called) mock_send_message.assert_called_with( - submission_review.id, - submission_review.instance.xform.id, - XFORM, - submission_review.created_by, - SUBMISSION_REVIEWED, + instance_id=submission_review.instance_id, + target_id=submission_review.instance.xform.id, + target_type=XFORM, + user=submission_review.created_by, + message_verb=SUBMISSION_REVIEWED, ) @patch("onadata.apps.api.viewsets.submission_review_viewset.send_message") @@ -102,11 +102,11 @@ def test_bulk_create_submission_review(self, mock_send_message): # sends message upon saving the submission review self.assertTrue(mock_send_message.called) mock_send_message.assert_called_with( - [s.id for s in self.xform.instances.all()], - self.xform.id, - XFORM, - request.user, - SUBMISSION_REVIEWED, + instance_id=[s.id for s in self.xform.instances.all()], + target_id=self.xform.id, + target_type=XFORM, + user=request.user, + message_verb=SUBMISSION_REVIEWED, ) for item in response.data: # the note should match what we provided From 0d3de8f211e52eb9897dabaf5e5d256222abb724 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 25 Mar 2024 18:46:50 +0300 Subject: [PATCH 179/270] Update assert_called_with tests --- onadata/apps/api/tests/viewsets/test_xform_viewset.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 9b9c385839..d125165ef9 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -215,7 +215,11 @@ def test_replace_form_with_external_choices(self, mock_send_message): # send message upon form update self.assertTrue(mock_send_message.called) mock_send_message.assert_called_with( - self.xform.id, self.xform.id, XFORM, request.user, FORM_UPDATED + instance_id=self.xform.id, + target_id=self.xform.id, + target_type=XFORM, + user=request.user, + message_verb=FORM_UPDATED, ) def test_form_publishing_using_invalid_text_xls_form(self): From 35d7a7784a0b465b6ef2781a6e2d1f4d6462d8bf Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 26 Mar 2024 19:35:53 +0300 Subject: [PATCH 180/270] Django 4.1: update dependencies --- requirements/base.pip | 10 +++++----- requirements/dev.pip | 14 ++++++-------- setup.cfg | 2 +- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/requirements/base.pip b/requirements/base.pip index d0ae022f23..32bf9f4fff 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -41,9 +41,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.69 +boto3==1.34.70 # via dataflows-tabulator -botocore==1.34.69 +botocore==1.34.70 # via # boto3 # s3transfer @@ -100,7 +100,7 @@ deprecated==1.2.14 # via onadata dict2xml==1.7.5 # via onadata -django==4.0 +django==4.1.13 # via # django-activity-stream # django-cors-headers @@ -135,7 +135,7 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.1.0 +django-oauth-toolkit==2.3.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -149,7 +149,7 @@ django-render-block==0.9.2 # via django-templated-email django-reversion==5.0.12 # via onadata -django-taggit==4.0.0 +django-taggit==5.0.1 # via onadata django-templated-email==3.0.1 # via onadata diff --git a/requirements/dev.pip b/requirements/dev.pip index 8f3db50884..cb40371530 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -49,9 +49,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.69 +boto3==1.34.70 # via dataflows-tabulator -botocore==1.34.69 +botocore==1.34.70 # via # boto3 # s3transfer @@ -118,7 +118,7 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -django==4.0 +django==4.1.13 # via # django-activity-stream # django-cors-headers @@ -156,7 +156,7 @@ django-guardian==2.4.0 # onadata django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.1.0 +django-oauth-toolkit==2.3.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -170,7 +170,7 @@ django-render-block==0.9.2 # via django-templated-email django-reversion==5.0.12 # via onadata -django-taggit==4.0.0 +django-taggit==5.0.1 # via onadata django-templated-email==3.0.1 # via onadata @@ -208,7 +208,7 @@ et-xmlfile==1.1.0 # via openpyxl executing==2.0.1 # via stack-data -filelock==3.13.1 +filelock==3.13.3 # via virtualenv flake8==5.0.4 # via @@ -305,8 +305,6 @@ mccabe==0.7.0 # flake8 # prospector # pylint -mock==5.1.0 - # via -r requirements/dev.in modilabs-python-utils==0.1.5 # via onadata monotonic==1.6 diff --git a/setup.cfg b/setup.cfg index 2ccf9ad228..7f27898ffd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,7 +26,7 @@ tests_require = httmock requests-mock install_requires = - Django==4.0,<5 + Django==4.1.13 django-guardian django-registration-redux django-templated-email From e8d5fdf478675116c8d15419e9d0c7ebdeb4f65b Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 26 Mar 2024 23:38:18 +0300 Subject: [PATCH 181/270] Switch to the standard python library unittest.mock --- .../management/commands/test_delete_users.py | 15 +- .../tests/viewsets/test_briefcase_viewset.py | 617 +++++++++-------- .../api/tests/viewsets/test_charts_viewset.py | 25 +- .../viewsets/test_xform_submission_viewset.py | 8 +- onadata/apps/main/tests/test_form_show.py | 649 +++++++++++------- .../libs/tests/utils/test_api_export_tools.py | 131 ++-- 6 files changed, 846 insertions(+), 599 deletions(-) diff --git a/onadata/apps/api/tests/management/commands/test_delete_users.py b/onadata/apps/api/tests/management/commands/test_delete_users.py index ce975e13f5..2f1a484246 100644 --- a/onadata/apps/api/tests/management/commands/test_delete_users.py +++ b/onadata/apps/api/tests/management/commands/test_delete_users.py @@ -2,12 +2,17 @@ Test delete user management command. """ import sys -from unittest import mock -from six import StringIO -from django.contrib.auth.models import User +from unittest.mock import patch + +from django.contrib.auth import get_user_model from django.core.management import call_command -from onadata.apps.main.tests.test_base import TestBase + +from six import StringIO + from onadata.apps.api.management.commands.delete_users import get_user_object_stats +from onadata.apps.main.tests.test_base import TestBase + +User = get_user_model() class DeleteUserTest(TestBase): @@ -35,7 +40,7 @@ def test_delete_users_with_input(self): with self.assertRaises(User.DoesNotExist): User.objects.get(email="bruce@gmail.com") - @mock.patch("onadata.apps.api.management.commands.delete_users.input") + @patch("onadata.apps.api.management.commands.delete_users.input") def test_delete_users_no_input(self, mock_input): # pylint: disable=R0201 """ Test that when user_input is not provided, diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index 0e8b60f6f2..b43bd6b517 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -1,13 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Test BriefcaseViewset +""" import codecs import os import shutil +from unittest.mock import patch -import mock -from django.core.files.storage import get_storage_class from django.conf import settings +from django.core.files.storage import get_storage_class +from django.test import override_settings from django.urls import reverse from django.utils import timezone -from django.test import override_settings + from django_digest.test import DigestAuth from rest_framework.test import APIRequestFactory @@ -18,45 +23,50 @@ ) from onadata.apps.api.viewsets.xform_submission_viewset import XFormSubmissionViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Instance -from onadata.apps.logger.models import XForm +from onadata.apps.logger.models import Instance, XForm NUM_INSTANCES = 4 storage = get_storage_class()() def ordered_instances(xform): - return Instance.objects.filter(xform=xform).order_by('id') + return Instance.objects.filter(xform=xform).order_by("id") class TestBriefcaseViewSet(test_abstract_viewset.TestAbstractViewSet): + """ + Test BriefcaseViewset + """ def setUp(self): super(test_abstract_viewset.TestAbstractViewSet, self).setUp() self.factory = APIRequestFactory() self._login_user_and_profile() - self.login_username = 'bob' - self.login_password = 'bobbob' + self.login_username = "bob" + self.login_password = "bobbob" self.maxDiff = None self.form_def_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'transportation.xml') + self.main_directory, "fixtures", "transportation", "transportation.xml" + ) self._submission_list_url = reverse( - 'view-submission-list', kwargs={'username': self.user.username}) + "view-submission-list", kwargs={"username": self.user.username} + ) self._submission_url = reverse( - 'submissions', kwargs={'username': self.user.username}) + "submissions", kwargs={"username": self.user.username} + ) self._download_submission_url = reverse( - 'view-download-submission', - kwargs={'username': self.user.username}) + "view-download-submission", kwargs={"username": self.user.username} + ) self._form_upload_url = reverse( - 'form-upload', kwargs={'username': self.user.username}) + "form-upload", kwargs={"username": self.user.username} + ) def _publish_xml_form(self, auth=None): - view = BriefcaseViewset.as_view({'post': 'create'}) + view = BriefcaseViewset.as_view({"post": "create"}) count = XForm.objects.count() - with codecs.open(self.form_def_path, encoding='utf-8') as f: - params = {'form_def_file': f, 'dataFile': ''} + with codecs.open(self.form_def_path, encoding="utf-8") as f: + params = {"form_def_file": f, "dataFile": ""} auth = auth or DigestAuth(self.login_username, self.login_password) request = self.factory.post(self._form_upload_url, data=params) response = view(request, username=self.user.username) @@ -65,20 +75,29 @@ def _publish_xml_form(self, auth=None): response = view(request, username=self.user.username) self.assertEqual(XForm.objects.count(), count + 1) - self.assertContains( - response, "successfully published.", status_code=201) - self.xform = XForm.objects.order_by('pk').reverse()[0] + self.assertContains(response, "successfully published.", status_code=201) + self.xform = XForm.objects.order_by("pk").reverse()[0] def test_retrieve_encrypted_form_submissions(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) path = os.path.join( - settings.PROJECT_ROOT, "apps", "api", "tests", "fixtures", - "encrypted-form.xlsx") + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "encrypted-form.xlsx", + ) submission_path = os.path.join( - settings.PROJECT_ROOT, "apps", "api", "tests", "fixtures", - "encrypted-submission.xml") + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "encrypted-submission.xml", + ) self._publish_xls_form_to_project(xlsform_path=path) - form = XForm.objects.filter(id_string='hh_survey2').first() + form = XForm.objects.filter(id_string="hh_survey2").first() self._make_submission(submission_path) # Ensure media_all_received is false on the submission @@ -88,29 +107,29 @@ def test_retrieve_encrypted_form_submissions(self): self.assertEqual(instance.total_media, 2) self.assertEqual( set(instance.get_expected_media()), - set(['submission.xml.enc', '6-seater-7-15_15_11-15_45_15.jpg.enc']) + set(["submission.xml.enc", "6-seater-7-15_15_11-15_45_15.jpg.enc"]), ) self.assertFalse(instance.media_all_received) # Ensure submission is not returned on the Briefcase viewset request = self.factory.get( - self._submission_list_url, - data={'formId': form.id_string}) + self._submission_list_url, data={"formId": form.id_string} + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['instances'].count(), 0) + self.assertEqual(response.data["instances"].count(), 0) def test_view_submission_list(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -118,55 +137,65 @@ def test_view_submission_list(self): response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submissionList.xml') + self.main_directory, + "fixtures", + "transportation", + "view", + "submissionList.xml", + ) instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES) last_index = instances[instances.count() - 1].pk - with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + with codecs.open(submission_list_path, "rb", encoding="utf-8") as f: expected_submission_list = f.read() - expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + expected_submission_list = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) def test_view_submission_list_token_auth(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() # use Token auth in self.extra request = self.factory.get( self._submission_list_url, - data={'formId': self.xform.id_string}, **self.extra) + data={"formId": self.xform.id_string}, + **self.extra, + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submissionList.xml') + self.main_directory, + "fixtures", + "transportation", + "view", + "submissionList.xml", + ) instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES) last_index = instances[instances.count() - 1].pk - with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + with codecs.open(submission_list_path, "rb", encoding="utf-8") as f: expected_submission_list = f.read() - expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + expected_submission_list = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) def test_view_submission_list_w_xformid(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() self._submission_list_url = reverse( - 'view-submission-list', - kwargs={'xform_pk': self.xform.pk}) + "view-submission-list", kwargs={"xform_pk": self.xform.pk} + ) request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -174,30 +203,34 @@ def test_view_submission_list_w_xformid(self): response = view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submissionList.xml') + self.main_directory, + "fixtures", + "transportation", + "view", + "submissionList.xml", + ) instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES) last_index = instances[instances.count() - 1].pk - with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + with codecs.open(submission_list_path, "rb", encoding="utf-8") as f: expected_submission_list = f.read() - expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + expected_submission_list = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) def test_view_submission_list_w_projectid(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() self._submission_list_url = reverse( - 'view-submission-list', - kwargs={'project_pk': self.xform.project.pk}) + "view-submission-list", kwargs={"project_pk": self.xform.project.pk} + ) request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, project_pk=self.xform.project.pk) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -205,25 +238,29 @@ def test_view_submission_list_w_projectid(self): response = view(request, project_pk=self.xform.project.pk) self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submissionList.xml') + self.main_directory, + "fixtures", + "transportation", + "view", + "submissionList.xml", + ) instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES) last_index = instances[instances.count() - 1].pk - with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + with codecs.open(submission_list_path, "rb", encoding="utf-8") as f: expected_submission_list = f.read() - expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + expected_submission_list = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) def test_view_submission_list_w_soft_deleted_submission(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() - uuid = 'f3d8dc65-91a6-4d0f-9e97-802128083390' + uuid = "f3d8dc65-91a6-4d0f-9e97-802128083390" # soft delete submission instance = Instance.objects.filter(uuid=uuid).first() @@ -231,8 +268,8 @@ def test_view_submission_list_w_soft_deleted_submission(self): instance.save() request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -243,20 +280,19 @@ def test_view_submission_list_w_soft_deleted_submission(self): # check that number of instances returned by response is equal to # number of instances that have not been soft deleted self.assertEqual( - response.data.get('instances').count(), - Instance.objects.filter( - xform=self.xform, deleted_at__isnull=True).count() + response.data.get("instances").count(), + Instance.objects.filter(xform=self.xform, deleted_at__isnull=True).count(), ) def test_view_submission_list_w_deleted_submission(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() - uuid = 'f3d8dc65-91a6-4d0f-9e97-802128083390' - Instance.objects.filter(uuid=uuid).order_by('id').delete() + uuid = "f3d8dc65-91a6-4d0f-9e97-802128083390" + Instance.objects.filter(uuid=uuid).order_by("id").delete() request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -264,28 +300,32 @@ def test_view_submission_list_w_deleted_submission(self): response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submissionList-4.xml') + self.main_directory, + "fixtures", + "transportation", + "view", + "submissionList-4.xml", + ) instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES - 1) last_index = instances[instances.count() - 1].pk - with codecs.open(submission_list_path, 'rb', encoding='utf-8') as f: + with codecs.open(submission_list_path, "rb", encoding="utf-8") as f: expected_submission_list = f.read() - expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + expected_submission_list = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) - view = BriefcaseViewset.as_view({'get': 'retrieve'}) - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': uuid} - params = {'formId': formId} - request = self.factory.get( - self._download_submission_url, data=params) + view = BriefcaseViewset.as_view({"get": "retrieve"}) + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": uuid} + ) + params = {"formId": formId} + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) auth = DigestAuth(self.login_username, self.login_password) @@ -294,16 +334,16 @@ def test_view_submission_list_w_deleted_submission(self): self.assertTrue(response.status_code, 404) def test_view_submission_list_OtherUser(self): - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() # alice cannot view bob's submissionList - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._create_user_profile(alice_data) - auth = DigestAuth('alice', 'bobbob') + auth = DigestAuth("alice", "bobbob") request = self.factory.get( - self._submission_list_url, - data={'formId': self.xform.id_string}) + self._submission_list_url, data={"formId": self.xform.id_string} + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) @@ -323,11 +363,11 @@ def get_last_index(xform, last_index=None): return get_last_index(xform) return 0 - view = BriefcaseViewset.as_view({'get': 'list'}) + view = BriefcaseViewset.as_view({"get": "list"}) self._publish_xml_form() self._make_submissions() - params = {'formId': self.xform.id_string} - params['numEntries'] = 2 + params = {"formId": self.xform.id_string} + params["numEntries"] = 2 instances = ordered_instances(self.xform) self.assertEqual(instances.count(), NUM_INSTANCES) @@ -336,9 +376,7 @@ def get_last_index(xform, last_index=None): last_expected_submission_list = "" for index in range(1, 5): auth = DigestAuth(self.login_username, self.login_password) - request = self.factory.get( - self._submission_list_url, - data=params) + request = self.factory.get(self._submission_list_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) @@ -346,176 +384,200 @@ def get_last_index(xform, last_index=None): self.assertEqual(response.status_code, 200) if index > 2: last_index = get_last_index(self.xform, last_index) - filename = 'submissionList-%s.xml' % index + filename = "submissionList-%s.xml" % index if index == 4: self.assertContains(response, last_expected_submission_list) continue # set cursor for second request - params['cursor'] = last_index + params["cursor"] = last_index submission_list_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', filename) - with codecs.open(submission_list_path, encoding='utf-8') as f: + self.main_directory, "fixtures", "transportation", "view", filename + ) + with codecs.open(submission_list_path, encoding="utf-8") as f: expected_submission_list = f.read() - last_expected_submission_list = expected_submission_list = \ - expected_submission_list.replace( - '{{resumptionCursor}}', '%s' % last_index) + last_expected_submission_list = ( + expected_submission_list + ) = expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) self.assertContains(response, expected_submission_list) last_index += 2 def test_view_downloadSubmission(self): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} auth = DigestAuth(self.login_username, self.login_password) - request = self.factory.get( - self._download_submission_url, data=params) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) - self.assertMultiLineEqual(response.content.decode('utf-8'), text) + self.assertMultiLineEqual(response.content.decode("utf-8"), text) def test_view_downloadSubmission_w_token_auth(self): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} # use Token auth in self.extra request = self.factory.get( - self._download_submission_url, data=params, **self.extra) + self._download_submission_url, data=params, **self.extra + ) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) - self.assertMultiLineEqual(response.content.decode('utf-8'), text) + self.assertMultiLineEqual(response.content.decode("utf-8"), text) def test_view_downloadSubmission_w_xformid(self): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} auth = DigestAuth(self.login_username, self.login_password) self._download_submission_url = reverse( - 'view-download-submission', - kwargs={'xform_pk': self.xform.pk}) - request = self.factory.get( - self._download_submission_url, data=params) + "view-download-submission", kwargs={"xform_pk": self.xform.pk} + ) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, xform_pk=self.xform.pk) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) response = view(request, xform_pk=self.xform.pk) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) - self.assertMultiLineEqual(response.content.decode('utf-8'), text) + self.assertMultiLineEqual(response.content.decode("utf-8"), text) def test_view_downloadSubmission_w_projectid(self): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} auth = DigestAuth(self.login_username, self.login_password) self._download_submission_url = reverse( - 'view-download-submission', - kwargs={'project_pk': self.xform.project.pk}) - request = self.factory.get( - self._download_submission_url, data=params) + "view-download-submission", kwargs={"project_pk": self.xform.project.pk} + ) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, project_pk=self.xform.project.pk) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) response = view(request, project_pk=self.xform.project.pk) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) - self.assertMultiLineEqual(response.content.decode('utf-8'), text) + self.assertMultiLineEqual(response.content.decode("utf-8"), text) def test_view_downloadSubmission_OtherUser(self): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} # alice cannot view bob's downloadSubmission - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._create_user_profile(alice_data) - auth = DigestAuth('alice', 'bobbob') - request = self.factory.get( - self._download_submission_url, data=params) + auth = DigestAuth("alice", "bobbob") + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) @@ -523,15 +585,15 @@ def test_view_downloadSubmission_OtherUser(self): self.assertEqual(response.status_code, 404) def test_publish_xml_form_OtherUser(self): - view = BriefcaseViewset.as_view({'post': 'create'}) + view = BriefcaseViewset.as_view({"post": "create"}) # deno cannot publish form to bob's account - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._create_user_profile(alice_data) count = XForm.objects.count() - with codecs.open(self.form_def_path, encoding='utf-8') as f: - params = {'form_def_file': f, 'dataFile': ''} - auth = DigestAuth('alice', 'bobbob') + with codecs.open(self.form_def_path, encoding="utf-8") as f: + params = {"form_def_file": f, "dataFile": ""} + auth = DigestAuth("alice", "bobbob") request = self.factory.post(self._form_upload_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) @@ -541,13 +603,13 @@ def test_publish_xml_form_OtherUser(self): self.assertEqual(response.status_code, 403) def test_publish_xml_form_where_filename_is_not_id_string(self): - view = BriefcaseViewset.as_view({'post': 'create'}) + view = BriefcaseViewset.as_view({"post": "create"}) form_def_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'Transportation Form.xml') + self.main_directory, "fixtures", "transportation", "Transportation Form.xml" + ) count = XForm.objects.count() - with codecs.open(form_def_path, encoding='utf-8') as f: - params = {'form_def_file': f, 'dataFile': ''} + with codecs.open(form_def_path, encoding="utf-8") as f: + params = {"form_def_file": f, "dataFile": ""} auth = DigestAuth(self.login_username, self.login_password) request = self.factory.post(self._form_upload_url, data=params) response = view(request, username=self.user.username) @@ -555,15 +617,14 @@ def test_publish_xml_form_where_filename_is_not_id_string(self): request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) self.assertEqual(XForm.objects.count(), count + 1) - self.assertContains( - response, "successfully published.", status_code=201) + self.assertContains(response, "successfully published.", status_code=201) def test_form_upload(self): - view = BriefcaseViewset.as_view({'post': 'create'}) + view = BriefcaseViewset.as_view({"post": "create"}) self._publish_xml_form() - with codecs.open(self.form_def_path, encoding='utf-8') as f: - params = {'form_def_file': f, 'dataFile': ''} + with codecs.open(self.form_def_path, encoding="utf-8") as f: + params = {"form_def_file": f, "dataFile": ""} auth = DigestAuth(self.login_username, self.login_password) request = self.factory.post(self._form_upload_url, data=params) response = view(request, username=self.user.username) @@ -573,12 +634,11 @@ def test_form_upload(self): self.assertEqual(response.status_code, 400) self.assertEqual( response.data, - {'message': u'Form with this id or SMS-keyword already exists.' - } + {"message": "Form with this id or SMS-keyword already exists."}, ) def test_upload_head_request(self): - view = BriefcaseViewset.as_view({'head': 'create'}) + view = BriefcaseViewset.as_view({"head": "create"}) auth = DigestAuth(self.login_username, self.login_password) request = self.factory.head(self._form_upload_url) @@ -587,28 +647,26 @@ def test_upload_head_request(self): request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 204) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length')) - self.assertTrue(response.has_header('Date')) + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) def test_submission_with_instance_id_on_root_node(self): - view = XFormSubmissionViewSet.as_view({'post': 'create'}) + view = XFormSubmissionViewSet.as_view({"post": "create"}) self._publish_xml_form() - message = u"Successful submission." - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' - self.assertRaises( - Instance.DoesNotExist, Instance.objects.get, uuid=instanceId) + message = "Successful submission." + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" + self.assertRaises(Instance.DoesNotExist, Instance.objects.get, uuid=instanceId) submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'submission.xml') + self.main_directory, "fixtures", "transportation", "view", "submission.xml" + ) count = Instance.objects.count() - with codecs.open(submission_path, encoding='utf-8') as f: - post_data = {'xml_submission_file': f} + with codecs.open(submission_path, encoding="utf-8") as f: + post_data = {"xml_submission_file": f} request = self.factory.post(self._submission_list_url, post_data) response = view(request) self.assertEqual(response.status_code, 401) - auth = DigestAuth('bob', 'bobbob') + auth = DigestAuth("bob", "bobbob") request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) self.assertContains(response, message, status_code=201) @@ -617,62 +675,67 @@ def test_submission_with_instance_id_on_root_node(self): def test_form_export_with_no_xlsform_returns_200(self): self._publish_xml_form() - self.view = XFormViewSet.as_view({'get': 'retrieve'}) + self.view = XFormViewSet.as_view({"get": "retrieve"}) xform = XForm.objects.get(id_string="transportation_2011_07_25") - request = self.factory.get('/', **self.extra) - response = self.view(request, pk=xform.pk, format='csv') + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=xform.pk, format="csv") self.assertEqual(response.status_code, 200) - self.view = XFormViewSet.as_view({'get': 'form'}) - response = self.view(request, pk=xform.pk, format='xls') + self.view = XFormViewSet.as_view({"get": "form"}) + response = self.view(request, pk=xform.pk, format="xls") self.assertEqual(response.status_code, 404) - @mock.patch.object(BriefcaseViewset, 'get_object') + @patch.object(BriefcaseViewset, "get_object") def test_view_downloadSubmission_no_xmlns(self, mock_get_object): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - instance.xml = u'nonenoneuuid:5b2cc313-fc09-437e-8149-fcd32f695d41\n' # noqa + instance.xml = 'nonenoneuuid:5b2cc313-fc09-437e-8149-fcd32f695d41\n' # noqa mock_get_object.return_value = instance - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} auth = DigestAuth(self.login_username, self.login_password) - request = self.factory.get( - self._download_submission_url, data=params) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertNotIn( 'transportation id="transportation_2011_07_25"' ' instanceID="uuid:5b2cc313-fc09-437e-8149-fcd32f695d41"' f' submissionDate="{ instance.date_created.isoformat() }" ' 'xlmns="http://opendatakit.org/submission"', - text) + text, + ) self.assertContains(response, instanceId, status_code=200) with override_settings(SUPPORT_BRIEFCASE_SUBMISSION_DATE=False): - request = self.factory.get( - self._download_submission_url, data=params) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) @@ -683,40 +746,46 @@ def test_view_downloadSubmission_no_xmlns(self, mock_get_object): ' id="transportation_2011_07_25"' ' instanceID="uuid:5b2cc313-fc09-437e-8149-fcd32f695d41"' f' submissionDate="{ instance.date_created.isoformat() }"', - response.content.decode('utf-8')) + response.content.decode("utf-8"), + ) - @mock.patch.object(BriefcaseViewset, 'get_object') + @patch.object(BriefcaseViewset, "get_object") def test_view_downloadSubmission_multiple_nodes(self, mock_get_object): - view = BriefcaseViewset.as_view({'get': 'retrieve'}) + view = BriefcaseViewset.as_view({"get": "retrieve"}) self._publish_xml_form() self.maxDiff = None self._submit_transport_instance_w_attachment() - instanceId = u'5b2cc313-fc09-437e-8149-fcd32f695d41' + instanceId = "5b2cc313-fc09-437e-8149-fcd32f695d41" instance = Instance.objects.get(uuid=instanceId) - instance.xml = u'nonenoneuuid:5b2cc313-fc09-437e-8149-fcd32f695d41\n' # noqa + instance.xml = "nonenoneuuid:5b2cc313-fc09-437e-8149-fcd32f695d41\n" # noqa mock_get_object.return_value = instance - formId = u'%(formId)s[@version=null and @uiVersion=null]/' \ - u'%(formId)s[@key=uuid:%(instanceId)s]' % { - 'formId': self.xform.id_string, - 'instanceId': instanceId} - params = {'formId': formId} + formId = ( + "%(formId)s[@version=null and @uiVersion=null]/" + "%(formId)s[@key=uuid:%(instanceId)s]" + % {"formId": self.xform.id_string, "instanceId": instanceId} + ) + params = {"formId": formId} auth = DigestAuth(self.login_username, self.login_password) - request = self.factory.get( - self._download_submission_url, data=params) + request = self.factory.get(self._download_submission_url, data=params) response = view(request, username=self.user.username) self.assertEqual(response.status_code, 401) request.META.update(auth(request.META, response)) response = view(request, username=self.user.username) text = "uuid:%s" % instanceId download_submission_path = os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'view', 'downloadSubmission.xml') - with codecs.open(download_submission_path, encoding='utf-8') as f: + self.main_directory, + "fixtures", + "transportation", + "view", + "downloadSubmission.xml", + ) + with codecs.open(download_submission_path, encoding="utf-8") as f: text = f.read() - for var in ((u'{{submissionDate}}', - instance.date_created.isoformat()), - (u'{{form_id}}', str(self.xform.id)), - (u'{{media_id}}', str(self.attachment.id))): + for var in ( + ("{{submissionDate}}", instance.date_created.isoformat()), + ("{{form_id}}", str(self.xform.id)), + ("{{media_id}}", str(self.attachment.id)), + ): text = text.replace(*var) self.assertContains(response, instanceId, status_code=200) diff --git a/onadata/apps/api/tests/viewsets/test_charts_viewset.py b/onadata/apps/api/tests/viewsets/test_charts_viewset.py index b334d5a1a1..21648cec9e 100644 --- a/onadata/apps/api/tests/viewsets/test_charts_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_charts_viewset.py @@ -1,22 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Test ChartsViewSet. +""" import json import os -import mock +from unittest.mock import patch -from django.utils import timezone from django.core.cache import cache +from django.db.utils import DataError from django.test.utils import override_settings -from rest_framework.test import APIClient -from rest_framework.test import APIRequestFactory -from rest_framework.test import force_authenticate +from django.utils import timezone + +from rest_framework.test import APIClient, APIRequestFactory, force_authenticate + from onadata.apps.api.viewsets.charts_viewset import ChartsViewSet from onadata.apps.api.viewsets.merged_xform_viewset import MergedXFormViewSet -from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models.instance import Instance -from django.db.utils import DataError +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.renderers.renderers import DecimalJSONRenderer +from onadata.libs.utils.cache_tools import XFORM_CHARTS from onadata.libs.utils.timing import calculate_duration from onadata.libs.utils.user_auth import get_user_default_project -from onadata.libs.utils.cache_tools import XFORM_CHARTS -from onadata.libs.renderers.renderers import DecimalJSONRenderer def raise_data_error(a): @@ -48,7 +52,6 @@ def raise_data_error(a): class TestChartsViewSet(TestBase): - def setUp(self): super(self.__class__, self).setUp() # publish tutorial form as it has all the different field types @@ -193,7 +196,7 @@ def test_get_on_date_field(self): self.assertEqual(response.data["field_name"], "date") self.assertEqual(response.data["data_type"], "time_based") - @mock.patch("onadata.libs.data.query._execute_query", side_effect=raise_data_error) + @patch("onadata.libs.data.query._execute_query", side_effect=raise_data_error) def test_get_on_date_field_with_invalid_data(self, mock_execute_query): data = {"field_name": "date"} request = self.factory.get("/charts", data) diff --git a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py index 9f7cbfb7b8..04c4667931 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py @@ -4,6 +4,7 @@ """ import os from builtins import open # pylint: disable=redefined-builtin +from unittest.mock import patch from django.conf import settings from django.contrib.auth.models import AnonymousUser @@ -11,7 +12,6 @@ from django.http import UnreadablePostError from django.test import TransactionTestCase -import mock import simplejson as json from django_digest.test import DigestAuth @@ -843,7 +843,7 @@ def test_floip_format_multiple_rows_instance(self): data_responses = [i[4] for i in json.loads(data)] self.assertTrue(any(i in data_responses for i in instance_json.values())) - @mock.patch( + @patch( "onadata.apps.api.viewsets.xform_submission_viewset.SubmissionSerializer" ) # noqa def test_post_submission_unreadable_post_error(self, MockSerializer): @@ -1232,7 +1232,7 @@ def test_post_submission_using_project_pk_while_authenticated(self): Instance.objects.filter(xform=self.xform).count(), count + 1 ) - @mock.patch.object(ServiceDefinition, "send") + @patch.object(ServiceDefinition, "send") def test_new_submission_sent_to_rapidpro(self, mock_send): """Submission created is sent to RapidPro""" rest_service = RestService.objects.create( @@ -1276,7 +1276,7 @@ def test_new_submission_sent_to_rapidpro(self, mock_send): instance = Instance.objects.all().order_by("-pk")[0] mock_send.assert_called_once_with(rest_service.service_url, instance) - @mock.patch.object(ServiceDefinition, "send") + @patch.object(ServiceDefinition, "send") def test_edit_submission_sent_to_rapidpro(self, mock_send): """Submission edited is sent to RapidPro""" rest_service = RestService.objects.create( diff --git a/onadata/apps/main/tests/test_form_show.py b/onadata/apps/main/tests/test_form_show.py index 403e591fce..7a5d586dc4 100644 --- a/onadata/apps/main/tests/test_form_show.py +++ b/onadata/apps/main/tests/test_form_show.py @@ -1,25 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Test form views. +""" import os -from builtins import open from unittest import skip +from unittest.mock import patch -import mock from django.core.exceptions import MultipleObjectsReturned from django.core.files.base import ContentFile +from django.test.utils import override_settings from django.urls import reverse + from httmock import HTTMock -from django.test.utils import override_settings from onadata.apps.api.tests.mocked_data import enketo_urls_mock from onadata.apps.logger.models import XForm -from onadata.apps.logger.views import download_xlsform, download_jsonform, \ - download_xform, delete_xform +from onadata.apps.logger.views import ( + delete_xform, + download_jsonform, + download_xform, + download_xlsform, +) from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.main.views import show, form_photos, update_xform, profile, \ - enketo_preview -from onadata.apps.viewer.views import export_list, map_view, data_export +from onadata.apps.main.views import ( + enketo_preview, + form_photos, + profile, + show, + update_xform, +) +from onadata.apps.viewer.views import data_export, export_list, map_view from onadata.libs.utils.logger_tools import publish_xml_form -from onadata.libs.utils.user_auth import get_user_default_project -from onadata.libs.utils.user_auth import http_auth_string +from onadata.libs.utils.user_auth import get_user_default_project, http_auth_string def raise_multiple_objects_returned_error(*args, **kwargs): @@ -27,15 +39,18 @@ def raise_multiple_objects_returned_error(*args, **kwargs): class TestFormShow(TestBase): + """ + Test form views. + """ def setUp(self): TestBase.setUp(self) self._create_user_and_login() self._publish_transportation_form() - self.url = reverse(show, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + self.url = reverse( + show, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) def test_show_form_name(self): response = self.client.get(self.url) @@ -59,135 +74,197 @@ def test_show_to_anon_if_public(self): def test_dl_xlsx_xlsform(self): self._publish_xlsx_file() - response = self.client.get(reverse(download_xlsform, kwargs={ - 'username': self.user.username, - 'id_string': 'exp_one' - })) + response = self.client.get( + reverse( + download_xlsform, + kwargs={"username": self.user.username, "id_string": "exp_one"}, + ) + ) self.assertEqual(response.status_code, 200) self.assertEqual( - response['Content-Disposition'], - "attachment; filename=exp_one.xlsx") + response["Content-Disposition"], "attachment; filename=exp_one.xlsx" + ) # test with unavailable id_string - response = self.client.get(reverse(download_xlsform, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string' - })) + response = self.client.get( + reverse( + download_xlsform, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + }, + ) + ) self.assertEqual(response.status_code, 404) def test_dl_xls_to_anon_if_public(self): self.xform.shared = True self.xform.save() - response = self.anon.get(reverse(download_xlsform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + response = self.anon.get( + reverse( + download_xlsform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) def test_dl_xls_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': - http_auth_string(self.login_username, self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } - response = self.anon.get(reverse(download_xlsform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }), **extra) + response = self.anon.get( + reverse( + download_xlsform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ), + **extra + ) self.assertEqual(response.status_code, 200) def test_dl_json_to_anon_if_public(self): self.xform.shared = True self.xform.save() - response = self.anon.get(reverse(download_jsonform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + response = self.anon.get( + reverse( + download_jsonform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) # test with unavailable id_string - response = self.anon.get(reverse(download_jsonform, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string' - })) + response = self.anon.get( + reverse( + download_jsonform, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + }, + ) + ) self.assertEqual(response.status_code, 404) def test_dl_jsonp_to_anon_if_public(self): self.xform.shared = True self.xform.save() - callback = 'jsonpCallback' - response = self.anon.get(reverse(download_jsonform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }), {'callback': callback}) - content = response.content.decode('utf-8') + callback = "jsonpCallback" + response = self.anon.get( + reverse( + download_jsonform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ), + {"callback": callback}, + ) + content = response.content.decode("utf-8") self.assertEqual(response.status_code, 200) - self.assertEqual(content.startswith(callback + '('), True) - self.assertEqual(content.endswith(')'), True) + self.assertEqual(content.startswith(callback + "("), True) + self.assertEqual(content.endswith(")"), True) def test_dl_json_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': - http_auth_string(self.login_username, self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } - response = self.anon.get(reverse(download_jsonform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }), **extra) + response = self.anon.get( + reverse( + download_jsonform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ), + **extra + ) self.assertEqual(response.status_code, 200) def test_dl_json_for_cors_options(self): - response = self.anon.options(reverse(download_jsonform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) - allowed_headers = ['Accept', 'Origin', 'X-Requested-With', - 'Authorization'] - control_headers = response['Access-Control-Allow-Headers'] - provided_headers = [h.strip() for h in control_headers.split(',')] + response = self.anon.options( + reverse( + download_jsonform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) + allowed_headers = ["Accept", "Origin", "X-Requested-With", "Authorization"] + control_headers = response["Access-Control-Allow-Headers"] + provided_headers = [h.strip() for h in control_headers.split(",")] self.assertListEqual(allowed_headers, provided_headers) - self.assertEqual(response['Access-Control-Allow-Methods'], 'GET') - self.assertEqual(response['Access-Control-Allow-Origin'], '*') + self.assertEqual(response["Access-Control-Allow-Methods"], "GET") + self.assertEqual(response["Access-Control-Allow-Origin"], "*") def test_dl_xform_to_anon_if_public(self): self.xform.shared = True self.xform.save() - response = self.anon.get(reverse(download_xform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + response = self.anon.get( + reverse( + download_xform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) def test_dl_xform_for_basic_auth(self): extra = { - 'HTTP_AUTHORIZATION': - http_auth_string(self.login_username, self.login_password) + "HTTP_AUTHORIZATION": http_auth_string( + self.login_username, self.login_password + ) } - response = self.anon.get(reverse(download_xform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }), **extra) + response = self.anon.get( + reverse( + download_xform, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ), + **extra + ) self.assertEqual(response.status_code, 200) def test_dl_xform_for_authenticated_non_owner(self): - self._create_user_and_login('alice', 'alice') - response = self.client.get(reverse(download_xform, kwargs={ - 'username': 'bob', - 'id_string': self.xform.id_string - })) + self._create_user_and_login("alice", "alice") + response = self.client.get( + reverse( + download_xform, + kwargs={"username": "bob", "id_string": self.xform.id_string}, + ) + ) self.assertEqual(response.status_code, 200) # test with unavailable id_string - response = self.client.get(reverse(download_xform, kwargs={ - 'username': 'bob', - 'id_string': 'random_id_string' - })) + response = self.client.get( + reverse( + download_xform, + kwargs={"username": "bob", "id_string": "random_id_string"}, + ) + ) self.assertEqual(response.status_code, 404) def test_show_private_if_shared_but_not_data(self): self.xform.shared = True self.xform.save() response = self.anon.get(self.url) - self.assertContains(response, 'PRIVATE') + self.assertContains(response, "PRIVATE") def test_show_link_if_shared_and_data(self): self.xform.project.shared = True @@ -197,75 +274,111 @@ def test_show_link_if_shared_and_data(self): self.xform.save() self._submit_transport_instance() response = self.anon.get(self.url) - self.assertContains(response, reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'csv' - })) + self.assertContains( + response, + reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "csv", + }, + ), + ) # assert contains .xlsx in url - self.assertContains(response, reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - })) + self.assertContains( + response, + reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ), + ) # assert shouldn't contain .xls in url - self.assertNotContains(response, reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xls' - })) + self.assertNotContains( + response, + reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xls", + }, + ), + ) def test_return_error_if_xform_not_found(self): - map_url = reverse(map_view, kwargs={ - 'username': self.user.username, - 'id_string': 'random_string' - }) + map_url = reverse( + map_view, + kwargs={"username": self.user.username, "id_string": "random_string"}, + ) response = self.client.get(map_url) self.assertEqual(response.status_code, 404) - map_url = reverse(data_export, kwargs={ - 'username': self.user.username, - 'id_string': 'random_string' - }) + map_url = reverse( + data_export, + kwargs={"username": self.user.username, "id_string": "random_string"}, + ) response = self.client.get(map_url) self.assertEqual(response.status_code, 404) def test_show_link_if_owner(self): self._submit_transport_instance() response = self.client.get(self.url) - self.assertContains(response, reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'csv' - })) - self.assertContains(response, reverse(export_list, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string, - 'export_type': 'xlsx' - })) - self.assertNotContains(response, reverse(map_view, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - })) + self.assertContains( + response, + reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "csv", + }, + ), + ) + self.assertContains( + response, + reverse( + export_list, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + "export_type": "xlsx", + }, + ), + ) + self.assertNotContains( + response, + reverse( + map_view, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ), + ) # check that a form with geopoints has the map url count = XForm.objects.count() self._publish_xls_file( - os.path.join( - os.path.dirname(__file__), "fixtures", "gps", "gps.xlsx")) + os.path.join(os.path.dirname(__file__), "fixtures", "gps", "gps.xlsx") + ) self.assertEqual(XForm.objects.count(), count + 1) - self.xform = XForm.objects.latest('date_created') - - show_url = reverse(show, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) - map_url = reverse(map_view, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + self.xform = XForm.objects.latest("date_created") + + show_url = reverse( + show, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) + map_url = reverse( + map_view, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) response = self.client.get(show_url) # check that map url doesnt show before we have submissions self.assertNotContains(response, map_url) @@ -273,8 +386,12 @@ def test_show_link_if_owner(self): # make a submission self._make_submission( os.path.join( - os.path.dirname(__file__), "fixtures", "gps", "instances", - "gps_1980-01-23_20-52-08.xml") + os.path.dirname(__file__), + "fixtures", + "gps", + "instances", + "gps_1980-01-23_20-52-08.xml", + ) ) self.assertEqual(self.response.status_code, 201) # get new show view @@ -283,57 +400,62 @@ def test_show_link_if_owner(self): def test_user_sees_edit_btn(self): response = self.client.get(self.url) - self.assertContains(response, 'edit') + self.assertContains(response, "edit") def test_user_sees_settings(self): response = self.client.get(self.url) - self.assertContains(response, 'Settings') + self.assertContains(response, "Settings") def test_anon_no_edit_btn(self): self.xform.shared = True self.xform.save() response = self.anon.get(self.url) - self.assertNotContains(response, 'edit') + self.assertNotContains(response, "edit") def test_anon_no_toggle_data_share_btn(self): self.xform.shared = True self.xform.save() response = self.anon.get(self.url) - self.assertNotContains(response, 'PUBLIC') - self.assertNotContains(response, 'PRIVATE') + self.assertNotContains(response, "PUBLIC") + self.assertNotContains(response, "PRIVATE") def test_show_add_sourc_doc_if_owner(self): response = self.client.get(self.url) - self.assertContains(response, 'Source document:') + self.assertContains(response, "Source document:") def test_show_add_supporting_docs_if_owner(self): response = self.client.get(self.url) - self.assertContains(response, 'Supporting document:') + self.assertContains(response, "Supporting document:") def test_show_add_supporting_media_if_owner(self): response = self.client.get(self.url) - self.assertContains(response, 'Media upload:') + self.assertContains(response, "Media upload:") def test_show_add_mapbox_layer_if_owner(self): response = self.client.get(self.url) - self.assertContains(response, 'JSONP url:') + self.assertContains(response, "JSONP url:") def test_hide_add_supporting_docs_if_not_owner(self): self.xform.shared = True self.xform.save() response = self.anon.get(self.url) - self.assertNotContains(response, 'Upload') + self.assertNotContains(response, "Upload") def test_load_photo_page(self): - response = self.client.get(reverse(form_photos, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string})) + response = self.client.get( + reverse( + form_photos, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) + ) self.assertEqual(response.status_code, 200) def test_load_from_uuid(self): self.xform = XForm.objects.get(pk=self.xform.id) - response = self.client.get(reverse(show, kwargs={ - 'uuid': self.xform.uuid})) + response = self.client.get(reverse(show, kwargs={"uuid": self.xform.uuid})) self.assertRedirects(response, self.url) def test_xls_replace_markup(self): @@ -343,56 +465,60 @@ def test_xls_replace_markup(self): # when we have 0 submissions, update markup exists self.xform.shared = True self.xform.save() - dashboard_url = reverse(profile, kwargs={ - 'username': 'bob' - }) + dashboard_url = reverse(profile, kwargs={"username": "bob"}) response = self.client.get(dashboard_url) - self.assertContains( - response, 'href="#replace-transportation_2011_07_25"') + self.assertContains(response, 'href="#replace-transportation_2011_07_25"') # a non owner can't see the markup response = self.anon.get(self.url) - self.assertNotContains( - response, 'href="#replace-transportation_2011_07_25"') + self.assertNotContains(response, 'href="#replace-transportation_2011_07_25"') # when we have a submission, we cant update the xls form self._submit_transport_instance() response = self.client.get(dashboard_url) - self.assertNotContains( - response, 'href="#replace-transportation_2011_07_25"') + self.assertNotContains(response, 'href="#replace-transportation_2011_07_25"') def test_non_owner_cannot_replace_form(self): """ Test that a non owner cannot replace a shared xls form """ - kwargs = { - 'username': self.user.username, - 'id_string': self.xform.id_string - } + kwargs = {"username": self.user.username, "id_string": self.xform.id_string} self.xform.shared = True self.xform.save() - request = self.factory.post('/') + request = self.factory.post("/") # create and login another user - self._create_user_and_login('peter', 'peter') + self._create_user_and_login("peter", "peter") request.user = self.user response = update_xform(request, **kwargs) self.assertEqual(response.status_code, 302) def test_replace_xform(self): - xform_update_url = reverse(update_xform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + xform_update_url = reverse( + update_xform, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) count = XForm.objects.count() - xls_path = os.path.join(self.this_directory, "fixtures", - "transportation", "transportation_updated.xlsx") - with open(xls_path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} + xls_path = os.path.join( + self.this_directory, + "fixtures", + "transportation", + "transportation_updated.xlsx", + ) + with open(xls_path, "rb") as xls_file: + post_data = {"xls_file": xls_file} self.client.post(xform_update_url, post_data) self.assertEqual(XForm.objects.count(), count) - self.xform = XForm.objects.order_by('id').reverse()[0] + self.xform = XForm.objects.order_by("id").reverse()[0] # look for the preferred_means question # which is only in the updated xls - is_updated_form = len([e.name for e in self.xform.survey_elements - if e.name == u'preferred_means']) > 0 + is_updated_form = ( + len( + [ + e.name + for e in self.xform.survey_elements + if e.name == "preferred_means" + ] + ) + > 0 + ) self.assertTrue(is_updated_form) def test_update_form_doesnt_truncate_to_50_chars(self): @@ -401,91 +527,111 @@ def test_update_form_doesnt_truncate_to_50_chars(self): self.this_directory, "fixtures", "transportation", - "transportation_with_long_id_string.xlsx") + "transportation_with_long_id_string.xlsx", + ) self._publish_xls_file_and_set_xform(xls_path) # Update the form - xform_update_url = reverse(update_xform, kwargs={ - 'username': self.user.username, - 'id_string': self.xform.id_string - }) + xform_update_url = reverse( + update_xform, + kwargs={"username": self.user.username, "id_string": self.xform.id_string}, + ) updated_xls_path = os.path.join( self.this_directory, "fixtures", "transportation", - "transportation_with_long_id_string_updated.xlsx") - with open(updated_xls_path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} + "transportation_with_long_id_string_updated.xlsx", + ) + with open(updated_xls_path, "rb") as xls_file: + post_data = {"xls_file": xls_file} self.client.post(xform_update_url, post_data) # Count should stay the same self.assertEqual(XForm.objects.count(), count + 1) - self.xform = XForm.objects.order_by('id').reverse()[0] + self.xform = XForm.objects.order_by("id").reverse()[0] # look for the preferred_means question # which is only in the updated xls - is_updated_form = len([e.name for e in self.xform.survey_elements - if e.name == u'preferred_means']) > 0 + is_updated_form = ( + len( + [ + e.name + for e in self.xform.survey_elements + if e.name == "preferred_means" + ] + ) + > 0 + ) self.assertTrue(is_updated_form) def test_xform_delete(self): id_string = self.xform.id_string - form_exists = XForm.objects.filter( - user=self.user, id_string=id_string).count() == 1 + form_exists = ( + XForm.objects.filter(user=self.user, id_string=id_string).count() == 1 + ) self.assertTrue(form_exists) - xform_delete_url = reverse(delete_xform, kwargs={ - 'username': self.user.username, - 'id_string': id_string - }) + xform_delete_url = reverse( + delete_xform, + kwargs={"username": self.user.username, "id_string": id_string}, + ) self.client.post(xform_delete_url) - form_deleted = XForm.objects.filter( - user=self.user, id_string=id_string).count() == 0 + form_deleted = ( + XForm.objects.filter(user=self.user, id_string=id_string).count() == 0 + ) self.assertTrue(form_deleted) # test with unavailable id_string - xform_delete_url = reverse(delete_xform, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string' - }) + xform_delete_url = reverse( + delete_xform, + kwargs={"username": self.user.username, "id_string": "random_id_string"}, + ) response = self.client.post(xform_delete_url) self.assertEqual(response.status_code, 404) - @mock.patch('onadata.apps.logger.views.get_object_or_404', - side_effect=raise_multiple_objects_returned_error) + @patch( + "onadata.apps.logger.views.get_object_or_404", + side_effect=raise_multiple_objects_returned_error, + ) def test_delete_xforms_with_same_id_string_in_same_account( - self, mock_get_object_or_404): + self, mock_get_object_or_404 + ): id_string = self.xform.id_string - xform_delete_url = reverse(delete_xform, kwargs={ - 'username': self.user.username, - 'id_string': id_string - }) + xform_delete_url = reverse( + delete_xform, + kwargs={"username": self.user.username, "id_string": id_string}, + ) response = self.client.post(xform_delete_url) - form_deleted = XForm.objects.filter( - user=self.user, id_string=id_string).count() == 0 + form_deleted = ( + XForm.objects.filter(user=self.user, id_string=id_string).count() == 0 + ) self.assertTrue(form_deleted) self.assertEqual(response.status_code, 302) def test_non_owner_cant_delete_xform(self): id_string = self.xform.id_string - form_exists = XForm.objects.filter( - user=self.user, id_string=id_string).count() == 1 + form_exists = ( + XForm.objects.filter(user=self.user, id_string=id_string).count() == 1 + ) self.assertTrue(form_exists) - xform_delete_url = reverse(delete_xform, kwargs={ - 'username': self.user.username, - 'id_string': id_string - }) + xform_delete_url = reverse( + delete_xform, + kwargs={"username": self.user.username, "id_string": id_string}, + ) # save current user before we re-assign bob = self.user - self._create_user_and_login('alice', 'alice') + self._create_user_and_login("alice", "alice") self.client.post(xform_delete_url) - form_deleted = XForm.objects.filter( - user=bob, id_string=id_string).count() == 0 + form_deleted = XForm.objects.filter(user=bob, id_string=id_string).count() == 0 self.assertFalse(form_deleted) @override_settings(TESTING_MODE=False) def test_enketo_preview(self): with HTTMock(enketo_urls_mock): url = reverse( - enketo_preview, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + enketo_preview, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) response = self.client.get(url) self.assertEqual(response.status_code, 302) @@ -494,42 +640,65 @@ def test_enketo_preview_works_on_shared_forms(self): self.xform.shared = True self.xform.save() url = reverse( - enketo_preview, kwargs={'username': self.user.username, - 'id_string': self.xform.id_string}) + enketo_preview, + kwargs={ + "username": self.user.username, + "id_string": self.xform.id_string, + }, + ) response = self.anon.get(url) self.assertEqual(response.status_code, 302) def test_enketo_preview_with_unavailable_id_string(self): - response = self.client.get(reverse(enketo_preview, kwargs={ - 'username': self.user.username, - 'id_string': 'random_id_string' - })) + response = self.client.get( + reverse( + enketo_preview, + kwargs={ + "username": self.user.username, + "id_string": "random_id_string", + }, + ) + ) self.assertEqual(response.status_code, 404) # TODO PLD disabling this test - @skip('Insensitivity is not enforced upon creation of id_strings.') + @skip("Insensitivity is not enforced upon creation of id_strings.") def test_form_urls_case_insensitive(self): - url = reverse(show, kwargs={ - 'username': self.user.username.upper(), - 'id_string': self.xform.id_string.upper() - }) + url = reverse( + show, + kwargs={ + "username": self.user.username.upper(), + "id_string": self.xform.id_string.upper(), + }, + ) response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_publish_xml_xlsform_download(self): count = XForm.objects.count() path = os.path.join( - self.this_directory, '..', '..', 'api', 'tests', 'fixtures', - 'forms', 'contributions', 'contributions.xml') + self.this_directory, + "..", + "..", + "api", + "tests", + "fixtures", + "forms", + "contributions", + "contributions.xml", + ) f = open(path) xml_file = ContentFile(f.read()) f.close() - xml_file.name = 'contributions.xml' + xml_file.name = "contributions.xml" project = get_user_default_project(self.user) self.xform = publish_xml_form(xml_file, self.user, project) self.assertTrue(XForm.objects.count() > count) - response = self.client.get(reverse(download_xlsform, kwargs={ - 'username': self.user.username, - 'id_string': 'contributions' - }), follow=True) - self.assertContains(response, 'No XLS file for your form ') + response = self.client.get( + reverse( + download_xlsform, + kwargs={"username": self.user.username, "id_string": "contributions"}, + ), + follow=True, + ) + self.assertContains(response, "No XLS file for your form ") diff --git a/onadata/libs/tests/utils/test_api_export_tools.py b/onadata/libs/tests/utils/test_api_export_tools.py index fe8f09ff46..39784138c9 100644 --- a/onadata/libs/tests/utils/test_api_export_tools.py +++ b/onadata/libs/tests/utils/test_api_export_tools.py @@ -2,27 +2,30 @@ """ Test api_export_tools module. """ +import datetime from collections import OrderedDict, defaultdict +from unittest.mock import patch -import mock -import datetime -from google.oauth2.credentials import Credentials -from celery.backends.rpc import BacklogLimitExceeded from django.http import Http404 from django.test.utils import override_settings + +from celery.backends.rpc import BacklogLimitExceeded +from google.oauth2.credentials import Credentials from kombu.exceptions import OperationalError from rest_framework.request import Request from onadata.apps.logger.models import XForm -from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.models import TokenStorageModel +from onadata.apps.main.tests.test_base import TestBase from onadata.apps.viewer.models.export import Export, ExportConnectionError from onadata.libs.exceptions import ServiceUnavailable from onadata.libs.utils.api_export_tools import ( - get_async_response, get_existing_file_format, process_async_export, - response_for_format, + _get_google_credential, + get_async_response, + get_existing_file_format, get_metadata_format, - _get_google_credential + process_async_export, + response_for_format, ) from onadata.libs.utils.async_status import SUCCESSFUL, status_msg @@ -38,7 +41,7 @@ class TestApiExportTools(TestBase): "client_id": "client-id", "client_secret": "client-secret", "scopes": ["https://www.googleapis.com/auth/drive.file"], - "expiry": datetime.datetime(2016, 8, 18, 12, 43, 30, 316792) + "expiry": datetime.datetime(2016, 8, 18, 12, 43, 30, 316792), } def _create_old_export(self, xform, export_type, options, filename=None): @@ -48,22 +51,23 @@ def _create_old_export(self, xform, export_type, options, filename=None): export_type=export_type, options=options, filename=filename, - internal_status=Export.SUCCESSFUL).save() + internal_status=Export.SUCCESSFUL, + ).save() # pylint: disable=attribute-defined-outside-init - self.export = Export.objects.filter( - xform=xform, export_type=export_type)[0] + self.export = Export.objects.filter(xform=xform, export_type=export_type)[0] def test_get_google_credentials(self): """ Test create_async_export deletes credential when invalid """ - request = self.factory.get('/') + request = self.factory.get("/") request.user = self.user request.query_params = {} request.data = {} credential = self.google_credential - t = TokenStorageModel(id=self.user, - credential=Credentials(**credential, token=None)) + t = TokenStorageModel( + id=self.user, credential=Credentials(**credential, token=None) + ) t.save() self.assertFalse(t.credential.valid) response = _get_google_credential(request) @@ -71,7 +75,7 @@ def test_get_google_credentials(self): self.assertEqual(response.status_code, 302) self.assertEqual( response.url[:71], - 'https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=' + "https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=", ) with self.assertRaises(TokenStorageModel.DoesNotExist): TokenStorageModel.objects.get(id=self.user) @@ -81,15 +85,17 @@ def test_get_google_credentials_valid(self): Test create_async_export does not get rid of valid credential """ - request = self.factory.get('/') + request = self.factory.get("/") request.user = self.user request.query_params = {} request.data = {} - self.google_credential['expiry'] = \ - datetime.datetime.utcnow() + datetime.timedelta(seconds=300) + self.google_credential[ + "expiry" + ] = datetime.datetime.utcnow() + datetime.timedelta(seconds=300) credential = self.google_credential - t = TokenStorageModel(id=self.user, - credential=Credentials(**credential, token="token")) + t = TokenStorageModel( + id=self.user, credential=Credentials(**credential, token="token") + ) t.save() self.assertTrue(t.credential.valid) credential = _get_google_credential(request) @@ -102,15 +108,14 @@ def test_process_async_export_creates_new_export(self): Test process_async_export creates a new export. """ self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user export_type = "csv" options = defaultdict(dict) - resp = process_async_export( - request, self.xform, export_type, options=options) + resp = process_async_export(request, self.xform, export_type, options=options) - self.assertIn('job_uuid', resp) + self.assertIn("job_uuid", resp) # pylint: disable=invalid-name @override_settings(CELERY_TASK_ALWAYS_EAGER=True) @@ -122,24 +127,24 @@ def test_process_async_export_returns_existing_export(self): options = { "group_delimiter": "/", "remove_group_name": False, - "split_select_multiples": True + "split_select_multiples": True, } - request = Request(self.factory.post('/')) + request = Request(self.factory.post("/")) request.user = self.user export_type = "csv" self._create_old_export( - self.xform, export_type, options, filename="test_async_export") + self.xform, export_type, options, filename="test_async_export" + ) - resp = process_async_export( - request, self.xform, export_type, options=options) + resp = process_async_export(request, self.xform, export_type, options=options) - self.assertEqual(resp['job_status'], status_msg[SUCCESSFUL]) + self.assertEqual(resp["job_status"], status_msg[SUCCESSFUL]) self.assertIn("export_url", resp) # pylint: disable=invalid-name - @mock.patch('onadata.libs.utils.api_export_tools.AsyncResult') + @patch("onadata.libs.utils.api_export_tools.AsyncResult") @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_get_async_response_export_does_not_exist(self, AsyncResult): """ @@ -150,19 +155,19 @@ class MockAsyncResult(object): # pylint: disable=R0903 """Mock AsyncResult""" def __init__(self): - self.state = 'SUCCESS' + self.state = "SUCCESS" self.result = 1 AsyncResult.return_value = MockAsyncResult() self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user with self.assertRaises(Http404): - get_async_response('job_uuid', request, self.xform) + get_async_response("job_uuid", request, self.xform) # pylint: disable=invalid-name - @mock.patch('onadata.libs.utils.api_export_tools.AsyncResult') + @patch("onadata.libs.utils.api_export_tools.AsyncResult") @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_get_async_response_export_backlog_limit(self, AsyncResult): """ @@ -182,11 +187,11 @@ def state(self): AsyncResult.return_value = MockAsyncResult() self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user - result = get_async_response('job_uuid', request, self.xform) - self.assertEqual(result, {'job_status': 'PENDING'}) + result = get_async_response("job_uuid", request, self.xform) + self.assertEqual(result, {"job_status": "PENDING"}) def test_response_for_format(self): """ @@ -196,13 +201,12 @@ def test_response_for_format(self): xform = XForm.objects.filter().last() self.assertIsNotNone(xform) self.assertIsInstance(response_for_format(xform).data, dict) - self.assertIsInstance(response_for_format(xform, 'json').data, dict) - self.assertTrue( - hasattr(response_for_format(xform, 'xls').data, 'file')) + self.assertIsInstance(response_for_format(xform, "json").data, dict) + self.assertTrue(hasattr(response_for_format(xform, "xls").data, "file")) xform.xls.storage.delete(xform.xls.name) with self.assertRaises(Http404): - response_for_format(xform, 'xls') + response_for_format(xform, "xls") def test_get_metadata_format(self): """ @@ -210,16 +214,13 @@ def test_get_metadata_format(self): """ self._publish_xlsx_file() xform = XForm.objects.filter().last() - data_value = "xform_geojson {} {}".format( - xform.pk, xform.id_string) + data_value = "xform_geojson {} {}".format(xform.pk, xform.id_string) fmt = get_metadata_format(data_value) self.assertEqual("geojson", fmt) - data_value = "dataview_geojson {} {}".format( - xform.pk, xform.id_string) + data_value = "dataview_geojson {} {}".format(xform.pk, xform.id_string) fmt = get_metadata_format(data_value) self.assertEqual("geojson", fmt) - data_value = "xform {} {}".format( - xform.pk, xform.id_string) + data_value = "xform {} {}".format(xform.pk, xform.id_string) fmt = get_metadata_format(data_value) self.assertEqual(fmt, "csv") @@ -229,57 +230,57 @@ def test_get_existing_file_format(self): """ self._publish_xlsx_file() xform = XForm.objects.filter().last() - fmt = get_existing_file_format(xform.xls, 'xlsx') + fmt = get_existing_file_format(xform.xls, "xlsx") self.assertEqual("xlsx", fmt) # ensure it picks existing file extension regardless # of format passed in request params - fmt = get_existing_file_format(xform.xls, 'xls') + fmt = get_existing_file_format(xform.xls, "xls") self.assertEqual("xlsx", fmt) # pylint: disable=invalid-name - @mock.patch( - 'onadata.libs.utils.api_export_tools.viewer_task.create_async_export') + @patch("onadata.libs.utils.api_export_tools.viewer_task.create_async_export") def test_process_async_export_connection_error(self, mock_task): """ Test process_async_export creates a new export. """ mock_task.side_effect = ExportConnectionError self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user export_type = "csv" options = defaultdict(dict) with self.assertRaises(ServiceUnavailable): - process_async_export( - request, self.xform, export_type, options=options) + process_async_export(request, self.xform, export_type, options=options) # pylint: disable=invalid-name @override_settings(CELERY_TASK_ALWAYS_EAGER=True) - @mock.patch('onadata.libs.utils.api_export_tools.AsyncResult') + @patch("onadata.libs.utils.api_export_tools.AsyncResult") def test_get_async_response_connection_error(self, AsyncResult): """ Test get_async_response connection error. """ AsyncResult.side_effect = OperationalError self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user with self.assertRaises(ServiceUnavailable): - get_async_response('job_uuid', request, self.xform) + get_async_response("job_uuid", request, self.xform) - @mock.patch('onadata.libs.utils.api_export_tools.AsyncResult') + @patch("onadata.libs.utils.api_export_tools.AsyncResult") @override_settings(CELERY_TASK_ALWAYS_EAGER=True) def test_get_async_response_when_result_changes_in_subsequent_calls( - self, AsyncResult): + self, AsyncResult + ): """ Test get_async_response export does not exist. """ class MockAsyncResult(object): # pylint: disable=R0903 """Mock AsyncResult""" - res = [1, {'PENDING': 'PENDING'}] + + res = [1, {"PENDING": "PENDING"}] def __init__(self): self.state = "PENDING" @@ -291,8 +292,8 @@ def result(self): AsyncResult.return_value = MockAsyncResult() self._publish_transportation_form_and_submit_instance() - request = self.factory.post('/') + request = self.factory.post("/") request.user = self.user - result = get_async_response('job_uuid', request, self.xform) - self.assertEqual(result, {'job_status': 'PENDING', 'progress': '1'}) + result = get_async_response("job_uuid", request, self.xform) + self.assertEqual(result, {"job_status": "PENDING", "progress": "1"}) From ebcb1a18c63f43d62c9341e6207bee57ea41932f Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 27 Mar 2024 00:07:25 +0300 Subject: [PATCH 182/270] Django 4.1: update dependecies --- requirements/azure.in | 2 +- requirements/azure.pip | 4 ++-- requirements/s3.in | 2 +- requirements/s3.pip | 8 ++++---- requirements/ses.in | 2 +- requirements/ses.pip | 8 ++++---- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements/azure.in b/requirements/azure.in index fbae9b96a5..7c8f971e76 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ cryptography>=39.0.1 -django ==4.0,<5 +django ==4.1.13 django-storages[azure] diff --git a/requirements/azure.pip b/requirements/azure.pip index 91dfe77194..4996bf9ee8 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/azure.pip --strip-extras requirements/azure.in # -asgiref==3.7.2 +asgiref==3.8.1 # via django azure-core==1.30.1 # via @@ -22,7 +22,7 @@ cryptography==42.0.5 # via # -r requirements/azure.in # azure-storage-blob -django==4.0 +django==4.1.13 # via # -r requirements/azure.in # django-storages diff --git a/requirements/s3.in b/requirements/s3.in index 9dcb6b3af8..5232d5e4c0 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ boto3 -django ==4.0,<5 +django ==4.1.13 django-storages diff --git a/requirements/s3.pip b/requirements/s3.pip index 8e68f1a5fa..5dfa1a77c8 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -4,15 +4,15 @@ # # pip-compile --output-file=requirements/s3.pip --strip-extras requirements/s3.in # -asgiref==3.7.2 +asgiref==3.8.1 # via django -boto3==1.34.66 +boto3==1.34.71 # via -r requirements/s3.in -botocore==1.34.66 +botocore==1.34.71 # via # boto3 # s3transfer -django==4.0 +django==4.1.13 # via # -r requirements/s3.in # django-storages diff --git a/requirements/ses.in b/requirements/ses.in index d0a01aa09d..e62720a74b 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django ==4.0,<5 +django ==4.1.13 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index 5162be5bb4..57f2c90dab 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -4,17 +4,17 @@ # # pip-compile --output-file=requirements/ses.pip --strip-extras requirements/ses.in # -asgiref==3.7.2 +asgiref==3.8.1 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.34.66 +boto3==1.34.71 # via django-ses -botocore==1.34.66 +botocore==1.34.71 # via # boto3 # s3transfer -django==4.0 +django==4.1.13 # via # -r requirements/ses.in # django-ses From ee5f61d44ec72a57e345e967099fcb85ce95f426 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 27 Mar 2024 11:37:46 +0300 Subject: [PATCH 183/270] Django 4.2: update dependecies --- requirements/azure.in | 2 +- requirements/azure.pip | 2 +- requirements/base.in | 12 ++++++------ requirements/base.pip | 36 ++++++++++++++++++------------------ requirements/dev.pip | 36 ++++++++++++++++++------------------ requirements/s3.in | 2 +- requirements/s3.pip | 2 +- requirements/ses.in | 2 +- requirements/ses.pip | 2 +- setup.cfg | 7 ++++--- 10 files changed, 52 insertions(+), 51 deletions(-) diff --git a/requirements/azure.in b/requirements/azure.in index 7c8f971e76..88168553d4 100644 --- a/requirements/azure.in +++ b/requirements/azure.in @@ -1,3 +1,3 @@ cryptography>=39.0.1 -django ==4.1.13 +django>=4.2.11,<5 django-storages[azure] diff --git a/requirements/azure.pip b/requirements/azure.pip index 4996bf9ee8..eb6756a559 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -22,7 +22,7 @@ cryptography==42.0.5 # via # -r requirements/azure.in # azure-storage-blob -django==4.1.13 +django==4.2.11 # via # -r requirements/azure.in # django-storages diff --git a/requirements/base.in b/requirements/base.in index b9286956c0..0755924497 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -2,10 +2,10 @@ -e . # installed from Git --e git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36#egg=python-digest --e git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest --e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router --e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip --e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient --e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc +git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36#egg=python-digest +git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest +git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router +git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip +git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient +git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter diff --git a/requirements/base.pip b/requirements/base.pip index 32bf9f4fff..d3f695e47b 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -4,18 +4,6 @@ # # pip-compile --output-file=requirements/base.pip --strip-extras requirements/base.in # --e git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest - # via -r requirements/base.in --e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router - # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc - # via -r requirements/base.in --e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip - # via -r requirements/base.in --e git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36#egg=python-digest - # via -r requirements/base.in --e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient - # via -r requirements/base.in -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.16 @@ -41,9 +29,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.70 +boto3==1.34.71 # via dataflows-tabulator -botocore==1.34.70 +botocore==1.34.71 # via # boto3 # s3transfer @@ -100,7 +88,7 @@ deprecated==1.2.14 # via onadata dict2xml==1.7.5 # via onadata -django==4.1.13 +django==4.2.11 # via # django-activity-stream # django-cors-headers @@ -127,12 +115,16 @@ django-csp==3.8 # via onadata django-debug-toolbar==4.3.0 # via onadata -django-filter==23.5 +django-digest @ git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92 + # via -r requirements/base.in +django-filter==24.1 # via onadata django-guardian==2.4.0 # via # djangorestframework-guardian # onadata +django-multidb-router @ git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52 + # via -r requirements/base.in django-nose==1.4.7 # via onadata django-oauth-toolkit==2.3.0 @@ -253,6 +245,8 @@ oauthlib==3.2.2 # via # django-oauth-toolkit # requests-oauthlib +ona-oidc @ git+https://github.com/onaio/ona-oidc.git@pytz-deprecated + # via -r requirements/base.in openpyxl==3.0.9 # via # dataflows-tabulator @@ -270,14 +264,16 @@ prompt-toolkit==3.0.43 # via click-repl psycopg2-binary==2.9.9 # via onadata -pyasn1==0.5.1 +pyasn1==0.6.0 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 +pyasn1-modules==0.4.0 # via google-auth pycparser==2.21 # via cffi +pyfloip @ git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d + # via -r requirements/base.in pygments==2.17.2 # via sphinx pyjwt==2.8.0 @@ -298,6 +294,10 @@ python-dateutil==2.9.0.post0 # fleming # onadata # tableschema +python-digest @ git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36 + # via -r requirements/base.in +python-json2xlsclient @ git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0 + # via -r requirements/base.in python-memcached==1.62 # via onadata pytz==2024.1 diff --git a/requirements/dev.pip b/requirements/dev.pip index cb40371530..28137f7906 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -4,18 +4,6 @@ # # pip-compile --output-file=requirements/dev.pip --strip-extras requirements/dev.in # --e git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92#egg=django-digest - # via -r requirements/base.in --e git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52#egg=django-multidb-router - # via -r requirements/base.in --e git+https://github.com/onaio/ona-oidc.git@pytz-deprecated#egg=ona-oidc - # via -r requirements/base.in --e git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d#egg=pyfloip - # via -r requirements/base.in --e git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36#egg=python-digest - # via -r requirements/base.in --e git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0#egg=python-json2xlsclient - # via -r requirements/base.in -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in alabaster==0.7.16 @@ -49,9 +37,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.70 +boto3==1.34.71 # via dataflows-tabulator -botocore==1.34.70 +botocore==1.34.71 # via # boto3 # s3transfer @@ -118,7 +106,7 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -django==4.1.13 +django==4.2.11 # via # django-activity-stream # django-cors-headers @@ -146,14 +134,18 @@ django-csp==3.8 # via onadata django-debug-toolbar==4.3.0 # via onadata +django-digest @ git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92 + # via -r requirements/base.in django-extensions==3.2.3 # via -r requirements/dev.in -django-filter==23.5 +django-filter==24.1 # via onadata django-guardian==2.4.0 # via # djangorestframework-guardian # onadata +django-multidb-router @ git+https://github.com/onaio/django-multidb-router.git@f711368180d58eef87eda54fadfd5f8355623d52 + # via -r requirements/base.in django-nose==1.4.7 # via onadata django-oauth-toolkit==2.3.0 @@ -319,6 +311,8 @@ oauthlib==3.2.2 # via # django-oauth-toolkit # requests-oauthlib +ona-oidc @ git+https://github.com/onaio/ona-oidc.git@pytz-deprecated + # via -r requirements/base.in openpyxl==3.0.9 # via # dataflows-tabulator @@ -360,11 +354,11 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pyasn1==0.5.1 +pyasn1==0.6.0 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 +pyasn1-modules==0.4.0 # via google-auth pycodestyle==2.9.1 # via @@ -378,6 +372,8 @@ pyflakes==2.5.0 # via # flake8 # prospector +pyfloip @ git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d + # via -r requirements/base.in pygments==2.17.2 # via # ipython @@ -422,6 +418,10 @@ python-dateutil==2.9.0.post0 # fleming # onadata # tableschema +python-digest @ git+https://github.com/onaio/python-digest.git@08267ca8afc1a52f91352ebb5385e8e6d074fc36 + # via -r requirements/base.in +python-json2xlsclient @ git+https://github.com/onaio/python-json2xlsclient.git@62b4645f7b4f2684421a13ce98da0331a9dd66a0 + # via -r requirements/base.in python-memcached==1.62 # via onadata pytz==2024.1 diff --git a/requirements/s3.in b/requirements/s3.in index 5232d5e4c0..936c09c17a 100644 --- a/requirements/s3.in +++ b/requirements/s3.in @@ -1,3 +1,3 @@ boto3 -django ==4.1.13 +django>=4.2.11,<5 django-storages diff --git a/requirements/s3.pip b/requirements/s3.pip index 5dfa1a77c8..fe691e6de3 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -12,7 +12,7 @@ botocore==1.34.71 # via # boto3 # s3transfer -django==4.1.13 +django==4.2.11 # via # -r requirements/s3.in # django-storages diff --git a/requirements/ses.in b/requirements/ses.in index e62720a74b..3e46b26f84 100644 --- a/requirements/ses.in +++ b/requirements/ses.in @@ -1,3 +1,3 @@ boto -django ==4.1.13 +django>=4.2.11,<5 django-ses diff --git a/requirements/ses.pip b/requirements/ses.pip index 57f2c90dab..1ca64618da 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -14,7 +14,7 @@ botocore==1.34.71 # via # boto3 # s3transfer -django==4.1.13 +django==4.2.11 # via # -r requirements/ses.in # django-ses diff --git a/setup.cfg b/setup.cfg index 7f27898ffd..abb7e495ee 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,17 +1,18 @@ [metadata] name = onadata -version = 3.19.0 +version = 4.0.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst url = https://github.com/onaio/onadata author = Ona Systems Inc author_email = support@ona.io -license = Copyright (c) 2022 Ona Systems Inc All rights reserved +license = Copyright (c) 2024 Ona Systems Inc All rights reserved license_file = LICENSE classifiers = Development Status :: 5 - Production/Stable Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 project_urls = Documentation = https://api.ona.io/api Source = https://github.com/onaio/onadata @@ -26,7 +27,7 @@ tests_require = httmock requests-mock install_requires = - Django==4.1.13 + Django>=4.2.11,<5 django-guardian django-registration-redux django-templated-email From 5f28feabdfb9da0f9dd51753c463c5654749de47 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 2 Apr 2024 13:50:58 +0300 Subject: [PATCH 184/270] Django 4.2: Update __version__ --- onadata/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/__init__.py b/onadata/__init__.py index 82441273fd..8fc1ffb3e5 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "3.19.0" +__version__ = "4.0.0" # This will make sure the app is always imported when From 2d2ee8cc54a45010bbdb7f4d3a2502636580cd03 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 2 Apr 2024 13:57:15 +0300 Subject: [PATCH 185/270] Update pre-commit dependencies --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5feb00ad06..98b634345c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files - repo: https://github.com/psf/black - rev: 22.6.0 + rev: 24.3.0 hooks: - id: black From 800dd6d589a5a80ca13eddeece0817631089e121 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 12:31:13 +0300 Subject: [PATCH 186/270] Flaky tests - remove string encoding checks --- .../libs/tests/utils/test_export_builder.py | 178 +++++++++--------- 1 file changed, 89 insertions(+), 89 deletions(-) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index e4aeee65ef..589b684b84 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -75,7 +75,7 @@ class TestExportBuilder(TestBase): }, { "children/cartoons/name": "Flinstones", - "children/cartoons/why": "I like bam bam\u0107" + "children/cartoons/why": "I like bam bam\u0107", # throw in a unicode character }, ], @@ -130,7 +130,7 @@ class TestExportBuilder(TestBase): }, { "childrens_survey_with_a_very_lo/cartoons/name": "Flinstones", - "childrens_survey_with_a_very_lo/cartoons/why": "I like bam bam\u0107" + "childrens_survey_with_a_very_lo/cartoons/why": "I like bam bam\u0107", # throw in a unicode character }, ], @@ -613,12 +613,12 @@ def test_zipped_sav_export_with_date_field(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"expense_date") - self.assertEqual(rows[1][0], b"2013-01-03") - self.assertEqual(rows[0][1], b"A.gdate") - self.assertEqual(rows[1][1], b"2017-06-13") - self.assertEqual(rows[0][5], b"@_submission_time") - self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") + self.assertEqual(rows[0][0], "expense_date") + self.assertEqual(rows[1][0], "2013-01-03") + self.assertEqual(rows[0][1], "A.gdate") + self.assertEqual(rows[1][1], "2017-06-13") + self.assertEqual(rows[0][5], "@_submission_time") + self.assertEqual(rows[1][5], "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -666,23 +666,23 @@ def test_zipped_sav_export_dynamic_select_multiple(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"sex") - self.assertEqual(rows[1][0], b"male") - self.assertEqual(rows[0][1], b"text") - self.assertEqual(rows[1][1], b"his") - self.assertEqual(rows[0][2], b"favorite_brand") - self.assertEqual(rows[1][2], b"Generic") - self.assertEqual(rows[0][3], b"name") - self.assertEqual(rows[1][3], b"Davis") - self.assertEqual(rows[0][4], b"brand_known") - self.assertEqual(rows[1][4], b"his Generic a") - self.assertEqual(rows[0][5], b"brand_known.$text") + self.assertEqual(rows[0][0], "sex") + self.assertEqual(rows[1][0], "male") + self.assertEqual(rows[0][1], "text") + self.assertEqual(rows[1][1], "his") + self.assertEqual(rows[0][2], "favorite_brand") + self.assertEqual(rows[1][2], "Generic") + self.assertEqual(rows[0][3], "name") + self.assertEqual(rows[1][3], "Davis") + self.assertEqual(rows[0][4], "brand_known") + self.assertEqual(rows[1][4], "his Generic a") + self.assertEqual(rows[0][5], "brand_known.$text") self.assertEqual(rows[1][5], 1.0) - self.assertEqual(rows[0][6], b"brand_known.$favorite_brand") + self.assertEqual(rows[0][6], "brand_known.$favorite_brand") self.assertEqual(rows[1][6], 1.0) - self.assertEqual(rows[0][7], b"brand_known.a") + self.assertEqual(rows[0][7], "brand_known.a") self.assertEqual(rows[1][7], 1.0) - self.assertEqual(rows[0][8], b"brand_known.b") + self.assertEqual(rows[0][8], "brand_known.b") self.assertEqual(rows[1][8], 0.0) shutil.rmtree(temp_dir) @@ -759,16 +759,16 @@ def test_zipped_sav_export_with_numeric_select_one_field(self): self.assertTrue(len(rows) > 1) # expensed 1 - self.assertEqual(rows[0][0], b"expensed") + self.assertEqual(rows[0][0], "expensed") self.assertEqual(rows[1][0], 1) # A/q1 1 - self.assertEqual(rows[0][1], b"A.q1") + self.assertEqual(rows[0][1], "A.q1") self.assertEqual(rows[1][1], 1) # _submission_time is a date string - self.assertEqual(rows[0][5], b"@_submission_time") - self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") + self.assertEqual(rows[0][5], "@_submission_time") + self.assertEqual(rows[1][5], "2016-11-21 03:43:43") # pylint: disable=invalid-name def test_zipped_sav_export_with_duplicate_field_different_groups(self): @@ -951,35 +951,35 @@ def test_zipped_sav_export_with_numeric_select_multiple_field(self): rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"expensed") - self.assertEqual(rows[1][0], b"1") + self.assertEqual(rows[0][0], "expensed") + self.assertEqual(rows[1][0], "1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][1], b"expensed.1") + self.assertEqual(rows[0][1], "expensed.1") self.assertEqual(rows[1][1], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][2], b"expensed.0") + self.assertEqual(rows[0][2], "expensed.0") self.assertEqual(rows[1][2], 0) - self.assertEqual(rows[0][3], b"A.q1") - self.assertEqual(rows[1][3], b"1") + self.assertEqual(rows[0][3], "A.q1") + self.assertEqual(rows[1][3], "1") # ensure you get a numeric value for multiple select with choice # filters - self.assertEqual(rows[0][6], b"A.q2") - self.assertEqual(rows[1][6], b"1") + self.assertEqual(rows[0][6], "A.q2") + self.assertEqual(rows[1][6], "1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][4], b"A.q1.1") + self.assertEqual(rows[0][4], "A.q1.1") self.assertEqual(rows[1][4], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][5], b"A.q1.0") + self.assertEqual(rows[0][5], "A.q1.0") self.assertEqual(rows[1][5], 0) - self.assertEqual(rows[0][12], b"@_submission_time") - self.assertEqual(rows[1][12], b"2016-11-21 03:43:43") + self.assertEqual(rows[0][12], "@_submission_time") + self.assertEqual(rows[1][12], "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1012,12 +1012,12 @@ def test_zipped_sav_export_with_zero_padded_select_multiple_field(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[1][0], b"1") + self.assertEqual(rows[1][0], "1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS self.assertEqual(rows[1][1], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS self.assertEqual(rows[1][2], 0) - self.assertEqual(rows[1][6], b"2016-11-21 03:43:43") + self.assertEqual(rows[1][6], "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1053,12 +1053,12 @@ def test_zipped_sav_export_with_values_split_select_multiple(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[1][0], b"2 09") + self.assertEqual(rows[1][0], "2 09") # expensed.1 is selected hence True, 1.00 or 1 in SPSS self.assertEqual(rows[1][1], 2) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[1][2], b"09") - self.assertEqual(rows[1][6], b"2016-11-21 03:43:43") + self.assertEqual(rows[1][2], "09") + self.assertEqual(rows[1][6], "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1165,10 +1165,10 @@ def test_zipped_sav_export_with_duplicate_column_name(self): rows = list(reader) # Check that columns are present - self.assertIn(b"Sport", rows[0]) + self.assertIn("Sport", rows[0]) # Check for sport in first 5 characters # because rows contains 'sport@d4b6' - self.assertIn(b"sport", [x[0:5] for x in rows[0]]) + self.assertIn("sport", [x[0:5] for x in rows[0]]) # pylint: disable=invalid-name def test_xlsx_export_works_with_unicode(self): @@ -2904,7 +2904,7 @@ def test_zipped_sav_has_submission_review_fields(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x.encode("utf-8") + x for x in [ "photo", "osm_road", @@ -2941,9 +2941,9 @@ def test_zipped_sav_has_submission_review_fields(self): ] ] self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][29], b"Rejected") - self.assertEqual(rows[1][30], b"Wrong Location") - self.assertEqual(rows[1][31], b"2021-05-25T02:27:19") + self.assertEqual(rows[1][29], "Rejected") + self.assertEqual(rows[1][30], "Wrong Location") + self.assertEqual(rows[1][31], "2021-05-25T02:27:19") # pylint: disable=invalid-name def test_zipped_csv_export_with_osm_data(self): @@ -3041,7 +3041,7 @@ def test_zipped_sav_export_with_osm_data(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x.encode("utf-8") + x for x in [ "photo", "osm_road", @@ -3075,12 +3075,12 @@ def test_zipped_sav_export_with_osm_data(self): ] ] self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][0], b"1424308569120.jpg") - self.assertEqual(rows[1][1], b"OSMWay234134797.osm") - self.assertEqual(rows[1][2], b"23.708174238006087") - self.assertEqual(rows[1][4], b"tertiary") - self.assertEqual(rows[1][6], b"Patuatuli Road") - self.assertEqual(rows[1][13], b"kol") + self.assertEqual(rows[1][0], "1424308569120.jpg") + self.assertEqual(rows[1][1], "OSMWay234134797.osm") + self.assertEqual(rows[1][2], "23.708174238006087") + self.assertEqual(rows[1][4], "tertiary") + self.assertEqual(rows[1][6], "Patuatuli Road") + self.assertEqual(rows[1][13], "kol") def test_show_choice_labels(self): """ @@ -3607,52 +3607,52 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): expected_data = [ [ - b"gps", - b"@_gps_latitude", - b"@_gps_longitude", - b"@_gps_altitude", - b"@_gps_precision", - b"gps@52a9", - b"@_gps_latitude_52a9", - b"@_gps_longitude_52a9", - b"@_gps_altitude_52a9", - b"@_gps_precision_52a9", - b"instanceID", - b"@_id", - b"@_uuid", - b"@_submission_time", - b"@_index", - b"@_parent_table_name", - b"@_parent_index", - b"@_tags", - b"@_notes", - b"@_version", - b"@_duration", - b"@_submitted_by", + "gps", + "@_gps_latitude", + "@_gps_longitude", + "@_gps_altitude", + "@_gps_precision", + "gps@52a9", + "@_gps_latitude_52a9", + "@_gps_longitude_52a9", + "@_gps_altitude_52a9", + "@_gps_precision_52a9", + "instanceID", + "@_id", + "@_uuid", + "@_submission_time", + "@_index", + "@_parent_table_name", + "@_parent_index", + "@_tags", + "@_notes", + "@_version", + "@_duration", + "@_submitted_by", ], [ - b"4.0 36.1 5000 20", + "4.0 36.1 5000 20", 4.0, 36.1, 5000.0, 20.0, - b"1.0 36.1 2000 20", + "1.0 36.1 2000 20", 1.0, 36.1, 2000.0, 20.0, - b"", + "", None, - b"", - b"2016-11-21 03:42:43", + "", + "2016-11-21 03:42:43", 1.0, - b"", + "", -1.0, - b"", - b"", - b"", - b"", - b"", + "", + "", + "", + "", + "", ], ] with SavReader( From fb194995086cef8787211dcab35164f468adcd1f Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 13:04:01 +0300 Subject: [PATCH 187/270] Add quote to ppa:deadsnakes/ppa - address error adding the PPA --- docker/onadata-uwsgi/Dockerfile.ubuntu | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index b511802a4e..378936166d 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -44,10 +44,10 @@ ENV LC_CTYPE en_US.UTF-8 RUN dpkg-reconfigure locales # Add Deadsnake Repository -RUN add-apt-repository ppa:deadsnakes/ppa -y && apt-get update -q - # Install OnaData Dependencies -RUN apt-get install -y --no-install-recommends \ +RUN add-apt-repository 'ppa:deadsnakes/ppa' -y \ + && apt-get update -q \ + && apt-get install -y --no-install-recommends \ libproj-dev \ gdal-bin \ memcached \ @@ -111,5 +111,3 @@ EXPOSE 8000 USER onadata CMD ["/usr/local/bin/uwsgi", "--ini", "/uwsgi.ini"] - - From 7bad5206f29b6404eda98ccd383f743a6a816349 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 13:57:47 +0300 Subject: [PATCH 188/270] Flaky tests - string encoding --- .../libs/tests/utils/test_export_builder.py | 60 +++++++++---------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 589b684b84..85b61b9baa 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -759,16 +759,16 @@ def test_zipped_sav_export_with_numeric_select_one_field(self): self.assertTrue(len(rows) > 1) # expensed 1 - self.assertEqual(rows[0][0], "expensed") + self.assertEqual(rows[0][0], b"expensed") self.assertEqual(rows[1][0], 1) # A/q1 1 - self.assertEqual(rows[0][1], "A.q1") + self.assertEqual(rows[0][1], b"A.q1") self.assertEqual(rows[1][1], 1) # _submission_time is a date string - self.assertEqual(rows[0][5], "@_submission_time") - self.assertEqual(rows[1][5], "2016-11-21 03:43:43") + self.assertEqual(rows[0][5], b"@_submission_time") + self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") # pylint: disable=invalid-name def test_zipped_sav_export_with_duplicate_field_different_groups(self): @@ -951,35 +951,35 @@ def test_zipped_sav_export_with_numeric_select_multiple_field(self): rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], "expensed") - self.assertEqual(rows[1][0], "1") + self.assertEqual(rows[0][0], b"expensed") + self.assertEqual(rows[1][0], b"1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][1], "expensed.1") + self.assertEqual(rows[0][1], b"expensed.1") self.assertEqual(rows[1][1], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][2], "expensed.0") + self.assertEqual(rows[0][2], b"expensed.0") self.assertEqual(rows[1][2], 0) - self.assertEqual(rows[0][3], "A.q1") - self.assertEqual(rows[1][3], "1") + self.assertEqual(rows[0][3], b"A.q1") + self.assertEqual(rows[1][3], b"1") # ensure you get a numeric value for multiple select with choice # filters - self.assertEqual(rows[0][6], "A.q2") - self.assertEqual(rows[1][6], "1") + self.assertEqual(rows[0][6], b"A.q2") + self.assertEqual(rows[1][6], b"1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][4], "A.q1.1") + self.assertEqual(rows[0][4], b"A.q1.1") self.assertEqual(rows[1][4], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][5], "A.q1.0") + self.assertEqual(rows[0][5], b"A.q1.0") self.assertEqual(rows[1][5], 0) - self.assertEqual(rows[0][12], "@_submission_time") - self.assertEqual(rows[1][12], "2016-11-21 03:43:43") + self.assertEqual(rows[0][12], b"@_submission_time") + self.assertEqual(rows[1][12], b"2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1012,12 +1012,12 @@ def test_zipped_sav_export_with_zero_padded_select_multiple_field(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[1][0], "1") + self.assertEqual(rows[1][0], b"1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS self.assertEqual(rows[1][1], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS self.assertEqual(rows[1][2], 0) - self.assertEqual(rows[1][6], "2016-11-21 03:43:43") + self.assertEqual(rows[1][6], b"2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1053,12 +1053,12 @@ def test_zipped_sav_export_with_values_split_select_multiple(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[1][0], "2 09") + self.assertEqual(rows[1][0], b"2 09") # expensed.1 is selected hence True, 1.00 or 1 in SPSS self.assertEqual(rows[1][1], 2) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[1][2], "09") - self.assertEqual(rows[1][6], "2016-11-21 03:43:43") + self.assertEqual(rows[1][2], b"09") + self.assertEqual(rows[1][6], b"2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -2941,9 +2941,9 @@ def test_zipped_sav_has_submission_review_fields(self): ] ] self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][29], "Rejected") - self.assertEqual(rows[1][30], "Wrong Location") - self.assertEqual(rows[1][31], "2021-05-25T02:27:19") + self.assertEqual(rows[1][29], b"Rejected") + self.assertEqual(rows[1][30], b"Wrong Location") + self.assertEqual(rows[1][31], b"2021-05-25T02:27:19") # pylint: disable=invalid-name def test_zipped_csv_export_with_osm_data(self): @@ -3075,12 +3075,12 @@ def test_zipped_sav_export_with_osm_data(self): ] ] self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][0], "1424308569120.jpg") - self.assertEqual(rows[1][1], "OSMWay234134797.osm") - self.assertEqual(rows[1][2], "23.708174238006087") - self.assertEqual(rows[1][4], "tertiary") - self.assertEqual(rows[1][6], "Patuatuli Road") - self.assertEqual(rows[1][13], "kol") + self.assertEqual(rows[1][0], b"1424308569120.jpg") + self.assertEqual(rows[1][1], b"OSMWay234134797.osm") + self.assertEqual(rows[1][2], b"23.708174238006087") + self.assertEqual(rows[1][4], b"tertiary") + self.assertEqual(rows[1][6], b"Patuatuli Road") + self.assertEqual(rows[1][13], b"kol") def test_show_choice_labels(self): """ From 5a0e3a7b8755a878da99004dcf9a2dcf050e6eb1 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 19:51:16 +0300 Subject: [PATCH 189/270] Flaky tests - string encoding --- .../libs/tests/utils/test_export_builder.py | 98 +++++++++---------- 1 file changed, 49 insertions(+), 49 deletions(-) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 85b61b9baa..7577a9381d 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -613,12 +613,12 @@ def test_zipped_sav_export_with_date_field(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], "expense_date") - self.assertEqual(rows[1][0], "2013-01-03") - self.assertEqual(rows[0][1], "A.gdate") - self.assertEqual(rows[1][1], "2017-06-13") - self.assertEqual(rows[0][5], "@_submission_time") - self.assertEqual(rows[1][5], "2016-11-21 03:43:43") + self.assertEqual(rows[0][0], b"expense_date") + self.assertEqual(rows[1][0], b"2013-01-03") + self.assertEqual(rows[0][1], b"A.gdate") + self.assertEqual(rows[1][1], b"2017-06-13") + self.assertEqual(rows[0][5], b"@_submission_time") + self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -666,23 +666,23 @@ def test_zipped_sav_export_dynamic_select_multiple(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], "sex") - self.assertEqual(rows[1][0], "male") - self.assertEqual(rows[0][1], "text") - self.assertEqual(rows[1][1], "his") - self.assertEqual(rows[0][2], "favorite_brand") - self.assertEqual(rows[1][2], "Generic") - self.assertEqual(rows[0][3], "name") - self.assertEqual(rows[1][3], "Davis") - self.assertEqual(rows[0][4], "brand_known") - self.assertEqual(rows[1][4], "his Generic a") - self.assertEqual(rows[0][5], "brand_known.$text") + self.assertEqual(rows[0][0], b"sex") + self.assertEqual(rows[1][0], b"male") + self.assertEqual(rows[0][1], b"text") + self.assertEqual(rows[1][1], b"his") + self.assertEqual(rows[0][2], b"favorite_brand") + self.assertEqual(rows[1][2], b"Generic") + self.assertEqual(rows[0][3], b"name") + self.assertEqual(rows[1][3], b"Davis") + self.assertEqual(rows[0][4], b"brand_known") + self.assertEqual(rows[1][4], b"his Generic a") + self.assertEqual(rows[0][5], b"brand_known.$text") self.assertEqual(rows[1][5], 1.0) - self.assertEqual(rows[0][6], "brand_known.$favorite_brand") + self.assertEqual(rows[0][6], b"brand_known.$favorite_brand") self.assertEqual(rows[1][6], 1.0) - self.assertEqual(rows[0][7], "brand_known.a") + self.assertEqual(rows[0][7], b"brand_known.a") self.assertEqual(rows[1][7], 1.0) - self.assertEqual(rows[0][8], "brand_known.b") + self.assertEqual(rows[0][8], b"brand_known.b") self.assertEqual(rows[1][8], 0.0) shutil.rmtree(temp_dir) @@ -1165,10 +1165,10 @@ def test_zipped_sav_export_with_duplicate_column_name(self): rows = list(reader) # Check that columns are present - self.assertIn("Sport", rows[0]) + self.assertIn(b"Sport", rows[0]) # Check for sport in first 5 characters # because rows contains 'sport@d4b6' - self.assertIn("sport", [x[0:5] for x in rows[0]]) + self.assertIn(b"sport", [x[0:5] for x in rows[0]]) # pylint: disable=invalid-name def test_xlsx_export_works_with_unicode(self): @@ -2904,7 +2904,7 @@ def test_zipped_sav_has_submission_review_fields(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x + x.encode("utf-8") for x in [ "photo", "osm_road", @@ -3041,7 +3041,7 @@ def test_zipped_sav_export_with_osm_data(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x + x.encode("8") for x in [ "photo", "osm_road", @@ -3607,36 +3607,36 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): expected_data = [ [ - "gps", - "@_gps_latitude", - "@_gps_longitude", - "@_gps_altitude", - "@_gps_precision", - "gps@52a9", - "@_gps_latitude_52a9", - "@_gps_longitude_52a9", - "@_gps_altitude_52a9", - "@_gps_precision_52a9", - "instanceID", - "@_id", - "@_uuid", - "@_submission_time", - "@_index", - "@_parent_table_name", - "@_parent_index", - "@_tags", - "@_notes", - "@_version", - "@_duration", - "@_submitted_by", + b"gps", + b"@_gps_latitude", + b"@_gps_longitude", + b"@_gps_altitude", + b"@_gps_precision", + b"gps@52a9", + b"@_gps_latitude_52a9", + b"@_gps_longitude_52a9", + b"@_gps_altitude_52a9", + b"@_gps_precision_52a9", + b"instanceID", + b"@_id", + b"@_uuid", + b"@_submission_time", + b"@_index", + b"@_parent_table_name", + b"@_parent_index", + b"@_tags", + b"@_notes", + b"@_version", + b"@_duration", + b"@_submitted_by", ], [ - "4.0 36.1 5000 20", + b"4.0 36.1 5000 20", 4.0, 36.1, 5000.0, 20.0, - "1.0 36.1 2000 20", + b"1.0 36.1 2000 20", 1.0, 36.1, 2000.0, @@ -3644,7 +3644,7 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): "", None, "", - "2016-11-21 03:42:43", + b"2016-11-21 03:42:43", 1.0, "", -1.0, From 8cd8c57d4b2dc8d9ac103b7d66151f3f493cf168 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 21:38:24 +0300 Subject: [PATCH 190/270] Flaky tests - string encoding --- .../libs/tests/utils/test_export_builder.py | 81 ++++++++++--------- 1 file changed, 42 insertions(+), 39 deletions(-) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 7577a9381d..7edf2723bd 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -51,6 +51,11 @@ def _logger_fixture_path(*args): ) +def _str_if_bytes(val): + """Returns val as string if it is of type bytes otherwise returns bytes""" + return str(val, "utf-8") if isinstance(val, bytes) else val + + class TestExportBuilder(TestBase): """Test onadata.libs.utils.export_builder functions.""" @@ -2904,46 +2909,44 @@ def test_zipped_sav_has_submission_review_fields(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x.encode("utf-8") - for x in [ - "photo", - "osm_road", - "osm_building", - "fav_color", - "form_completed", - "meta.instanceID", - "@_id", - "@_uuid", - "@_submission_time", - "@_index", - "@_parent_table_name", - "@_review_comment", - f"@{REVIEW_DATE}", - "@_review_status", - "@_parent_index", - "@_tags", - "@_notes", - "@_version", - "@_duration", - "@_submitted_by", - "osm_road_ctr_lat", - "osm_road_ctr_lon", - "osm_road_highway", - "osm_road_lanes", - "osm_road_name", - "osm_road_way_id", - "osm_building_building", - "osm_building_building_levels", - "osm_building_ctr_lat", - "osm_building_ctr_lon", - "osm_building_name", - "osm_building_way_id", - ] + "photo", + "osm_road", + "osm_building", + "fav_color", + "form_completed", + "meta.instanceID", + "@_id", + "@_uuid", + "@_submission_time", + "@_index", + "@_parent_table_name", + "@_review_comment", + f"@{REVIEW_DATE}", + "@_review_status", + "@_parent_index", + "@_tags", + "@_notes", + "@_version", + "@_duration", + "@_submitted_by", + "osm_road_ctr_lat", + "osm_road_ctr_lon", + "osm_road_highway", + "osm_road_lanes", + "osm_road_name", + "osm_road_way_id", + "osm_building_building", + "osm_building_building_levels", + "osm_building_ctr_lat", + "osm_building_ctr_lon", + "osm_building_name", + "osm_building_way_id", ] - self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][29], b"Rejected") - self.assertEqual(rows[1][30], b"Wrong Location") - self.assertEqual(rows[1][31], b"2021-05-25T02:27:19") + actual_headers = list(map(_str_if_bytes, rows[0])) + self.assertEqual(sorted(actual_headers), sorted(expected_column_headers)) + self.assertEqual(_str_if_bytes(rows[1][29]), "Rejected") + self.assertEqual(_str_if_bytes(rows[1][30]), "Wrong Location") + self.assertEqual(_str_if_bytes(rows[1][31]), "2021-05-25T02:27:19") # pylint: disable=invalid-name def test_zipped_csv_export_with_osm_data(self): From 7f47f200c2f45d6742fcd450be60b6479b62a6d3 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 5 Apr 2024 23:40:34 +0300 Subject: [PATCH 191/270] Flaky tests - string encoding --- .../libs/tests/utils/test_export_builder.py | 207 +++++++++--------- 1 file changed, 105 insertions(+), 102 deletions(-) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index 7edf2723bd..f5c2d7ec8f 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -618,12 +618,12 @@ def test_zipped_sav_export_with_date_field(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"expense_date") - self.assertEqual(rows[1][0], b"2013-01-03") - self.assertEqual(rows[0][1], b"A.gdate") - self.assertEqual(rows[1][1], b"2017-06-13") - self.assertEqual(rows[0][5], b"@_submission_time") - self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") + self.assertEqual(_str_if_bytes(rows[0][0]), "expense_date") + self.assertEqual(_str_if_bytes(rows[1][0]), "2013-01-03") + self.assertEqual(_str_if_bytes(rows[0][1]), "A.gdate") + self.assertEqual(_str_if_bytes(rows[1][1]), "2017-06-13") + self.assertEqual(_str_if_bytes(rows[0][5]), "@_submission_time") + self.assertEqual(_str_if_bytes(rows[1][5]), "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -670,24 +670,26 @@ def test_zipped_sav_export_dynamic_select_multiple(self): with SavReader(os.path.join(temp_dir, "exp.sav"), returnHeader=True) as reader: rows = list(reader) + rows[0] = list(map(_str_if_bytes, rows[0])) + rows[1] = list(map(_str_if_bytes, rows[1])) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"sex") - self.assertEqual(rows[1][0], b"male") - self.assertEqual(rows[0][1], b"text") - self.assertEqual(rows[1][1], b"his") - self.assertEqual(rows[0][2], b"favorite_brand") - self.assertEqual(rows[1][2], b"Generic") - self.assertEqual(rows[0][3], b"name") - self.assertEqual(rows[1][3], b"Davis") - self.assertEqual(rows[0][4], b"brand_known") - self.assertEqual(rows[1][4], b"his Generic a") - self.assertEqual(rows[0][5], b"brand_known.$text") + self.assertEqual(rows[0][0], "sex") + self.assertEqual(rows[1][0], "male") + self.assertEqual(rows[0][1], "text") + self.assertEqual(rows[1][1], "his") + self.assertEqual(rows[0][2], "favorite_brand") + self.assertEqual(rows[1][2], "Generic") + self.assertEqual(rows[0][3], "name") + self.assertEqual(rows[1][3], "Davis") + self.assertEqual(rows[0][4], "brand_known") + self.assertEqual(rows[1][4], "his Generic a") + self.assertEqual(rows[0][5], "brand_known.$text") self.assertEqual(rows[1][5], 1.0) - self.assertEqual(rows[0][6], b"brand_known.$favorite_brand") + self.assertEqual(rows[0][6], "brand_known.$favorite_brand") self.assertEqual(rows[1][6], 1.0) - self.assertEqual(rows[0][7], b"brand_known.a") + self.assertEqual(rows[0][7], "brand_known.a") self.assertEqual(rows[1][7], 1.0) - self.assertEqual(rows[0][8], b"brand_known.b") + self.assertEqual(rows[0][8], "brand_known.b") self.assertEqual(rows[1][8], 0.0) shutil.rmtree(temp_dir) @@ -764,16 +766,16 @@ def test_zipped_sav_export_with_numeric_select_one_field(self): self.assertTrue(len(rows) > 1) # expensed 1 - self.assertEqual(rows[0][0], b"expensed") - self.assertEqual(rows[1][0], 1) + self.assertEqual(_str_if_bytes(rows[0][0]), "expensed") + self.assertEqual(_str_if_bytes(rows[1][0]), 1) # A/q1 1 - self.assertEqual(rows[0][1], b"A.q1") + self.assertEqual(_str_if_bytes(rows[0][1]), "A.q1") self.assertEqual(rows[1][1], 1) # _submission_time is a date string - self.assertEqual(rows[0][5], b"@_submission_time") - self.assertEqual(rows[1][5], b"2016-11-21 03:43:43") + self.assertEqual(_str_if_bytes(rows[0][5]), "@_submission_time") + self.assertEqual(_str_if_bytes(rows[1][5]), "2016-11-21 03:43:43") # pylint: disable=invalid-name def test_zipped_sav_export_with_duplicate_field_different_groups(self): @@ -956,35 +958,35 @@ def test_zipped_sav_export_with_numeric_select_multiple_field(self): rows = list(reader) self.assertTrue(len(rows) > 1) - self.assertEqual(rows[0][0], b"expensed") - self.assertEqual(rows[1][0], b"1") + self.assertEqual(_str_if_bytes(rows[0][0]), "expensed") + self.assertEqual(_str_if_bytes(rows[1][0]), "1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][1], b"expensed.1") + self.assertEqual(_str_if_bytes(rows[0][1]), "expensed.1") self.assertEqual(rows[1][1], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][2], b"expensed.0") + self.assertEqual(_str_if_bytes(rows[0][2]), "expensed.0") self.assertEqual(rows[1][2], 0) - self.assertEqual(rows[0][3], b"A.q1") - self.assertEqual(rows[1][3], b"1") + self.assertEqual(_str_if_bytes(rows[0][3]), "A.q1") + self.assertEqual(_str_if_bytes(rows[1][3]), "1") # ensure you get a numeric value for multiple select with choice # filters - self.assertEqual(rows[0][6], b"A.q2") - self.assertEqual(rows[1][6], b"1") + self.assertEqual(_str_if_bytes(rows[0][6]), "A.q2") + self.assertEqual(_str_if_bytes(rows[1][6]), "1") # expensed.1 is selected hence True, 1.00 or 1 in SPSS - self.assertEqual(rows[0][4], b"A.q1.1") + self.assertEqual(_str_if_bytes(rows[0][4]), "A.q1.1") self.assertEqual(rows[1][4], 1) # expensed.0 is not selected hence False, .00 or 0 in SPSS - self.assertEqual(rows[0][5], b"A.q1.0") + self.assertEqual(_str_if_bytes(rows[0][5]), "A.q1.0") self.assertEqual(rows[1][5], 0) - self.assertEqual(rows[0][12], b"@_submission_time") - self.assertEqual(rows[1][12], b"2016-11-21 03:43:43") + self.assertEqual(_str_if_bytes(rows[0][12]), "@_submission_time") + self.assertEqual(_str_if_bytes(rows[1][12]), "2016-11-21 03:43:43") shutil.rmtree(temp_dir) @@ -1170,10 +1172,10 @@ def test_zipped_sav_export_with_duplicate_column_name(self): rows = list(reader) # Check that columns are present - self.assertIn(b"Sport", rows[0]) + self.assertIn("Sport", _str_if_bytes(rows[0])) # Check for sport in first 5 characters # because rows contains 'sport@d4b6' - self.assertIn(b"sport", [x[0:5] for x in rows[0]]) + self.assertIn("sport", list(map(_str_if_bytes, [x[0:5] for x in rows[0]]))) # pylint: disable=invalid-name def test_xlsx_export_works_with_unicode(self): @@ -3044,46 +3046,45 @@ def test_zipped_sav_export_with_osm_data(self): with SavReader(os.path.join(temp_dir, "osm.sav"), returnHeader=True) as reader: rows = list(reader) expected_column_headers = [ - x.encode("8") - for x in [ - "photo", - "osm_road", - "osm_building", - "fav_color", - "form_completed", - "meta.instanceID", - "@_id", - "@_uuid", - "@_submission_time", - "@_index", - "@_parent_table_name", - "@_parent_index", - "@_tags", - "@_notes", - "@_version", - "@_duration", - "@_submitted_by", - "osm_road_ctr_lat", - "osm_road_ctr_lon", - "osm_road_highway", - "osm_road_lanes", - "osm_road_name", - "osm_road_way_id", - "osm_building_building", - "osm_building_building_levels", - "osm_building_ctr_lat", - "osm_building_ctr_lon", - "osm_building_name", - "osm_building_way_id", - ] + "photo", + "osm_road", + "osm_building", + "fav_color", + "form_completed", + "meta.instanceID", + "@_id", + "@_uuid", + "@_submission_time", + "@_index", + "@_parent_table_name", + "@_parent_index", + "@_tags", + "@_notes", + "@_version", + "@_duration", + "@_submitted_by", + "osm_road_ctr_lat", + "osm_road_ctr_lon", + "osm_road_highway", + "osm_road_lanes", + "osm_road_name", + "osm_road_way_id", + "osm_building_building", + "osm_building_building_levels", + "osm_building_ctr_lat", + "osm_building_ctr_lon", + "osm_building_name", + "osm_building_way_id", ] + rows[0] = list(map(_str_if_bytes, rows[0])) + rows[1] = list(map(_str_if_bytes, rows[1])) self.assertEqual(sorted(rows[0]), sorted(expected_column_headers)) - self.assertEqual(rows[1][0], b"1424308569120.jpg") - self.assertEqual(rows[1][1], b"OSMWay234134797.osm") - self.assertEqual(rows[1][2], b"23.708174238006087") - self.assertEqual(rows[1][4], b"tertiary") - self.assertEqual(rows[1][6], b"Patuatuli Road") - self.assertEqual(rows[1][13], b"kol") + self.assertEqual(rows[1][0], "1424308569120.jpg") + self.assertEqual(rows[1][1], "OSMWay234134797.osm") + self.assertEqual(rows[1][2], "23.708174238006087") + self.assertEqual(rows[1][4], "tertiary") + self.assertEqual(rows[1][6], "Patuatuli Road") + self.assertEqual(rows[1][13], "kol") def test_show_choice_labels(self): """ @@ -3610,36 +3611,36 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): expected_data = [ [ - b"gps", - b"@_gps_latitude", - b"@_gps_longitude", - b"@_gps_altitude", - b"@_gps_precision", - b"gps@52a9", - b"@_gps_latitude_52a9", - b"@_gps_longitude_52a9", - b"@_gps_altitude_52a9", - b"@_gps_precision_52a9", - b"instanceID", - b"@_id", - b"@_uuid", - b"@_submission_time", - b"@_index", - b"@_parent_table_name", - b"@_parent_index", - b"@_tags", - b"@_notes", - b"@_version", - b"@_duration", - b"@_submitted_by", + "gps", + "@_gps_latitude", + "@_gps_longitude", + "@_gps_altitude", + "@_gps_precision", + "gps@52a9", + "@_gps_latitude_52a9", + "@_gps_longitude_52a9", + "@_gps_altitude_52a9", + "@_gps_precision_52a9", + "instanceID", + "@_id", + "@_uuid", + "@_submission_time", + "@_index", + "@_parent_table_name", + "@_parent_index", + "@_tags", + "@_notes", + "@_version", + "@_duration", + "@_submitted_by", ], [ - b"4.0 36.1 5000 20", + "4.0 36.1 5000 20", 4.0, 36.1, 5000.0, 20.0, - b"1.0 36.1 2000 20", + "1.0 36.1 2000 20", 1.0, 36.1, 2000.0, @@ -3647,7 +3648,7 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): "", None, "", - b"2016-11-21 03:42:43", + "2016-11-21 03:42:43", 1.0, "", -1.0, @@ -3663,5 +3664,7 @@ def test_sav_export_with_duplicate_metadata(self, mock_uuid): ) as reader: rows = list(reader) self.assertEqual(len(rows), 2) + rows[0] = list(map(_str_if_bytes, rows[0])) + rows[1] = list(map(_str_if_bytes, rows[1])) self.assertEqual(expected_data, rows) shutil.rmtree(temp_dir) From efdfe80ba1af86dfd74a3ffe11d08b7707b6cf0e Mon Sep 17 00:00:00 2001 From: apiyo Date: Thu, 11 Apr 2024 18:18:43 +0300 Subject: [PATCH 192/270] When an odk token expires is nil, deactivate and replace --- .../api/tests/viewsets/test_connect_viewset.py | 18 ++++++++++++++++++ onadata/apps/api/viewsets/connect_viewset.py | 4 +++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/onadata/apps/api/tests/viewsets/test_connect_viewset.py b/onadata/apps/api/tests/viewsets/test_connect_viewset.py index d6aa51706b..9d6b358b00 100644 --- a/onadata/apps/api/tests/viewsets/test_connect_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_connect_viewset.py @@ -603,6 +603,24 @@ def test_retrieve_odk_token(self): self.assertEqual(response.data["odk_token"], odk_token) self.assertEqual(response.data["expires"], expires) + def test_deactivate_token_when_expires_is_None(self): + """ + Test that when a token's .expires field is nil, it will be deactivated + and a new one created in it's place + """ + view = ConnectViewSet.as_view({"post": "odk_token", "get": "odk_token"}) + + # Create an active tokens + token = ODKToken.objects.create(user=self.user) + ODKToken.objects.filter(pk=token.pk).update(expires=None) + + request = self.factory.get("/", **self.extra) + request.session = self.client.session + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(ODKToken.objects.filter(status=ODKToken.ACTIVE)), 1) + self.assertNotEqual(response.data["odk_token"], token.raw_key) + def test_deactivates_multiple_active_odk_token(self): """ Test that the viewset deactivates tokens when two or more are diff --git a/onadata/apps/api/viewsets/connect_viewset.py b/onadata/apps/api/viewsets/connect_viewset.py index a662f782b0..c2c586c61e 100644 --- a/onadata/apps/api/viewsets/connect_viewset.py +++ b/onadata/apps/api/viewsets/connect_viewset.py @@ -178,7 +178,9 @@ def odk_token(self, request, *args, **kwargs): user=user, status=ODKToken.ACTIVE ) - if not created and timezone.now() > token.expires: + if not token.expires or ( + not created and timezone.now() > token.expires + ): token.status = ODKToken.INACTIVE token.save() token = ODKToken.objects.create(user=user) From e2660963ec6fc4f2e6a200f3171efceae451b1ba Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 12 Apr 2024 10:08:27 +0300 Subject: [PATCH 193/270] Flatten select multiples in repeating sections during CSV import (#2578) * flatten select multiples in repeating sections * fix failing test --- ...csv_import_multiple_split_group_repeat.csv | 2 + ..._import_multiple_wo_split_group_repeat.csv | 2 + onadata/libs/tests/utils/test_csv_import.py | 104 ++++++++++++++++++ .../libs/tests/utils/test_export_builder.py | 2 +- onadata/libs/utils/csv_import.py | 14 ++- 5 files changed, 122 insertions(+), 2 deletions(-) create mode 100644 onadata/libs/tests/utils/fixtures/csv_import_multiple_split_group_repeat.csv create mode 100644 onadata/libs/tests/utils/fixtures/csv_import_multiple_wo_split_group_repeat.csv diff --git a/onadata/libs/tests/utils/fixtures/csv_import_multiple_split_group_repeat.csv b/onadata/libs/tests/utils/fixtures/csv_import_multiple_split_group_repeat.csv new file mode 100644 index 0000000000..a97a56c2a9 --- /dev/null +++ b/onadata/libs/tests/utils/fixtures/csv_import_multiple_split_group_repeat.csv @@ -0,0 +1,2 @@ +name,age,grp1/grp2/browser_use[1]/grp3/grp4/grp5/year,grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/firefox,grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/chrome,grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/ie,grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers/safari,grp1/grp2/browser_use[2]/grp3/grp4/grp5/year,grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/firefox,grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/chrome,grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/ie,grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers/safari,meta/instanceID +Vic,30,2010,True,False,False,True,2011,True,True,False,False,uuid:13f5c2a8-0ba2-44e2-bd0a-9c6fa3991484 diff --git a/onadata/libs/tests/utils/fixtures/csv_import_multiple_wo_split_group_repeat.csv b/onadata/libs/tests/utils/fixtures/csv_import_multiple_wo_split_group_repeat.csv new file mode 100644 index 0000000000..8e3f030c29 --- /dev/null +++ b/onadata/libs/tests/utils/fixtures/csv_import_multiple_wo_split_group_repeat.csv @@ -0,0 +1,2 @@ +name,age,grp1/grp2/browser_use[1]/grp3/grp4/grp5/year,grp1/grp2/browser_use[1]/grp3/grp4/grp5/browsers,grp1/grp2/browser_use[2]/grp3/grp4/grp5/year,grp1/grp2/browser_use[2]/grp3/grp4/grp5/browsers,meta/instanceID +Vic,30,2010,firefox safari,2011,firefox chrome,uuid:13f5c2a8-0ba2-44e2-bd0a-9c6fa3991484 diff --git a/onadata/libs/tests/utils/test_csv_import.py b/onadata/libs/tests/utils/test_csv_import.py index 461b46bafa..d19a546d53 100644 --- a/onadata/libs/tests/utils/test_csv_import.py +++ b/onadata/libs/tests/utils/test_csv_import.py @@ -599,3 +599,107 @@ def test_get_columns_by_type(self): self.assertTrue( submission.json["section_B/year_established"].startswith("1890") ) + + def test_select_multiples_grouped_repeating_w_split(self): + """Select multiple choices within group within repeat with split""" + md_xform = """ + | survey | | | | + | | type | name | label | + | | text | name | Name | + | | integer | age | Age | + | | begin group | grp1 | Group 1 | + | | begin group | grp2 | Group 2 | + | | begin repeat | browser_use | Browser Use | + | | begin group | grp3 | Group 3 | + | | begin group | grp4 | Group 4 | + | | begin group | grp5 | Group 5 | + | | integer | year | Year | + | | select_multiple browsers | browsers | Browsers | + | | end group | | | + | | end group | | | + | | end group | | | + | | end repeat | | | + | | end group | | | + | | end group | | | + | choices | | | | + | | list_name | name | label | + | | browsers | firefox | Firefox | + | | browsers | chrome | Chrome | + | | browsers | ie | Internet Explorer | + | | browsers | safari | Safari |""" + xform = self._publish_markdown(md_xform, self.user, id_string="nested_split") + + with open( + os.path.join( + self.fixtures_dir, "csv_import_multiple_split_group_repeat.csv" + ), + "rb", + ) as csv_file: + csv_import.submit_csv(self.user.username, xform, csv_file) + self.assertEqual(Instance.objects.count(), 1) + submission = Instance.objects.first() + self.assertEqual( + submission.json["grp1/grp2/browser_use"], + [ + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": 2010, + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox safari", + }, + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": 2011, + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox chrome", + }, + ], + ) + + def test_select_multiples_grouped_repeating_wo_split(self): + """Select multiple choices within group within repeat without split""" + md_xform = """ + | survey | | | | + | | type | name | label | + | | text | name | Name | + | | integer | age | Age | + | | begin group | grp1 | Group 1 | + | | begin group | grp2 | Group 2 | + | | begin repeat | browser_use | Browser Use | + | | begin group | grp3 | Group 3 | + | | begin group | grp4 | Group 4 | + | | begin group | grp5 | Group 5 | + | | integer | year | Year | + | | select_multiple browsers | browsers | Browsers | + | | end group | | | + | | end group | | | + | | end group | | | + | | end repeat | | | + | | end group | | | + | | end group | | | + | choices | | | | + | | list_name | name | label | + | | browsers | firefox | Firefox | + | | browsers | chrome | Chrome | + | | browsers | ie | Internet Explorer | + | | browsers | safari | Safari |""" + xform = self._publish_markdown(md_xform, self.user, id_string="nested_split") + + with open( + os.path.join( + self.fixtures_dir, "csv_import_multiple_wo_split_group_repeat.csv" + ), + "rb", + ) as csv_file: + csv_import.submit_csv(self.user.username, xform, csv_file) + self.assertEqual(Instance.objects.count(), 1) + submission = Instance.objects.first() + self.assertEqual( + submission.json["grp1/grp2/browser_use"], + [ + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": 2010, + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox safari", + }, + { + "grp1/grp2/browser_use/grp3/grp4/grp5/year": 2011, + "grp1/grp2/browser_use/grp3/grp4/grp5/browsers": "firefox chrome", + }, + ], + ) diff --git a/onadata/libs/tests/utils/test_export_builder.py b/onadata/libs/tests/utils/test_export_builder.py index f5c2d7ec8f..ec1fe751ae 100644 --- a/onadata/libs/tests/utils/test_export_builder.py +++ b/onadata/libs/tests/utils/test_export_builder.py @@ -1172,7 +1172,7 @@ def test_zipped_sav_export_with_duplicate_column_name(self): rows = list(reader) # Check that columns are present - self.assertIn("Sport", _str_if_bytes(rows[0])) + self.assertIn("Sport", [_str_if_bytes(item) for item in rows[0]]) # Check for sport in first 5 characters # because rows contains 'sport@d4b6' self.assertIn("sport", list(map(_str_if_bytes, [x[0:5] for x in rows[0]]))) diff --git a/onadata/libs/utils/csv_import.py b/onadata/libs/utils/csv_import.py index 4ce7f19f4e..fdd807646d 100644 --- a/onadata/libs/utils/csv_import.py +++ b/onadata/libs/utils/csv_import.py @@ -287,7 +287,7 @@ def flatten_split_select_multiples( for key, value in row.items(): if key in select_multiples and isinstance(value, dict): picked_choices = [ - k for k, v in value.items() if v in ["1", "TRUE"] or v == k + k for k, v in value.items() if v.upper() in ["1", "TRUE"] or v == k ] new_value = " ".join(picked_choices) row.update({key: new_value}) @@ -295,6 +295,18 @@ def flatten_split_select_multiples( # Handle cases where select_multiples are within a group new_value = flatten_split_select_multiples(value, select_multiples) row.update({key: new_value}) + elif isinstance(value, list): + # Handle case where we have repeat questions + new_value = [] + + for repeat_question in value: + flattened_question = flatten_split_select_multiples( + repeat_question, select_multiples + ) + new_value.append(flattened_question) + + row.update({key: new_value}) + return row From 9e10a18b38904f8a1272a57395d2dae730bab5bf Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 15 Apr 2024 18:20:38 +0300 Subject: [PATCH 194/270] bump version to 4.0.1 (#2586) --- CHANGES.rst | 21 +++++++++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 8790e85fc4..1300449e89 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,27 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.0.1(2024-04-15) +------------------ +- When an odk token expires is None, deactivate and replace + `PR #2583 ` + [@FrankApiyo] +- Flatten select multiples in repeating sections during CSV import + `PR #2578 ` + [@kelvin-muchiri] + +v4.0.0(2024-04-08) +------------------ +- Remove redundant Dockerfile used for development + `PR #2575 ` + [@kelvin-muchiri] +- Add user to organization asynchronously + `PR #2574 ` + [@kelvin-muchiri] +- Upgrade to Django 4.2 + `PR #2572 ` + [@ukanga] + v3.19.0(2024-03-26) ------------------- - Security Remediations diff --git a/onadata/__init__.py b/onadata/__init__.py index 8fc1ffb3e5..bfb9f379c4 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.0.0" +__version__ = "4.0.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index abb7e495ee..e55af82d42 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.0.0 +version = 4.0.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 64fd3c4eab79ba00791d3060cad7c2d4cbea3b44 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 19 Apr 2024 15:01:00 +0300 Subject: [PATCH 195/270] fix API docs not copied when building image (#2589) --- docker-compose.yml | 2 -- docker/onadata-uwsgi/Dockerfile.ubuntu | 5 +---- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index a1b8585c30..f619f154cb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,6 @@ services: build: context: . dockerfile: ./docker/onadata-uwsgi/Dockerfile.ubuntu - target: runserver depends_on: - database - cache @@ -19,7 +18,6 @@ services: build: context: . dockerfile: ./docker/onadata-uwsgi/Dockerfile.ubuntu - target: runserver depends_on: - database - cache diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index 378936166d..f225883897 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -13,7 +13,7 @@ RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts # hadolint ignore=DL3013 RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi -FROM ubuntu:focal-20240123 as base +FROM ubuntu:focal-20240123 ARG release_version=v3.18.2 @@ -100,12 +100,9 @@ RUN python3.10 -m pip install --no-cache-dir -U pip && \ python3.10 -m pip install setuptools==65.5.1 && \ python3.10 -m pip install --no-cache-dir pyyaml uwsgitop -FROM base as docs # Compile API Docs RUN make -C docs html -FROM base as runserver - EXPOSE 8000 USER onadata From fc1a1ff8d3fd3ea77e1f3c7aeb34120e999c243e Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 23 Apr 2024 09:59:11 +0300 Subject: [PATCH 196/270] Add xform to attachment model (#2587) * FA|KM|DU: Add xform to attachment model Co-authored-by: Frankline Apiyo Co-authored-by: Kelvin Muchiri * Fix failing tests for attachment viewset Co-authored-by: Kip * Return empty list on XFormFilter/attachments when no form selector has been specified. * Add submitted user to Attachment model * Handle form filter when a single record via pk is requested * add migration to populate attachements xform * fix typo * fix typo * refactor code for optimisation * fix AttributeError: 'dict' object has no attribute 'pk' * fix typo --------- Co-authored-by: Frankline Apiyo Co-authored-by: Kelvin Muchiri Co-authored-by: Kip --- onadata/apps/api/permissions.py | 2 +- .../tests/viewsets/test_attachment_viewset.py | 48 +- .../api/tests/viewsets/test_export_viewset.py | 366 +++++----- .../api/tests/viewsets/test_media_viewset.py | 2 +- .../viewsets/test_merged_xform_viewset.py | 679 +++++++++--------- .../viewsets/test_messaging_stats_viewset.py | 13 +- .../api/tests/viewsets/test_widget_viewset.py | 14 +- onadata/apps/api/viewsets/media_viewset.py | 4 +- .../0013_add_xform_to_logger_attachment.py | 36 + .../0014_populate_attachment_xform.py | 37 + onadata/apps/logger/models/attachment.py | 13 + onadata/apps/logger/models/instance.py | 4 + .../messaging/tests/test_messaging_viewset.py | 371 +++++----- .../viewsets/test_restservicesviewset.py | 8 +- onadata/libs/filters.py | 65 +- onadata/libs/permissions.py | 23 +- .../libs/serializers/attachment_serializer.py | 12 +- onadata/libs/utils/logger_tools.py | 18 +- 18 files changed, 957 insertions(+), 758 deletions(-) create mode 100644 onadata/apps/logger/migrations/0013_add_xform_to_logger_attachment.py create mode 100644 onadata/apps/logger/migrations/0014_populate_attachment_xform.py diff --git a/onadata/apps/api/permissions.py b/onadata/apps/api/permissions.py index efe64ad4cc..07485f9903 100644 --- a/onadata/apps/api/permissions.py +++ b/onadata/apps/api/permissions.py @@ -370,7 +370,7 @@ def has_object_permission(self, request, view, obj): model_cls = XForm user = request.user - return self._has_object_permission(request, model_cls, user, obj.instance.xform) + return self._has_object_permission(request, model_cls, user, obj.xform) class ConnectViewsetPermissions(IsAuthenticated): diff --git a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py index 4026d6a500..64d7a9562f 100644 --- a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py @@ -89,10 +89,11 @@ def test_attachment_pagination(self): extension="JPG", name=filename, media_file=media_file, + xform=self.xform, ) # not using pagination params - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.list_view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) @@ -100,7 +101,9 @@ def test_attachment_pagination(self): self.assertEqual(len(response.data), 2) # valid page and page_size - request = self.factory.get("/", data={"page": 1, "page_size": 1}, **self.extra) + request = self.factory.get( + "/", data={"xform": self.xform.pk, "page": 1, "page_size": 1}, **self.extra + ) response = self.list_view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) @@ -108,12 +111,16 @@ def test_attachment_pagination(self): self.assertEqual(len(response.data), 1) # invalid page type - request = self.factory.get("/", data={"page": "invalid"}, **self.extra) + request = self.factory.get( + "/", data={"xform": self.xform.pk, "page": "invalid"}, **self.extra + ) response = self.list_view(request) self.assertEqual(response.status_code, 404) # invalid page size type - request = self.factory.get("/", data={"page_size": "invalid"}, **self.extra) + request = self.factory.get( + "/", data={"xform": self.xform.pk, "page_size": "invalid"}, **self.extra + ) response = self.list_view(request) self.assertEqual(response.status_code, 200) self.assertTrue(isinstance(response.data, list)) @@ -121,13 +128,17 @@ def test_attachment_pagination(self): # invalid page and page_size types request = self.factory.get( - "/", data={"page": "invalid", "page_size": "invalid"}, **self.extra + "/", + data={"xform": self.xform.pk, "page": "invalid", "page_size": "invalid"}, + **self.extra, ) response = self.list_view(request) self.assertEqual(response.status_code, 404) # invalid page size - request = self.factory.get("/", data={"page": 4, "page_size": 1}, **self.extra) + request = self.factory.get( + "/", data={"xform": self.xform.pk, "page": 4, "page_size": 1}, **self.extra + ) response = self.list_view(request) self.assertEqual(response.status_code, 404) @@ -170,7 +181,7 @@ def test_retrieve_and_list_views_with_anonymous_user(self): def test_list_view(self): self._submit_transport_instance_w_attachment() - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.list_view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) @@ -181,7 +192,7 @@ def test_list_view(self): self.attachment.instance.deleted_at = timezone.now() self.attachment.instance.save() - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.list_view(request) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 0) @@ -189,7 +200,7 @@ def test_list_view(self): def test_data_list_with_xform_in_delete_async(self): self._submit_transport_instance_w_attachment() - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.list_view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) @@ -198,7 +209,7 @@ def test_data_list_with_xform_in_delete_async(self): self.xform.deleted_at = timezone.now() self.xform.save() - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.list_view(request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), initial_count - 1) @@ -276,6 +287,7 @@ def test_list_view_filter_by_attachment_type(self): extension="MP4", name=filename, media_file=media_file, + xform=self.xform, ) Attachment.objects.create( @@ -284,6 +296,7 @@ def test_list_view_filter_by_attachment_type(self): extension="PDF", name=filename, media_file=media_file, + xform=self.xform, ) Attachment.objects.create( instance=self.xform.instances.first(), @@ -291,6 +304,7 @@ def test_list_view_filter_by_attachment_type(self): extension="TXT", name=filename, media_file=media_file, + xform=self.xform, ) Attachment.objects.create( instance=self.xform.instances.first(), @@ -298,6 +312,7 @@ def test_list_view_filter_by_attachment_type(self): extension="MP3", name=filename, media_file=media_file, + xform=self.xform, ) Attachment.objects.create( instance=self.xform.instances.first(), @@ -305,12 +320,13 @@ def test_list_view_filter_by_attachment_type(self): extension="GEOJSON", name=geojson_filename, media_file=geojson_media_file, + xform=self.xform, ) - data = {} + data = {"xform": self.xform.pk} request = self.factory.get("/", data, **self.extra) response = self.list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 6) @@ -318,8 +334,8 @@ def test_list_view_filter_by_attachment_type(self): data["type"] = "image" request = self.factory.get("/", data, **self.extra) response = self.list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]["mimetype"], "image/jpeg") @@ -328,8 +344,8 @@ def test_list_view_filter_by_attachment_type(self): data["type"] = "audio" request = self.factory.get("/", data, **self.extra) response = self.list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]["mimetype"], "audio/mp3") @@ -338,8 +354,8 @@ def test_list_view_filter_by_attachment_type(self): data["type"] = "video" request = self.factory.get("/", data, **self.extra) response = self.list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]["mimetype"], "video/mp4") @@ -348,8 +364,8 @@ def test_list_view_filter_by_attachment_type(self): data["type"] = "document" request = self.factory.get("/", data, **self.extra) response = self.list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) self.assertTrue(isinstance(response.data, list)) self.assertEqual(len(response.data), 3) self.assertEqual(response.data[0]["mimetype"], "application/pdf") diff --git a/onadata/apps/api/tests/viewsets/test_export_viewset.py b/onadata/apps/api/tests/viewsets/test_export_viewset.py index 24399b48cc..f9005f5fe6 100644 --- a/onadata/apps/api/tests/viewsets/test_export_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_export_viewset.py @@ -18,8 +18,7 @@ from onadata.apps.main.models import MetaData, UserProfile from onadata.apps.main.tests.test_base import TestBase from onadata.apps.viewer.models.export import Export -from onadata.libs.permissions import ( - DataEntryMinorRole, ReadOnlyRole, EditorMinorRole) +from onadata.libs.permissions import DataEntryMinorRole, ReadOnlyRole, EditorMinorRole from onadata.libs.utils.export_tools import generate_export @@ -31,9 +30,8 @@ class TestExportViewSet(TestBase): def setUp(self): super(TestExportViewSet, self).setUp() self.factory = APIRequestFactory() - self.formats = ['csv', 'csvzip', 'kml', 'osm', 'savzip', 'xls', - 'xlsx', 'zip'] - self.view = ExportViewSet.as_view({'get': 'retrieve'}) + self.formats = ["csv", "csvzip", "kml", "osm", "savzip", "xls", "xlsx", "zip"] + self.view = ExportViewSet.as_view({"get": "retrieve"}) def test_export_response(self): """ @@ -42,17 +40,17 @@ def test_export_response(self): self._create_user_and_login() self._publish_transportation_form() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - request = self.factory.get('/export') + request = self.factory.get("/export") force_authenticate(request, user=self.user) response = self.view(request, pk=export.pk) - self.assertIn(filename, response.get('Content-Disposition')) + self.assertIn(filename, response.get("Content-Disposition")) def test_export_formats_present(self): """ @@ -70,7 +68,7 @@ def test_export_non_existent_file(self): self._create_user_and_login() non_existent_pk = 1525266252676 for ext in self.formats: - request = self.factory.get('/export') + request = self.factory.get("/export") force_authenticate(request, user=self.user) response = self.view(request, pk=non_existent_pk, format=ext) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -80,8 +78,8 @@ def test_export_list(self): Test ExportViewSet list endpoint. """ self._create_user_and_login() - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export') + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export") force_authenticate(request, user=self.user) response = view(request) self.assertFalse(bool(response.data)) @@ -96,38 +94,46 @@ def test_export_list_public(self): self.xform.shared_data = True self.xform.save() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export') + view = ExportViewSet.as_view({"get": "list"}) + + # Should be empty list when no xform filter is provided + request = self.factory.get("/export") force_authenticate(request, user=self.user) response = view(request) - self.assertTrue(bool(response.data)) + self.assertEqual(response.data, []) + + # Should not be empty list when xform filter is provided + request = self.factory.get("/export", data={"xform": self.xform.pk}) + force_authenticate(request, user=self.user) + response = view(request) + self.assertNotEqual(response.data, []) self.assertEqual(status.HTTP_200_OK, response.status_code) def test_export_list_public_form(self): """ Test ExportViewSet list endpoint for a single public form. """ - user_mosh = self._create_user('mosh', 'mosh') + user_mosh = self._create_user("mosh", "mosh") self._publish_transportation_form() self.xform.shared_data = True self.xform.save() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', {'xform': self.xform.pk}) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", {"xform": self.xform.pk}) force_authenticate(request, user=user_mosh) response = view(request) self.assertTrue(bool(response.data)) @@ -141,11 +147,10 @@ def test_export_public_project(self): self._publish_transportation_form() self.xform.shared_data = True self.xform.save() - export = generate_export(Export.CSV_EXPORT, - self.xform, - None, - {"extension": "csv"}) - request = self.factory.get('/export') + export = generate_export( + Export.CSV_EXPORT, self.xform, None, {"extension": "csv"} + ) + request = self.factory.get("/export") response = self.view(request, pk=export.pk) self.assertEqual(status.HTTP_200_OK, response.status_code) @@ -158,11 +163,10 @@ def test_export_public_authenticated(self): self._publish_transportation_form() self.xform.shared_data = True self.xform.save() - export = generate_export(Export.CSV_EXPORT, - self.xform, - None, - {"extension": "csv"}) - request = self.factory.get('/export') + export = generate_export( + Export.CSV_EXPORT, self.xform, None, {"extension": "csv"} + ) + request = self.factory.get("/export") force_authenticate(request, user=self.user) response = self.view(request, pk=export.pk) self.assertEqual(status.HTTP_200_OK, response.status_code) @@ -177,23 +181,21 @@ def test_export_public_not_owner_authenticated(self): self.xform.shared_data = True self.xform.shared = True self.xform.save() - test_user = self._create_user('not_bob', 'pass') - request = self.factory.get('/export') + test_user = self._create_user("not_bob", "pass") + request = self.factory.get("/export") force_authenticate(request, user=test_user) # csv export - export = generate_export(Export.CSV_EXPORT, - self.xform, - None, - {"extension": "csv"}) - export.options = {"query": {"_submitted_by": 'not_bob'}} + export = generate_export( + Export.CSV_EXPORT, self.xform, None, {"extension": "csv"} + ) + export.options = {"query": {"_submitted_by": "not_bob"}} export.save() response = self.view(request, pk=export.pk) self.assertEqual(status.HTTP_200_OK, response.status_code) # sav export - export = generate_export(Export.SAV_ZIP_EXPORT, - self.xform, - None, - {"extension": "zip"}) + export = generate_export( + Export.SAV_ZIP_EXPORT, self.xform, None, {"extension": "zip"} + ) response = self.view(request, pk=export.pk) self.assertEqual(status.HTTP_200_OK, response.status_code) @@ -206,11 +208,10 @@ def test_export_non_public_export(self): self._publish_transportation_form() self.xform.shared_data = False self.xform.save() - export = generate_export(Export.CSV_EXPORT, - self.xform, - None, - {"extension": "csv"}) - request = self.factory.get('/export') + export = generate_export( + Export.CSV_EXPORT, self.xform, None, {"extension": "csv"} + ) + request = self.factory.get("/export") response = self.view(request, pk=export.pk) self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code) @@ -221,19 +222,19 @@ def test_export_list_on_user(self): self._create_user_and_login() self._publish_transportation_form() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - exports = [Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir)] + exports = [ + Export.objects.create(xform=self.xform, filename=filename, filedir=filedir) + ] exports[0].save() - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', data={'xform': self.xform.id}) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=self.user) response = view(request) self.assertEqual(len(exports), len(response.data)) - self.assertEqual(exports[0].id, response.data[0].get('id')) + self.assertEqual(exports[0].id, response.data[0].get("id")) self.assertEqual(status.HTTP_200_OK, response.status_code) def test_export_list_on_with_different_users(self): @@ -243,16 +244,16 @@ def test_export_list_on_with_different_users(self): self._create_user_and_login() self._publish_transportation_form() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', data={'xform': self.xform.id}) - self._create_user_and_login(username='mary', password='password1') + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", data={"xform": self.xform.id}) + self._create_user_and_login(username="mary", password="password1") force_authenticate(request, user=self.user) response = view(request) self.assertFalse(bool(response.data)) @@ -277,17 +278,17 @@ def test_export_delete(self): bob = self.user export = Export.objects.create(xform=xform) export.save() - view = ExportViewSet.as_view({'delete': 'destroy'}) + view = ExportViewSet.as_view({"delete": "destroy"}) # mary has no access hence cannot delete - self._create_user_and_login(username='mary', password='password1') - request = self.factory.delete('/export') + self._create_user_and_login(username="mary", password="password1") + request = self.factory.delete("/export") force_authenticate(request, user=self.user) response = view(request, pk=export.pk) self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code) # bob has access hence can delete - request = self.factory.delete('/export') + request = self.factory.delete("/export") force_authenticate(request, user=bob) response = view(request, pk=export.pk) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) @@ -302,16 +303,24 @@ def test_export_list_with_meta_perms(self): for survey in self.surveys: self._make_submission( os.path.join( - settings.PROJECT_ROOT, 'apps', - 'main', 'tests', 'fixtures', 'transportation', - 'instances', survey, survey + '.xml'), - forced_submission_time=parse_datetime( - '2013-02-18 15:54:01Z')) - - alice = self._create_user('alice', 'alice', True) - - MetaData.xform_meta_permission(self.xform, - data_value="editor|dataentry-minor") + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=parse_datetime("2013-02-18 15:54:01Z"), + ) + + alice = self._create_user("alice", "alice", True) + + MetaData.xform_meta_permission( + self.xform, data_value="editor|dataentry-minor" + ) DataEntryMinorRole.add(alice, self.xform) @@ -319,36 +328,30 @@ def test_export_list_with_meta_perms(self): i.user = alice i.save() - view = XFormViewSet.as_view({ - 'get': 'retrieve' - }) + view = XFormViewSet.as_view({"get": "retrieve"}) - alices_extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % alice.auth_token.key - } + alices_extra = {"HTTP_AUTHORIZATION": "Token %s" % alice.auth_token.key} # Alice creates an export with her own submissions - request = self.factory.get('/', **alices_extra) - response = view(request, pk=self.xform.pk, format='csv') + request = self.factory.get("/", **alices_extra) + response = view(request, pk=self.xform.pk, format="csv") self.assertEqual(response.status_code, 200) exports = Export.objects.filter(xform=self.xform) - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=alice) response = view(request) self.assertEqual(len(exports), len(response.data)) # Mary should not have access to the export with Alice's # submissions. - self._create_user_and_login(username='mary', password='password1') - self.assertEqual(self.user.username, 'mary') + self._create_user_and_login(username="mary", password="password1") + self.assertEqual(self.user.username, "mary") # Mary should only view their own submissions. DataEntryMinorRole.add(self.user, self.xform) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=self.user) response = view(request) self.assertFalse(bool(response.data), response.data) @@ -364,16 +367,24 @@ def test_export_async_with_meta_perms(self): for survey in self.surveys: self._make_submission( os.path.join( - settings.PROJECT_ROOT, 'apps', - 'main', 'tests', 'fixtures', 'transportation', - 'instances', survey, survey + '.xml'), - forced_submission_time=parse_datetime( - '2013-02-18 15:54:01Z')) - - alice = self._create_user('alice', 'alice', True) - - MetaData.xform_meta_permission(self.xform, - data_value="editor|dataentry-minor") + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=parse_datetime("2013-02-18 15:54:01Z"), + ) + + alice = self._create_user("alice", "alice", True) + + MetaData.xform_meta_permission( + self.xform, data_value="editor|dataentry-minor" + ) DataEntryMinorRole.add(alice, self.xform) @@ -381,37 +392,34 @@ def test_export_async_with_meta_perms(self): i.user = alice i.save() - view = XFormViewSet.as_view({ - 'get': 'export_async', - }) + view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) - alices_extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % alice.auth_token.key - } + alices_extra = {"HTTP_AUTHORIZATION": "Token %s" % alice.auth_token.key} # Alice creates an export with her own submissions - request = self.factory.get('/', data={"format": 'csv'}, - **alices_extra) + request = self.factory.get("/", data={"format": "csv"}, **alices_extra) response = view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 202) exports = Export.objects.filter(xform=self.xform) - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=alice) response = view(request) self.assertEqual(len(exports), len(response.data)) # Mary should not have access to the export with Alice's # submissions. - self._create_user_and_login(username='mary', password='password1') - self.assertEqual(self.user.username, 'mary') + self._create_user_and_login(username="mary", password="password1") + self.assertEqual(self.user.username, "mary") # Mary should only view their own submissions. DataEntryMinorRole.add(self.user, self.xform) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=self.user) response = view(request) self.assertFalse(bool(response.data), response.data) @@ -427,37 +435,43 @@ def test_export_readonly_with_meta_perms(self): for survey in self.surveys: self._make_submission( os.path.join( - settings.PROJECT_ROOT, 'apps', - 'main', 'tests', 'fixtures', 'transportation', - 'instances', survey, survey + '.xml'), - forced_submission_time=parse_datetime( - '2013-02-18 15:54:01Z')) - - alice = self._create_user('alice', 'alice', True) - - MetaData.xform_meta_permission(self.xform, - data_value="editor|dataentry-minor") + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "instances", + survey, + survey + ".xml", + ), + forced_submission_time=parse_datetime("2013-02-18 15:54:01Z"), + ) + + alice = self._create_user("alice", "alice", True) + + MetaData.xform_meta_permission( + self.xform, data_value="editor|dataentry-minor" + ) ReadOnlyRole.add(alice, self.xform) - export_view = XFormViewSet.as_view({ - 'get': 'export_async', - }) + export_view = XFormViewSet.as_view( + { + "get": "export_async", + } + ) - alices_extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % alice.auth_token.key - } + alices_extra = {"HTTP_AUTHORIZATION": "Token %s" % alice.auth_token.key} # Alice creates an export with her own submissions - request = self.factory.get('/', data={"format": 'csv'}, - **alices_extra) + request = self.factory.get("/", data={"format": "csv"}, **alices_extra) response = export_view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 202) exports = Export.objects.filter(xform=self.xform) - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=alice) response = view(request) self.assertEqual(len(exports), len(response.data)) @@ -465,13 +479,12 @@ def test_export_readonly_with_meta_perms(self): # Mary should not have access to the export with Alice's # submissions. - self._create_user_and_login(username='mary', password='password1') - self.assertEqual(self.user.username, 'mary') + self._create_user_and_login(username="mary", password="password1") + self.assertEqual(self.user.username, "mary") # Mary should only view their own submissions. DataEntryMinorRole.add(self.user, self.xform) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=self.user) response = view(request) self.assertFalse(bool(response.data), response.data) @@ -483,24 +496,21 @@ def test_export_readonly_with_meta_perms(self): i.save() # Mary creates an export with her own submissions - request = self.factory.get('/', data={"format": 'csv'}) + request = self.factory.get("/", data={"format": "csv"}) force_authenticate(request, user=self.user) response = export_view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 202) - request = self.factory.get('/export', - data={'xform': self.xform.id}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=self.user) response = view(request) self.assertTrue(bool(response.data), response.data) self.assertEqual(status.HTTP_200_OK, response.status_code) self.assertEqual(len(response.data), 1) - self.assertEqual( - Export.objects.filter(xform=self.xform).count(), 2) + self.assertEqual(Export.objects.filter(xform=self.xform).count(), 2) # Alice does not have access to the submitter only export - request = self.factory.get('/export', - data={'xform': self.xform.id}) + request = self.factory.get("/export", data={"xform": self.xform.id}) force_authenticate(request, user=alice) response = view(request) self.assertEqual(len(exports), len(response.data)) @@ -513,18 +523,16 @@ def test_export_retrieval_authentication(self): self._create_user_and_login() self._publish_transportation_form() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - extra = { - 'HTTP_AUTHORIZATION': f'Token {self.user.auth_token.key}' - } + extra = {"HTTP_AUTHORIZATION": f"Token {self.user.auth_token.key}"} - request = self.factory.get('/export', **extra) + request = self.factory.get("/export", **extra) response = self.view(request, pk=export.pk) self.assertEqual(response.status_code, 200) @@ -537,41 +545,39 @@ def test_export_failure_reason_returned(self): Export.objects.create( xform=self.xform, internal_status=Export.FAILED, - error_message="Something unexpected happened") + error_message="Something unexpected happened", + ) extra = { - 'HTTP_AUTHORIZATION': f'Token {self.user.auth_token.key}', + "HTTP_AUTHORIZATION": f"Token {self.user.auth_token.key}", } - view = ExportViewSet.as_view({'get': 'list'}) - request = self.factory.get( - '/export', {'xform': self.xform.pk}, **extra) + view = ExportViewSet.as_view({"get": "list"}) + request = self.factory.get("/export", {"xform": self.xform.pk}, **extra) force_authenticate(request) response = view(request) self.assertEqual(response.status_code, 200) - self.assertIn('error_message', response.data[0].keys()) + self.assertIn("error_message", response.data[0].keys()) self.assertEqual( - response.data[0]['error_message'], - 'Something unexpected happened') + response.data[0]["error_message"], "Something unexpected happened" + ) def test_export_are_downloadable_to_all_users_when_public_form(self): self._create_user_and_login() self._publish_transportation_form() temp_dir = settings.MEDIA_ROOT - dummy_export_file = NamedTemporaryFile(suffix='.xlsx', dir=temp_dir) + dummy_export_file = NamedTemporaryFile(suffix=".xlsx", dir=temp_dir) filename = os.path.basename(dummy_export_file.name) filedir = os.path.dirname(dummy_export_file.name) - export = Export.objects.create(xform=self.xform, - filename=filename, - filedir=filedir) + export = Export.objects.create( + xform=self.xform, filename=filename, filedir=filedir + ) export.save() - user_alice = self._create_user('alice', 'alice') + user_alice = self._create_user("alice", "alice") # create user profile and set require_auth to false for tests _ = UserProfile.objects.get_or_create(user=user_alice) - alices_extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % user_alice.auth_token.key - } + alices_extra = {"HTTP_AUTHORIZATION": "Token %s" % user_alice.auth_token.key} EditorMinorRole.add(user_alice, self.xform) # Form permissions are ignored when downloading Export; @@ -584,11 +590,11 @@ def test_export_are_downloadable_to_all_users_when_public_form(self): self.xform.save() # Anonymous user - request = self.factory.get('/export') + request = self.factory.get("/export") response = self.view(request, pk=export.pk) self.assertEqual(response.status_code, 200) # Alice user; With editor role - request = self.factory.get('/export', **alices_extra) + request = self.factory.get("/export", **alices_extra) response = self.view(request, pk=export.pk) self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/api/tests/viewsets/test_media_viewset.py b/onadata/apps/api/tests/viewsets/test_media_viewset.py index 37f3623e38..f36be593ab 100644 --- a/onadata/apps/api/tests/viewsets/test_media_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_media_viewset.py @@ -34,7 +34,7 @@ class TestMediaViewSet(TestAbstractViewSet, TestBase): """ def setUp(self): - super(TestMediaViewSet, self).setUp() + super().setUp() self.retrieve_view = MediaViewSet.as_view({"get": "retrieve"}) self._publish_xls_form_to_project() diff --git a/onadata/apps/api/tests/viewsets/test_merged_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_merged_xform_viewset.py index b5e2607405..05b2441714 100644 --- a/onadata/apps/api/tests/viewsets/test_merged_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_merged_xform_viewset.py @@ -13,8 +13,7 @@ from django.conf import settings from django.core.files.base import File -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.charts_viewset import ChartsViewSet from onadata.apps.api.viewsets.attachment_viewset import AttachmentViewSet from onadata.apps.api.viewsets.data_viewset import DataViewSet @@ -27,8 +26,7 @@ from onadata.apps.logger.models.instance import FormIsMergedDatasetError from onadata.apps.logger.models.open_data import get_or_create_opendata from onadata.apps.restservice.models import RestService -from onadata.apps.restservice.viewsets.restservices_viewset import \ - RestServicesViewSet +from onadata.apps.restservice.viewsets.restservices_viewset import RestServicesViewSet from onadata.libs.utils.export_tools import get_osm_data_kwargs from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.serializers.attachment_serializer import AttachmentSerializer @@ -73,22 +71,20 @@ def streaming_data(response): """ Iterates through a streaming response to return a json list object """ - return json.loads(u''.join( - [i.decode('utf-8') for i in response.streaming_content])) + return json.loads("".join([i.decode("utf-8") for i in response.streaming_content])) def _add_attachments_to_instances(instance): attachment_file_path = os.path.join( - settings.PROJECT_ROOT, - "libs", - "tests", - "utils", - "fixtures", - "test-image.png" + settings.PROJECT_ROOT, "libs", "tests", "utils", "fixtures", "test-image.png" ) with open(attachment_file_path, "rb") as file: - Attachment.objects.create(instance=instance, media_file=File( - file, attachment_file_path)) + Attachment.objects.create( + instance=instance, + media_file=File(file, attachment_file_path), + xform=instance.xform, + user=instance.user, + ) def _make_submissions_merged_datasets(merged_xform): @@ -107,51 +103,51 @@ class TestMergedXFormViewSet(TestAbstractViewSet): """Test merged dataset functionality.""" def _create_merged_dataset(self, geo=False): - view = MergedXFormViewSet.as_view({ - 'post': 'create', - }) + view = MergedXFormViewSet.as_view( + { + "post": "create", + } + ) # pylint: disable=attribute-defined-outside-init self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(MD, self.user, id_string='a') - xform2 = self._publish_markdown(MD, self.user, id_string='b') + xform1 = self._publish_markdown(MD, self.user, id_string="a") + xform2 = self._publish_markdown(MD, self.user, id_string="b") if geo: xform2.instances_with_geopoints = True - xform2.save(update_fields=['instances_with_geopoints']) + xform2.save(update_fields=["instances_with_geopoints"]) data = { - 'xforms': [ + "xforms": [ "http://testserver/api/v1/forms/%s" % xform1.pk, "http://testserver/api/v1/forms/%s" % xform2.pk, ], - 'name': - 'Merged Dataset', - 'project': - f"http://testserver/api/v1/projects/{self.project.pk}", + "name": "Merged Dataset", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", } # anonymous user - request = self.factory.post('/', data=data) + request = self.factory.post("/", data=data) response = view(request) self.assertEqual(response.status_code, 401) - request = self.factory.post('/', data=data, **self.extra) + request = self.factory.post("/", data=data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) - self.assertIn('id', response.data) - self.assertIn('title', response.data) - self.assertIn('xforms', response.data) + self.assertIn("id", response.data) + self.assertIn("title", response.data) + self.assertIn("xforms", response.data) expected_xforms_data = { - 'id': xform1.pk, - 'title': xform1.title, - 'id_string': xform1.id_string, - 'url': "http://testserver/api/v1/forms/%s" % xform1.pk, - 'num_of_submissions': xform1.num_of_submissions, - 'owner': xform1.user.username, - 'project_id': self.project.pk, - 'project_name': self.project.name + "id": xform1.pk, + "title": xform1.title, + "id_string": xform1.id_string, + "url": "http://testserver/api/v1/forms/%s" % xform1.pk, + "num_of_submissions": xform1.num_of_submissions, + "owner": xform1.user.username, + "project_id": self.project.pk, + "project_name": self.project.name, } - self.assertEqual(response.data['xforms'][0], expected_xforms_data) - self.assertIsNotNone(response.data['uuid']) - self.assertEqual(len(response.data['uuid']), 32) + self.assertEqual(response.data["xforms"][0], expected_xforms_data) + self.assertIsNotNone(response.data["uuid"]) + self.assertEqual(len(response.data["uuid"]), 32) return response.data @@ -161,10 +157,12 @@ def test_create_merged_dataset(self): def test_merged_datasets_list(self): """Test list endpoint of a merged dataset""" - view = MergedXFormViewSet.as_view({ - 'get': 'list', - }) - request = self.factory.get('/') + view = MergedXFormViewSet.as_view( + { + "get": "list", + } + ) + request = self.factory.get("/") # Empty list when there are no merged datasets response = view(request) @@ -182,31 +180,28 @@ def test_merged_datasets_list(self): self.assertEqual([], response.data) # A list containing the merged datasets for user bob - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request) self.assertEqual(response.status_code, 200) self.assertIsInstance(response.data, list) self.assertIn(merged_dataset, response.data) # merged dataset included in api/forms endpoint - request = self.factory.get('/', **self.extra) - view = XFormViewSet.as_view({'get': 'list'}) + request = self.factory.get("/", **self.extra) + view = XFormViewSet.as_view({"get": "list"}) response = view(request) self.assertEqual(response.status_code, 200) self.assertIsInstance(response.data, list) self.assertEqual(len(response.data), 3) - self.assertIn(merged_dataset['id'], - [d['formid'] for d in response.data]) - data = [ - _ for _ in response.data if _['formid'] == merged_dataset['id'] - ][0] - self.assertIn('is_merged_dataset', data) - self.assertTrue(data['is_merged_dataset']) + self.assertIn(merged_dataset["id"], [d["formid"] for d in response.data]) + data = [_ for _ in response.data if _["formid"] == merged_dataset["id"]][0] + self.assertIn("is_merged_dataset", data) + self.assertTrue(data["is_merged_dataset"]) def test_merged_datasets_retrieve(self): """Test retrieving a specific merged dataset""" merged_dataset = self._create_merged_dataset(geo=True) - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) # make submission to form b form_b = merged_xform.xforms.all()[1] @@ -216,59 +211,63 @@ def test_merged_datasets_retrieve(self): form_b.refresh_from_db() form_b.last_submission_time = instance.date_created form_b.save() - view = MergedXFormViewSet.as_view({'get': 'retrieve'}) - request = self.factory.get('/') + view = MergedXFormViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/") # status_code is 404 when the pk doesn't exist - response = view(request, pk=(1000 * merged_dataset['id'])) + response = view(request, pk=(1000 * merged_dataset["id"])) self.assertEqual(response.status_code, 404) # status_code is 404 when: pk exists, user is not authenticated - response = view(request, pk=merged_dataset['id']) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 404) # status_code is 200 when: pk exists, user is authenticated - request = self.factory.get('/', **self.extra) - response = view(request, pk=merged_dataset['id']) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) # data has expected fields - self.assertIn('id', response.data) - self.assertIn('title', response.data) - self.assertIn('xforms', response.data) - self.assertEqual(response.data['num_of_submissions'], 1) - self.assertEqual(response.data['last_submission_time'], - form_b.last_submission_time.isoformat()) + self.assertIn("id", response.data) + self.assertIn("title", response.data) + self.assertIn("xforms", response.data) + self.assertEqual(response.data["num_of_submissions"], 1) + self.assertEqual( + response.data["last_submission_time"], + form_b.last_submission_time.isoformat(), + ) # merged dataset should be available at api/forms/[pk] endpoint - request = self.factory.get('/', **self.extra) - view = XFormViewSet.as_view({'get': 'retrieve'}) - response = view(request, pk=merged_dataset['id']) - self.assertEqual(response.status_code, 200) - self.assertEqual(merged_dataset['id'], response.data['formid']) - self.assertIn('is_merged_dataset', response.data) - self.assertTrue(response.data['is_merged_dataset']) - self.assertTrue(response.data['instances_with_geopoints']) - self.assertEqual(response.data['num_of_submissions'], 1) - self.assertEqual(response.data['last_submission_time'], - form_b.last_submission_time.isoformat()) + request = self.factory.get("/", **self.extra) + view = XFormViewSet.as_view({"get": "retrieve"}) + response = view(request, pk=merged_dataset["id"]) + self.assertEqual(response.status_code, 200) + self.assertEqual(merged_dataset["id"], response.data["formid"]) + self.assertIn("is_merged_dataset", response.data) + self.assertTrue(response.data["is_merged_dataset"]) + self.assertTrue(response.data["instances_with_geopoints"]) + self.assertEqual(response.data["num_of_submissions"], 1) + self.assertEqual( + response.data["last_submission_time"], + form_b.last_submission_time.isoformat(), + ) def test_merged_datasets_form_json(self): """Test retrieving the XLSForm JSON of a merged dataset""" # create a merged dataset merged_dataset = self._create_merged_dataset() - view = MergedXFormViewSet.as_view({'get': 'form'}) - request = self.factory.get('/', **self.extra) - response = view(request, pk=merged_dataset['id'], format='json') + view = MergedXFormViewSet.as_view({"get": "form"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_dataset["id"], format="json") self.assertEqual(response.status_code, 200) response.render() - self.assertEqual('application/json', response['Content-Type']) + self.assertEqual("application/json", response["Content-Type"]) data = json.loads(response.content) self.assertIsInstance(data, dict) - for key in ['children', 'id_string', 'name', 'default_language']: + for key in ["children", "id_string", "name", "default_language"]: self.assertIn(key, data) def test_merged_datasets_form_xml(self): @@ -276,92 +275,102 @@ def test_merged_datasets_form_xml(self): # create a merged dataset merged_dataset = self._create_merged_dataset() - view = MergedXFormViewSet.as_view({'get': 'form'}) - request = self.factory.get('/', **self.extra) - response = view(request, pk=merged_dataset['id'], format='xml') + view = MergedXFormViewSet.as_view({"get": "form"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_dataset["id"], format="xml") self.assertEqual(response.status_code, 200) response.render() - self.assertEqual('text/xml; charset=utf-8', response['Content-Type']) + self.assertEqual("text/xml; charset=utf-8", response["Content-Type"]) def test_merged_datasets_data(self): """Test retrieving data of a merged dataset""" merged_dataset = self._create_merged_dataset() - request = self.factory.get('/', **self.extra) - view = MergedXFormViewSet.as_view({'get': 'data'}) - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) - detail_view = MergedXFormViewSet.as_view({ - 'get': 'retrieve', - }) - xform_detail_view = XFormViewSet.as_view({ - 'get': 'retrieve', - }) - - response = view(request, pk=merged_dataset['id']) + request = self.factory.get("/", **self.extra) + view = MergedXFormViewSet.as_view({"get": "data"}) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) + detail_view = MergedXFormViewSet.as_view( + { + "get": "retrieve", + } + ) + xform_detail_view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) + + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) # check num_of_submissions - response = detail_view(request, pk=merged_dataset['id']) + response = detail_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['num_of_submissions'], 0) + self.assertEqual(response.data["num_of_submissions"], 0) # make submission to form a form_a = merged_xform.xforms.all()[0] xml = 'orange' Instance(xform=form_a, xml=xml).save() - response = view(request, pk=merged_dataset['id']) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange"] self.assertEqual(fruit, expected_fruit) # check num_of_submissions - response = detail_view(request, pk=merged_dataset['id']) + response = detail_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['num_of_submissions'], 1) + self.assertEqual(response.data["num_of_submissions"], 1) # make submission to form b form_b = merged_xform.xforms.all()[1] xml = 'mango' last_submission = Instance(xform=form_b, xml=xml) last_submission.save() - response = view(request, pk=merged_dataset['id']) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange', 'mango'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange", "mango"] self.assertEqual(fruit, expected_fruit) # check num_of_submissions /merged-datasets/[pk] - response = detail_view(request, pk=merged_dataset['id']) + response = detail_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['num_of_submissions'], 2) + self.assertEqual(response.data["num_of_submissions"], 2) # check last_submission_time - self.assertEqual(response.data['last_submission_time'], - last_submission.date_created.isoformat()) + self.assertEqual( + response.data["last_submission_time"], + last_submission.date_created.isoformat(), + ) # check num_of_submissions /forms/[pk] - response = xform_detail_view(request, pk=merged_dataset['id']) + response = xform_detail_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['num_of_submissions'], 2) + self.assertEqual(response.data["num_of_submissions"], 2) # check last_submission_time - self.assertEqual(response.data['last_submission_time'], - last_submission.date_created.isoformat()) + self.assertEqual( + response.data["last_submission_time"], + last_submission.date_created.isoformat(), + ) def test_md_data_viewset(self): """Test retrieving data of a merged dataset at the /data endpoint""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) - request = self.factory.get('/', **self.extra) - data_view = DataViewSet.as_view({ - 'get': 'list', - }) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) + request = self.factory.get("/", **self.extra) + data_view = DataViewSet.as_view( + { + "get": "list", + } + ) # make submission to form a form_a = merged_xform.xforms.all()[0] @@ -369,12 +378,12 @@ def test_md_data_viewset(self): Instance(xform=form_a, xml=xml).save() # DataViewSet /data/[pk] endpoint - response = data_view(request, pk=merged_dataset['id']) + response = data_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange"] self.assertEqual(fruit, expected_fruit) # make submission to form b @@ -383,56 +392,56 @@ def test_md_data_viewset(self): Instance(xform=form_b, xml=xml).save() # DataViewSet /data/[pk] endpoint - response = data_view(request, pk=merged_dataset['id']) + response = data_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) - dataid = response.data[0]['_id'] + dataid = response.data[0]["_id"] - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange', 'mango'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange", "mango"] self.assertEqual(fruit, expected_fruit) # DataViewSet /data/[pk]/[dataid] endpoint - data_view = DataViewSet.as_view({ - 'get': 'retrieve', - }) - response = data_view(request, pk=merged_dataset['id'], dataid=dataid) + data_view = DataViewSet.as_view( + { + "get": "retrieve", + } + ) + response = data_view(request, pk=merged_dataset["id"], dataid=dataid) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['fruit'], 'orange') + self.assertEqual(response.data["fruit"], "orange") def test_deleted_forms(self): """Test retrieving data of a merged dataset with no forms linked.""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) merged_xform.xforms.all().delete() request = self.factory.get( - '/', - data={ - 'sort': '{"_submission_time":1}', - 'limit': '10' - }, - **self.extra) - data_view = DataViewSet.as_view({ - 'get': 'list', - }) + "/", data={"sort": '{"_submission_time":1}', "limit": "10"}, **self.extra + ) + data_view = DataViewSet.as_view( + { + "get": "list", + } + ) # DataViewSet /data/[pk] endpoint - response = data_view(request, pk=merged_dataset['id']) + response = data_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200, response.data) self.assertEqual(response.data, []) - data = {'field_name': 'fruit'} - view = ChartsViewSet.as_view({'get': 'retrieve'}) + data = {"field_name": "fruit"} + view = ChartsViewSet.as_view({"get": "retrieve"}) - request = self.factory.get('/charts', data, **self.extra) - response = view(request, pk=merged_dataset['id'], format='html') + request = self.factory.get("/charts", data, **self.extra) + response = view(request, pk=merged_dataset["id"], format="html") self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['data'].__len__(), 0) + self.assertEqual(response.data["data"].__len__(), 0) def test_md_geojson_response(self): """Test geojson response of a merged dataset""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(merged_xform) @@ -444,125 +453,148 @@ def test_md_geojson_response(self): instance.deleted_by = self.user instance.save() - view = MergedXFormViewSet.as_view({'get': 'data'}) + view = MergedXFormViewSet.as_view({"get": "data"}) - request = self.factory.get('/', **self.extra) - response = view(request, pk=merged_dataset['id'], format='geojson') + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_dataset["id"], format="geojson") self.assertEqual(response.status_code, 200) # we get correct content type headers = dict(response.items()) self.assertEqual(headers["Content-Type"], "application/geo+json") - del response.data['features'][0]['properties']['xform'] - del response.data['features'][1]['properties']['xform'] - del response.data['features'][0]['properties']['id'] - del response.data['features'][1]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][1]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] + del response.data["features"][1]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [{'type': 'Feature', 'geometry': None, 'properties': {}}, - {'type': 'Feature', 'geometry': None, 'properties': {}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + {"type": "Feature", "geometry": None, "properties": {}}, + {"type": "Feature", "geometry": None, "properties": {}}, + ], + }, + response.data, ) # pagination works ok! - request = self.factory.get('/?page=1&page_size=1', **self.extra) - response = view(request, pk=merged_dataset['id'], format='geojson') + request = self.factory.get("/?page=1&page_size=1", **self.extra) + response = view(request, pk=merged_dataset["id"], format="geojson") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [{'type': 'Feature', 'geometry': None, 'properties': {}}]}, - response.data + { + "type": "FeatureCollection", + "features": [{"type": "Feature", "geometry": None, "properties": {}}], + }, + response.data, ) - request = self.factory.get('/?page=2&page_size=1', **self.extra) - response = view(request, pk=merged_dataset['id'], format='geojson') + request = self.factory.get("/?page=2&page_size=1", **self.extra) + response = view(request, pk=merged_dataset["id"], format="geojson") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [{'type': 'Feature', 'geometry': None, 'properties': {}}]}, - response.data + { + "type": "FeatureCollection", + "features": [{"type": "Feature", "geometry": None, "properties": {}}], + }, + response.data, ) # fields argument is applied correctly - request = self.factory.get('/?page=1&page_size=1&fields=fruit', **self.extra) - response = view(request, pk=merged_dataset['id'], format='geojson') + request = self.factory.get("/?page=1&page_size=1&fields=fruit", **self.extra) + response = view(request, pk=merged_dataset["id"], format="geojson") self.assertEqual(response.status_code, 200) - del response.data['features'][0]['properties']['xform'] - del response.data['features'][0]['properties']['id'] + del response.data["features"][0]["properties"]["xform"] + del response.data["features"][0]["properties"]["id"] self.assertEqual( - {'type': 'FeatureCollection', - 'features': - [{'type': 'Feature', 'geometry': None, - 'properties': {'fruit': 'orange'}}]}, - response.data + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": {"fruit": "orange"}, + } + ], + }, + response.data, ) # Invalid page error when we reqeust for a non-existent page - request = self.factory.get('/?page=10&page_size=1&fields=fruit', **self.extra) - response = view(request, pk=merged_dataset['id'], format='geojson') + request = self.factory.get("/?page=10&page_size=1&fields=fruit", **self.extra) + response = view(request, pk=merged_dataset["id"], format="geojson") self.assertEqual(response.status_code, 404) - self.assertEqual( - {'detail': 'Invalid page.'}, - response.data - ) + self.assertEqual({"detail": "Invalid page."}, response.data) def test_md_csv_export(self): """Test CSV export of a merged dataset""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(merged_xform) # merged dataset should be available at api/forms/[pk] endpoint - request = self.factory.get('/', **self.extra) - view = XFormViewSet.as_view({'get': 'retrieve'}) - response = view(request, pk=merged_dataset['id'], format='csv') + request = self.factory.get("/", **self.extra) + view = XFormViewSet.as_view({"get": "retrieve"}) + response = view(request, pk=merged_dataset["id"], format="csv") self.assertEqual(response.status_code, 200) - csv_file_obj = StringIO(''.join( - [c.decode('utf-8') for c in response.streaming_content])) + csv_file_obj = StringIO( + "".join([c.decode("utf-8") for c in response.streaming_content]) + ) csv_reader = csv.reader(csv_file_obj) # jump over headers first headers = next(csv_reader) - self.assertEqual(headers, [ - 'fruit', 'meta/instanceID', '_id', '_uuid', '_submission_time', - '_date_modified', '_tags', '_notes', '_version', '_duration', - '_submitted_by', '_total_media', '_media_count', - '_media_all_received']) + self.assertEqual( + headers, + [ + "fruit", + "meta/instanceID", + "_id", + "_uuid", + "_submission_time", + "_date_modified", + "_tags", + "_notes", + "_version", + "_duration", + "_submitted_by", + "_total_media", + "_media_count", + "_media_all_received", + ], + ) row1 = next(csv_reader) - self.assertEqual(row1[0], 'orange') + self.assertEqual(row1[0], "orange") row2 = next(csv_reader) - self.assertEqual(row2[0], 'mango') + self.assertEqual(row2[0], "mango") def test_get_osm_data_kwargs(self): """ Test get_osm_data_kwargs returns correct kwargs for a merged dataset. """ merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) - pks = [_ for _ in merged_xform.xforms.values_list('id', flat=True)] + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) + pks = [_ for _ in merged_xform.xforms.values_list("id", flat=True)] kwargs = get_osm_data_kwargs(merged_xform) - self.assertEqual(kwargs, { - 'instance__deleted_at__isnull': True, - 'instance__xform_id__in': pks - }) + self.assertEqual( + kwargs, + {"instance__deleted_at__isnull": True, "instance__xform_id__in": pks}, + ) xform = merged_xform.xforms.all()[0] kwargs = get_osm_data_kwargs(xform) - self.assertEqual(kwargs, { - 'instance__deleted_at__isnull': True, - 'instance__xform_id': xform.pk - }) + self.assertEqual( + kwargs, + {"instance__deleted_at__isnull": True, "instance__xform_id": xform.pk}, + ) # pylint: disable=invalid-name def test_merged_with_attachment_endpoint(self): merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(merged_xform) # Attachment viewset works ok for filtered datasets @@ -571,41 +603,40 @@ def test_merged_with_attachment_endpoint(self): for instance in all_instances: _add_attachments_to_instances(instance) request = self.factory.get( - "/?merged_xform=" + str(merged_xform.pk), - **self.extra) + "/?merged_xform=" + str(merged_xform.pk), **self.extra + ) response = attachment_list_view(request) serialized_attachments = AttachmentSerializer( - Attachment.objects.filter( - instance__xform__in=merged_xform.xforms.all()), - many=True, context={'request': request}).data - self.assertEqual( - response.data, - serialized_attachments) + Attachment.objects.filter(instance__xform__in=merged_xform.xforms.all()), + many=True, + context={"request": request}, + ).data + self.assertEqual(response.data, serialized_attachments) def test_merged_dataset_charts(self): """Test /charts endpoint for a merged dataset works""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(merged_xform) - data = {'field_name': 'fruit'} - view = ChartsViewSet.as_view({'get': 'retrieve'}) + data = {"field_name": "fruit"} + view = ChartsViewSet.as_view({"get": "retrieve"}) - request = self.factory.get('/charts', data, **self.extra) - response = view(request, pk=merged_dataset['id'], format='html') + request = self.factory.get("/charts", data, **self.extra) + response = view(request, pk=merged_dataset["id"], format="html") self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get('Cache-Control'), None) - self.assertEqual(response.data['field_type'], 'select one') - self.assertEqual(response.data['field_name'], 'fruit') - self.assertEqual(response.data['data_type'], 'categorized') - self.assertEqual(response.data['data'][0]['fruit'], 'Mango') - self.assertEqual(response.data['data'][1]['fruit'], 'Orange') + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data["field_type"], "select one") + self.assertEqual(response.data["field_name"], "fruit") + self.assertEqual(response.data["data_type"], "categorized") + self.assertEqual(response.data["data"][0]["fruit"], "Mango") + self.assertEqual(response.data["data"][1]["fruit"], "Orange") def test_submissions_not_allowed(self): """Test submissions to a merged form is not allowed""" merged_dataset = self._create_merged_dataset() - merged_xform = XForm.objects.get(pk=merged_dataset['id']) + merged_xform = XForm.objects.get(pk=merged_dataset["id"]) # make submission to form a xml = 'orange' @@ -615,24 +646,23 @@ def test_submissions_not_allowed(self): def test_openrosa_form_list(self): """Test merged dataset form is not included in /formList""" merged_dataset = self._create_merged_dataset() - merged_xform = XForm.objects.get(pk=merged_dataset['id']) + merged_xform = XForm.objects.get(pk=merged_dataset["id"]) view = XFormListViewSet.as_view({"get": "list"}) - request = self.factory.get('/') + request = self.factory.get("/") response = view(request, username=self.user.username) self.assertEqual(response.status_code, 200) - self.assertNotIn(merged_xform.id_string, - [_['formID'] for _ in response.data]) + self.assertNotIn(merged_xform.id_string, [_["formID"] for _ in response.data]) def test_open_data(self): """Test OpenDataViewSet data endpoint""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(merged_xform) - xform = XForm.objects.get(pk=merged_dataset['id']) - view = OpenDataViewSet.as_view({'get': 'data'}) + xform = XForm.objects.get(pk=merged_dataset["id"]) + view = OpenDataViewSet.as_view({"get": "data"}) _open_data = get_or_create_opendata(xform)[0] uuid = _open_data.uuid - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, uuid=uuid) self.assertEqual(response.status_code, 200) # cast generator response to list so that we can get the response count @@ -644,21 +674,20 @@ def test_filtered_dataset(self): the linked forms. """ merged_dataset = self._create_merged_dataset() - xform = XForm.objects.get(pk=merged_dataset['id']) + xform = XForm.objects.get(pk=merged_dataset["id"]) _make_submissions_merged_datasets(xform.mergedxform) self.assertTrue(xform.is_merged_dataset) data = { - 'name': "My DataView", - 'xform': 'http://testserver/api/v1/forms/%s' % xform.pk, - 'project': - 'http://testserver/api/v1/projects/%s' % xform.project.pk, + "name": "My DataView", + "xform": "http://testserver/api/v1/forms/%s" % xform.pk, + "project": "http://testserver/api/v1/projects/%s" % xform.project.pk, # ensure there's an attachment column(photo) in you dataview - 'columns': '["fruit"]' + "columns": '["fruit"]', } - view = DataViewViewSet.as_view({'get': 'data'}) + view = DataViewViewSet.as_view({"get": "data"}) self._create_dataview(data=data, project=xform.project, xform=xform) - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=self.data_view.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) @@ -669,15 +698,15 @@ def test_rest_service(self): """ count = RestService.objects.count() merged_dataset = self._create_merged_dataset() - xform = XForm.objects.get(pk=merged_dataset['id']) - view = RestServicesViewSet.as_view({'post': 'create'}) + xform = XForm.objects.get(pk=merged_dataset["id"]) + view = RestServicesViewSet.as_view({"post": "create"}) post_data = { "name": "generic_json", "service_url": "http://crunch.goodbot.ai", - "xform": xform.pk + "xform": xform.pk, } - request = self.factory.post('/', data=post_data, **self.extra) + request = self.factory.post("/", data=post_data, **self.extra) response = view(request) self.assertEqual(response.status_code, 201) @@ -693,70 +722,71 @@ def test_md_has_deleted_xforms(self): """ Test creating a merged dataset that includes a soft deleted form. """ - view = MergedXFormViewSet.as_view({ - 'post': 'create', - }) + view = MergedXFormViewSet.as_view( + { + "post": "create", + } + ) # pylint: disable=attribute-defined-outside-init self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(MD, self.user, id_string='a') - xform2 = self._publish_markdown(MD, self.user, id_string='b') + xform1 = self._publish_markdown(MD, self.user, id_string="a") + xform2 = self._publish_markdown(MD, self.user, id_string="b") xform2.soft_delete() data = { - 'xforms': [ + "xforms": [ "http://testserver/api/v1/forms/%s" % xform1.pk, "http://testserver/api/v1/forms/%s" % xform2.pk, ], - 'name': - 'Merged Dataset', - 'project': - f"http://testserver/api/v1/projects/{self.project.pk}", + "name": "Merged Dataset", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", } - request = self.factory.post('/', data=data, **self.extra) + request = self.factory.post("/", data=data, **self.extra) response = view(request) self.assertEqual(response.status_code, 400) self.assertEqual( - response.data, - {'xforms': [u'Invalid hyperlink - Object does not exist.']}) + response.data, {"xforms": ["Invalid hyperlink - Object does not exist."]} + ) def test_md_has_no_matching_fields(self): """ Test creating a merged dataset that has no matching fields. """ - view = MergedXFormViewSet.as_view({ - 'post': 'create', - }) + view = MergedXFormViewSet.as_view( + { + "post": "create", + } + ) # pylint: disable=attribute-defined-outside-init self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(MD, self.user, id_string='a') - xform2 = self._publish_markdown(NOT_MATCHING, self.user, id_string='b') + xform1 = self._publish_markdown(MD, self.user, id_string="a") + xform2 = self._publish_markdown(NOT_MATCHING, self.user, id_string="b") data = { - 'xforms': [ + "xforms": [ "http://testserver/api/v1/forms/%s" % xform1.pk, "http://testserver/api/v1/forms/%s" % xform2.pk, ], - 'name': - 'Merged Dataset', - 'project': - f"http://testserver/api/v1/projects/{self.project.pk}", + "name": "Merged Dataset", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", } - request = self.factory.post('/', data=data, **self.extra) + request = self.factory.post("/", data=data, **self.extra) response = view(request) self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, - {'xforms': [u'No matching fields in xforms.']}) + self.assertEqual(response.data, {"xforms": ["No matching fields in xforms."]}) def test_md_data_viewset_deleted_form(self): """Test retrieving data of a merged dataset with one form deleted""" merged_dataset = self._create_merged_dataset() - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) - request = self.factory.get('/', **self.extra) - data_view = DataViewSet.as_view({ - 'get': 'list', - }) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) + request = self.factory.get("/", **self.extra) + data_view = DataViewSet.as_view( + { + "get": "list", + } + ) # make submission to form a form_a = merged_xform.xforms.all()[0] @@ -764,12 +794,12 @@ def test_md_data_viewset_deleted_form(self): Instance(xform=form_a, xml=xml).save() # DataViewSet /data/[pk] endpoint - response = data_view(request, pk=merged_dataset['id']) + response = data_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange"] self.assertEqual(fruit, expected_fruit) # make submission to form b @@ -778,18 +808,18 @@ def test_md_data_viewset_deleted_form(self): Instance(xform=form_b, xml=xml).save() # DataViewSet /data/[pk] endpoint - response = data_view(request, pk=merged_dataset['id']) + response = data_view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) - dataid = response.data[0]['_id'] + dataid = response.data[0]["_id"] - fruit = [d['fruit'] for d in response.data] - expected_fruit = ['orange', 'mango'] + fruit = [d["fruit"] for d in response.data] + expected_fruit = ["orange", "mango"] self.assertEqual(fruit, expected_fruit) # DataViewSet /data/[pk] endpoint, form_a deleted form_a.soft_delete() - response = data_view(request, pk=merged_dataset['id'], dataid=dataid) + response = data_view(request, pk=merged_dataset["id"], dataid=dataid) self.assertEqual(response.status_code, 404) def test_xform_has_uncommon_reference(self): @@ -797,41 +827,40 @@ def test_xform_has_uncommon_reference(self): Test creating a merged dataset that has matching fields but with uncommon reference variable. """ - view = MergedXFormViewSet.as_view({ - 'post': 'create', - }) + view = MergedXFormViewSet.as_view( + { + "post": "create", + } + ) # pylint: disable=attribute-defined-outside-init self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(MD, self.user, id_string='a') - xform2 = self._publish_markdown( - REFERENCE_ISSUE, self.user, id_string='b') + xform1 = self._publish_markdown(MD, self.user, id_string="a") + xform2 = self._publish_markdown(REFERENCE_ISSUE, self.user, id_string="b") data = { - 'xforms': [ + "xforms": [ "http://testserver/api/v1/forms/%s" % xform2.pk, "http://testserver/api/v1/forms/%s" % xform1.pk, ], - 'name': - 'Merged Dataset', - 'project': - f"http://testserver/api/v1/projects/{self.project.pk}", + "name": "Merged Dataset", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", } - request = self.factory.post('/', data=data, **self.extra) + request = self.factory.post("/", data=data, **self.extra) response = view(request) self.assertEqual(response.status_code, 400) error_message = ( "There has been a problem trying to replace ${tunda} with the " "XPath to the survey element named 'tunda'. There is no survey " - "element with this name.") - self.assertIn('xforms', response.data) - self.assertIn(error_message, response.data['xforms']) + "element with this name." + ) + self.assertIn("xforms", response.data) + self.assertIn(error_message, response.data["xforms"]) def test_merged_datasets_deleted_parent_retrieve(self): - """Test retrieving a specific merged dataset when the parent is deleted - """ + """Test retrieving a specific merged dataset when the parent is deleted""" merged_dataset = self._create_merged_dataset(geo=True) - merged_xform = MergedXForm.objects.get(pk=merged_dataset['id']) + merged_xform = MergedXForm.objects.get(pk=merged_dataset["id"]) # make submission to form b form_b = merged_xform.xforms.all()[1] @@ -841,12 +870,12 @@ def test_merged_datasets_deleted_parent_retrieve(self): form_b.refresh_from_db() form_b.last_submission_time = instance.date_created form_b.save() - view = MergedXFormViewSet.as_view({'get': 'retrieve'}) + view = MergedXFormViewSet.as_view({"get": "retrieve"}) # status_code is 200 when: pk exists, user is authenticated - request = self.factory.get('/', **self.extra) - response = view(request, pk=merged_dataset['id']) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) # delete parents @@ -854,12 +883,12 @@ def test_merged_datasets_deleted_parent_retrieve(self): merged_xform.refresh_from_db() # merged dataset should be available at api/forms/[pk] endpoint - request = self.factory.get('/', **self.extra) - view = XFormViewSet.as_view({'get': 'retrieve'}) - response = view(request, pk=merged_dataset['id']) + request = self.factory.get("/", **self.extra) + view = XFormViewSet.as_view({"get": "retrieve"}) + response = view(request, pk=merged_dataset["id"]) self.assertEqual(response.status_code, 200) - self.assertEqual(merged_dataset['id'], response.data['formid']) - self.assertTrue(response.data['is_merged_dataset']) - self.assertTrue(response.data['instances_with_geopoints']) + self.assertEqual(merged_dataset["id"], response.data["formid"]) + self.assertTrue(response.data["is_merged_dataset"]) + self.assertTrue(response.data["instances_with_geopoints"]) # deleted parents, 0 submissions - self.assertEqual(response.data['num_of_submissions'], 0) + self.assertEqual(response.data["num_of_submissions"], 0) diff --git a/onadata/apps/api/tests/viewsets/test_messaging_stats_viewset.py b/onadata/apps/api/tests/viewsets/test_messaging_stats_viewset.py index 1dac6fba1b..98a99fd5e8 100644 --- a/onadata/apps/api/tests/viewsets/test_messaging_stats_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_messaging_stats_viewset.py @@ -1,8 +1,9 @@ """ Module containing test for the MessagingStatsViewset (api/v1/stats/messaging) """ + import json -from datetime import date +from datetime import datetime, timezone from django.test import RequestFactory from onadata.apps.api.viewsets.messaging_stats_viewset import MessagingStatsViewSet @@ -34,7 +35,7 @@ def test_filters(self): "target_type": "xform", "target_id": self.xform.id, "group_by": "day", - "timestamp__day": date.today().day, + "timestamp__day": datetime.now().day, # .astimezone(timezone.utc).day, }, **self.extra, ) @@ -49,7 +50,7 @@ def test_filters(self): returned_data, [ { - "group": str(date.today()), + "group": str(datetime.now().astimezone(timezone.utc).date()), "submission_created": self.xform.instances.count(), } ], @@ -61,7 +62,7 @@ def test_filters(self): "target_type": "xform", "target_id": self.xform.id, "group_by": "day", - "timestamp__day": date.today().day + 1, + "timestamp__day": datetime.now().astimezone(timezone.utc).day + 1, }, **self.extra, ) @@ -98,7 +99,7 @@ def test_filters(self): returned_data, [ { - "group": str(date.today()), + "group": str(datetime.now().astimezone(timezone.utc).date()), "submission_created": self.xform.instances.count(), } ], @@ -153,7 +154,7 @@ def test_expected_responses(self): returned_data, [ { - "group": str(date.today()), + "group": str(datetime.now().astimezone(timezone.utc).date()), "submission_created": self.xform.instances.count(), } ], diff --git a/onadata/apps/api/tests/viewsets/test_widget_viewset.py b/onadata/apps/api/tests/viewsets/test_widget_viewset.py index 93ec7cf33a..0ed0605530 100644 --- a/onadata/apps/api/tests/viewsets/test_widget_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_widget_viewset.py @@ -217,11 +217,17 @@ def test_list_widgets(self): } ) + # empty - no xform filter request = self.factory.get("/", **self.extra) response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 0) + # not empty - xform filter + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) + response = view(request) self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 2) + self.assertEqual(len(response.data), 1) def test_widget_permission_create(self): @@ -313,7 +319,7 @@ def test_widget_permission_list(self): ) request = self.factory.get("/", **self.extra) - response = view(request) + response = view(request, formid=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) @@ -322,8 +328,7 @@ def test_widget_permission_list(self): ReadOnlyRole.add(self.user, self.xform) request = self.factory.get("/", **self.extra) - response = view(request) - + response = view(request, formid=self.xform.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) @@ -498,7 +503,6 @@ def test_widget_data_public_form(self): request = self.factory.get("/", **self.extra) response = view(request, formid=self.xform.pk) - self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) diff --git a/onadata/apps/api/viewsets/media_viewset.py b/onadata/apps/api/viewsets/media_viewset.py index 11827bbb51..87f77b6050 100644 --- a/onadata/apps/api/viewsets/media_viewset.py +++ b/onadata/apps/api/viewsets/media_viewset.py @@ -34,9 +34,7 @@ class MediaViewSet( ): """A view to redirect to actual attachments url""" - queryset = Attachment.objects.filter( - instance__deleted_at__isnull=True, deleted_at__isnull=True - ) + queryset = Attachment.objects.filter(deleted_at__isnull=True) filter_backends = (filters.AttachmentFilter, filters.AttachmentTypeFilter) lookup_field = "pk" permission_classes = (AttachmentObjectPermissions,) diff --git a/onadata/apps/logger/migrations/0013_add_xform_to_logger_attachment.py b/onadata/apps/logger/migrations/0013_add_xform_to_logger_attachment.py new file mode 100644 index 0000000000..19131cce2c --- /dev/null +++ b/onadata/apps/logger/migrations/0013_add_xform_to_logger_attachment.py @@ -0,0 +1,36 @@ +# Generated by Django 4.1 on 2024-04-15 14:00 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import onadata.apps.logger.models.instance + + +class Migration(migrations.Migration): + + dependencies = [ + ("logger", "0012_add_instance_history_uuid_and_checksum_idx"), + ] + + operations = [ + migrations.AddField( + model_name="attachment", + name="xform", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="xform_attachments", + to="logger.xform", + ), + ), + migrations.AddField( + model_name="attachment", + name="user", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/onadata/apps/logger/migrations/0014_populate_attachment_xform.py b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py new file mode 100644 index 0000000000..563b5f8c96 --- /dev/null +++ b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py @@ -0,0 +1,37 @@ +# Generated by Django 4.2.11 on 2024-04-22 06:42 + +from django.db import migrations + + +def populate_attachment_xform(apps, schema_editor): + """Populate xform field for Attachments""" + Attachment = apps.get_model("logger", "Attachment") + queryset = Attachment.objects.filter(xform__isnull=True).values( + "pk", "instance__xform", "instance__user" + ) + count = queryset.count() + print("Start populating attachment xform...") + print(f"Found {count} records") + + for attachment in queryset.iterator(chunk_size=100): + # We do not want to trigger Model.save or any signal + # Queryset.update is a workaround to achieve this. + # Model.save and the post/pre signals may contain + # some side-effects which we are not interested in + Attachment.objects.filter(pk=attachment["pk"]).update( + xform=attachment["instance__xform"], + user=attachment["instance__user"], + ) + count -= 1 + print(f"{count} remaining") + + print("Done populating attachment xform!") + + +class Migration(migrations.Migration): + + dependencies = [ + ("logger", "0013_add_xform_to_logger_attachment"), + ] + + operations = [migrations.RunPython(populate_attachment_xform)] diff --git a/onadata/apps/logger/models/attachment.py b/onadata/apps/logger/models/attachment.py index 5a58a186b3..e44f7d18a7 100644 --- a/onadata/apps/logger/models/attachment.py +++ b/onadata/apps/logger/models/attachment.py @@ -50,6 +50,13 @@ class Attachment(models.Model): OSM = "osm" + xform = models.ForeignKey( + "logger.XForm", + related_name="xform_attachments", + on_delete=models.CASCADE, + null=True, + blank=True, + ) instance = models.ForeignKey( "logger.Instance", related_name="attachments", on_delete=models.CASCADE ) @@ -69,6 +76,12 @@ class Attachment(models.Model): null=True, on_delete=models.SET_NULL, ) + # submitted_by user + user = models.ForeignKey( + get_user_model(), + null=True, + on_delete=models.SET_NULL, + ) class Meta: app_label = "logger" diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 5b87600ebf..e84aafe019 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -823,6 +823,10 @@ def post_save_submission(sender, instance=None, created=False, **kwargs): """ if instance.deleted_at is not None: _update_xform_submission_count_delete(instance) + # mark attachments also as deleted. + instance.attachments.filter(deleted_at__isnull=True).update( + deleted_at=instance.deleted_at, deleted_by=instance.deleted_by + ) if ( hasattr(settings, "ASYNC_POST_SUBMISSION_PROCESSING_ENABLED") diff --git a/onadata/apps/messaging/tests/test_messaging_viewset.py b/onadata/apps/messaging/tests/test_messaging_viewset.py index f0ffd719c9..3b807b6444 100644 --- a/onadata/apps/messaging/tests/test_messaging_viewset.py +++ b/onadata/apps/messaging/tests/test_messaging_viewset.py @@ -31,21 +31,20 @@ def _create_message(self, user=None): """ if not user: user = _create_user() - assign_perm('auth.change_user', user, user) - view = MessagingViewSet.as_view({'post': 'create'}) + assign_perm("auth.change_user", user, user) + view = MessagingViewSet.as_view({"post": "create"}) data = { "message": "Hello World!", "target_id": user.pk, - "target_type": 'user', + "target_type": "user", } # yapf: disable - request = self.factory.post('/messaging', data) + request = self.factory.post("/messaging", data) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 201, response.data) self.assertDictContainsSubset(data, response.data) # ensure that id and timestamp are returned - self.assertTrue('id' and 'timestamp' in - [text(x) for x in list(response.data)]) + self.assertTrue("id" and "timestamp" in [text(x) for x in list(response.data)]) return response.data def test_create_message(self): @@ -60,17 +59,17 @@ def test_target_does_not_exist(self): target that does not exist. """ user = _create_user() - view = MessagingViewSet.as_view({'post': 'create'}) + view = MessagingViewSet.as_view({"post": "create"}) data = { "message": "Hello World!", "target_id": 1000000000, - "target_type": 'user', + "target_type": "user", } # yapf: disable - request = self.factory.post('/messaging', data) + request = self.factory.post("/messaging", data) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 400, response.data) - self.assertEqual(response.data['target_id'], 'target_id not found') + self.assertEqual(response.data["target_id"], "target_id not found") def test_delete_message(self): """ @@ -78,12 +77,12 @@ def test_delete_message(self): """ user = _create_user() message_data = self._create_message(user) - view = MessagingViewSet.as_view({'delete': 'destroy'}) - request = self.factory.delete('/messaging/%s' % message_data['id']) + view = MessagingViewSet.as_view({"delete": "destroy"}) + request = self.factory.delete("/messaging/%s" % message_data["id"]) force_authenticate(request, user=user) - response = view(request=request, pk=message_data['id']) + response = view(request=request, pk=message_data["id"]) self.assertEqual(response.status_code, 204) - self.assertFalse(Action.objects.filter(pk=message_data['id']).exists()) + self.assertFalse(Action.objects.filter(pk=message_data["id"]).exists()) def test_list_messages(self): """ @@ -91,61 +90,60 @@ def test_list_messages(self): """ user = _create_user() message_data = self._create_message(user) - target_id = message_data['target_id'] - view = MessagingViewSet.as_view({'get': 'list'}) + target_id = message_data["target_id"] + view = MessagingViewSet.as_view({"get": "list"}) # return data only when a target_type is provided request = self.factory.get( - '/messaging', {'target_type': 'user', - 'target_id': target_id}) + "/messaging", {"target_type": "user", "target_id": target_id} + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) - message_data.pop('target_id') - message_data.pop('target_type') + message_data.pop("target_id") + message_data.pop("target_type") self.assertEqual(len(response.data), 1) self.assertEqual(dict(response.data[0]), message_data) # returns empty list when a target type does not have any records request = self.factory.get( - '/messaging', {'target_type': 'xform', - 'target_id': target_id}) + "/messaging", {"target_type": "xform", "target_id": target_id} + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) # return status 400 if both target_type and target_id are misssing - request = self.factory.get('/messaging') + request = self.factory.get("/messaging") force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 400) # returns 400 status when a target_id is missing - request = self.factory.get('/messaging', {'target_type': 'user'}) + request = self.factory.get("/messaging", {"target_type": "user"}) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, - {u'detail': u"Parameter 'target_id' is missing."}) + self.assertEqual(response.data, {"detail": "Parameter 'target_id' is missing."}) # returns 400 status when a target_type is missing - request = self.factory.get('/messaging', {'target_id': target_id}) + request = self.factory.get("/messaging", {"target_id": target_id}) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, - {u'detail': u"Parameter 'target_type' is missing."}) + self.assertEqual( + response.data, {"detail": "Parameter 'target_type' is missing."} + ) # returns 400 status when a target type is not known request = self.factory.get( - '/messaging', {'target_type': 'xyz', - 'target_id': target_id}) + "/messaging", {"target_type": "xyz", "target_id": target_id} + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 400) - self.assertEqual(response.data, - {u'detail': u'Unknown target_type xyz'}) + self.assertEqual(response.data, {"detail": "Unknown target_type xyz"}) def test_retrieve_message(self): """ @@ -153,13 +151,13 @@ def test_retrieve_message(self): """ user = _create_user() message_data = self._create_message(user) - view = MessagingViewSet.as_view({'get': 'retrieve'}) - request = self.factory.get('/messaging/{}'.format(message_data['id'])) + view = MessagingViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/messaging/{}".format(message_data["id"])) force_authenticate(request, user=user) - response = view(request=request, pk=message_data['id']) + response = view(request=request, pk=message_data["id"]) self.assertEqual(response.status_code, 200) - message_data.pop('target_id') - message_data.pop('target_type') + message_data.pop("target_id") + message_data.pop("target_type") self.assertDictEqual(response.data, message_data) def test_authentication_required(self): @@ -167,51 +165,47 @@ def test_authentication_required(self): Test that authentication is required at all endpoints. """ # Test that the list endpoint requires authentication - view1 = MessagingViewSet.as_view({'get': 'list'}) - request1 = self.factory.get('/messaging', - {'target_type': 'xform', - 'target_id': 1}) + view1 = MessagingViewSet.as_view({"get": "list"}) + request1 = self.factory.get( + "/messaging", {"target_type": "xform", "target_id": 1} + ) response1 = view1(request=request1) self.assertEqual(response1.status_code, 401) - self.assertEqual(response1.data, { - u'detail': - u"Authentication credentials were not provided." - }) + self.assertEqual( + response1.data, {"detail": "Authentication credentials were not provided."} + ) # Test that retrieve requires authentication - view2 = MessagingViewSet.as_view({'get': 'retrieve'}) - request2 = self.factory.get('/messaging/1') + view2 = MessagingViewSet.as_view({"get": "retrieve"}) + request2 = self.factory.get("/messaging/1") response2 = view2(request=request2, pk=1) self.assertEqual(response2.status_code, 401) - self.assertEqual(response2.data, { - u'detail': - u"Authentication credentials were not provided." - }) + self.assertEqual( + response2.data, {"detail": "Authentication credentials were not provided."} + ) # Test that delete requires authentication - view3 = MessagingViewSet.as_view({'delete': 'destroy'}) - request3 = self.factory.delete('/messaging/5') + view3 = MessagingViewSet.as_view({"delete": "destroy"}) + request3 = self.factory.delete("/messaging/5") response3 = view3(request=request3, pk=5) self.assertEqual(response3.status_code, 401) - self.assertEqual(response3.data, { - u'detail': - u"Authentication credentials were not provided." - }) + self.assertEqual( + response3.data, {"detail": "Authentication credentials were not provided."} + ) # Test that create requires authentication - view4 = MessagingViewSet.as_view({'post': 'create'}) + view4 = MessagingViewSet.as_view({"post": "create"}) data = { "message": "Hello World!", "target_id": 1, - "target_type": 'user', + "target_type": "user", } # yapf: disable - request4 = self.factory.post('/messaging', data) + request4 = self.factory.post("/messaging", data) response4 = view4(request=request4) self.assertEqual(response4.status_code, 401) - self.assertEqual(response4.data, { - u'detail': - u"Authentication credentials were not provided." - }) + self.assertEqual( + response4.data, {"detail": "Authentication credentials were not provided."} + ) def test_create_permissions(self): """ @@ -221,19 +215,19 @@ def test_create_permissions(self): data = { "message": "Hello World!", "target_id": user.pk, - "target_type": 'user', + "target_type": "user", } # yapf: disable - view = MessagingViewSet.as_view({'post': 'create'}) + view = MessagingViewSet.as_view({"post": "create"}) - request = self.factory.post('/messaging', data) + request = self.factory.post("/messaging", data) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 403) - self.assertIn(u'You do not have permission', response.data['detail']) + self.assertIn("You do not have permission", response.data["detail"]) # assign add_user permissions - assign_perm('auth.change_user', user, user) - request = self.factory.post('/messaging', data) + assign_perm("auth.change_user", user, user) + request = self.factory.post("/messaging", data) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 201) @@ -243,17 +237,17 @@ def test_retrieve_permissions(self): Test that correct permissions are required when retrieving a message """ user = _create_user() - other_user = _create_user('anotheruser') + other_user = _create_user("anotheruser") message_data = self._create_message(user) - view = MessagingViewSet.as_view({'get': 'retrieve'}) - request = self.factory.get('/messaging/{}'.format(message_data['id'])) + view = MessagingViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/messaging/{}".format(message_data["id"])) force_authenticate(request, user=other_user) - response = view(request=request, pk=message_data['id']) + response = view(request=request, pk=message_data["id"]) self.assertEqual(response.status_code, 403) - request = self.factory.get('/messaging/{}'.format(message_data['id'])) + request = self.factory.get("/messaging/{}".format(message_data["id"])) force_authenticate(request, user=user) - response = view(request=request, pk=message_data['id']) + response = view(request=request, pk=message_data["id"]) self.assertEqual(response.status_code, 200) def test_retrieve_pagination(self): @@ -263,10 +257,10 @@ def test_retrieve_pagination(self): self._create_message(user) count += 1 - view = MessagingViewSet.as_view({'get': 'list'}) + view = MessagingViewSet.as_view({"get": "list"}) request = self.factory.get( - '/messaging', data={ - "target_type": "user", "target_id": user.pk}) + "/messaging", data={"target_type": "user", "target_id": user.pk} + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200, response.data) @@ -274,39 +268,43 @@ def test_retrieve_pagination(self): # Test that the pagination query params paginate the responses request = self.factory.get( - '/messaging', data={ - "target_type": "user", - "target_id": user.pk, "page_size": 2}) + "/messaging", + data={"target_type": "user", "target_id": user.pk, "page_size": 2}, + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200, response.data) self.assertEqual(len(response.data), 2) - self.assertIn('Link', response) + self.assertIn("Link", response) self.assertEqual( - response['Link'], - (f'; rel="next",' - ' ; rel="last"')) + response["Link"], + ( + f"; rel="next",' + " ; rel="last"' + ), + ) # Test the retrieval threshold is respected with override_settings(MESSAGE_RETRIEVAL_THRESHOLD=2): request = self.factory.get( - '/messaging', data={ - "target_type": "user", "target_id": user.pk - } + "/messaging", data={"target_type": "user", "target_id": user.pk} ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200, response.data) self.assertEqual(len(response.data), 2) - self.assertIn('Link', response) + self.assertIn("Link", response) self.assertEqual( - response['Link'], - (f'; rel="next",' - ' ; rel="last"')) + response["Link"], + ( + f"; rel="next",' + " ; rel="last"' + ), + ) @override_settings(USE_TZ=False) def test_messaging_timestamp_filter(self): @@ -317,265 +315,286 @@ def test_messaging_timestamp_filter(self): message_one = self._create_message(user) message_two = self._create_message(user) - view = MessagingViewSet.as_view({'get': 'list'}) - message_one_timestamp = message_one['timestamp'] + view = MessagingViewSet.as_view({"get": "list"}) + message_one_timestamp = message_one["timestamp"] target_id = user.id request = self.factory.get( - f'/messaging?timestamp={message_one_timestamp}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp={message_one_timestamp}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0].get('id'), message_one['id']) + self.assertEqual(response.data[0].get("id"), message_one["id"]) # Test able to filter using gt & gte lookups request = self.factory.get( - f'/messaging?timestamp__gt={message_one_timestamp}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__gt={message_one_timestamp}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0].get('id'), message_two['id']) + self.assertEqual(response.data[0].get("id"), message_two["id"]) request = self.factory.get( - f'/messaging?timestamp__gte={message_one_timestamp}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__gte={message_one_timestamp}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) # Test able to filter using lt & lte lookups - message_two_timestamp = message_two['timestamp'] + message_two_timestamp = message_two["timestamp"] request = self.factory.get( - f'/messaging?timestamp__lt={message_two_timestamp}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__lt={message_two_timestamp}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual( - response.data[0].get('id'), message_one['id']) + self.assertEqual(response.data[0].get("id"), message_one["id"]) - message_two_timestamp = message_two['timestamp'] + message_two_timestamp = message_two["timestamp"] request = self.factory.get( - f'/messaging?timestamp__lte={message_two_timestamp}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__lte={message_two_timestamp}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) # Test able to use day filters - day = Action.objects.get( - id=message_one['id']).timestamp.day + day = Action.objects.get(id=message_one["id"]).timestamp.day request = self.factory.get( - f'/messaging?timestamp__day={day}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__day={day}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__day__gt={day}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__day__gt={day}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__day__gte={day}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__day__gte={day}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__day__lt={day}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__day__lt={day}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__day__lte={day}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__day__lte={day}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) # Test able to use month filters - month = Action.objects.get( - id=message_one['id']).timestamp.month + month = Action.objects.get(id=message_one["id"]).timestamp.month request = self.factory.get( - f'/messaging?timestamp__month={month}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__month={month}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__month__gt={month}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__month__gt={month}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__month__gte={month}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__month__gte={month}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__month__lt={month}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__month__lt={month}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__month__lte={month}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__month__lte={month}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) # Test able to use year filters - year = Action.objects.get( - id=message_one['id']).timestamp.year + year = Action.objects.get(id=message_one["id"]).timestamp.year request = self.factory.get( - f'/messaging?timestamp__year={year}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__year={year}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__year__gt={year}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__year__gt={year}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__year__gte={year}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__year__gte={year}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__year__lt={year}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__year__lt={year}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__year__lte={year}&' - f'target_type=user&target_id={target_id}') + f"/messaging?timestamp__year__lte={year}&" + f"target_type=user&target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) # Test able to use hour & minute filters - hour = Action.objects.get( - id=message_one['id']).timestamp.hour - minute = Action.objects.get( - id=message_one['id']).timestamp.minute + hour = Action.objects.get(id=message_one["id"]).timestamp.hour + minute = Action.objects.get(id=message_one["id"]).timestamp.minute request = self.factory.get( - f'/messaging?timestamp__hour={hour}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__hour={hour}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__hour__lt={hour}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__hour__lt={hour}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__hour__gt={hour}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__hour__gt={hour}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__hour__lte={hour}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__hour__lte={hour}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__hour__gte={hour}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__hour__gte={hour}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__minute__gt={minute}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__minute__gt={minute}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__minute__lt={minute}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__minute__lt={minute}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) request = self.factory.get( - f'/messaging?timestamp__minute__gte={minute}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__minute__gte={minute}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 2) request = self.factory.get( - f'/messaging?timestamp__minute__lte={minute}&target_type=user&' - f'target_id={target_id}') + f"/messaging?timestamp__minute__lte={minute}&target_type=user&" + f"target_id={target_id}" + ) force_authenticate(request, user=user) response = view(request=request) self.assertEqual(response.status_code, 200) diff --git a/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py b/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py index af2052e068..a4e46ecc9b 100644 --- a/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py +++ b/onadata/apps/restservice/tests/viewsets/test_restservicesviewset.py @@ -108,7 +108,7 @@ def test_retrieve_textit_services(self): _id = response_data.get("id") - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.view(request, pk=_id) expected_dict = { "name": "textit", @@ -239,18 +239,18 @@ def test_delete(self): def test_retrieve(self): """Test retrieving a service via API.""" rest = RestService( - name="testservice", service_url="http://serviec.io", xform=self.xform + name="testservice", service_url="http://service.io", xform=self.xform ) rest.save() - request = self.factory.get("/", **self.extra) + request = self.factory.get("/", data={"xform": self.xform.pk}, **self.extra) response = self.view(request, pk=rest.pk) data = { "id": rest.pk, "xform": self.xform.pk, "name": "testservice", - "service_url": "http://serviec.io", + "service_url": "http://service.io", "active": True, "inactive_reason": "", } diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index e210ad658f..22726b4a7d 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -332,9 +332,11 @@ def _add_instance_prefix_to_dataview_filter_kwargs(self, filter_kwargs): return prefixed_filter_kwargs - def _xform_filter(self, request, view, keyword): + def _xform_filter(self, request, view, keyword, queryset=None): """Use XForm permissions""" xform = request.query_params.get("xform") + if xform is None and "xform" in request.data: + xform = request.data.get("xform") dataview = request.query_params.get("dataview") merged_xform = request.query_params.get("merged_xform") filename = request.query_params.get("filename") @@ -344,17 +346,19 @@ def _xform_filter(self, request, view, keyword): if dataview: int_or_parse_error( dataview, - "Invalid value for dataview ID. It must be a positive integer." + "Invalid value for dataview ID. It must be a positive integer.", ) self.dataview = get_object_or_404(DataView, pk=dataview) # filter with fitlered dataset query dataview_kwargs = self._add_instance_prefix_to_dataview_filter_kwargs( - get_filter_kwargs(self.dataview.query)) + get_filter_kwargs(self.dataview.query) + ) xform_qs = XForm.objects.filter(pk=self.dataview.xform.pk) elif merged_xform: int_or_parse_error( merged_xform, - "Invalid value for Merged Dataset ID. It must be a positive integer.") + "Invalid value for Merged Dataset ID. It must be a positive integer.", + ) self.merged_xform = get_object_or_404(MergedXForm, pk=merged_xform) xform_qs = self.merged_xform.xforms.all() elif xform: @@ -365,26 +369,41 @@ def _xform_filter(self, request, view, keyword): xform_qs = XForm.objects.filter(pk=self.xform.pk) public_forms = XForm.objects.filter(pk=self.xform.pk, shared_data=True) elif filename: - attachment_id = view.kwargs.get("pk") - attachment = get_object_or_404(Attachment, pk=attachment_id) - self.xform = attachment.instance.xform + attachment = get_object_or_404(Attachment, pk=view.kwargs.get("pk")) + self.xform = ( + attachment.instance.xform + if attachment.xform is None + else attachment.xform + ) xform_qs = XForm.objects.filter(pk=self.xform.pk) public_forms = XForm.objects.filter(pk=self.xform.pk, shared_data=True) else: - xform_qs = XForm.objects.all() + if queryset is not None and "pk" in view.kwargs: + xform_ids = list( + set( + queryset.filter(pk=view.kwargs.get("pk")).values_list( + f"{keyword}", flat=True + ) + ) + ) + xform_qs = XForm.objects.filter(pk__in=xform_ids) + elif queryset is not None and "formid" in view.kwargs: + xform_qs = XForm.objects.filter( + pk=view.kwargs.get("formid"), deleted_at__isnull=True + ) + else: + # No form filter supplied - return empty list. + xform_qs = XForm.objects.none() xform_qs = xform_qs.filter(deleted_at=None) if request.user.is_anonymous: xforms = xform_qs.filter(shared_data=True) else: xforms = super().filter_queryset(request, xform_qs, view) | public_forms - return { - **{f"{keyword}__in": xforms}, - **dataview_kwargs - } + return {**{f"{keyword}__in": xforms}, **dataview_kwargs} def _xform_filter_queryset(self, request, queryset, view, keyword): - kwarg = self._xform_filter(request, view, keyword) + kwarg = self._xform_filter(request, view, keyword, queryset) return queryset.filter(**kwarg) @@ -495,7 +514,7 @@ def filter_queryset(self, request, queryset, view): # generate queries xform_content_type = ContentType.objects.get_for_model(XForm) - xform_kwarg = self._xform_filter(request, view, keyword) + xform_kwarg = self._xform_filter(request, view, keyword, queryset) xform_kwarg["content_type"] = xform_content_type project_content_type = ContentType.objects.get_for_model(Project) @@ -530,16 +549,18 @@ class AttachmentFilter(XFormPermissionFilterMixin, ObjectPermissionsFilter): """Attachment filter.""" def filter_queryset(self, request, queryset, view): - queryset = self._xform_filter_queryset( - request, queryset, view, "instance__xform" - ) + queryset = self._xform_filter_queryset(request, queryset, view, "xform") + xform = getattr(self, "xform", None) # Ensure queryset is filtered by XForm meta permissions - xform_ids = set(queryset.values_list("instance__xform", flat=True)) - for xform_id in xform_ids: - xform = XForm.objects.get(id=xform_id) - user = request.user + if xform is None: + xform_ids = list(set(queryset.values_list("xform", flat=True))) + if xform_ids: + # only the first form xform_ids[0] + xform = XForm.objects.get(pk=xform_ids[0]) + + if xform is not None: queryset = exclude_items_from_queryset_using_xform_meta_perms( - xform, user, queryset + xform, request.user, queryset ) instance_id = request.query_params.get("instance") diff --git a/onadata/libs/permissions.py b/onadata/libs/permissions.py index 0913f918fe..00d174a2b7 100644 --- a/onadata/libs/permissions.py +++ b/onadata/libs/permissions.py @@ -12,7 +12,6 @@ import six from guardian.shortcuts import assign_perm, get_perms, get_users_with_perms, remove_perm -from onadata.apps.logger.models.attachment import Attachment from onadata.apps.logger.models.project import ( Project, ProjectGroupObjectPermission, @@ -571,15 +570,17 @@ def get_object_users_with_permissions( except UserProfile.DoesNotExist: profile = UserProfile.objects.create(user=user) - result.append({ - "user": user.username if username else user, - "first_name": user.first_name, - "last_name": user.last_name, - "role": get_role(permissions, obj), - "is_org": is_organization(profile), - "gravatar": profile.gravatar, - "metadata": profile.metadata, - }) + result.append( + { + "user": user.username if username else user, + "first_name": user.first_name, + "last_name": user.last_name, + "role": get_role(permissions, obj), + "is_org": is_organization(profile), + "gravatar": profile.gravatar, + "metadata": profile.metadata, + } + ) return result @@ -615,8 +616,6 @@ def exclude_items_from_queryset_using_xform_meta_perms(xform, user, queryset): ): return queryset if user.has_perm(CAN_VIEW_XFORM_DATA, xform): - if queryset.model is Attachment: - return queryset.exclude(~Q(instance__user=user), instance__xform=xform) return queryset.exclude(~Q(user=user), xform=xform) return queryset.none() diff --git a/onadata/libs/serializers/attachment_serializer.py b/onadata/libs/serializers/attachment_serializer.py index 174fc4ca6f..5b0c72acd6 100644 --- a/onadata/libs/serializers/attachment_serializer.py +++ b/onadata/libs/serializers/attachment_serializer.py @@ -52,7 +52,7 @@ class AttachmentSerializer(serializers.HyperlinkedModelSerializer): download_url = serializers.SerializerMethodField() small_download_url = serializers.SerializerMethodField() medium_download_url = serializers.SerializerMethodField() - xform = serializers.ReadOnlyField(source="instance.xform.pk") + xform = serializers.SerializerMethodField() instance = serializers.PrimaryKeyRelatedField(queryset=Instance.objects.all()) filename = serializers.ReadOnlyField(source="media_file.name") @@ -71,6 +71,16 @@ class Meta: ) model = Attachment + @check_obj + def get_xform(self, obj): + """ + Return xform_id - old forms xform id is in submission instance xform_id + """ + if obj.xform is None: + return obj.instance.xform_id + + return obj.xform_id + @check_obj def get_download_url(self, obj): """ diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index 412af0ac03..e90d745c8a 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -114,9 +114,11 @@ def create_xform_version(xform: XForm, user: User) -> XFormVersion: versioned_xform = XFormVersion.objects.create( xform=xform, xls=xform.xls, - json=xform.json - if isinstance(xform.json, str) - else json.dumps(xform.json), + json=( + xform.json + if isinstance(xform.json, str) + else json.dumps(xform.json) + ), version=xform.version, created_by=user, xml=xform.xml, @@ -421,17 +423,21 @@ def save_attachments(xform, instance, media_files, remove_deleted_media=False): if len(filename) > 100: raise AttachmentNameError(filename) media_in_submission = filename in instance.get_expected_media() or [ - instance.xml.decode("utf-8").find(filename) != -1 - if isinstance(instance.xml, bytes) - else instance.xml.find(filename) != -1 + ( + instance.xml.decode("utf-8").find(filename) != -1 + if isinstance(instance.xml, bytes) + else instance.xml.find(filename) != -1 + ) ] if media_in_submission: Attachment.objects.get_or_create( + xform=xform, instance=instance, media_file=f, mimetype=content_type, name=filename, extension=extension, + user=instance.user, ) if remove_deleted_media: instance.soft_delete_attachments() From 625bac3d2925c79bb059262aa6f4acadbea3fc9e Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 3 May 2024 10:30:33 +0300 Subject: [PATCH 197/270] tag release v4.1.0 (#2590) --- CHANGES.rst | 9 +++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 1300449e89..4bc31351cf 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,15 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.1.0(2024-05-03) +------------------ +- Fix API docs not created when building image + `PR #2589 ` + [@kelvin-muchiri] +- Add xform to attachment model + `PR #2587 ` + [@ukanga] + v4.0.1(2024-04-15) ------------------ - When an odk token expires is None, deactivate and replace diff --git a/onadata/__init__.py b/onadata/__init__.py index bfb9f379c4..b5b0237a59 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.0.1" +__version__ = "4.1.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index e55af82d42..edfc236e5f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.0.1 +version = 4.1.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From cc67ff9e9a9bb84233e9705fd6e6a7c56044b4e6 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 7 May 2024 16:24:58 +0300 Subject: [PATCH 198/270] Add support for entities (#2504) * add entities models add EntityList, RegistrationForm, FollowUpForms models add tests upgrade pyxform * add support for publishing xlsform w/entities * fix typo in docstring * handle replacing a form with entities definition * create entities FollowUpForm on publish On publish, we check if a form references a dataset that is an EntityList and create a FollowUpForm object * add test case * add fields version,json to RegistrationForm * add model Entity * save registration form json when creating xform * refactor code * update docstring * delete whitespace * add field version to model Entity * update doc string * update doc string * disable registration form if form is replaced with no entities definition * refactor code * refactor code * recreate migrations * add test cases * fix typo * disable previous RegistrationForm if XLSForm entities list_name changes * add test case * refactor tests * enhance test case * suppress lint error suppress imported but unused error * refactor code * fix cyclic-import lint error * add code comment * rename related name follow_up_lis to follow_up_forms * create entity on submission * create followup form MetaData on publish * fix lint error fix reimported / Reimport 'MetaData' * add is_active field to model FollowUpForm * add support for downloading entity list dataset * refactor code * address lint errors address wrong-import-order and line-too-long * add endpoint api/v1/entity-lists * add test case * suppress lint error suppress lint error too-many-ancestors * fix failing tests * add test cases * add test case * fix failing test * save Entity json properties as alias when creating an Entity, replace the fieldname with alias defined in the XLSForm * address lint error address pylint: too-many-lines * update comment * suppress lint error too-many-lines * add endpoint to GET Entities add endpoint /api/v1/entity-lists//entities * optimize performance * reduce migrations * add code comments * remove unrelated changes in migration * use raw string for entity-list url * add attribute to get dataset properties from an EntityList object * export EntityList dataset with the appropriate headers * refactor code * fix lint error fix error variable 'tagName' in function should be lowercase * replace xml.dom.minidom.parseString with defusedxml.minidom.parseString avoid XML attacks by using defusedxml equivalent https://pypi.org/project/defusedxml/#defusedxml-sax * fix bug 'types.UnionType' object has no attribute 'properties' * address failing test * fix failing tets * fix failing tests * always create new export for entity list dataset * refactor code * add model GenericExport add model GenericExport that will store data for Entity datasets exports and any other generic export * address lint errors * address lint error Redefining built-in 'object' * fix bug entity dataset export not created * rename methods * fix bug invalid method call * update entity dataset hash on new submission * suppress lint error import-outside-toplevel * fix pylint error fix pylint error cyclic-import / Cyclic import (onadata.apps.logger.models.entity -> onadata.apps.logger.models.registration_form -> onadata.apps.logger.models.entity_list) * suppress lint error suppress error pylint: invalid-name / Variable name "Entity" doesn't conform to '(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$' pattern (col * ignore flake8 warning ignore warning N806 / variable 'Entity' in function should be lowercase * optimize xform manifest hash recalculation for entity datasets * fix lint warning invalid-name * suppress lint warninng * update docstrings * add tests * update docstring * fix typo in comment * enhance docstring * handle some edge cases * fix typo * update code comment * add task to persist cached entities data to database * suppress lint warning invalid-name * suppress lint warning invalid-name * add num_entities in api/v1/entity-list response add num_entities in the response that returns how many entities an EntityList dataset has * refactor code * add EntityList property queried_num_entities * rename EntityList property current_last_update_time * update code comment * update docstring * exclude db query from EntityList last entity update time * fix failing tests * fix failing test * include which EntityList a form contributes to when get list of forms and a single form * add to form list and form responses the EntityLists form consumes Entities * fix failing test * readd attribute is_instance_json_regenerated * fix failing tests * add support to filter EntityLists by project id * add fields to forms exposed on endpoint /api/v1/projects * fix error NotImplementedError: .filter_queryset() must be overridden. * add created_at, updated_at to list of EntityLists endpoint * add _id to response at endpoint /api/v1/entity-lists//entities * fix failing test * remove EntityList num of entities cache increment when committing the cache increment to the DB, we cannot guarantee the operation is going to atomic. We have to clear the key from cache and at that same moment, the cache value can be in the process of being updated * fix migration conflict * fix failing tests * fix failing tests * fix failing tests * fix failing tests * fix failing tests * refactor to publish forms in markdown * make use of existing base model * squash entities migrations * create XFormversion when publish markdown during test runs * refactor TestBase * refactor tests * refactor tests * fix failing tests * fix failing tests * fix method resolution order error * fix failing tests * refactor code * remove superflous code * add attributes login_username, login_password to TestAbstractViewSet * fix failing tests * add documentation for Entities * address grammartical error * update entities documentation * remove unused model methods * ignore deleted Entities when getting Entities * refactor code * refactor code * refactor code * refactor code * refactor code * remove entity migrations * re-make entities migration * move entities endpoints to v2 * update entities documenatation * address failing test * add a reverse_code argument for migration 0014_populate_attachment_xform This resolves the error django.db.migrations.exceptions.IrreversibleError when unapplying the migration * resolve typo * add doc string --- docs/entities.md | 147 + onadata/apps/api/tasks.py | 2 +- .../api/tests/views/test_user_permissions.py | 283 +- .../tests/viewsets/test_abstract_viewset.py | 32 +- .../tests/viewsets/test_briefcase_viewset.py | 21 +- .../viewsets/test_entity_list_viewset.py | 389 ++ .../apps/api/tests/viewsets/test_ona_api.py | 18 +- .../tests/viewsets/test_project_viewset.py | 70 +- .../tests/viewsets/test_xform_list_viewset.py | 42 + .../viewsets/test_xform_submission_viewset.py | 70 +- .../api/tests/viewsets/test_xform_viewset.py | 3207 +++++++++-------- onadata/apps/api/tools.py | 16 +- onadata/apps/api/urls/v2_urls.py | 2 + .../apps/api/viewsets/entity_list_viewset.py | 84 + onadata/apps/logger/apps.py | 20 + .../0014_populate_attachment_xform.py | 15 +- ..._followupform_registrationform_and_more.py | 200 + onadata/apps/logger/models/__init__.py | 4 + onadata/apps/logger/models/entity.py | 50 + onadata/apps/logger/models/entity_list.py | 60 + onadata/apps/logger/models/follow_up_form.py | 40 + .../apps/logger/models/project_invitation.py | 6 +- .../apps/logger/models/registration_form.py | 69 + onadata/apps/logger/signals.py | 71 +- .../apps/logger/tests/models/test_entity.py | 84 + .../logger/tests/models/test_entity_list.py | 111 + .../tests/models/test_follow_up_form.py | 68 + .../tests/models/test_registration_form.py | 179 + onadata/apps/logger/tests/test_parsing.py | 2 +- onadata/apps/logger/xform_instance_parser.py | 30 +- .../entities/instances/trees_registration.xml | 28 + .../instances/trees_registration_2.xml | 17 + onadata/apps/main/tests/test_base.py | 103 +- onadata/apps/main/tests/test_process.py | 3 +- .../viewer/migrations/0003_genericexport.py | 37 + onadata/apps/viewer/models/data_dictionary.py | 148 +- onadata/apps/viewer/models/export.py | 238 +- onadata/apps/viewer/models/tests/__init__.py | 0 .../models/tests/test_data_dictionary.py | 271 ++ onadata/libs/filters.py | 15 + onadata/libs/models/__init__.py | 1 + onadata/libs/models/base_model.py | 2 + onadata/libs/pagination.py | 5 + onadata/libs/serializers/entity_serializer.py | 129 + .../libs/serializers/project_serializer.py | 42 +- onadata/libs/serializers/xform_serializer.py | 68 +- .../libs/tests/models/test_share_project.py | 1 + .../serializers/test_xform_serializer.py | 40 + onadata/libs/tests/utils/test_csv_builder.py | 63 + onadata/libs/tests/utils/test_export_tools.py | 31 +- onadata/libs/tests/utils/test_logger_tools.py | 64 +- onadata/libs/utils/api_export_tools.py | 49 +- onadata/libs/utils/csv_builder.py | 116 +- onadata/libs/utils/export_builder.py | 4 +- onadata/libs/utils/export_tools.py | 146 +- onadata/libs/utils/logger_tools.py | 80 +- 56 files changed, 5240 insertions(+), 1853 deletions(-) create mode 100644 docs/entities.md create mode 100644 onadata/apps/api/tests/viewsets/test_entity_list_viewset.py create mode 100644 onadata/apps/api/viewsets/entity_list_viewset.py create mode 100644 onadata/apps/logger/apps.py create mode 100644 onadata/apps/logger/migrations/0015_entity_entitylist_followupform_registrationform_and_more.py create mode 100644 onadata/apps/logger/models/entity.py create mode 100644 onadata/apps/logger/models/entity_list.py create mode 100644 onadata/apps/logger/models/follow_up_form.py create mode 100644 onadata/apps/logger/models/registration_form.py create mode 100644 onadata/apps/logger/tests/models/test_entity.py create mode 100644 onadata/apps/logger/tests/models/test_entity_list.py create mode 100644 onadata/apps/logger/tests/models/test_follow_up_form.py create mode 100644 onadata/apps/logger/tests/models/test_registration_form.py create mode 100644 onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml create mode 100644 onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml create mode 100644 onadata/apps/viewer/migrations/0003_genericexport.py create mode 100644 onadata/apps/viewer/models/tests/__init__.py create mode 100644 onadata/apps/viewer/models/tests/test_data_dictionary.py create mode 100644 onadata/libs/serializers/entity_serializer.py diff --git a/docs/entities.md b/docs/entities.md new file mode 100644 index 0000000000..093c4cbdb2 --- /dev/null +++ b/docs/entities.md @@ -0,0 +1,147 @@ +# Entities + +Entities allow you to share information between forms, enabling the collection of longitudinal data, management of cases over time, and support for other complex workflows. + +The following endpoints provides access to Entities related data: Where: + +- _Entity_ - Each item that gets managed by an ODK workflow. Entities are automatically created from submissions receieved from a form that contains entity definitions. +- _EntityList_ - a dataset that contains Entities of the same type. + +## Get EntityLists + +`GET /api/v2/entity-lists` + +**Example** + +`curl -X GET https://api.ona.io/api/v2/entity-lists` + +**Response** + +``` +[ + { + "url":"http://testserver/api/v2/entity-lists/9", + "id":9, + "name":"trees", + "project":"http://testserver/api/v1/projects/9", + "public":false, + "date_created":"2024-04-17T11:26:24.630117Z", + "date_modified":"2024-04-17T11:26:25.050823Z", + "num_registration_forms":1, + "num_follow_up_forms":1, + "num_entities":1 + } +] + +``` + +To get EntityLists for a specific project + +`GET /api/v2/entity-lists?project=` + +**Example** + +`curl -X GET https://api.ona.io/api/v2/entity-lists?project=9` + +**Response** + +``` +[ + { + "url":"http://testserver/api/v2/entity-lists/9", + "id":9, + "name":"trees", + "project":"http://testserver/api/v1/projects/9", + "public":false, + "date_created":"2024-04-17T11:26:24.630117Z", + "date_modified":"2024-04-17T11:26:25.050823Z", + "num_registration_forms":1, + "num_follow_up_forms":1, + "num_entities":1 + } +] + +``` + +## Get a single EntityList + +`GET /api/v2/entity-lists/` + +**Example** + +`curl -X GET https://api.ona.io/api/v2/entity-lists/1` + +**Response** + +``` +{ + "id":16, + "name":"trees", + "project":"http://testserver/api/v1/projects/13", + "public":false, + "date_created":"2024-04-17T11:43:08.530848Z", + "date_modified":"2024-04-17T11:43:09.030105Z", + "num_registration_forms":1, + "num_follow_up_forms":1, + "num_entities":1, + "registration_forms":[ + { + "title":"Trees registration", + "xform":"http://testserver/api/v1/forms/15", + "id_string":"trees_registration", + "save_to":[ + "geometry", + "species", + "circumference_cm" + ] + } + ], + "follow_up_forms":[ + { + "title":"Trees follow-up", + "xform":"http://testserver/api/v1/forms/16", + "id_string":"trees_follow_up" + } + ] +} +``` + +## Get Entities + +`GET api/v2/entity-lists//entities` + +**Example** + +`curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities` + +**Response** + +``` +[ + { + "_id":3, + "species":"purpleheart", + "_version":"2022110901", + "geometry":"-1.286905 36.772845 0 0", + "formhub/uuid":"d156a2dce4c34751af57f21ef5c4e6cc", + "meta/instanceID":"uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", + "_xform_id_string":"trees_registration", + "circumference_cm":300, + "meta/entity/label":"300cm purpleheart", + "meta/instanceName":"300cm purpleheart" + }, + { + "_id":4, + "species":"wallaba", + "_version":"2022110901", + "geometry":"-1.305796 36.791849 0 0", + "formhub/uuid":"d156a2dce4c34751af57f21ef5c4e6cc", + "intake_notes":"Looks malnourished", + "meta/instanceID":"uuid:648e4106-2224-4bd7-8bf9-859102fc6fae", + "_xform_id_string":"trees_registration", + "circumference_cm":100, + "meta/entity/label":"100cm wallaba", + "meta/instanceName":"100cm wallaba" + } +] +``` diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index 2b1a815f1b..ddbaca238a 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -20,6 +20,7 @@ from onadata.apps.api import tools from onadata.apps.api.models.organization_profile import OrganizationProfile from onadata.apps.logger.models import Instance, ProjectInvitation, XForm, Project +from onadata.celeryapp import app from onadata.libs.utils.email import send_generic_email from onadata.libs.utils.model_tools import queryset_iterator from onadata.libs.utils.cache_tools import ( @@ -28,7 +29,6 @@ ) from onadata.libs.models.share_project import ShareProject from onadata.libs.utils.email import ProjectInvitationEmail -from onadata.celeryapp import app logger = logging.getLogger(__name__) diff --git a/onadata/apps/api/tests/views/test_user_permissions.py b/onadata/apps/api/tests/views/test_user_permissions.py index b0bfc8892f..90fb0924a1 100644 --- a/onadata/apps/api/tests/views/test_user_permissions.py +++ b/onadata/apps/api/tests/views/test_user_permissions.py @@ -7,8 +7,10 @@ from httmock import HTTMock from rest_framework.renderers import JSONRenderer -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet, enketo_urls_mock +from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( + TestAbstractViewSet, + enketo_urls_mock, +) from onadata.apps.api.viewsets.data_viewset import DataViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet from onadata.apps.api.viewsets.project_viewset import ProjectViewSet @@ -22,99 +24,97 @@ def setUp(self): super(self.__class__, self).setUp() def test_can_add_xform_to_other_account(self): - view = XFormViewSet.as_view({ - 'post': 'create' - }) + view = XFormViewSet.as_view({"post": "create"}) data = { - 'owner': 'http://testserver/api/v1/users/bob', - 'public': False, - 'public_data': False, - 'description': u'', - 'downloadable': True, - 'allows_sms': False, - 'encrypted': False, - 'sms_id_string': u'transportation_2011_07_25', - 'id_string': u'transportation_2011_07_25', - 'title': u'transportation_2011_07_25', - 'bamboo_dataset': u'' + "owner": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": "transportation_2011_07_25", + "id_string": "transportation_2011_07_25", + "title": "transportation_2011_07_25", + "bamboo_dataset": "", } path = os.path.join( - settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", - "transportation", "transportation.xlsx") + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation.xlsx", + ) bob = self.user - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file, 'owner': 'bob'} - request = self.factory.post('/', data=post_data, **self.extra) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file, "owner": "bob"} + request = self.factory.post("/", data=post_data, **self.extra) response = view(request) self.assertEqual(response.status_code, 403) - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file, 'owner': 'bob'} - request = self.factory.post('/', data=post_data, **self.extra) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file, "owner": "bob"} + request = self.factory.post("/", data=post_data, **self.extra) role.ManagerRole.add(self.user, bob.profile) with HTTMock(enketo_urls_mock): response = view(request) self.assertEqual(response.status_code, 201) xform = bob.xforms.all()[0] - data.update({ - 'url': - 'http://testserver/api/v1/forms/%s' % xform.pk - }) + data.update({"url": "http://testserver/api/v1/forms/%s" % xform.pk}) self.assertDictContainsSubset(data, response.data) def test_manager_can_update_xform(self): self._publish_xls_form_to_project() - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) - view = XFormViewSet.as_view({ - 'put': 'update' - }) - description = 'DESCRIPTION' - request = self.factory.get('/', **self.extra) - xfs = XFormSerializer(instance=self.xform, - context={'request': request}) + view = XFormViewSet.as_view({"put": "update"}) + description = "DESCRIPTION" + request = self.factory.get("/", **self.extra) + xfs = XFormSerializer(instance=self.xform, context={"request": request}) data = json.loads(JSONRenderer().render(xfs.data)) - data.update({'public': True, 'description': description}) + data.update({"public": True, "description": description}) self.assertFalse(self.xform.shared) # Remove key:value pairs where the value is None. # More info: https://code.djangoproject.com/ticket/30024 - data.pop('enketo_preview_url') - data.pop('last_submission_time') + data.pop("enketo_preview_url") + data.pop("last_submission_time") + data.pop("contributes_entities_to") + data.pop("consumes_entities_from") - request = self.factory.put('/', data=data, **self.extra) + request = self.factory.put("/", data=data, **self.extra) response = view(request, pk=self.xform.id) self.assertEqual(response.status_code, 404) self.assertFalse(self.xform.shared) role.ManagerRole.add(self.user, self.xform) - request = self.factory.put('/', data=data, **self.extra) + request = self.factory.put("/", data=data, **self.extra) response = view(request, pk=self.xform.id) self.xform.refresh_from_db() self.assertTrue(self.xform.shared) self.assertEqual(self.xform.description, description) - self.assertEqual(response.data['public'], True) - self.assertEqual(response.data['description'], description) + self.assertEqual(response.data["public"], True) + self.assertEqual(response.data["description"], description) def test_manager_can_update_xform_tags(self): self._publish_xls_form_to_project() - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) - view = XFormViewSet.as_view({ - 'get': 'labels', - 'post': 'labels', - 'delete': 'labels' - }) + view = XFormViewSet.as_view( + {"get": "labels", "post": "labels", "delete": "labels"} + ) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 404) @@ -123,31 +123,27 @@ def test_manager_can_update_xform_tags(self): self.assertEqual(response.data, []) # add tag "hello" - request = self.factory.post('/', data={"tags": "hello"}, **self.extra) + request = self.factory.post("/", data={"tags": "hello"}, **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 201) - self.assertEqual(response.data, [u'hello']) + self.assertEqual(response.data, ["hello"]) # remove tag "hello" - request = self.factory.delete('/', data={"tags": "hello"}, - **self.extra) - response = view(request, pk=formid, label='hello') + request = self.factory.delete("/", data={"tags": "hello"}, **self.extra) + response = view(request, pk=formid, label="hello") self.assertEqual(response.status_code, 200) self.assertEqual(response.data, []) def test_readonly_role(self): self._publish_xls_form_to_project() self._make_submissions() - view = XFormViewSet.as_view({ - 'get': 'retrieve', - 'put': 'update' - }) - data_view = DataViewSet.as_view({'get': 'list'}) - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + view = XFormViewSet.as_view({"get": "retrieve", "put": "update"}) + data_view = DataViewSet.as_view({"get": "list"}) + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 404) @@ -161,8 +157,8 @@ def test_readonly_role(self): response = data_view(request, pk=formid) self.assertEqual(response.status_code, 200) - data = {'public': True, 'description': "Some description"} - request = self.factory.put('/', data=data, **self.extra) + data = {"public": True, "description": "Some description"} + request = self.factory.put("/", data=data, **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 403) @@ -171,31 +167,40 @@ def test_readonly_role_submission_when_requires_auth(self): self.user.profile.require_auth = True self.user.profile.save() - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'password1': 'alice', 'password2': 'alice'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", + } self._login_user_and_profile(extra_post_data=alice_data) role.ReadOnlyRole.add(self.user, self.xform) - paths = [os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'instances', s, s + '.xml') for s in self.surveys] - auth = DigestAuth('alice', 'alice') - self._make_submission(paths[0], username='bob', auth=auth) + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + s + ".xml", + ) + for s in self.surveys + ] + auth = DigestAuth("alice", "alice") + self._make_submission(paths[0], username="bob", auth=auth) self.assertEqual(self.response.status_code, 403) def test_data_entry_role(self): self._publish_xls_form_to_project() self._make_submissions() - view = XFormViewSet.as_view({ - 'get': 'retrieve', - 'put': 'update' - }) - data_view = DataViewSet.as_view({'get': 'list'}) - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + view = XFormViewSet.as_view({"get": "retrieve", "put": "update"}) + data_view = DataViewSet.as_view({"get": "list"}) + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 404) @@ -209,8 +214,8 @@ def test_data_entry_role(self): response = data_view(request, pk=formid) self.assertEqual(response.status_code, 200) - data = {'public': True, 'description': "Some description"} - request = self.factory.put('/', data=data, **self.extra) + data = {"public": True, "description": "Some description"} + request = self.factory.put("/", data=data, **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 403) @@ -219,31 +224,40 @@ def test_data_entry_role_submission_when_requires_auth(self): self.user.profile.require_auth = True self.user.profile.save() - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'password1': 'alice', 'password2': 'alice'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", + } self._login_user_and_profile(extra_post_data=alice_data) role.DataEntryRole.add(self.user, self.xform) - paths = [os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'instances', s, s + '.xml') for s in self.surveys] - auth = DigestAuth('alice', 'alice') - self._make_submission(paths[0], username='bob', auth=auth) + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances", + s, + s + ".xml", + ) + for s in self.surveys + ] + auth = DigestAuth("alice", "alice") + self._make_submission(paths[0], username="bob", auth=auth) self.assertEqual(self.response.status_code, 201) def test_editor_role(self): self._publish_xls_form_to_project() self._make_submissions() - view = XFormViewSet.as_view({ - 'get': 'retrieve', - 'patch': 'update' - }) - data_view = DataViewSet.as_view({'get': 'list'}) - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + view = XFormViewSet.as_view({"get": "retrieve", "patch": "update"}) + data_view = DataViewSet.as_view({"get": "list"}) + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 404) @@ -259,41 +273,52 @@ def test_editor_role(self): def test_editor_role_submission_when_requires_auth(self): self._publish_xls_form_to_project() - paths = [os.path.join( - self.main_directory, 'fixtures', 'transportation', - 'instances_w_uuid', s, s + '.xml') for s in [ - 'transport_2011-07-25_19-05-36', - 'transport_2011-07-25_19-05-36-edited']] + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "transportation", + "instances_w_uuid", + s, + s + ".xml", + ) + for s in [ + "transport_2011-07-25_19-05-36", + "transport_2011-07-25_19-05-36-edited", + ] + ] self._make_submission(paths[0]) self.user.profile.require_auth = True self.user.profile.save() - alice_data = {'username': 'alice', 'email': 'alice@localhost.com', - 'password1': 'alice', 'password2': 'alice'} + alice_data = { + "username": "alice", + "email": "alice@localhost.com", + "password1": "alice", + "password2": "alice", + } self._login_user_and_profile(extra_post_data=alice_data) - auth = DigestAuth('alice', 'alice') - self._make_submission(paths[1], username='bob', auth=auth) + auth = DigestAuth("alice", "alice") + self._make_submission(paths[1], username="bob", auth=auth) self.assertEqual(self.response.status_code, 403) role.EditorRole.add(self.user, self.xform) - self._make_submission(paths[1], username='bob', auth=auth) + self._make_submission(paths[1], username="bob", auth=auth) self.assertEqual(self.response.status_code, 201) def test_owner_role(self): self._publish_xls_form_to_project() self._make_submissions() - view = XFormViewSet.as_view({ - 'get': 'retrieve', - 'put': 'update', - 'delete': 'destroy' - }) - data_view = DataViewSet.as_view({'get': 'list'}) - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + view = XFormViewSet.as_view( + {"get": "retrieve", "put": "update", "delete": "destroy"} + ) + data_view = DataViewSet.as_view({"get": "list"}) + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) formid = self.xform.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 404) @@ -307,50 +332,46 @@ def test_owner_role(self): response = data_view(request, pk=formid) self.assertEqual(response.status_code, 200) - xfs = XFormSerializer(instance=self.xform, - context={'request': request}) + xfs = XFormSerializer(instance=self.xform, context={"request": request}) data = json.loads(JSONRenderer().render(xfs.data)) - data.update({'public': True, 'description': "Some description"}) + data.update({"public": True, "description": "Some description"}) # Remove key:value pairs where the value is None. # More info: https://code.djangoproject.com/ticket/30024 - data.pop('enketo_preview_url') - data.pop('last_submission_time') + data.pop("enketo_preview_url") + data.pop("last_submission_time") + data.pop("contributes_entities_to") + data.pop("consumes_entities_from") - request = self.factory.put('/', data=data, **self.extra) + request = self.factory.put("/", data=data, **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 200) - request = self.factory.delete('/', **self.extra) + request = self.factory.delete("/", **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, 204) def test_org_creator_permissions(self): self._org_create() - self.assertTrue(role.OwnerRole.user_has_role( - self.user, self.organization)) + self.assertTrue(role.OwnerRole.user_has_role(self.user, self.organization)) def test_form_inherits_permision_from_project(self): self._publish_xls_form_to_project() self._make_submissions() - project_view = ProjectViewSet.as_view({ - 'get': 'retrieve' - }) + project_view = ProjectViewSet.as_view({"get": "retrieve"}) - xform_view = XFormViewSet.as_view({ - 'get': 'retrieve' - }) + xform_view = XFormViewSet.as_view({"get": "retrieve"}) - data_view = DataViewSet.as_view({'get': 'list'}) + data_view = DataViewSet.as_view({"get": "list"}) - alice_data = {'username': 'alice', 'email': 'alice@localhost.com'} + alice_data = {"username": "alice", "email": "alice@localhost.com"} self._login_user_and_profile(extra_post_data=alice_data) formid = self.xform.pk project_id = self.project.pk - request = self.factory.get('/', **self.extra) + request = self.factory.get("/", **self.extra) response = xform_view(request, pk=formid) self.assertEqual(response.status_code, 404) diff --git a/onadata/apps/api/tests/viewsets/test_abstract_viewset.py b/onadata/apps/api/tests/viewsets/test_abstract_viewset.py index bb635d5993..4164a29f77 100644 --- a/onadata/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_abstract_viewset.py @@ -29,18 +29,21 @@ from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.team_viewset import TeamViewSet from onadata.apps.api.viewsets.widget_viewset import WidgetViewSet -from onadata.apps.logger.models import Attachment, Instance, Project, XForm +from onadata.apps.logger.models import ( + Attachment, + Instance, + Project, + XForm, +) from onadata.apps.logger.models.data_view import DataView from onadata.apps.logger.models.widget import Widget from onadata.apps.logger.views import submission from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml from onadata.apps.main import tests as main_tests +from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.models import MetaData, UserProfile -from onadata.apps.viewer.models import DataDictionary from onadata.libs.serializers.project_serializer import ProjectSerializer -from onadata.libs.test_utils.pyxform_test_case import PyxformMarkdown from onadata.libs.utils.common_tools import merge_dicts -from onadata.libs.utils.user_auth import get_user_default_project # pylint: disable=invalid-name User = get_user_model() @@ -95,7 +98,7 @@ def get_mocked_response_for_file(file_object, filename, status_code=200): # pylint: disable=too-many-instance-attributes -class TestAbstractViewSet(PyxformMarkdown, TestCase): +class TestAbstractViewSet(TestBase, TestCase): """ Base test class for API viewsets. """ @@ -186,6 +189,8 @@ def _login_user_and_profile(self, extra_post_data=None): ) ) self.extra = {"HTTP_AUTHORIZATION": f"Token {self.user.auth_token}"} + self.login_username = self.profile_data["username"] + self.login_password = self.profile_data["password1"] def _org_create(self, org_data=None): org_data = {} if org_data is None else org_data @@ -682,20 +687,3 @@ def _get_request_session_with_auth(self, view, auth, extra=None): request.session = self.client.session return request - - def _publish_markdown(self, md, user, project=None, **kwargs): - kwargs["name"] = "data" - survey = self.md_to_pyxform_survey(md, kwargs=kwargs) - survey["sms_keyword"] = survey["id_string"] - if not project or not hasattr(self, "project"): - project = get_user_default_project(user) - xform = DataDictionary( - created_by=user, - user=user, - xml=survey.to_xml(), - json=survey.to_json(), - project=project, - ) - xform.save() - - return xform diff --git a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py index b43bd6b517..1b91199eba 100644 --- a/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_briefcase_viewset.py @@ -14,9 +14,8 @@ from django.utils import timezone from django_digest.test import DigestAuth -from rest_framework.test import APIRequestFactory -from onadata.apps.api.tests.viewsets import test_abstract_viewset +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.briefcase_viewset import ( BriefcaseViewset, _query_optimization_fence, @@ -33,18 +32,14 @@ def ordered_instances(xform): return Instance.objects.filter(xform=xform).order_by("id") -class TestBriefcaseViewSet(test_abstract_viewset.TestAbstractViewSet): +class TestBriefcaseViewSet(TestAbstractViewSet): """ Test BriefcaseViewset """ def setUp(self): - super(test_abstract_viewset.TestAbstractViewSet, self).setUp() - self.factory = APIRequestFactory() - self._login_user_and_profile() - self.login_username = "bob" - self.login_password = "bobbob" - self.maxDiff = None + super().setUp() + self.form_def_path = os.path.join( self.main_directory, "fixtures", "transportation", "transportation.xml" ) @@ -395,10 +390,10 @@ def get_last_index(xform, last_index=None): ) with codecs.open(submission_list_path, encoding="utf-8") as f: expected_submission_list = f.read() - last_expected_submission_list = ( - expected_submission_list - ) = expected_submission_list.replace( - "{{resumptionCursor}}", "%s" % last_index + last_expected_submission_list = expected_submission_list = ( + expected_submission_list.replace( + "{{resumptionCursor}}", "%s" % last_index + ) ) self.assertContains(response, expected_submission_list) last_index += 2 diff --git a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py new file mode 100644 index 0000000000..ce6333733a --- /dev/null +++ b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py @@ -0,0 +1,389 @@ +"""Tests for module onadata.apps.api.viewsets.entity_list_viewset""" + +import json +import os +import sys + +from django.conf import settings +from django.test import override_settings +from django.utils import timezone + +from onadata.apps.api.viewsets.entity_list_viewset import EntityListViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet +from onadata.apps.logger.models import Entity, EntityList, Project +from onadata.libs.models.share_project import ShareProject + + +class GetEntityListsTestCase(TestAbstractViewSet): + """Tests for GET all EntityLists""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"get": "list"}) + + @override_settings(TIME_ZONE="UTC") + def test_get_all(self): + """GET all EntityLists works""" + # Publish registration form and create "trees" EntityList dataset + self._publish_registration_form(self.user) + # Publish follow up form for "trees" dataset + self._publish_follow_up_form(self.user) + # Make submission on tree_registration form + submission_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + self._make_submission(submission_path) + # Create more EntityLists explicitly + EntityList.objects.create(name="immunization", project=self.project) + EntityList.objects.create(name="savings", project=self.project) + qs = EntityList.objects.all().order_by("pk") + first = qs[0] + second = qs[1] + third = qs[2] + # Make request + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertIsNotNone(response.get("Cache-Control")) + expected_data = [ + { + "url": f"http://testserver/api/v2/entity-lists/{first.pk}", + "id": first.pk, + "name": "trees", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", + "public": False, + "date_created": first.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": first.date_modified.isoformat().replace("+00:00", "Z"), + "num_registration_forms": 1, + "num_follow_up_forms": 1, + "num_entities": 1, + }, + { + "url": f"http://testserver/api/v2/entity-lists/{second.pk}", + "id": second.pk, + "name": "immunization", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", + "public": False, + "date_created": second.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": second.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "num_registration_forms": 0, + "num_follow_up_forms": 0, + "num_entities": 0, + }, + { + "url": f"http://testserver/api/v2/entity-lists/{third.pk}", + "id": third.pk, + "name": "savings", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", + "public": False, + "date_created": third.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": third.date_modified.isoformat().replace("+00:00", "Z"), + "num_registration_forms": 0, + "num_follow_up_forms": 0, + "num_entities": 0, + }, + ] + self.assertEqual(response.data, expected_data) + + def test_anonymous_user(self): + """Anonymous user can only view EntityLists under public projects""" + # Create public project + public_project = Project.objects.create( + name="public", + shared=True, + created_by=self.user, + organization=self.user, + ) + # Create private project + private_project = Project.objects.create( + name="private", + shared=False, + created_by=self.user, + organization=self.user, + ) + # Create EntityList explicitly + EntityList.objects.create(name="immunization", project=public_project) + EntityList.objects.create(name="savings", project=private_project) + # Make request as anonymous user + request = self.factory.get("/") + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertIsNotNone(response.get("Cache-Control")) + self.assertEqual(len(response.data), 1) + first = EntityList.objects.all()[0] + self.assertEqual(response.data[0]["id"], first.pk) + # Logged in user is able to view all + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertIsNotNone(response.get("Cache-Control")) + self.assertEqual(len(response.data), 2) + + def test_pagination(self): + """Pagination works""" + self._project_create() + EntityList.objects.create(name="dataset_1", project=self.project) + EntityList.objects.create(name="dataset_2", project=self.project) + request = self.factory.get("/", data={"page": 1, "page_size": 1}, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + + def test_filtering_by_project(self): + """Filter by project id works""" + self._project_create() + project_2 = Project.objects.create( + name="Other project", + created_by=self.user, + organization=self.user, + ) + EntityList.objects.create(name="dataset_1", project=self.project) + EntityList.objects.create(name="dataset_2", project=project_2) + request = self.factory.get("/", data={"project": project_2.pk}, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["name"], "dataset_2") + + +@override_settings(TIME_ZONE="UTC") +class GetSingleEntityListTestCase(TestAbstractViewSet): + """Tests for GET single EntityList""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"get": "retrieve"}) + # Publish registration form and create "trees" EntityList dataset + self._publish_registration_form(self.user) + # Publish follow up form for "trees" dataset + self._publish_follow_up_form(self.user) + self.entity_list = EntityList.objects.first() + # Make submission on tree_registration form + submission_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + self._make_submission(submission_path) + + def test_get_entity_list(self): + """Returns a single EntityList""" + registration_form = self.entity_list.registration_forms.first() + follow_up_form = self.entity_list.follow_up_forms.first() + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertIsNotNone(response.get("Cache-Control")) + self.entity_list.refresh_from_db() + date_created = self.entity_list.date_created.isoformat().replace("+00:00", "Z") + date_modified = self.entity_list.date_modified.isoformat().replace( + "+00:00", "Z" + ) + expected_data = { + "id": self.entity_list.pk, + "name": "trees", + "project": f"http://testserver/api/v1/projects/{self.project.pk}", + "public": False, + "date_created": date_created, + "date_modified": date_modified, + "num_registration_forms": 1, + "num_follow_up_forms": 1, + "num_entities": 1, + "registration_forms": [ + { + "title": "Trees registration", + "xform": f"http://testserver/api/v1/forms/{registration_form.xform.pk}", + "id_string": "trees_registration", + "save_to": [ + "geometry", + "species", + "circumference_cm", + ], + }, + ], + "follow_up_forms": [ + { + "title": "Trees follow-up", + "xform": f"http://testserver/api/v1/forms/{follow_up_form.xform.pk}", + "id_string": "trees_follow_up", + } + ], + } + self.assertEqual(json.dumps(response.data), json.dumps(expected_data)) + + def test_anonymous_user(self): + """Anonymous user cannot view a private EntityList""" + # Anonymous user cannot view private EntityList + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 404) + # Anonymous user can view public EntityList + self.project.shared = True + self.project.save() + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + + def test_does_not_exist(self): + """Invalid EntityList is handled""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + def test_shared_project(self): + """A user can view a project shared with them""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + # Share project with Alice + ShareProject(self.project, "alice", "readonly-no-download") + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + request = self.factory.get("/", **extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + + +class GetEntitiesTestCase(TestAbstractViewSet): + """Tests for GET Entities""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"get": "entities"}) + # Publish registration form and create "trees" EntityList dataset + self._publish_registration_form(self.user) + # Publish follow up form for "trees" dataset + self._publish_follow_up_form(self.user) + # Make submissions which will then create Entities + paths = [ + os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ), + os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration_2.xml", + ), + ] + + for path in paths: + self._make_submission(path) + + self.entity_list = EntityList.objects.first() + entity_qs = Entity.objects.all().order_by("pk") + self.expected_data = [ + { + "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", + "meta/instanceName": "300cm purpleheart", + "meta/entity/label": "300cm purpleheart", + "_xform_id_string": "trees_registration", + "_version": "2022110901", + "_id": entity_qs[0].pk, + }, + { + "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + "geometry": "-1.305796 36.791849 0 0", + "species": "wallaba", + "circumference_cm": 100, + "intake_notes": "Looks malnourished", + "meta/instanceID": "uuid:648e4106-2224-4bd7-8bf9-859102fc6fae", + "meta/instanceName": "100cm wallaba", + "meta/entity/label": "100cm wallaba", + "_xform_id_string": "trees_registration", + "_version": "2022110901", + "_id": entity_qs[1].pk, + }, + ] + + def test_get_all(self): + """All Entities are returned""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, self.expected_data) + self.assertIsNotNone(response.get("Cache-Control")) + + def test_anonymous_user(self): + """Anonymous user cannot view Entities for a private EntityList""" + # Anonymous user cannot view private EntityList + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 404) + # Anonymous user can view public EntityList + self.project.shared = True + self.project.save() + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, self.expected_data) + + def test_shared_project(self): + """A user can view Entities for a project shared with them""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + # Share project with Alice + ShareProject(self.project, "alice", "readonly-no-download") + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + request = self.factory.get("/", **extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, self.expected_data) + + def test_pagination(self): + """Pagination works""" + request = self.factory.get("/", data={"page": 1, "page_size": 1}, **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["meta/entity/label"], "300cm purpleheart") + + def test_deleted_ignored(self): + """Deleted Entities are ignored""" + entity_qs = Entity.objects.all().order_by("pk") + entity_first = entity_qs.first() + entity_first.deleted_at = timezone.now() + entity_first.save() + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, [self.expected_data[-1]]) + self.assertIsNotNone(response.get("Cache-Control")) diff --git a/onadata/apps/api/tests/viewsets/test_ona_api.py b/onadata/apps/api/tests/viewsets/test_ona_api.py index e1b218ee55..43ad23c5c7 100644 --- a/onadata/apps/api/tests/viewsets/test_ona_api.py +++ b/onadata/apps/api/tests/viewsets/test_ona_api.py @@ -1,33 +1,31 @@ from django.urls import resolve -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.urls.v1_urls import router as v1_router from onadata.apps.api.urls.v2_urls import router as v2_router class TestOnaApi(TestAbstractViewSet): - def test_number_of_v1_viewsets(self): - ''' + """ Counts the number of v1 viewsets for the api django app - ''' + """ view = v1_router.get_api_root_view() - path = '/api/v1/' + path = "/api/v1/" request = self.factory.get(path) request.resolver_match = resolve(path) response = view(request) self.assertEqual(len(response.data), 30) def test_number_of_v2_viewsets(self): - ''' + """ Counts the number of v2 viewsets for the api django app - ''' + """ view = v2_router.get_api_root_view() - path = '/api/v2/' + path = "/api/v2/" request = self.factory.get(path) request.resolver_match = resolve(path) response = view(request) - self.assertEqual(len(response.data), 1) + self.assertEqual(len(response.data), 2) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 1695ac9360..cce5afb707 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -142,16 +142,78 @@ def test_publish_xlsform_using_url_upload(self, mock_requests): 1, ) + @override_settings(TIME_ZONE="UTC") def test_projects_list(self): - self._project_create() + self._publish_xls_form_to_project() + self.project.refresh_from_db() request = self.factory.get("/", **self.extra) request.user = self.user response = self.view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - serializer = BaseProjectSerializer(self.project, context={"request": request}) - - self.assertEqual(response.data, [serializer.data]) + expected_data = [ + OrderedDict( + [ + ("url", f"http://testserver/api/v1/projects/{self.project.pk}"), + ("projectid", self.project.pk), + ("owner", "http://testserver/api/v1/users/bob"), + ("created_by", "http://testserver/api/v1/users/bob"), + ( + "metadata", + { + "category": "governance", + "location": "Naivasha, Kenya", + "description": "Some description", + }, + ), + ("starred", False), + ( + "users", + [ + { + "is_org": False, + "metadata": {}, + "first_name": "Bob", + "last_name": "erama", + "user": "bob", + "role": "owner", + } + ], + ), + ( + "forms", + [ + OrderedDict( + [ + ("name", "transportation_2011_07_25"), + ("formid", self.xform.pk), + ("id_string", "transportation_2011_07_25"), + ("is_merged_dataset", False), + ("contributes_entities_to", None), + ("consumes_entities_from", []), + ] + ) + ], + ), + ("public", False), + ("tags", []), + ("num_datasets", 1), + ("last_submission_date", None), + ("teams", []), + ("name", "demo"), + ( + "date_created", + self.project.date_created.isoformat().replace("+00:00", "Z"), + ), + ( + "date_modified", + self.project.date_modified.isoformat().replace("+00:00", "Z"), + ), + ("deleted_at", None), + ] + ) + ] + self.assertEqual(response.data, expected_data) self.assertIn("created_by", list(response.data[0])) def test_projects_list_with_pagination(self): diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index 9869903b09..4d3bb0eafc 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -20,6 +20,7 @@ PreviewXFormListViewSet, XFormListViewSet, ) +from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.main.models import MetaData from onadata.libs.permissions import DataEntryRole, OwnerRole, ReadOnlyRole @@ -964,6 +965,47 @@ def test_retrieve_xform_media_linked_xform(self): response["Content-Disposition"], "attachment; filename=transportation.csv" ) + def test_retrieve_xform_media_entity_list_dataset(self): + """EntityList dataset is returned""" + # Publish registration form and create "trees" Entitylist dataset + self._publish_registration_form(self.user) + # Make submission to trees_registration form + submission_path = os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + self._make_submission(submission_path) + entity_list = EntityList.objects.get(name="trees") + metadata = MetaData.objects.create( + content_object=self.xform, + data_type="media", + data_value=f"entity_list {entity_list.pk} {entity_list.name}", + ) + self.view = XFormListViewSet.as_view({"get": "media", "head": "media"}) + request = self.factory.get("/") + response = self.view( + request, pk=self.xform.pk, metadata=metadata.pk, format="csv" + ) + self.assertEqual(response.status_code, 401) + + request = self.factory.head("/") + response = self.view( + request, pk=self.xform.pk, metadata=metadata.pk, format="csv" + ) + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") + request.META.update(auth(request.META, response)) + response = self.view( + request, pk=self.xform.pk, metadata=metadata.pk, format="csv" + ) + self.assertEqual(response.status_code, 200) + self.assertEqual( + response["Content-Disposition"], "attachment; filename=trees.csv" + ) + def test_retrieve_xform_manifest_linked_form(self): # for linked forms check if manifest media download url for csv # has a group_delimiter param diff --git a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py index 04c4667931..bb5500f1a6 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py @@ -20,7 +20,7 @@ add_uuid_to_submission_xml, ) from onadata.apps.api.viewsets.xform_submission_viewset import XFormSubmissionViewSet -from onadata.apps.logger.models import Attachment, Instance, XForm +from onadata.apps.logger.models import Attachment, Instance, XForm, Entity from onadata.apps.restservice.models import RestService from onadata.apps.restservice.services.textit import ServiceDefinition from onadata.libs.permissions import DataEntryRole @@ -1320,3 +1320,71 @@ def test_edit_submission_sent_to_rapidpro(self, mock_send): new_uuid = "6b2cc313-fc09-437e-8139-fcd32f695d41" instance = Instance.objects.get(uuid=new_uuid) mock_send.assert_called_once_with(rest_service.service_url, instance) + + def test_create_entity(self): + """An Entity is created for if the form is a RegistrationForm""" + self.xform = self._publish_registration_form(self.user) + submission_path = os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + + with open(submission_path, "rb") as sf: + data = {"xml_submission_file": sf} + request = self.factory.post("/submission", data) + response = self.view(request) + self.assertEqual(response.status_code, 401) + auth = DigestAuth("bob", "bobbob") + request.META.update(auth(request.META, response)) + response = self.view(request, username=self.user.username) + self.assertContains(response, "Successful submission", status_code=201) + self.assertEqual(Instance.objects.count(), 1) + self.assertEqual(Entity.objects.count(), 1) + instance = Instance.objects.first() + entity = Entity.objects.first() + self.assertEqual(entity.registration_form.xform, self.xform) + self.assertEqual(entity.xml, instance.xml) + expected_json = { + "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", + "meta/instanceName": "300cm purpleheart", + "meta/entity/label": "300cm purpleheart", + "_xform_id_string": "trees_registration", + "_version": "2022110901", + "_id": entity.pk, + } + self.assertEqual(entity.json, expected_json) + self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") + + def test_registration_form_inactive(self): + """When the RegistrationForm is inactive, Entity should not be created""" + self.xform = self._publish_registration_form(self.user) + registration_form = self.xform.registration_forms.first() + # deactivate registration form + registration_form.is_active = False + registration_form.save() + submission_path = os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + + with open(submission_path, "rb") as sf: + data = {"xml_submission_file": sf} + request = self.factory.post("/submission", data) + response = self.view(request) + self.assertEqual(response.status_code, 401) + auth = DigestAuth("bob", "bobbob") + request.META.update(auth(request.META, response)) + response = self.view(request, username=self.user.username) + self.assertContains(response, "Successful submission", status_code=201) + self.assertEqual(Instance.objects.count(), 1) + self.assertEqual(Entity.objects.count(), 0) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index d125165ef9..a3996d96ed 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -15,7 +15,8 @@ from http.client import BadStatusLine from io import StringIO from unittest.mock import Mock, patch -from xml.dom import Node, minidom +from xml.dom import Node +from defusedxml import minidom from django.conf import settings from django.contrib.contenttypes.models import ContentType @@ -54,7 +55,13 @@ ) from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Attachment, Instance, Project, XForm +from onadata.apps.logger.models import ( + Attachment, + Instance, + Project, + XForm, + EntityList, +) from onadata.apps.logger.models.xform_version import XFormVersion from onadata.apps.logger.views import delete_xform from onadata.apps.logger.xform_instance_parser import XLSFormError @@ -138,21 +145,17 @@ def _make_submission_over_date_range(self, start, days=1): curr_time += timedelta(days=days) -class TestXFormViewSet(XFormViewSetBaseTestCase): - """Test XFormViewSet""" +class PublishXLSFormTestCase(XFormViewSetBaseTestCase): + """Tests for publishing an XLSForm""" def setUp(self): - super(TestXFormViewSet, self).setUp() - self.view = XFormViewSet.as_view( - { - "get": "list", - } - ) + super().setUp() + + self.view = XFormViewSet.as_view({"post": "create"}) def test_form_publishing_arabic(self): with HTTMock(enketo_mock): xforms = XForm.objects.count() - view = XFormViewSet.as_view({"post": "create"}) path = os.path.join( settings.PROJECT_ROOT, "apps", @@ -164,209 +167,231 @@ def test_form_publishing_arabic(self): with open(path, "rb") as xls_file: post_data = {"xls_file": xls_file} request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + response = self.view(request) self.assertEqual(xforms + 1, XForm.objects.count()) self.assertEqual(response.status_code, 201) - @patch("onadata.apps.api.viewsets.xform_viewset.send_message") - def test_replace_form_with_external_choices(self, mock_send_message): - with HTTMock(enketo_mock): - xls_file_path = os.path.join( + def test_publish_xlsform(self): + with HTTMock(enketo_urls_mock): + data = { + "owner": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": "transportation_2011_07_25", + "id_string": "transportation_2011_07_25", + "title": "transportation_2011_07_25", + "bamboo_dataset": "", + } + path = os.path.join( settings.PROJECT_ROOT, "apps", - "logger", + "main", + "tests", "fixtures", - "external_choice_form_v1.xlsx", + "transportation", + "transportation.xlsx", ) - self._publish_xls_form_to_project(xlsform_path=xls_file_path) - self.assertIsNotNone(self.xform.version) - form_id = self.xform.pk + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + xform = self.user.xforms.get(id_string="transportation_2011_07_25") + data.update({"url": "http://testserver/api/v1/forms/%s" % xform.pk}) - self.view = XFormViewSet.as_view( - { - "get": "retrieve", - } - ) + self.assertDictContainsSubset(data, response.data) + self.assertTrue(OwnerRole.user_has_role(self.user, xform)) + self.assertEqual("owner", response.data["users"][0]["role"]) - request = self.factory.get("/", **self.extra) - response = self.view(request, pk=self.xform.id) - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) + # pylint: disable=no-member + self.assertIsNotNone( + MetaData.objects.get(object_id=xform.id, data_type="enketo_url") + ) + self.assertIsNotNone( + MetaData.objects.get( + object_id=xform.id, data_type="enketo_preview_url" + ) + ) - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } - ) + # Ensure XFormVersion object is created on XForm publish + versions_count = XFormVersion.objects.filter(xform=xform).count() + self.assertEqual(versions_count, 1) - xls_file_path = os.path.join( + def test_publish_xlsforms_with_same_id_string(self): + with HTTMock(enketo_urls_mock): + counter = XForm.objects.count() + data = { + "owner": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": "transportation_2011_07_25", + "id_string": "transportation_2011_07_25", + "title": "transportation_2011_07_25", + "bamboo_dataset": "", + } + path = os.path.join( settings.PROJECT_ROOT, "apps", - "logger", + "main", + "tests", "fixtures", - "external_choice_form_v2.xlsx", + "transportation", + "transportation.xlsx", ) - with open(xls_file_path, "rb") as xls_file: + with open(path, "rb") as xls_file: post_data = {"xls_file": xls_file} - request = self.factory.patch("/", data=post_data, **self.extra) - response = view(request, pk=form_id) - self.assertEqual(response.status_code, 200) - # send message upon form update - self.assertTrue(mock_send_message.called) - mock_send_message.assert_called_with( - instance_id=self.xform.id, - target_id=self.xform.id, - target_type=XFORM, - user=request.user, - message_verb=FORM_UPDATED, - ) - - def test_form_publishing_using_invalid_text_xls_form(self): - view = ProjectViewSet.as_view({"post": "forms"}) - self._project_create() - project_id = self.project.pk + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + xform = self.user.xforms.all()[0] + data.update( + { + "url": "http://testserver/api/v1/forms/%s" % xform.pk, + "has_id_string_changed": False, + } + ) + self.assertDictContainsSubset(data, response.data) + self.assertTrue(OwnerRole.user_has_role(self.user, xform)) + self.assertEqual("owner", response.data["users"][0]["role"]) - invalid_post_data = { - "downloadable": ["True"], - "text_xls_form": ["invalid data"], - } - request = self.factory.post("/", data=invalid_post_data, **self.extra) - response = view(request, pk=project_id) - self.assertEqual(response.status_code, 400) + # pylint: disable=no-member + self.assertIsNotNone( + MetaData.objects.get(object_id=xform.id, data_type="enketo_url") + ) + self.assertIsNotNone( + MetaData.objects.get( + object_id=xform.id, data_type="enketo_preview_url" + ) + ) - def test_form_publishing_using_text_xls_form(self): - view = ProjectViewSet.as_view({"post": "forms"}) - self._project_create() - project_id = self.project.pk + self.assertEqual(counter + 1, XForm.objects.count()) + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_copy.xlsx", + ) - pre_count = XForm.objects.count() - valid_post_data = { - "downloadable": ["True"], - "text_xls_form": [ - ( - "survey\r\n," - "required,type,name,label,calculation\r\n," - "true,text,What_is_your_name,What is your name\r\n," - ",calculate,__version__,,'vbP67kPMwnY8aTFcFHgWMN'\r\n" - "settings\r\n," - "form_title,version,id_string\r\n," - "Demo to Jonathan,vbP67kPMwnY8aTFcFHgWMN," - "afPkTij9pVg8T8c35h3SvS\r\n" + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + xform = self.user.xforms.get(id_string="Transportation_2011_07_25_1") + data.update( + { + "url": "http://testserver/api/v1/forms/%s" % xform.pk, + "id_string": "Transportation_2011_07_25_1", + "title": "Transportation_2011_07_25", + "sms_id_string": "Transportation_2011_07_25", + "has_id_string_changed": True, + } ) - ], - } - request = self.factory.post("/", data=valid_post_data, **self.extra) - response = view(request, pk=project_id) - self.assertEqual(response.status_code, 201) - self.assertEqual(XForm.objects.count(), pre_count + 1) + self.assertDictContainsSubset(data, response.data) + self.assertTrue(OwnerRole.user_has_role(self.user, xform)) + self.assertEqual("owner", response.data["users"][0]["role"]) - updated_post_data = { - "downloadable": ["True"], - "text_xls_form": [ - ( - "survey\r\n," - "required,type,name,label,calculation\r\n," - "true,text,What_is_your_name,What is your name\r\n," - "true,integer,What_is_your_age,What is your age\r\n," - ",calculate,__version__,,'vB9EtM9inCMPC4qpPcuX3h'\r\n" - "settings\r\n," - "form_title,version,id_string\r\n," - "Demo to Jonathan,vB9EtM9inCMPC4qpPcuX3h," - "afPkTij9pVg8T8c35h3SvS\r\n" + # pylint: disable=no-member + self.assertIsNotNone( + MetaData.objects.get(object_id=xform.id, data_type="enketo_url") + ) + self.assertIsNotNone( + MetaData.objects.get( + object_id=xform.id, data_type="enketo_preview_url" + ) ) - ], - } - xform = XForm.objects.last() - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } - ) - request = self.factory.patch("/", data=updated_post_data, **self.extra) - response = view(request, pk=xform.id) - self.assertEqual(response.status_code, 200) + xform = XForm.objects.get(id_string="transportation_2011_07_25") + self.assertIsInstance(xform, XForm) + self.assertEqual(counter + 2, XForm.objects.count()) - def test_instances_with_geopoints_true_for_instances_with_geopoints(self): + # pylint: disable=invalid-name + @patch("onadata.apps.main.forms.requests") + def test_publish_xlsform_using_url_upload(self, mock_requests): with HTTMock(enketo_mock): - xls_file_path = os.path.join( + xls_url = "https://ona.io/examples/forms/tutorial/form.xlsx" + pre_count = XForm.objects.count() + path = os.path.join( settings.PROJECT_ROOT, "apps", - "logger", + "main", + "tests", "fixtures", - "tutorial", - "tutorial.xlsx", + "transportation", + "transportation_different_id_string.xlsx", ) - self._publish_xls_form_to_project(xlsform_path=xls_file_path) + with open(path, "rb") as xls_file: + mock_response = get_mocked_response_for_file( + xls_file, "transportation_different_id_string.xlsx", 200 + ) + mock_requests.head.return_value = mock_response + mock_requests.get.return_value = mock_response - xml_submission_file_path = os.path.join( + post_data = {"xls_url": xls_url} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + + mock_requests.get.assert_called_with(xls_url) + xls_file.close() + + self.assertEqual(response.status_code, 201) + self.assertEqual(XForm.objects.count(), pre_count + 1) + + # pylint: disable=invalid-name + @patch("onadata.apps.main.forms.requests") + def test_publish_xlsform_using_url_with_no_extension(self, mock_requests): + with HTTMock(enketo_mock, xls_url_no_extension_mock): + xls_url = "https://ona.io/examples/forms/tutorial/form" + pre_count = XForm.objects.count() + path = os.path.join( settings.PROJECT_ROOT, "apps", - "logger", + "main", + "tests", "fixtures", - "tutorial", - "instances", - "tutorial_2012-06-27_11-27-53.xml", - ) - - self._make_submission(xml_submission_file_path) - self.xform.refresh_from_db() - - view = XFormViewSet.as_view( - { - "get": "retrieve", - } + "transportation", + "transportation_different_id_string.xlsx", ) - formid = self.xform.pk - request = self.factory.get("/", **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.status_code, 200) - self.assertTrue(response.data.get("instances_with_geopoints")) - Instance.objects.get(xform__id=formid).delete() - request = self.factory.get("/", **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.status_code, 200) - self.assertFalse(response.data.get("instances_with_geopoints")) + with open(path, "rb") as xls_file: + mock_response = get_mocked_response_for_file( + xls_file, "transportation_version.xlsx", 200 + ) + mock_requests.head.return_value = mock_response + mock_requests.get.return_value = mock_response - def test_form_list(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - request = self.factory.get("/", **self.extra) - response = self.view(request) - self.assertNotEqual(response.get("Cache-Control"), None) + post_data = {"xls_url": xls_url} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) - @override_settings(STREAM_DATA=True) - def test_form_list_stream(self): - view = XFormViewSet.as_view( - { - "get": "list", - } - ) - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - request = self.factory.get("/", **self.extra) - response = view(request) - self.assertTrue(response.streaming) - streaming_data = json.loads( - "".join([i.decode("utf-8") for i in response.streaming_content]) - ) - self.assertIsInstance(streaming_data, list) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) + self.assertEqual(response.status_code, 201, response.data) + self.assertEqual(XForm.objects.count(), pre_count + 1) - def test_form_list_with_pagination(self): - view = XFormViewSet.as_view( - { - "get": "list", - } - ) - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - form_path = os.path.join( + # pylint: disable=invalid-name + @patch("onadata.apps.main.forms.requests") + def test_publish_xlsform_using_url_content_disposition_attr_jumbled_v1( + self, mock_requests + ): + with HTTMock( + enketo_mock, xls_url_no_extension_mock_content_disposition_attr_jumbled_v1 + ): + xls_url = "https://ona.io/examples/forms/tutorial/form" + pre_count = XForm.objects.count() + path = os.path.join( settings.PROJECT_ROOT, "apps", "main", @@ -375,33 +400,32 @@ def test_form_list_with_pagination(self): "transportation", "transportation_different_id_string.xlsx", ) - self._publish_xls_form_to_project(xlsform_path=form_path) - # no page param no pagination - request = self.factory.get("/", **self.extra) - response = view(request) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) - self.assertTrue(len(response.data), 2) - # test pagination - request = self.factory.get( - "/", data={"page": 1, "page_size": 1}, **self.extra - ) - response = view(request) - self.assertEqual(response.status_code, 200) - # check that only one form is returned - self.assertEqual(len(response.data), 1) + with open(path, "rb") as xls_file: + mock_response = get_mocked_response_for_file( + xls_file, "transportation_different_id_string.xlsx", 200 + ) + mock_requests.head.return_value = mock_response + mock_requests.get.return_value = mock_response - @override_settings(STREAM_DATA=True) - def test_form_list_stream_with_pagination(self): - view = XFormViewSet.as_view( - { - "get": "list", - } - ) - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - form_path = os.path.join( + post_data = {"xls_url": xls_url} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + + self.assertEqual(response.status_code, 201) + self.assertEqual(XForm.objects.count(), pre_count + 1) + + # pylint: disable=invalid-name + @patch("onadata.apps.main.forms.requests") + def test_publish_xlsform_using_url_content_disposition_attr_jumbled_v2( + self, mock_requests + ): + with HTTMock( + enketo_mock, xls_url_no_extension_mock_content_disposition_attr_jumbled_v2 + ): + xls_url = "https://ona.io/examples/forms/tutorial/form" + pre_count = XForm.objects.count() + path = os.path.join( settings.PROJECT_ROOT, "apps", "main", @@ -410,314 +434,585 @@ def test_form_list_stream_with_pagination(self): "transportation", "transportation_different_id_string.xlsx", ) - self._publish_xls_form_to_project(xlsform_path=form_path) - # no page param no pagination - request = self.factory.get("/", **self.extra) - response = view(request) - self.assertTrue(response.streaming) - streaming_data = json.loads( - "".join([i.decode("utf-8") for i in response.streaming_content]) - ) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(streaming_data), 2) - # test pagination - request = self.factory.get( - "/", data={"page": 1, "page_size": 1}, **self.extra - ) - response = view(request) - self.assertTrue(response.streaming) - streaming_data = json.loads( - "".join([i.decode("utf-8") for i in response.streaming_content]) - ) - self.assertEqual(response.status_code, 200) - # check that only one form is returned - self.assertEqual(len(streaming_data), 1) + with open(path, "rb") as xls_file: + mock_response = get_mocked_response_for_file( + xls_file, "transportation_different_id_string.xlsx", 200 + ) + mock_requests.head.return_value = mock_response + mock_requests.get.return_value = mock_response - def test_form_list_without_enketo_connection(self): - self._publish_xls_form_to_project() - request = self.factory.get("/", **self.extra) - response = self.view(request) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) + post_data = {"xls_url": xls_url} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) - def test_form_list_anon(self): + self.assertEqual(response.status_code, 201) + self.assertEqual(XForm.objects.count(), pre_count + 1) + + # pylint: disable=invalid-name + @patch("onadata.apps.main.forms.requests") + def test_publish_csvform_using_url_upload(self, mock_requests): with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - request = self.factory.get("/") - response = self.view(request) - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data, []) + csv_url = "https://ona.io/examples/forms/tutorial/form.csv" + pre_count = XForm.objects.count() + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "text_and_integer.csv", + ) - def test_public_form_list(self): + with open(path, "rb") as csv_file: + mock_response = get_mocked_response_for_file( + csv_file, "text_and_integer.csv", 200 + ) + mock_requests.head.return_value = mock_response + mock_requests.get.return_value = mock_response + + post_data = {"csv_url": csv_url} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + + mock_requests.get.assert_called_with(csv_url) + csv_file.close() + + self.assertEqual(response.status_code, 201) + self.assertEqual(XForm.objects.count(), pre_count + 1) + + # pylint: disable=invalid-name + def test_publish_select_external_xlsform(self): with HTTMock(enketo_urls_mock): - self._publish_xls_form_to_project() - self.view = XFormViewSet.as_view( - { - "get": "retrieve", - } + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "select_one_external.xlsx", ) - request = self.factory.get("/", **self.extra) - response = self.view(request, pk="public") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data, []) - - # public shared form - self.xform.shared = True - self.xform.save() - response = self.view(request, pk="public") - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) - self.form_data["public"] = True - # pylint: disable=no-member - resultset = MetaData.objects.filter( - Q(object_id=self.xform.pk), - Q(data_type="enketo_url") - | Q(data_type="enketo_preview_url") - | Q(data_type="enketo_single_submit_url"), + with open(path, "rb") as xls_file: + # pylint: disable=no-member + meta_count = MetaData.objects.count() + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + xform = self.user.xforms.all()[0] + self.assertEqual(response.status_code, 201) + self.assertEqual(meta_count + 4, MetaData.objects.count()) + metadata = MetaData.objects.get( + object_id=xform.id, data_value="itemsets.csv" + ) + self.assertIsNotNone(metadata) + self.assertTrue(OwnerRole.user_has_role(self.user, xform)) + self.assertEqual("owner", response.data["users"][0]["role"], self.user) + + def test_publish_csv_with_universal_newline_xlsform(self): + with HTTMock(enketo_mock): + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "universal_newline.csv", ) - url = resultset.get(data_type="enketo_url") - preview_url = resultset.get(data_type="enketo_preview_url") - single_submit_url = resultset.get(data_type="enketo_single_submit_url") - self.form_data["metadata"] = [ - OrderedDict( - [ - ("id", url.pk), - ("xform", self.xform.pk), - ("data_value", "https://enketo.ona.io/::YY8M"), - ("data_type", "enketo_url"), - ("data_file", None), - ("extra_data", {}), - ("data_file_type", None), - ("media_url", None), - ("file_hash", None), - ("url", "http://testserver/api/v1/metadata/%s" % url.pk), - ("date_created", url.date_created), - ] - ), - OrderedDict( - [ - ("id", preview_url.pk), - ("xform", self.xform.pk), - ("data_value", "https://enketo.ona.io/preview/::YY8M"), - ("data_type", "enketo_preview_url"), - ("data_file", None), - ("extra_data", {}), - ("data_file_type", None), - ("media_url", None), - ("file_hash", None), - ( - "url", - "http://testserver/api/v1/metadata/%s" % preview_url.pk, - ), - ("date_created", preview_url.date_created), - ] - ), - OrderedDict( - [ - ("id", single_submit_url.pk), - ("xform", self.xform.pk), - ("data_value", "http://enketo.ona.io/single/::XZqoZ94y"), - ("data_type", "enketo_single_submit_url"), - ("data_file", None), - ("extra_data", {}), - ("data_file_type", None), - ("media_url", None), - ("file_hash", None), - ( - "url", - "http://testserver/api/v1/metadata/%s" - % single_submit_url.pk, - ), - ("date_created", single_submit_url.date_created), - ] - ), - ] - del self.form_data["date_modified"] - del response.data[0]["date_modified"] + with open(path, encoding="utf-8") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201, response.data) + + def test_publish_xlsform_anon(self): + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation.xlsx", + ) + username = "Anon" + error_msg = "User with username %s does not exist." % username + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file, "owner": username} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data.get("message"), error_msg) + + def test_publish_invalid_xls_form(self): + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation.bad_id.xlsx", + ) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.get("Cache-Control"), None) + error_msg = ( + "In strict mode, the XForm ID must be " + "a valid slug and contain no spaces." + " Please ensure that you have set an" + " id_string in the settings sheet or " + "have modified the filename to not " + "contain any spaces." + ) + self.assertEqual(response.data.get("text"), error_msg) + + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_ampersand_in_title.xlsx", + ) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.get("Cache-Control"), None) + error_msg = ( + "Title shouldn't have any invalid xml characters " "('>' '&' '<')" + ) + self.assertEqual(response.data.get("text"), error_msg) + + def test_publish_invalid_xls_form_no_choices(self): + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation.no_choices.xlsx", + ) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.get("Cache-Control"), None) + error_msg = ( + "There should be a choices sheet in this xlsform. " + "Please ensure that the choices sheet has the mandatory columns " + "'list_name', 'name', and 'label'." + ) + self.assertEqual(response.data.get("text"), error_msg) + + def test_upload_xml_form_file(self): + with HTTMock(enketo_mock): + path = os.path.join( + os.path.dirname(__file__), "..", "fixtures", "forms", "contributions" + ) + form_path = os.path.join(path, "contributions.xml") + xforms = XForm.objects.count() + + with open(form_path, encoding="utf-8") as xml_file: + post_data = {"xml_file": xml_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(xforms + 1, XForm.objects.count()) + self.assertEqual(response.status_code, 201) + + instances_path = os.path.join(path, "instances") + for uuid in os.listdir(instances_path): + s_path = os.path.join(instances_path, uuid, "submission.xml") + self._make_submission(s_path) + xform = XForm.objects.last() + self.assertEqual(xform.instances.count(), 6) + + def test_form_publishing_floip(self): + with HTTMock(enketo_mock): + xforms = XForm.objects.count() + path = os.path.join( + os.path.dirname(__file__), + "../", + "fixtures", + "flow-results-example-1.json", + ) + with open(path, "rb") as xls_file: + post_data = {"floip_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201, response.data) + self.assertEqual(xforms + 1, XForm.objects.count()) + + def test_external_choice_integer_name_xlsform(self): + """Test that names with integers are converted to strings""" + with HTTMock(enketo_urls_mock): + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "api", + "tests", + "fixtures", + "integer_name_test.xlsx", + ) + with open(path, "rb") as xls_file: + # pylint: disable=no-member + meta_count = MetaData.objects.count() + post_data = {"xls_file": xls_file} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + xform = self.user.xforms.all()[0] + self.assertEqual(response.status_code, 201) + self.assertEqual(meta_count + 4, MetaData.objects.count()) + metadata = MetaData.objects.get( + object_id=xform.id, data_value="itemsets.csv" + ) + self.assertIsNotNone(metadata) + + csv_reader = csv.reader(codecs.iterdecode(metadata.data_file, "utf-8")) + expected_data = [ + ["list_name", "name", "label", "state", "county"], + ["states", "1", "Texas", "", ""], + ["states", "2", "Washington", "", ""], + ["counties", "b1", "King", "2", ""], + ["counties", "b2", "Pierce", "2", ""], + ["counties", "b3", "King", "1", ""], + ["counties", "b4", "Cameron", "1", ""], + ["cities", "dumont", "Dumont", "1", "b3"], + ["cities", "finney", "Finney", "1", "b3"], + ["cities", "brownsville", "brownsville", "1", "b4"], + ["cities", "harlingen", "harlingen", "1", "b4"], + ["cities", "seattle", "Seattle", "2", "b3"], + ["cities", "redmond", "Redmond", "2", "b3"], + ["cities", "tacoma", "Tacoma", "2", "b2"], + ["cities", "puyallup", "Puyallup", "2", "b2"], + ] + for index, row in enumerate(csv_reader): + self.assertEqual(row, expected_data[index]) + + +class TestXFormViewSet(XFormViewSetBaseTestCase): + """Test XFormViewSet""" + + def setUp(self): + super(TestXFormViewSet, self).setUp() + self.view = XFormViewSet.as_view( + { + "get": "list", + } + ) + + @patch("onadata.apps.api.viewsets.xform_viewset.send_message") + def test_replace_form_with_external_choices(self, mock_send_message): + with HTTMock(enketo_mock): + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v1.xlsx", + ) + self._publish_xls_form_to_project(xlsform_path=xls_file_path) + + self.assertIsNotNone(self.xform.version) + form_id = self.xform.pk + + self.view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) + + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } + ) + + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "external_choice_form_v2.xlsx", + ) + with open(xls_file_path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.patch("/", data=post_data, **self.extra) + response = view(request, pk=form_id) + self.assertEqual(response.status_code, 200) + # send message upon form update + self.assertTrue(mock_send_message.called) + mock_send_message.assert_called_with( + instance_id=self.xform.id, + target_id=self.xform.id, + target_type=XFORM, + user=request.user, + message_verb=FORM_UPDATED, + ) + + def test_form_publishing_using_invalid_text_xls_form(self): + view = ProjectViewSet.as_view({"post": "forms"}) + self._project_create() + project_id = self.project.pk + + invalid_post_data = { + "downloadable": ["True"], + "text_xls_form": ["invalid data"], + } + request = self.factory.post("/", data=invalid_post_data, **self.extra) + response = view(request, pk=project_id) + self.assertEqual(response.status_code, 400) + + def test_form_publishing_using_text_xls_form(self): + view = ProjectViewSet.as_view({"post": "forms"}) + self._project_create() + project_id = self.project.pk + + pre_count = XForm.objects.count() + valid_post_data = { + "downloadable": ["True"], + "text_xls_form": [ + ( + "survey\r\n," + "required,type,name,label,calculation\r\n," + "true,text,What_is_your_name,What is your name\r\n," + ",calculate,__version__,,'vbP67kPMwnY8aTFcFHgWMN'\r\n" + "settings\r\n," + "form_title,version,id_string\r\n," + "Demo to Jonathan,vbP67kPMwnY8aTFcFHgWMN," + "afPkTij9pVg8T8c35h3SvS\r\n" + ) + ], + } + + request = self.factory.post("/", data=valid_post_data, **self.extra) + response = view(request, pk=project_id) + self.assertEqual(response.status_code, 201) + self.assertEqual(XForm.objects.count(), pre_count + 1) + + updated_post_data = { + "downloadable": ["True"], + "text_xls_form": [ + ( + "survey\r\n," + "required,type,name,label,calculation\r\n," + "true,text,What_is_your_name,What is your name\r\n," + "true,integer,What_is_your_age,What is your age\r\n," + ",calculate,__version__,,'vB9EtM9inCMPC4qpPcuX3h'\r\n" + "settings\r\n," + "form_title,version,id_string\r\n," + "Demo to Jonathan,vB9EtM9inCMPC4qpPcuX3h," + "afPkTij9pVg8T8c35h3SvS\r\n" + ) + ], + } + + xform = XForm.objects.last() + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } + ) + request = self.factory.patch("/", data=updated_post_data, **self.extra) + response = view(request, pk=xform.id) + self.assertEqual(response.status_code, 200) - del self.form_data["last_updated_at"] - del response.data[0]["last_updated_at"] + def test_instances_with_geopoints_true_for_instances_with_geopoints(self): + with HTTMock(enketo_mock): + xls_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "tutorial", + "tutorial.xlsx", + ) - self.form_data.pop("has_id_string_changed") - self.form_data["metadata"].sort(key=lambda x: x["id"]) - response.data[0]["metadata"].sort(key=lambda x: x["id"]) - self.assertEqual(response.data, [self.form_data]) + self._publish_xls_form_to_project(xlsform_path=xls_file_path) - # public shared form data - self.xform.shared_data = True - self.xform.shared = False - self.xform.save() - response = self.view(request, pk="public") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data, []) + xml_submission_file_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "logger", + "fixtures", + "tutorial", + "instances", + "tutorial_2012-06-27_11-27-53.xml", + ) - def test_form_list_other_user_access(self): - with HTTMock(enketo_urls_mock): - """Test that a different user has no access to bob's form""" - self._publish_xls_form_to_project() - request = self.factory.get("/", **self.extra) - response = self.view(request) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) + self._make_submission(xml_submission_file_path) + self.xform.refresh_from_db() - # pylint: disable=no-member - resultset = MetaData.objects.filter( - Q(object_id=self.xform.pk), - Q(data_type="enketo_url") | Q(data_type="enketo_preview_url"), - ) - url = resultset.get(data_type="enketo_url") - preview_url = resultset.get(data_type="enketo_preview_url") - self.form_data["metadata"] = [ - { - "id": preview_url.pk, - "xform": self.xform.pk, - "data_value": "https://enketo.ona.io/preview/::YY8M", - "data_type": "enketo_preview_url", - "data_file": None, - "data_file_type": None, - "url": "http://testserver/api/v1/metadata/%s" % preview_url.pk, - "file_hash": None, - "media_url": None, - "date_created": preview_url.date_created, - }, + view = XFormViewSet.as_view( { - "id": url.pk, - "xform": self.xform.pk, - "data_value": "https://enketo.ona.io/::YY8M", - "data_type": "enketo_url", - "data_file": None, - "data_file_type": None, - "url": "http://testserver/api/v1/metadata/%s" % url.pk, - "file_hash": None, - "media_url": None, - "date_created": url.date_created, - }, - ] - - self.assertEqual(response.data.sort(), [self.form_data].sort()) + "get": "retrieve", + } + ) + formid = self.xform.pk + request = self.factory.get("/", **self.extra) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertTrue(response.data.get("instances_with_geopoints")) - # test with different user - previous_user = self.user - alice_data = {"username": "alice", "email": "alice@localhost.com"} - self._login_user_and_profile(extra_post_data=alice_data) - self.assertEqual(self.user.username, "alice") - self.assertNotEqual(previous_user, self.user) + Instance.objects.get(xform__id=formid).delete() request = self.factory.get("/", **self.extra) - response = self.view(request) + response = view(request, pk=formid) self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - # should be empty - self.assertEqual(response.data, []) + self.assertFalse(response.data.get("instances_with_geopoints")) - def test_form_list_filter_by_user(self): - with HTTMock(enketo_urls_mock): - # publish bob's form + def test_form_list(self): + with HTTMock(enketo_mock): self._publish_xls_form_to_project() - - previous_user = self.user - alice_data = {"username": "alice", "email": "alice@localhost.com"} - self._login_user_and_profile(extra_post_data=alice_data) - self.assertEqual(self.user.username, "alice") - self.assertNotEqual(previous_user, self.user) - - ReadOnlyRole.add(self.user, self.xform) - view = XFormViewSet.as_view({"get": "retrieve"}) - safe_delete("{}{}".format(XFORM_PERMISSIONS_CACHE, self.xform.pk)) request = self.factory.get("/", **self.extra) - response = view(request, pk=self.xform.pk) - bobs_form_data = response.data - form_users = [(u["role"], u["user"]) for u in bobs_form_data["users"]] - self.assertEqual(len(form_users), 2) - self.assertIn(("owner", "bob"), form_users) - self.assertIn(("readonly", "alice"), form_users) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) - # publish alice's form + @override_settings(STREAM_DATA=True) + def test_form_list_stream(self): + view = XFormViewSet.as_view( + { + "get": "list", + } + ) + with HTTMock(enketo_mock): self._publish_xls_form_to_project() - request = self.factory.get("/", **self.extra) - response = self.view(request) + response = view(request) + self.assertTrue(response.streaming) + streaming_data = json.loads( + "".join([i.decode("utf-8") for i in response.streaming_content]) + ) + self.assertIsInstance(streaming_data, list) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - self.form_data.pop("has_id_string_changed") - response_data = sorted(response.data, key=lambda x: x["formid"]) - for k in ["submission_count_for_today", "metadata", "form_versions"]: - bobs_form_data.pop(k) - self.form_data.pop(k) - expected_data = [OrderedDict(bobs_form_data), OrderedDict(self.form_data)] - - self.assertTrue(len(response_data), 2) - - # remove date modified and last updated at - for indx in [0, 1]: - response_data[indx].pop("date_modified") - expected_data[indx].pop("date_modified") - response_data[indx].pop("last_updated_at") - expected_data[indx].pop("last_updated_at") - - response_users = sorted( - response_data[0].pop("users"), key=lambda x: x["user"] - ) - expected_users = sorted( - expected_data[0].pop("users"), key=lambda x: x["user"] + def test_form_list_with_pagination(self): + view = XFormViewSet.as_view( + { + "get": "list", + } + ) + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + form_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_different_id_string.xlsx", ) - self.assertEqual(response_data[0], expected_data[0]) - self.assertEqual(response_users, expected_users) - - self.assertEqual(response_data[1], expected_data[1]) - self.assertEqual(response_users, expected_users) - - # apply filter, see only bob's forms - request = self.factory.get("/", data={"owner": "bob"}, **self.extra) - response = self.view(request) + self._publish_xls_form_to_project(xlsform_path=form_path) + # no page param no pagination + request = self.factory.get("/", **self.extra) + response = view(request) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, [bobs_form_data]) + self.assertTrue(len(response.data), 2) - # apply filter, see only bob's forms, case insensitive - request = self.factory.get("/", data={"owner": "BoB"}, **self.extra) - response = self.view(request) - self.assertNotEqual(response.get("Cache-Control"), None) + # test pagination + request = self.factory.get( + "/", data={"page": 1, "page_size": 1}, **self.extra + ) + response = view(request) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, [bobs_form_data]) + # check that only one form is returned + self.assertEqual(len(response.data), 1) - # apply filter, see only alice's forms - request = self.factory.get("/", data={"owner": "alice"}, **self.extra) - response = self.view(request) + @override_settings(STREAM_DATA=True) + def test_form_list_stream_with_pagination(self): + view = XFormViewSet.as_view( + { + "get": "list", + } + ) + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + form_path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_different_id_string.xlsx", + ) + self._publish_xls_form_to_project(xlsform_path=form_path) + # no page param no pagination + request = self.factory.get("/", **self.extra) + response = view(request) + self.assertTrue(response.streaming) + streaming_data = json.loads( + "".join([i.decode("utf-8") for i in response.streaming_content]) + ) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.assertEqual(len(streaming_data), 2) - # remove date-modified - response.data[0].pop("date_modified") - self.form_data.pop("date_modified") - - # remove last updated at - response.data[0].pop("last_updated_at") - self.form_data.pop("last_updated_at") + # test pagination + request = self.factory.get( + "/", data={"page": 1, "page_size": 1}, **self.extra + ) + response = view(request) + self.assertTrue(response.streaming) + streaming_data = json.loads( + "".join([i.decode("utf-8") for i in response.streaming_content]) + ) + self.assertEqual(response.status_code, 200) + # check that only one form is returned + self.assertEqual(len(streaming_data), 1) - self.assertEqual(response.data, [self.form_data]) + def test_form_list_without_enketo_connection(self): + self._publish_xls_form_to_project() + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) - # apply filter, see a non existent user - request = self.factory.get("/", data={"owner": "noone"}, **self.extra) + def test_form_list_anon(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 200) self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.data, []) - def test_form_get(self): + def test_public_form_list(self): with HTTMock(enketo_urls_mock): - view = XFormViewSet.as_view({"get": "retrieve"}) self._publish_xls_form_to_project() - formid = self.xform.pk + self.view = XFormViewSet.as_view( + { + "get": "retrieve", + } + ) request = self.factory.get("/", **self.extra) - response = view(request, pk=formid) + response = self.view(request, pk="public") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data, []) + + # public shared form + self.xform.shared = True + self.xform.save() + response = self.view(request, pk="public") self.assertNotEqual(response.get("Cache-Control"), None) self.assertEqual(response.status_code, 200) + self.form_data["public"] = True # pylint: disable=no-member resultset = MetaData.objects.filter( Q(object_id=self.xform.pk), @@ -728,7 +1023,6 @@ def test_form_get(self): url = resultset.get(data_type="enketo_url") preview_url = resultset.get(data_type="enketo_preview_url") single_submit_url = resultset.get(data_type="enketo_single_submit_url") - self.form_data["metadata"] = [ OrderedDict( [ @@ -780,857 +1074,626 @@ def test_form_get(self): % single_submit_url.pk, ), ("date_created", single_submit_url.date_created), - ] - ), - ] - - self.form_data["metadata"] = sorted( - self.form_data["metadata"], key=lambda x: x["id"] - ) - response.data["metadata"] = sorted( - response.data["metadata"], key=lambda x: x["id"] - ) - - # remove date modified - self.form_data.pop("date_modified") - response.data.pop("date_modified") - # remove last updated at - self.form_data.pop("last_updated_at") - response.data.pop("last_updated_at") - - self.form_data.pop("has_id_string_changed") - - self.assertEqual(response.data, self.form_data) - - def test_form_format(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "form"}) - formid = self.xform.pk - data = { - "name": "data", - "title": "transportation_2011_07_25", - "default_language": "default", - "id_string": "transportation_2011_07_25", - "type": "survey", - } - request = self.factory.get("/", **self.extra) - - # test for unsupported format - response = view(request, pk=formid, format="csvzip") - self.assertEqual(response.status_code, 400) - - # test for supported formats - - # JSON format - response = view(request, pk=formid, format="json") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertDictContainsSubset(data, response.data) - - # test correct file name - self.assertEqual( - response.get("Content-Disposition"), - "attachment; filename=" + self.xform.id_string + "." + "json", - ) - - # XML format - response = view(request, pk=formid, format="xml") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - response_doc = minidom.parseString(response.data) - - # test correct file name - self.assertEqual( - response.get("Content-Disposition"), - "attachment; filename=" + self.xform.id_string + "." + "xml", - ) - - # XLS format - response = view(request, pk=formid, format="xlsx") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - - # test correct file name - self.assertEqual( - response.get("Content-Disposition"), - "attachment; filename=" + self.xform.id_string + "." + "xlsx", - ) - - xml_path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation.xml", - ) - with open(xml_path, "rb") as xml_file: - expected_doc = minidom.parse(xml_file) - - model_node = [ - n - for n in response_doc.getElementsByTagName("h:head")[0].childNodes - if n.nodeType == Node.ELEMENT_NODE and n.tagName == "model" - ][0] - - # check for UUID and remove - uuid_nodes = [ - node - for node in model_node.childNodes - if node.nodeType == Node.ELEMENT_NODE - and node.getAttribute("nodeset") == "/data/formhub/uuid" - ] - self.assertEqual(len(uuid_nodes), 1) - uuid_node = uuid_nodes[0] - uuid_node.setAttribute("calculate", "''") - - # check content without UUID - response_xml = response_doc.toxml().replace( - self.xform.version, "201411120717" - ) - self.assertEqual(response_xml, expected_doc.toxml()) - - def test_existing_form_format(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "form"}) - formid = self.xform.pk - request = self.factory.get("/", **self.extra) - # get existing form format - exsting_format = get_existing_file_format(self.xform.xls, "xls") - - # XLSX format - response = view(request, pk=formid, format="xlsx") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - - # test correct content disposition - # ensure it still maintains the existing form extension - self.assertEqual( - response.get("Content-Disposition"), - "attachment; filename=" + self.xform.id_string + "." + exsting_format, - ) - - # XLS format - response = view(request, pk=formid, format="xls") - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - - # test correct content disposition - # ensure it still maintains the existing form extension - self.assertEqual( - response.get("Content-Disposition"), - "attachment; filename=" + self.xform.id_string + "." + exsting_format, - ) - - def test_form_tags(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view( - {"get": "labels", "post": "labels", "delete": "labels"} - ) - list_view = XFormViewSet.as_view( - { - "get": "list", - } - ) - formid = self.xform.pk - - # no tags - request = self.factory.get("/", **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.data, []) - - # add tag "hello" - request = self.factory.post("/", data={"tags": "hello"}, **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.status_code, 201) - self.assertEqual(response.data, ["hello"]) - - # check filter by tag - request = self.factory.get("/", data={"tags": "hello"}, **self.extra) - self.form_data = XFormBaseSerializer( - self.xform, context={"request": request} - ).data - response = list_view(request) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 200) - response_data = dict(response.data[0]) - response_data.pop("date_modified") - response_data.pop("last_updated_at") - self.form_data.pop("date_modified") - self.form_data.pop("last_updated_at") - self.assertEqual(response_data, self.form_data) - - request = self.factory.get("/", data={"tags": "goodbye"}, **self.extra) - response = list_view(request, pk=formid) - self.assertEqual(response.status_code, 200) - self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data, []) - - # remove tag "hello" - request = self.factory.delete("/", data={"tags": "hello"}, **self.extra) - response = view(request, pk=formid, label="hello") - self.assertEqual(response.status_code, 200) - self.assertEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data, []) - - def test_enketo_url_no_account(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - # no tags - request = self.factory.get("/", **self.extra) - with HTTMock(enketo_error_mock): - response = view(request, pk=formid) - data = { - "message": "Enketo error: no account exists for this OpenRosa server" - } - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.data, data) - - def test_enketo_url_error500(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - # no tags - request = self.factory.get("/", **self.extra) - with HTTMock(enketo_error500_mock): - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + ] + ), + ] + del self.form_data["date_modified"] + del response.data[0]["date_modified"] - def test_enketo_url_error502(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - # no tags - request = self.factory.get("/", **self.extra) - with HTTMock(enketo_error502_mock): - response = view(request, pk=formid) - data = { - "message": "Enketo error: Sorry, we cannot load your form right " - "now. Please try again later." - } - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.data, data) + del self.form_data["last_updated_at"] + del response.data[0]["last_updated_at"] - @override_settings(TESTING_MODE=False) - def test_enketo_url(self): - """Test functionality to expose enketo urls.""" + self.form_data.pop("has_id_string_changed") + self.form_data["metadata"].sort(key=lambda x: x["id"]) + response.data[0]["metadata"].sort(key=lambda x: x["id"]) + self.assertEqual(response.data, [self.form_data]) + + # public shared form data + self.xform.shared_data = True + self.xform.shared = False + self.xform.save() + response = self.view(request, pk="public") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data, []) + + def test_form_list_other_user_access(self): with HTTMock(enketo_urls_mock): + """Test that a different user has no access to bob's form""" self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - # no tags request = self.factory.get("/", **self.extra) - response = view(request, pk=formid) - url = "https://enketo.ona.io/::YY8M" - preview_url = "https://enketo.ona.io/preview/::YY8M" - single_url = "http://enketo.ona.io/single/::XZqoZ94y" - data = { - "enketo_url": url, - "enketo_preview_url": preview_url, - "single_submit_url": single_url, - } - self.assertEqual(response.data, data) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) - alice_data = {"username": "alice", "email": "alice@localhost.com"} - alice_profile = self._create_user_profile(alice_data) - credentials = { - "HTTP_AUTHORIZATION": ("Token %s" % alice_profile.user.auth_token) - } - request = self.factory.get("/", **credentials) - response = view(request, pk=formid) - # Alice has no permissions to the form hence no access to web form - self.assertEqual(response.status_code, 404) + # pylint: disable=no-member + resultset = MetaData.objects.filter( + Q(object_id=self.xform.pk), + Q(data_type="enketo_url") | Q(data_type="enketo_preview_url"), + ) + url = resultset.get(data_type="enketo_url") + preview_url = resultset.get(data_type="enketo_preview_url") + self.form_data["metadata"] = [ + { + "id": preview_url.pk, + "xform": self.xform.pk, + "data_value": "https://enketo.ona.io/preview/::YY8M", + "data_type": "enketo_preview_url", + "data_file": None, + "data_file_type": None, + "url": "http://testserver/api/v1/metadata/%s" % preview_url.pk, + "file_hash": None, + "media_url": None, + "date_created": preview_url.date_created, + }, + { + "id": url.pk, + "xform": self.xform.pk, + "data_value": "https://enketo.ona.io/::YY8M", + "data_type": "enketo_url", + "data_file": None, + "data_file_type": None, + "url": "http://testserver/api/v1/metadata/%s" % url.pk, + "file_hash": None, + "media_url": None, + "date_created": url.date_created, + }, + ] - # Give Alice read-only permissions to the form - ReadOnlyRole.add(alice_profile.user, self.xform) - response = view(request, pk=formid) - # Alice with read-only access should not have access to web form - self.assertEqual(response.status_code, 404) + self.assertEqual(response.data.sort(), [self.form_data].sort()) - # Give Alice data-entry permissions - DataEntryRole.add(alice_profile.user, self.xform) - response = view(request, pk=formid) - # Alice with data-entry access should have access to web form + # test with different user + previous_user = self.user + alice_data = {"username": "alice", "email": "alice@localhost.com"} + self._login_user_and_profile(extra_post_data=alice_data) + self.assertEqual(self.user.username, "alice") + self.assertNotEqual(previous_user, self.user) + request = self.factory.get("/", **self.extra) + response = self.view(request) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, data) + self.assertNotEqual(response.get("Cache-Control"), None) + # should be empty + self.assertEqual(response.data, []) - def test_get_single_submit_url(self): + def test_form_list_filter_by_user(self): with HTTMock(enketo_urls_mock): + # publish bob's form self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - get_data = {"survey_type": "single"} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - submit_url = "http://enketo.ona.io/single/::XZqoZ94y" - self.assertEqual(response.data["single_submit_url"], submit_url) - - def test_enketo_url_with_default_form_params(self): - with HTTMock(enketo_mock_with_form_defaults): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "enketo"}) - formid = self.xform.pk - - get_data = {"num": "1"} - request = self.factory.get("/", data=get_data, **self.extra) - response = view(request, pk=formid) - url = "https://dmfrm.enketo.org/webform?d[%2Fnum]=1" - self.assertEqual(response.data["enketo_url"], url) - - def test_handle_memory_error_on_form_replacement(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() - form_id = self.xform.pk - with patch("onadata.apps.api.tools.QuickConverter.publish") as mock_func: - mock_func.side_effect = MemoryError() - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } - ) + previous_user = self.user + alice_data = {"username": "alice", "email": "alice@localhost.com"} + self._login_user_and_profile(extra_post_data=alice_data) + self.assertEqual(self.user.username, "alice") + self.assertNotEqual(previous_user, self.user) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_version.xlsx", - ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.patch("/", data=post_data, **self.extra) - response = view(request, pk=form_id) - self.assertEqual(response.status_code, 400) - self.assertEqual( - response.data, - { - "text": ( - "An error occurred while publishing the " - "form. Please try again." - ), - "type": "alert-error", - }, - ) + ReadOnlyRole.add(self.user, self.xform) + view = XFormViewSet.as_view({"get": "retrieve"}) + safe_delete("{}{}".format(XFORM_PERMISSIONS_CACHE, self.xform.pk)) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.xform.pk) + bobs_form_data = response.data + form_users = [(u["role"], u["user"]) for u in bobs_form_data["users"]] + self.assertEqual(len(form_users), 2) + self.assertIn(("owner", "bob"), form_users) + self.assertIn(("readonly", "alice"), form_users) - def test_enketo_urls_remain_the_same_after_form_replacement(self): - with HTTMock(enketo_mock): + # publish alice's form self._publish_xls_form_to_project() - self.assertIsNotNone(self.xform.version) - version = self.xform.version - form_id = self.xform.pk - id_string = self.xform.id_string - - self.view = XFormViewSet.as_view( - { - "get": "retrieve", - } - ) - request = self.factory.get("/", **self.extra) - response = self.view(request, pk=self.xform.id) - self.assertEqual(response.status_code, 200) + response = self.view(request) self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) - enketo_url = response.data.get("enketo_url") - enketo_preview_url = response.data.get("enketo_preview_url") - - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } - ) - - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_version.xlsx", - ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.patch("/", data=post_data, **self.extra) - response = view(request, pk=form_id) - self.assertEqual( - response.data.get("enketo_preview_url"), enketo_preview_url - ) - self.assertEqual(response.data.get("enketo_url"), enketo_url) - self.assertEqual(response.status_code, 200) - - self.xform.refresh_from_db() - - # diff versions - self.assertNotEqual(version, self.xform.version) - self.assertEqual(form_id, self.xform.pk) - self.assertEqual(id_string, self.xform.id_string) + self.form_data.pop("has_id_string_changed") + response_data = sorted(response.data, key=lambda x: x["formid"]) + for k in ["submission_count_for_today", "metadata", "form_versions"]: + bobs_form_data.pop(k) + self.form_data.pop(k) + expected_data = [OrderedDict(bobs_form_data), OrderedDict(self.form_data)] - def test_xform_hash_changes_after_form_replacement(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() + self.assertTrue(len(response_data), 2) - self.assertIsNotNone(self.xform.version) - form_id = self.xform.pk - xform_old_hash = self.xform.hash + # remove date modified and last updated at + for indx in [0, 1]: + response_data[indx].pop("date_modified") + expected_data[indx].pop("date_modified") + response_data[indx].pop("last_updated_at") + expected_data[indx].pop("last_updated_at") - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } + response_users = sorted( + response_data[0].pop("users"), key=lambda x: x["user"] ) - - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_version.xlsx", + expected_users = sorted( + expected_data[0].pop("users"), key=lambda x: x["user"] ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.patch("/", data=post_data, **self.extra) - response = view(request, pk=form_id) - self.assertEqual(response.status_code, 200) + self.assertEqual(response_data[0], expected_data[0]) + self.assertEqual(response_users, expected_users) - self.xform.refresh_from_db() - self.assertNotEqual(xform_old_hash, self.xform.hash) + self.assertEqual(response_data[1], expected_data[1]) + self.assertEqual(response_users, expected_users) - def test_hash_changes_after_update_xform_xls_file(self): - with HTTMock(enketo_mock): - self._publish_xls_form_to_project() + # apply filter, see only bob's forms + request = self.factory.get("/", data={"owner": "bob"}, **self.extra) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, [bobs_form_data]) - xform_old_hash = self.xform.hash - form_id = self.xform.pk + # apply filter, see only bob's forms, case insensitive + request = self.factory.get("/", data={"owner": "BoB"}, **self.extra) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, [bobs_form_data]) - view = XFormViewSet.as_view( - { - "patch": "partial_update", - } - ) + # apply filter, see only alice's forms + request = self.factory.get("/", data={"owner": "alice"}, **self.extra) + response = self.view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_version.xlsx", - ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.patch("/", data=post_data, **self.extra) - response = view(request, pk=form_id) - self.assertEqual(response.status_code, 200) + # remove date-modified + response.data[0].pop("date_modified") + self.form_data.pop("date_modified") - self.xform.refresh_from_db() - self.assertNotEqual(xform_old_hash, self.xform.hash) + # remove last updated at + response.data[0].pop("last_updated_at") + self.form_data.pop("last_updated_at") - def test_login_enketo_no_redirect(self): - with HTTMock(enketo_urls_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "login"}) - formid = self.xform.pk - request = self.factory.get("/") - response = view(request, pk=formid) - self.assertEqual( - response.content.decode("utf-8"), - "Authentication failure, cannot redirect", - ) + self.assertEqual(response.data, [self.form_data]) - @override_settings( - ENKETO_CLIENT_LOGIN_URL={ - "*": "http://test.ona.io/login", - "stage-testserver": "http://gh.ij.kl/login", - } - ) - @override_settings(ALLOWED_HOSTS=["*"]) - def test_login_enketo_no_jwt_but_with_return_url(self): + # apply filter, see a non existent user + request = self.factory.get("/", data={"owner": "noone"}, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data, []) + + def test_form_get(self): with HTTMock(enketo_urls_mock): + view = XFormViewSet.as_view({"get": "retrieve"}) self._publish_xls_form_to_project() - - view = XFormViewSet.as_view({"get": "login"}) - formid = self.xform.pk - url = "https://enketo.ona.io/::YY8M" - query_data = {"return": url} - request = self.factory.get("/", data=query_data) - - # user is redirected to default login page "*" + request = self.factory.get("/", **self.extra) response = view(request, pk=formid) - self.assertTrue(response.url.startswith("http://test.ona.io/login")) - self.assertEqual(response.status_code, 302) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) + # pylint: disable=no-member + resultset = MetaData.objects.filter( + Q(object_id=self.xform.pk), + Q(data_type="enketo_url") + | Q(data_type="enketo_preview_url") + | Q(data_type="enketo_single_submit_url"), + ) + url = resultset.get(data_type="enketo_url") + preview_url = resultset.get(data_type="enketo_preview_url") + single_submit_url = resultset.get(data_type="enketo_single_submit_url") - # user is redirected to login page for "stage-testserver" - request.META["HTTP_HOST"] = "stage-testserver" - response = view(request, pk=formid) - self.assertTrue(response.url.startswith("http://gh.ij.kl/login")) - self.assertEqual(response.status_code, 302) + self.form_data["metadata"] = [ + OrderedDict( + [ + ("id", url.pk), + ("xform", self.xform.pk), + ("data_value", "https://enketo.ona.io/::YY8M"), + ("data_type", "enketo_url"), + ("data_file", None), + ("extra_data", {}), + ("data_file_type", None), + ("media_url", None), + ("file_hash", None), + ("url", "http://testserver/api/v1/metadata/%s" % url.pk), + ("date_created", url.date_created), + ] + ), + OrderedDict( + [ + ("id", preview_url.pk), + ("xform", self.xform.pk), + ("data_value", "https://enketo.ona.io/preview/::YY8M"), + ("data_type", "enketo_preview_url"), + ("data_file", None), + ("extra_data", {}), + ("data_file_type", None), + ("media_url", None), + ("file_hash", None), + ( + "url", + "http://testserver/api/v1/metadata/%s" % preview_url.pk, + ), + ("date_created", preview_url.date_created), + ] + ), + OrderedDict( + [ + ("id", single_submit_url.pk), + ("xform", self.xform.pk), + ("data_value", "http://enketo.ona.io/single/::XZqoZ94y"), + ("data_type", "enketo_single_submit_url"), + ("data_file", None), + ("extra_data", {}), + ("data_file_type", None), + ("media_url", None), + ("file_hash", None), + ( + "url", + "http://testserver/api/v1/metadata/%s" + % single_submit_url.pk, + ), + ("date_created", single_submit_url.date_created), + ] + ), + ] - @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) - def test_login_enketo_online_url_bad_token(self): - with HTTMock(enketo_urls_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "login"}) - formid = self.xform.pk - temp_token = "abc" + self.form_data["metadata"] = sorted( + self.form_data["metadata"], key=lambda x: x["id"] + ) + response.data["metadata"] = sorted( + response.data["metadata"], key=lambda x: x["id"] + ) - # do not store temp token + # remove date modified + self.form_data.pop("date_modified") + response.data.pop("date_modified") + # remove last updated at + self.form_data.pop("last_updated_at") + response.data.pop("last_updated_at") - url = "https://enketo.ona.io/::YY8M?jwt=%s" % temp_token - query_data = {"return": url} - request = self.factory.get("/", data=query_data) - response = view(request, pk=formid) + self.form_data.pop("has_id_string_changed") - self.assertEqual(response.status_code, 401) - self.assertEqual( - response.data.get("detail"), "JWT DecodeError: Not enough segments" - ) + self.assertEqual(response.data, self.form_data) - @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) - def test_login_enketo_offline_url_using_jwt(self): - with HTTMock(enketo_urls_mock): + def test_form_format(self): + with HTTMock(enketo_mock): self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "login"}) + view = XFormViewSet.as_view({"get": "form"}) formid = self.xform.pk - - payload = { - "api-token": self.user.auth_token.key, + data = { + "name": "data", + "title": "transportation_2011_07_25", + "default_language": "default", + "id_string": "transportation_2011_07_25", + "type": "survey", } + request = self.factory.get("/", **self.extra) - encoded_payload = jwt.encode( - payload, JWT_SECRET_KEY, algorithm=JWT_ALGORITHM - ) - - return_url = "https://enketo.ona.io/_/#YY8M" - url = "https://enketo.ona.io/_/?jwt=%s#YY8M" % encoded_payload - - query_data = {"return": url} - request = self.factory.get("/", data=query_data) - response = view(request, pk=formid) - self.assertEqual(response.status_code, 302) - self.assertEqual(response.get("Location"), return_url) + # test for unsupported format + response = view(request, pk=formid, format="csvzip") + self.assertEqual(response.status_code, 400) - @patch("onadata.libs.authentication.EnketoTokenAuthentication.authenticate") - def test_enketo_cookie_authentication_with_invalid_jwt(self, mock_jwt_decode): - mock_jwt_decode.side_effect = jwt.DecodeError( - "JWT DecodeError: Not enough segments" - ) + # test for supported formats - with HTTMock(enketo_urls_mock): - with self.assertRaises(jwt.DecodeError): - self._publish_xls_form_to_project() - self.assertTrue(mock_jwt_decode.called) + # JSON format + response = view(request, pk=formid, format="json") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertDictContainsSubset(data, response.data) - @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) - def test_login_enketo_online_url_using_jwt(self): - with HTTMock(enketo_urls_mock): - self._publish_xls_form_to_project() - view = XFormViewSet.as_view({"get": "login"}) - formid = self.xform.pk + # test correct file name + self.assertEqual( + response.get("Content-Disposition"), + "attachment; filename=" + self.xform.id_string + "." + "json", + ) - payload = { - "api-token": self.user.auth_token.key, - } + # XML format + response = view(request, pk=formid, format="xml") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + response_doc = minidom.parseString(response.data) - encoded_payload = jwt.encode( - payload, JWT_SECRET_KEY, algorithm=JWT_ALGORITHM + # test correct file name + self.assertEqual( + response.get("Content-Disposition"), + "attachment; filename=" + self.xform.id_string + "." + "xml", ) - return_url = "https://enketo.ona.io/::YY8M" - url = "%s?jwt=%s" % (return_url, encoded_payload) - query_data = {"return": url} - request = self.factory.get("/", data=query_data) - response = view(request, pk=formid) - self.assertEqual(response.status_code, 302) - self.assertEqual(response.get("Location"), return_url) - cookies = response.cookies - uid_cookie = cookies.get(settings.ENKETO_META_UID_COOKIE)._value - username_cookie = cookies.get(settings.ENKETO_META_USERNAME_COOKIE)._value - # example cookie: bob:1jlVih:i2KvHoAtsQOlYB71CJeNuVUlEY0 - self.assertEqual(username_cookie.split(":")[0], "bob") - self.assertEqual(uid_cookie.split(":")[0], "bob") + # XLS format + response = view(request, pk=formid, format="xlsx") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) - def test_publish_xlsform(self): - with HTTMock(enketo_urls_mock): - view = XFormViewSet.as_view({"post": "create"}) - data = { - "owner": "http://testserver/api/v1/users/bob", - "public": False, - "public_data": False, - "description": "", - "downloadable": True, - "allows_sms": False, - "encrypted": False, - "sms_id_string": "transportation_2011_07_25", - "id_string": "transportation_2011_07_25", - "title": "transportation_2011_07_25", - "bamboo_dataset": "", - } - path = os.path.join( + # test correct file name + self.assertEqual( + response.get("Content-Disposition"), + "attachment; filename=" + self.xform.id_string + "." + "xlsx", + ) + + xml_path = os.path.join( settings.PROJECT_ROOT, "apps", "main", "tests", "fixtures", "transportation", - "transportation.xlsx", + "transportation.xml", ) + with open(xml_path, "rb") as xml_file: + expected_doc = minidom.parse(xml_file) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 201) - xform = self.user.xforms.get(id_string="transportation_2011_07_25") - data.update({"url": "http://testserver/api/v1/forms/%s" % xform.pk}) + model_node = [ + n + for n in response_doc.getElementsByTagName("h:head")[0].childNodes + if n.nodeType == Node.ELEMENT_NODE and n.tagName == "model" + ][0] - self.assertDictContainsSubset(data, response.data) - self.assertTrue(OwnerRole.user_has_role(self.user, xform)) - self.assertEqual("owner", response.data["users"][0]["role"]) + # check for UUID and remove + uuid_nodes = [ + node + for node in model_node.childNodes + if node.nodeType == Node.ELEMENT_NODE + and node.getAttribute("nodeset") == "/data/formhub/uuid" + ] + self.assertEqual(len(uuid_nodes), 1) + uuid_node = uuid_nodes[0] + uuid_node.setAttribute("calculate", "''") - # pylint: disable=no-member - self.assertIsNotNone( - MetaData.objects.get(object_id=xform.id, data_type="enketo_url") - ) - self.assertIsNotNone( - MetaData.objects.get( - object_id=xform.id, data_type="enketo_preview_url" - ) - ) + # check content without UUID + response_xml = response_doc.toxml().replace( + self.xform.version, "201411120717" + ) + self.assertEqual(response_xml, expected_doc.toxml()) - # Ensure XFormVersion object is created on XForm publish - versions_count = XFormVersion.objects.filter(xform=xform).count() - self.assertEqual(versions_count, 1) + def test_existing_form_format(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "form"}) + formid = self.xform.pk + request = self.factory.get("/", **self.extra) + # get existing form format + exsting_format = get_existing_file_format(self.xform.xls, "xls") - def test_publish_xlsforms_with_same_id_string(self): - with HTTMock(enketo_urls_mock): - counter = XForm.objects.count() - view = XFormViewSet.as_view({"post": "create"}) - data = { - "owner": "http://testserver/api/v1/users/bob", - "public": False, - "public_data": False, - "description": "", - "downloadable": True, - "allows_sms": False, - "encrypted": False, - "sms_id_string": "transportation_2011_07_25", - "id_string": "transportation_2011_07_25", - "title": "transportation_2011_07_25", - "bamboo_dataset": "", - } - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation.xlsx", + # XLSX format + response = view(request, pk=formid, format="xlsx") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + + # test correct content disposition + # ensure it still maintains the existing form extension + self.assertEqual( + response.get("Content-Disposition"), + "attachment; filename=" + self.xform.id_string + "." + exsting_format, ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 201) - xform = self.user.xforms.all()[0] - data.update( - { - "url": "http://testserver/api/v1/forms/%s" % xform.pk, - "has_id_string_changed": False, - } - ) - self.assertDictContainsSubset(data, response.data) - self.assertTrue(OwnerRole.user_has_role(self.user, xform)) - self.assertEqual("owner", response.data["users"][0]["role"]) - # pylint: disable=no-member - self.assertIsNotNone( - MetaData.objects.get(object_id=xform.id, data_type="enketo_url") - ) - self.assertIsNotNone( - MetaData.objects.get( - object_id=xform.id, data_type="enketo_preview_url" - ) - ) + # XLS format + response = view(request, pk=formid, format="xls") + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(counter + 1, XForm.objects.count()) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_copy.xlsx", + # test correct content disposition + # ensure it still maintains the existing form extension + self.assertEqual( + response.get("Content-Disposition"), + "attachment; filename=" + self.xform.id_string + "." + exsting_format, + ) + + def test_form_tags(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view( + {"get": "labels", "post": "labels", "delete": "labels"} + ) + list_view = XFormViewSet.as_view( + { + "get": "list", + } ) + formid = self.xform.pk + + # no tags + request = self.factory.get("/", **self.extra) + response = view(request, pk=formid) + self.assertEqual(response.data, []) + + # add tag "hello" + request = self.factory.post("/", data={"tags": "hello"}, **self.extra) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 201) + self.assertEqual(response.data, ["hello"]) + + # check filter by tag + request = self.factory.get("/", data={"tags": "hello"}, **self.extra) + self.form_data = XFormBaseSerializer( + self.xform, context={"request": request} + ).data + response = list_view(request) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.status_code, 200) + response_data = dict(response.data[0]) + response_data.pop("date_modified") + response_data.pop("last_updated_at") + self.form_data.pop("date_modified") + self.form_data.pop("last_updated_at") + self.assertEqual(response_data, self.form_data) + + request = self.factory.get("/", data={"tags": "goodbye"}, **self.extra) + response = list_view(request, pk=formid) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data, []) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 201) - xform = self.user.xforms.get(id_string="Transportation_2011_07_25_1") - data.update( - { - "url": "http://testserver/api/v1/forms/%s" % xform.pk, - "id_string": "Transportation_2011_07_25_1", - "title": "Transportation_2011_07_25", - "sms_id_string": "Transportation_2011_07_25", - "has_id_string_changed": True, - } - ) + # remove tag "hello" + request = self.factory.delete("/", data={"tags": "hello"}, **self.extra) + response = view(request, pk=formid, label="hello") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.get("Cache-Control"), None) + self.assertEqual(response.data, []) - self.assertDictContainsSubset(data, response.data) - self.assertTrue(OwnerRole.user_has_role(self.user, xform)) - self.assertEqual("owner", response.data["users"][0]["role"]) + def test_enketo_url_no_account(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk + # no tags + request = self.factory.get("/", **self.extra) + with HTTMock(enketo_error_mock): + response = view(request, pk=formid) + data = { + "message": "Enketo error: no account exists for this OpenRosa server" + } - # pylint: disable=no-member - self.assertIsNotNone( - MetaData.objects.get(object_id=xform.id, data_type="enketo_url") - ) - self.assertIsNotNone( - MetaData.objects.get( - object_id=xform.id, data_type="enketo_preview_url" - ) - ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data, data) - xform = XForm.objects.get(id_string="transportation_2011_07_25") - self.assertIsInstance(xform, XForm) - self.assertEqual(counter + 2, XForm.objects.count()) + def test_enketo_url_error500(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk + # no tags + request = self.factory.get("/", **self.extra) + with HTTMock(enketo_error500_mock): + response = view(request, pk=formid) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - # pylint: disable=invalid-name - @patch("onadata.apps.main.forms.requests") - def test_publish_xlsform_using_url_upload(self, mock_requests): + def test_enketo_url_error502(self): with HTTMock(enketo_mock): - view = XFormViewSet.as_view({"post": "create"}) + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk + # no tags + request = self.factory.get("/", **self.extra) + with HTTMock(enketo_error502_mock): + response = view(request, pk=formid) + data = { + "message": "Enketo error: Sorry, we cannot load your form right " + "now. Please try again later." + } + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data, data) - xls_url = "https://ona.io/examples/forms/tutorial/form.xlsx" - pre_count = XForm.objects.count() - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_different_id_string.xlsx", - ) + @override_settings(TESTING_MODE=False) + def test_enketo_url(self): + """Test functionality to expose enketo urls.""" + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk + # no tags + request = self.factory.get("/", **self.extra) + response = view(request, pk=formid) + url = "https://enketo.ona.io/::YY8M" + preview_url = "https://enketo.ona.io/preview/::YY8M" + single_url = "http://enketo.ona.io/single/::XZqoZ94y" + data = { + "enketo_url": url, + "enketo_preview_url": preview_url, + "single_submit_url": single_url, + } + self.assertEqual(response.data, data) - with open(path, "rb") as xls_file: - mock_response = get_mocked_response_for_file( - xls_file, "transportation_different_id_string.xlsx", 200 - ) - mock_requests.head.return_value = mock_response - mock_requests.get.return_value = mock_response + alice_data = {"username": "alice", "email": "alice@localhost.com"} + alice_profile = self._create_user_profile(alice_data) + credentials = { + "HTTP_AUTHORIZATION": ("Token %s" % alice_profile.user.auth_token) + } + request = self.factory.get("/", **credentials) + response = view(request, pk=formid) + # Alice has no permissions to the form hence no access to web form + self.assertEqual(response.status_code, 404) - post_data = {"xls_url": xls_url} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + # Give Alice read-only permissions to the form + ReadOnlyRole.add(alice_profile.user, self.xform) + response = view(request, pk=formid) + # Alice with read-only access should not have access to web form + self.assertEqual(response.status_code, 404) - mock_requests.get.assert_called_with(xls_url) - xls_file.close() + # Give Alice data-entry permissions + DataEntryRole.add(alice_profile.user, self.xform) + response = view(request, pk=formid) + # Alice with data-entry access should have access to web form + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, data) - self.assertEqual(response.status_code, 201) - self.assertEqual(XForm.objects.count(), pre_count + 1) + def test_get_single_submit_url(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk + get_data = {"survey_type": "single"} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + submit_url = "http://enketo.ona.io/single/::XZqoZ94y" + self.assertEqual(response.data["single_submit_url"], submit_url) - # pylint: disable=invalid-name - @patch("onadata.apps.main.forms.requests") - def test_publish_xlsform_using_url_with_no_extension(self, mock_requests): - with HTTMock(enketo_mock, xls_url_no_extension_mock): - view = XFormViewSet.as_view({"post": "create"}) + def test_enketo_url_with_default_form_params(self): + with HTTMock(enketo_mock_with_form_defaults): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "enketo"}) + formid = self.xform.pk - xls_url = "https://ona.io/examples/forms/tutorial/form" - pre_count = XForm.objects.count() - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_different_id_string.xlsx", - ) + get_data = {"num": "1"} + request = self.factory.get("/", data=get_data, **self.extra) + response = view(request, pk=formid) + url = "https://dmfrm.enketo.org/webform?d[%2Fnum]=1" + self.assertEqual(response.data["enketo_url"], url) - with open(path, "rb") as xls_file: - mock_response = get_mocked_response_for_file( - xls_file, "transportation_version.xlsx", 200 + def test_handle_memory_error_on_form_replacement(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() + form_id = self.xform.pk + + with patch("onadata.apps.api.tools.QuickConverter.publish") as mock_func: + mock_func.side_effect = MemoryError() + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } ) - mock_requests.head.return_value = mock_response - mock_requests.get.return_value = mock_response - post_data = {"xls_url": xls_url} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + path = os.path.join( + settings.PROJECT_ROOT, + "apps", + "main", + "tests", + "fixtures", + "transportation", + "transportation_version.xlsx", + ) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.patch("/", data=post_data, **self.extra) + response = view(request, pk=form_id) + self.assertEqual(response.status_code, 400) + self.assertEqual( + response.data, + { + "text": ( + "An error occurred while publishing the " + "form. Please try again." + ), + "type": "alert-error", + }, + ) - self.assertEqual(response.status_code, 201, response.data) - self.assertEqual(XForm.objects.count(), pre_count + 1) + def test_enketo_urls_remain_the_same_after_form_replacement(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() - # pylint: disable=invalid-name - @patch("onadata.apps.main.forms.requests") - def test_publish_xlsform_using_url_content_disposition_attr_jumbled_v1( - self, mock_requests - ): - with HTTMock( - enketo_mock, xls_url_no_extension_mock_content_disposition_attr_jumbled_v1 - ): - view = XFormViewSet.as_view({"post": "create"}) + self.assertIsNotNone(self.xform.version) + version = self.xform.version + form_id = self.xform.pk + id_string = self.xform.id_string - xls_url = "https://ona.io/examples/forms/tutorial/form" - pre_count = XForm.objects.count() - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_different_id_string.xlsx", + self.view = XFormViewSet.as_view( + { + "get": "retrieve", + } ) - with open(path, "rb") as xls_file: - mock_response = get_mocked_response_for_file( - xls_file, "transportation_different_id_string.xlsx", 200 - ) - mock_requests.head.return_value = mock_response - mock_requests.get.return_value = mock_response - - post_data = {"xls_url": xls_url} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.xform.id) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(response.get("Cache-Control"), None) - self.assertEqual(response.status_code, 201) - self.assertEqual(XForm.objects.count(), pre_count + 1) + enketo_url = response.data.get("enketo_url") + enketo_preview_url = response.data.get("enketo_preview_url") - # pylint: disable=invalid-name - @patch("onadata.apps.main.forms.requests") - def test_publish_xlsform_using_url_content_disposition_attr_jumbled_v2( - self, mock_requests - ): - with HTTMock( - enketo_mock, xls_url_no_extension_mock_content_disposition_attr_jumbled_v2 - ): - view = XFormViewSet.as_view({"post": "create"}) + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } + ) - xls_url = "https://ona.io/examples/forms/tutorial/form" - pre_count = XForm.objects.count() path = os.path.join( settings.PROJECT_ROOT, "apps", @@ -1638,169 +1701,212 @@ def test_publish_xlsform_using_url_content_disposition_attr_jumbled_v2( "tests", "fixtures", "transportation", - "transportation_different_id_string.xlsx", + "transportation_version.xlsx", ) - with open(path, "rb") as xls_file: - mock_response = get_mocked_response_for_file( - xls_file, "transportation_different_id_string.xlsx", 200 + post_data = {"xls_file": xls_file} + request = self.factory.patch("/", data=post_data, **self.extra) + response = view(request, pk=form_id) + self.assertEqual( + response.data.get("enketo_preview_url"), enketo_preview_url ) - mock_requests.head.return_value = mock_response - mock_requests.get.return_value = mock_response + self.assertEqual(response.data.get("enketo_url"), enketo_url) + self.assertEqual(response.status_code, 200) - post_data = {"xls_url": xls_url} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + self.xform.refresh_from_db() - self.assertEqual(response.status_code, 201) - self.assertEqual(XForm.objects.count(), pre_count + 1) + # diff versions + self.assertNotEqual(version, self.xform.version) + self.assertEqual(form_id, self.xform.pk) + self.assertEqual(id_string, self.xform.id_string) - # pylint: disable=invalid-name - @patch("onadata.apps.main.forms.requests") - def test_publish_csvform_using_url_upload(self, mock_requests): + def test_xform_hash_changes_after_form_replacement(self): with HTTMock(enketo_mock): - view = XFormViewSet.as_view({"post": "create"}) + self._publish_xls_form_to_project() + + self.assertIsNotNone(self.xform.version) + form_id = self.xform.pk + xform_old_hash = self.xform.hash + + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } + ) - csv_url = "https://ona.io/examples/forms/tutorial/form.csv" - pre_count = XForm.objects.count() path = os.path.join( settings.PROJECT_ROOT, "apps", - "api", + "main", "tests", "fixtures", - "text_and_integer.csv", + "transportation", + "transportation_version.xlsx", ) + with open(path, "rb") as xls_file: + post_data = {"xls_file": xls_file} + request = self.factory.patch("/", data=post_data, **self.extra) + response = view(request, pk=form_id) + self.assertEqual(response.status_code, 200) - with open(path, "rb") as csv_file: - mock_response = get_mocked_response_for_file( - csv_file, "text_and_integer.csv", 200 - ) - mock_requests.head.return_value = mock_response - mock_requests.get.return_value = mock_response + self.xform.refresh_from_db() + self.assertNotEqual(xform_old_hash, self.xform.hash) - post_data = {"csv_url": csv_url} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) + def test_hash_changes_after_update_xform_xls_file(self): + with HTTMock(enketo_mock): + self._publish_xls_form_to_project() - mock_requests.get.assert_called_with(csv_url) - csv_file.close() + xform_old_hash = self.xform.hash + form_id = self.xform.pk - self.assertEqual(response.status_code, 201) - self.assertEqual(XForm.objects.count(), pre_count + 1) + view = XFormViewSet.as_view( + { + "patch": "partial_update", + } + ) - # pylint: disable=invalid-name - def test_publish_select_external_xlsform(self): - with HTTMock(enketo_urls_mock): - view = XFormViewSet.as_view({"post": "create"}) path = os.path.join( settings.PROJECT_ROOT, "apps", - "api", + "main", "tests", "fixtures", - "select_one_external.xlsx", + "transportation", + "transportation_version.xlsx", ) with open(path, "rb") as xls_file: - # pylint: disable=no-member - meta_count = MetaData.objects.count() post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - xform = self.user.xforms.all()[0] - self.assertEqual(response.status_code, 201) - self.assertEqual(meta_count + 4, MetaData.objects.count()) - metadata = MetaData.objects.get( - object_id=xform.id, data_value="itemsets.csv" - ) - self.assertIsNotNone(metadata) - self.assertTrue(OwnerRole.user_has_role(self.user, xform)) - self.assertEqual("owner", response.data["users"][0]["role"], self.user) + request = self.factory.patch("/", data=post_data, **self.extra) + response = view(request, pk=form_id) + self.assertEqual(response.status_code, 200) - def test_publish_csv_with_universal_newline_xlsform(self): - with HTTMock(enketo_mock): - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "api", - "tests", - "fixtures", - "universal_newline.csv", + self.xform.refresh_from_db() + self.assertNotEqual(xform_old_hash, self.xform.hash) + + def test_login_enketo_no_redirect(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "login"}) + formid = self.xform.pk + request = self.factory.get("/") + response = view(request, pk=formid) + self.assertEqual( + response.content.decode("utf-8"), + "Authentication failure, cannot redirect", ) - with open(path, encoding="utf-8") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 201, response.data) - def test_publish_xlsform_anon(self): - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation.xlsx", - ) - username = "Anon" - error_msg = "User with username %s does not exist." % username - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file, "owner": username} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 400) - self.assertEqual(response.get("Cache-Control"), None) - self.assertEqual(response.data.get("message"), error_msg) + @override_settings( + ENKETO_CLIENT_LOGIN_URL={ + "*": "http://test.ona.io/login", + "stage-testserver": "http://gh.ij.kl/login", + } + ) + @override_settings(ALLOWED_HOSTS=["*"]) + def test_login_enketo_no_jwt_but_with_return_url(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() - def test_publish_invalid_xls_form(self): - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation.bad_id.xlsx", - ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 400) - self.assertEqual(response.get("Cache-Control"), None) - error_msg = ( - "In strict mode, the XForm ID must be " - "a valid slug and contain no spaces." - " Please ensure that you have set an" - " id_string in the settings sheet or " - "have modified the filename to not " - "contain any spaces." + view = XFormViewSet.as_view({"get": "login"}) + + formid = self.xform.pk + url = "https://enketo.ona.io/::YY8M" + query_data = {"return": url} + request = self.factory.get("/", data=query_data) + + # user is redirected to default login page "*" + response = view(request, pk=formid) + self.assertTrue(response.url.startswith("http://test.ona.io/login")) + self.assertEqual(response.status_code, 302) + + # user is redirected to login page for "stage-testserver" + request.META["HTTP_HOST"] = "stage-testserver" + response = view(request, pk=formid) + self.assertTrue(response.url.startswith("http://gh.ij.kl/login")) + self.assertEqual(response.status_code, 302) + + @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) + def test_login_enketo_online_url_bad_token(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "login"}) + formid = self.xform.pk + temp_token = "abc" + + # do not store temp token + + url = "https://enketo.ona.io/::YY8M?jwt=%s" % temp_token + query_data = {"return": url} + request = self.factory.get("/", data=query_data) + response = view(request, pk=formid) + + self.assertEqual(response.status_code, 401) + self.assertEqual( + response.data.get("detail"), "JWT DecodeError: Not enough segments" ) - self.assertEqual(response.data.get("text"), error_msg) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation_ampersand_in_title.xlsx", + @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) + def test_login_enketo_offline_url_using_jwt(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "login"}) + formid = self.xform.pk + + payload = { + "api-token": self.user.auth_token.key, + } + + encoded_payload = jwt.encode( + payload, JWT_SECRET_KEY, algorithm=JWT_ALGORITHM + ) + + return_url = "https://enketo.ona.io/_/#YY8M" + url = "https://enketo.ona.io/_/?jwt=%s#YY8M" % encoded_payload + + query_data = {"return": url} + request = self.factory.get("/", data=query_data) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.get("Location"), return_url) + + @patch("onadata.libs.authentication.EnketoTokenAuthentication.authenticate") + def test_enketo_cookie_authentication_with_invalid_jwt(self, mock_jwt_decode): + mock_jwt_decode.side_effect = jwt.DecodeError( + "JWT DecodeError: Not enough segments" ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 400) - self.assertEqual(response.get("Cache-Control"), None) - error_msg = ( - "Title shouldn't have any invalid xml characters " "('>' '&' '<')" + + with HTTMock(enketo_urls_mock): + with self.assertRaises(jwt.DecodeError): + self._publish_xls_form_to_project() + self.assertTrue(mock_jwt_decode.called) + + @override_settings(JWT_SECRET_KEY=JWT_SECRET_KEY, JWT_ALGORITHM=JWT_ALGORITHM) + def test_login_enketo_online_url_using_jwt(self): + with HTTMock(enketo_urls_mock): + self._publish_xls_form_to_project() + view = XFormViewSet.as_view({"get": "login"}) + formid = self.xform.pk + + payload = { + "api-token": self.user.auth_token.key, + } + + encoded_payload = jwt.encode( + payload, JWT_SECRET_KEY, algorithm=JWT_ALGORITHM ) - self.assertEqual(response.data.get("text"), error_msg) + + return_url = "https://enketo.ona.io/::YY8M" + url = "%s?jwt=%s" % (return_url, encoded_payload) + query_data = {"return": url} + request = self.factory.get("/", data=query_data) + response = view(request, pk=formid) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.get("Location"), return_url) + cookies = response.cookies + uid_cookie = cookies.get(settings.ENKETO_META_UID_COOKIE)._value + username_cookie = cookies.get(settings.ENKETO_META_USERNAME_COOKIE)._value + # example cookie: bob:1jlVih:i2KvHoAtsQOlYB71CJeNuVUlEY0 + self.assertEqual(username_cookie.split(":")[0], "bob") + self.assertEqual(uid_cookie.split(":")[0], "bob") @patch("onadata.apps.api.viewsets.xform_viewset.XFormViewSet.list") def test_return_400_on_xlsform_error_on_list_action(self, mock_set_title): @@ -1814,30 +1920,6 @@ def test_return_400_on_xlsform_error_on_list_action(self, mock_set_title): self.assertEqual(response.status_code, 400) self.assertEqual(response.content.decode("utf-8"), error_msg) - def test_publish_invalid_xls_form_no_choices(self): - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "transportation", - "transportation.no_choices.xlsx", - ) - with open(path, "rb") as xls_file: - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 400) - self.assertEqual(response.get("Cache-Control"), None) - error_msg = ( - "There should be a choices sheet in this xlsform. " - "Please ensure that the choices sheet has the mandatory columns " - "'list_name', 'name', and 'label'." - ) - self.assertEqual(response.data.get("text"), error_msg) - def test_partial_update(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() @@ -4974,30 +5056,6 @@ def test_csv_export_cache(self): # reused options, should generate new with new submission self.assertEqual(count + 3, Export.objects.all().count()) - def test_upload_xml_form_file(self): - with HTTMock(enketo_mock): - path = os.path.join( - os.path.dirname(__file__), "..", "fixtures", "forms", "contributions" - ) - form_path = os.path.join(path, "contributions.xml") - - xforms = XForm.objects.count() - view = XFormViewSet.as_view({"post": "create"}) - - with open(form_path, encoding="utf-8") as xml_file: - post_data = {"xml_file": xml_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(xforms + 1, XForm.objects.count()) - self.assertEqual(response.status_code, 201) - - instances_path = os.path.join(path, "instances") - for uuid in os.listdir(instances_path): - s_path = os.path.join(instances_path, uuid, "submission.xml") - self._make_submission(s_path) - xform = XForm.objects.last() - self.assertEqual(xform.instances.count(), 6) - def test_created_by_field_on_cloned_forms(self): """ Test that the created by field is not empty for cloned forms @@ -5020,23 +5078,6 @@ def test_created_by_field_on_cloned_forms(self): cloned_form = XForm.objects.last() self.assertEqual(cloned_form.created_by.username, "alice") - def test_form_publishing_floip(self): - with HTTMock(enketo_mock): - xforms = XForm.objects.count() - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - os.path.dirname(__file__), - "../", - "fixtures", - "flow-results-example-1.json", - ) - with open(path, "rb") as xls_file: - post_data = {"floip_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - self.assertEqual(response.status_code, 201, response.data) - self.assertEqual(xforms + 1, XForm.objects.count()) - def test_xlsx_import(self): """Ensure XLSX imports work as expected and dates are formatted correctly""" with HTTMock(enketo_mock): @@ -5091,53 +5132,6 @@ def test_xls_import(self): self.assertEqual(response.data.get("additions"), 9) self.assertEqual(response.data.get("updates"), 0) - def test_external_choice_integer_name_xlsform(self): - """Test that names with integers are converted to strings""" - with HTTMock(enketo_urls_mock): - view = XFormViewSet.as_view({"post": "create"}) - path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "api", - "tests", - "fixtures", - "integer_name_test.xlsx", - ) - with open(path, "rb") as xls_file: - # pylint: disable=no-member - meta_count = MetaData.objects.count() - post_data = {"xls_file": xls_file} - request = self.factory.post("/", data=post_data, **self.extra) - response = view(request) - xform = self.user.xforms.all()[0] - self.assertEqual(response.status_code, 201) - self.assertEqual(meta_count + 4, MetaData.objects.count()) - metadata = MetaData.objects.get( - object_id=xform.id, data_value="itemsets.csv" - ) - self.assertIsNotNone(metadata) - - csv_reader = csv.reader(codecs.iterdecode(metadata.data_file, "utf-8")) - expected_data = [ - ["list_name", "name", "label", "state", "county"], - ["states", "1", "Texas", "", ""], - ["states", "2", "Washington", "", ""], - ["counties", "b1", "King", "2", ""], - ["counties", "b2", "Pierce", "2", ""], - ["counties", "b3", "King", "1", ""], - ["counties", "b4", "Cameron", "1", ""], - ["cities", "dumont", "Dumont", "1", "b3"], - ["cities", "finney", "Finney", "1", "b3"], - ["cities", "brownsville", "brownsville", "1", "b4"], - ["cities", "harlingen", "harlingen", "1", "b4"], - ["cities", "seattle", "Seattle", "2", "b3"], - ["cities", "redmond", "Redmond", "2", "b3"], - ["cities", "tacoma", "Tacoma", "2", "b2"], - ["cities", "puyallup", "Puyallup", "2", "b2"], - ] - for index, row in enumerate(csv_reader): - self.assertEqual(row, expected_data[index]) - def test_csv_xls_import_errors(self): with HTTMock(enketo_mock): xls_path = os.path.join( @@ -5166,6 +5160,307 @@ def test_csv_xls_import_errors(self): self.assertEqual(response.status_code, 400) self.assertEqual(response.data.get("error"), "csv_file not a csv file") + @override_settings(TIME_ZONE="UTC") + def test_get_single_registration_form(self): + """Response a for an XForm contributing entities is correct""" + # Publish registration form + xform = self._publish_registration_form(self.user) + view = XFormViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=xform.pk) + self.assertEqual(response.status_code, 200) + entity_list = EntityList.objects.get(name="trees") + expected_data = { + "url": f"http://testserver/api/v1/forms/{xform.pk}", + "formid": xform.pk, + "metadata": [], + "owner": "http://testserver/api/v1/users/bob", + "created_by": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "public_key": "", + "require_auth": False, + "submission_count_for_today": 0, + "tags": [], + "title": xform.title, + "users": [ + { + "is_org": False, + "metadata": {}, + "first_name": "Bob", + "last_name": "erama", + "user": "bob", + "role": "owner", + } + ], + "enketo_url": None, + "enketo_preview_url": None, + "enketo_single_submit_url": None, + "num_of_submissions": 0, + "last_submission_time": None, + "form_versions": [], + "data_views": [], + "xls_available": False, + "contributes_entities_to": { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + }, + "consumes_entities_from": [], + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": xform.sms_id_string, + "id_string": xform.id_string, + "date_created": xform.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": xform.date_modified.isoformat().replace("+00:00", "Z"), + "uuid": xform.uuid, + "bamboo_dataset": "", + "instances_with_geopoints": False, + "instances_with_osm": False, + "version": xform.version, + "has_hxl_support": False, + "last_updated_at": xform.last_updated_at.isoformat().replace("+00:00", "Z"), + "hash": xform.hash, + "is_merged_dataset": False, + "is_instance_json_regenerated": False, + "project": f"http://testserver/api/v1/projects/{xform.project.pk}", + } + self.assertEqual(json.dumps(response.data), json.dumps(expected_data)) + + @override_settings(TIME_ZONE="UTC") + def test_get_list_registration_form(self): + """Getting a list of registration forms is correct""" + # Publish registration form + xform = self._publish_registration_form(self.user) + view = XFormViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request) + self.assertEqual(response.status_code, 200) + entity_list = EntityList.objects.get(name="trees") + expected_data = [ + { + "url": f"http://testserver/api/v1/forms/{xform.pk}", + "formid": xform.pk, + "owner": "http://testserver/api/v1/users/bob", + "created_by": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "public_key": "", + "require_auth": False, + "tags": [], + "title": xform.title, + "users": [ + { + "is_org": False, + "metadata": {}, + "first_name": "Bob", + "last_name": "erama", + "user": "bob", + "role": "owner", + } + ], + "enketo_url": None, + "enketo_preview_url": None, + "enketo_single_submit_url": None, + "num_of_submissions": 0, + "last_submission_time": None, + "data_views": [], + "xls_available": False, + "contributes_entities_to": { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + }, + "consumes_entities_from": [], + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": xform.sms_id_string, + "id_string": xform.id_string, + "date_created": xform.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": xform.date_modified.isoformat().replace("+00:00", "Z"), + "uuid": xform.uuid, + "bamboo_dataset": "", + "instances_with_geopoints": False, + "instances_with_osm": False, + "version": xform.version, + "has_hxl_support": False, + "last_updated_at": xform.last_updated_at.isoformat().replace( + "+00:00", "Z" + ), + "hash": xform.hash, + "is_merged_dataset": False, + "is_instance_json_regenerated": False, + "project": f"http://testserver/api/v1/projects/{xform.project.pk}", + } + ] + self.assertEqual(json.dumps(response.data), json.dumps(expected_data)) + + @override_settings(TIME_ZONE="UTC") + def test_get_single_follow_up_form(self): + """Response a for an XForm consuming entities is correct""" + self._project_create() + entity_list = EntityList.objects.create(name="trees", project=self.project) + xform = self._publish_follow_up_form(self.user, self.project) + view = XFormViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=xform.pk) + self.assertEqual(response.status_code, 200) + metadata = MetaData.objects.order_by("-pk").first() + expected_data = { + "url": f"http://testserver/api/v1/forms/{xform.pk}", + "formid": xform.pk, + "metadata": [ + OrderedDict( + [ + ("id", metadata.pk), + ("xform", xform.pk), + ("data_value", f"entity_list {entity_list.pk} trees"), + ("data_type", "media"), + ("data_file", None), + ("extra_data", {}), + ("data_file_type", None), + ("media_url", None), + ("file_hash", None), + ("url", f"http://testserver/api/v1/metadata/{metadata.pk}"), + ("date_created", metadata.date_created), + ] + ) + ], + "owner": "http://testserver/api/v1/users/bob", + "created_by": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "public_key": "", + "require_auth": False, + "submission_count_for_today": 0, + "tags": [], + "title": xform.title, + "users": [ + { + "is_org": False, + "metadata": {}, + "first_name": "Bob", + "last_name": "erama", + "user": "bob", + "role": "owner", + } + ], + "enketo_url": None, + "enketo_preview_url": None, + "enketo_single_submit_url": None, + "num_of_submissions": 0, + "last_submission_time": None, + "form_versions": [], + "data_views": [], + "xls_available": False, + "contributes_entities_to": None, + "consumes_entities_from": [ + { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + } + ], + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": xform.sms_id_string, + "id_string": xform.id_string, + "date_created": xform.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": xform.date_modified.isoformat().replace("+00:00", "Z"), + "uuid": xform.uuid, + "bamboo_dataset": "", + "instances_with_geopoints": False, + "instances_with_osm": False, + "version": xform.version, + "has_hxl_support": False, + "last_updated_at": xform.last_updated_at.isoformat().replace("+00:00", "Z"), + "hash": xform.hash, + "is_merged_dataset": False, + "is_instance_json_regenerated": False, + "project": f"http://testserver/api/v1/projects/{xform.project.pk}", + } + self.assertEqual(response.data, expected_data) + + @override_settings(TIME_ZONE="UTC") + def test_get_list_follow_up_form(self): + """Getting a list of follow up forms is correct""" + # Publish registration form + self._project_create() + entity_list = EntityList.objects.create(name="trees", project=self.project) + xform = self._publish_follow_up_form(self.user, self.project) + view = XFormViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request) + self.assertEqual(response.status_code, 200) + entity_list = EntityList.objects.get(name="trees") + expected_data = [ + { + "url": f"http://testserver/api/v1/forms/{xform.pk}", + "formid": xform.pk, + "owner": "http://testserver/api/v1/users/bob", + "created_by": "http://testserver/api/v1/users/bob", + "public": False, + "public_data": False, + "public_key": "", + "require_auth": False, + "tags": [], + "title": xform.title, + "users": [ + { + "is_org": False, + "metadata": {}, + "first_name": "Bob", + "last_name": "erama", + "user": "bob", + "role": "owner", + } + ], + "enketo_url": None, + "enketo_preview_url": None, + "enketo_single_submit_url": None, + "num_of_submissions": 0, + "last_submission_time": None, + "data_views": [], + "xls_available": False, + "contributes_entities_to": None, + "consumes_entities_from": [ + { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + } + ], + "description": "", + "downloadable": True, + "allows_sms": False, + "encrypted": False, + "sms_id_string": xform.sms_id_string, + "id_string": xform.id_string, + "date_created": xform.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": xform.date_modified.isoformat().replace("+00:00", "Z"), + "uuid": xform.uuid, + "bamboo_dataset": "", + "instances_with_geopoints": False, + "instances_with_osm": False, + "version": xform.version, + "has_hxl_support": False, + "last_updated_at": xform.last_updated_at.isoformat().replace( + "+00:00", "Z" + ), + "hash": xform.hash, + "is_merged_dataset": False, + "is_instance_json_regenerated": False, + "project": f"http://testserver/api/v1/projects/{xform.project.pk}", + } + ] + self.assertEqual(json.dumps(response.data), json.dumps(expected_data)) + class ExportAsyncTestCase(XFormViewSetBaseTestCase): """Tests for exporting form data asynchronously""" diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index 9aaf21bcb0..599fd73e72 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -36,7 +36,13 @@ get_organization_members_team, ) from onadata.apps.api.models.team import Team -from onadata.apps.logger.models import DataView, Instance, Project, XForm +from onadata.apps.logger.models import ( + DataView, + Instance, + Project, + XForm, + EntityList, +) from onadata.apps.main.forms import QuickConverter from onadata.apps.main.models.meta_data import MetaData from onadata.apps.main.models.user_profile import UserProfile @@ -61,6 +67,7 @@ from onadata.libs.utils.api_export_tools import ( custom_response_handler, get_metadata_format, + get_entity_list_export_response, ) from onadata.libs.utils.cache_tools import ( PROJ_BASE_FORMS_CACHE, @@ -540,6 +547,8 @@ def get_data_value_objects(value): model = DataView elif value.startswith("xform"): model = XForm + elif value.startswith("entity_list"): + model = EntityList if model: parts = value.split() @@ -573,9 +582,12 @@ def get_data_value_objects(value): except ValidationError: obj, filename = get_data_value_objects(metadata.data_value) if obj: + if isinstance(obj, EntityList): + return get_entity_list_export_response(request, obj, filename) + + export_type = get_metadata_format(metadata.data_value) dataview = obj if isinstance(obj, DataView) else False xform = obj.xform if isinstance(obj, DataView) else obj - export_type = get_metadata_format(metadata.data_value) return custom_response_handler( request, diff --git a/onadata/apps/api/urls/v2_urls.py b/onadata/apps/api/urls/v2_urls.py index 0751997480..48e86af846 100644 --- a/onadata/apps/api/urls/v2_urls.py +++ b/onadata/apps/api/urls/v2_urls.py @@ -2,9 +2,11 @@ """ Custom rest_framework Router V2 """ +from onadata.apps.api.viewsets.entity_list_viewset import EntityListViewSet from onadata.apps.api.viewsets.v2.tableau_viewset import TableauViewSet from .v1_urls import MultiLookupRouter router = MultiLookupRouter(trailing_slash=False) +router.register(r"entity-lists", EntityListViewSet, basename="entity_list") router.register(r"open-data", TableauViewSet, basename="open-data") diff --git a/onadata/apps/api/viewsets/entity_list_viewset.py b/onadata/apps/api/viewsets/entity_list_viewset.py new file mode 100644 index 0000000000..9b863eb366 --- /dev/null +++ b/onadata/apps/api/viewsets/entity_list_viewset.py @@ -0,0 +1,84 @@ +from rest_framework.decorators import action +from rest_framework.permissions import AllowAny +from rest_framework.response import Response +from rest_framework.viewsets import ReadOnlyModelViewSet + + +from onadata.apps.api.tools import get_baseviewset_class +from onadata.apps.logger.models import Entity, EntityList +from onadata.libs.filters import EntityListProjectFilter +from onadata.libs.mixins.cache_control_mixin import CacheControlMixin +from onadata.libs.mixins.etags_mixin import ETagsMixin +from onadata.libs.pagination import StandardPageNumberPagination +from onadata.libs.serializers.entity_serializer import ( + EntitySerializer, + EntityListSerializer, + EntityListDetailSerializer, +) + + +BaseViewset = get_baseviewset_class() + +# pylint: disable=too-many-ancestors + + +class EntityListViewSet( + CacheControlMixin, + ETagsMixin, + BaseViewset, + ReadOnlyModelViewSet, +): + queryset = EntityList.objects.all().order_by("pk") + serializer_class = EntityListSerializer + permission_classes = (AllowAny,) + pagination_class = StandardPageNumberPagination + filter_backends = (EntityListProjectFilter,) + + def get_queryset(self): + queryset = super().get_queryset() + + if self.request and self.request.user.is_anonymous: + queryset = queryset.filter(project__shared=True) + + if self.action == "retrieve": + # Prefetch related objects to be rendered for performance + # optimization + return queryset.prefetch_related( + "registration_forms", + "follow_up_forms", + ) + + return queryset + + def get_serializer_class(self): + """Override get_serializer_class""" + if self.action == "retrieve": + return EntityListDetailSerializer + + if self.action == "entities": + return EntitySerializer + + return super().get_serializer_class() + + @action(methods=["GET"], detail=True) + def entities(self, request, *args, **kwargs): + """Returns a list of Entities for a single EntityList""" + entity_list = self.get_object() + entities_qs = ( + Entity.objects.filter( + registration_form__entity_list=entity_list, + deleted_at__isnull=True, + ) + # To improve performance, we specify only the column(s) + # we are interested in using .only + .only("json").order_by("pk") + ) + queryset = self.filter_queryset(entities_qs) + page = self.paginate_queryset(queryset) + + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return Response(serializer.data) diff --git a/onadata/apps/logger/apps.py b/onadata/apps/logger/apps.py new file mode 100644 index 0000000000..2d6a0d4165 --- /dev/null +++ b/onadata/apps/logger/apps.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Loggger AppsConfig module +""" +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class LoggerConfig(AppConfig): + """ + Logger AppsConfig class. + """ + + name = "onadata.apps.logger" + verbose_name = "Logger" + + def ready(self): + # pylint: disable=import-outside-toplevel,unused-import + from onadata.apps.logger import signals # noqa diff --git a/onadata/apps/logger/migrations/0014_populate_attachment_xform.py b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py index 563b5f8c96..ba010bb481 100644 --- a/onadata/apps/logger/migrations/0014_populate_attachment_xform.py +++ b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py @@ -28,10 +28,23 @@ def populate_attachment_xform(apps, schema_editor): print("Done populating attachment xform!") +def reverse_populate_attachment_xform(apps, schema_editor): + """Reverse populate xform field when migrations are unapplied""" + Attachment = apps.get_model("logger", "Attachment") + queryset = Attachment.objects.filter(xform__isnull=False).values("pk") + + for attachment in queryset.iterator(chunk_size=100): + Attachment.objects.filter(pk=attachment["pk"]).update(xform=None, user=None) + + class Migration(migrations.Migration): dependencies = [ ("logger", "0013_add_xform_to_logger_attachment"), ] - operations = [migrations.RunPython(populate_attachment_xform)] + operations = [ + migrations.RunPython( + populate_attachment_xform, reverse_populate_attachment_xform + ) + ] diff --git a/onadata/apps/logger/migrations/0015_entity_entitylist_followupform_registrationform_and_more.py b/onadata/apps/logger/migrations/0015_entity_entitylist_followupform_registrationform_and_more.py new file mode 100644 index 0000000000..431b762729 --- /dev/null +++ b/onadata/apps/logger/migrations/0015_entity_entitylist_followupform_registrationform_and_more.py @@ -0,0 +1,200 @@ +# Generated by Django 4.2.11 on 2024-04-23 08:43 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("logger", "0014_populate_attachment_xform"), + ] + + operations = [ + migrations.CreateModel( + name="Entity", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date_created", models.DateTimeField(auto_now_add=True)), + ("date_modified", models.DateTimeField(auto_now=True)), + ("xml", models.TextField()), + ("json", models.JSONField(default=dict)), + ("version", models.CharField(max_length=255, null=True)), + ("uuid", models.CharField(db_index=True, default="", max_length=249)), + ("deleted_at", models.DateTimeField(blank=True, null=True)), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="EntityList", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date_created", models.DateTimeField(auto_now_add=True)), + ("date_modified", models.DateTimeField(auto_now=True)), + ( + "name", + models.CharField( + help_text="The name that the follow-up form will reference", + max_length=255, + ), + ), + ("num_entities", models.IntegerField(default=0)), + ( + "last_entity_update_time", + models.DateTimeField(blank=True, null=True), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="FollowUpForm", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date_created", models.DateTimeField(auto_now_add=True)), + ("date_modified", models.DateTimeField(auto_now=True)), + ("is_active", models.BooleanField(default=True)), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="RegistrationForm", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date_created", models.DateTimeField(auto_now_add=True)), + ("date_modified", models.DateTimeField(auto_now=True)), + ("is_active", models.BooleanField(default=True)), + ], + options={ + "abstract": False, + }, + ), + migrations.AddField( + model_name="registrationform", + name="entity_list", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="registration_forms", + to="logger.entitylist", + ), + ), + migrations.AddField( + model_name="registrationform", + name="xform", + field=models.ForeignKey( + help_text="XForm that creates entities", + on_delete=django.db.models.deletion.CASCADE, + related_name="registration_forms", + to="logger.xform", + ), + ), + migrations.AddField( + model_name="followupform", + name="entity_list", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="follow_up_forms", + to="logger.entitylist", + ), + ), + migrations.AddField( + model_name="followupform", + name="xform", + field=models.ForeignKey( + help_text="XForm that consumes entities", + on_delete=django.db.models.deletion.CASCADE, + related_name="follow_up_forms", + to="logger.xform", + ), + ), + migrations.AddField( + model_name="entitylist", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="entity_lists", + to="logger.project", + ), + ), + migrations.AddField( + model_name="entity", + name="deleted_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="deleted_entities", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddField( + model_name="entity", + name="instance", + field=models.OneToOneField( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="entity", + to="logger.instance", + ), + ), + migrations.AddField( + model_name="entity", + name="registration_form", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="entities", + to="logger.registrationform", + ), + ), + migrations.AlterUniqueTogether( + name="registrationform", + unique_together={("entity_list", "xform")}, + ), + migrations.AlterUniqueTogether( + name="followupform", + unique_together={("entity_list", "xform")}, + ), + migrations.AlterUniqueTogether( + name="entitylist", + unique_together={("name", "project")}, + ), + ] diff --git a/onadata/apps/logger/models/__init__.py b/onadata/apps/logger/models/__init__.py index 3f76ae73f4..8a39d078e7 100644 --- a/onadata/apps/logger/models/__init__.py +++ b/onadata/apps/logger/models/__init__.py @@ -17,3 +17,7 @@ from onadata.apps.logger.models.xform_version import XFormVersion # noqa from onadata.apps.logger.xform_instance_parser import InstanceParseError # noqa from onadata.apps.logger.models.project_invitation import ProjectInvitation # noqa +from onadata.apps.logger.models.entity_list import EntityList # noqa +from onadata.apps.logger.models.registration_form import RegistrationForm # noqa +from onadata.apps.logger.models.follow_up_form import FollowUpForm # noqa +from onadata.apps.logger.models.entity import Entity # noqa diff --git a/onadata/apps/logger/models/entity.py b/onadata/apps/logger/models/entity.py new file mode 100644 index 0000000000..22f5a52644 --- /dev/null +++ b/onadata/apps/logger/models/entity.py @@ -0,0 +1,50 @@ +""" +Entity model +""" + +from django.contrib.auth import get_user_model +from django.db import models + +from onadata.apps.logger.models.instance import Instance +from onadata.apps.logger.models.registration_form import RegistrationForm +from onadata.apps.logger.xform_instance_parser import get_entity_uuid_from_xml +from onadata.libs.models import BaseModel + +User = get_user_model() + + +class Entity(BaseModel): + """An entity created by a registration form""" + + registration_form = models.ForeignKey( + RegistrationForm, + on_delete=models.CASCADE, + related_name="entities", + ) + instance = models.OneToOneField( + Instance, + on_delete=models.SET_NULL, + related_name="entity", + null=True, + blank=True, + ) + xml = models.TextField() + json = models.JSONField(default=dict) + version = models.CharField(max_length=255, null=True) + uuid = models.CharField(max_length=249, default="", db_index=True) + deleted_at = models.DateTimeField(null=True, blank=True) + deleted_by = models.ForeignKey( + User, related_name="deleted_entities", null=True, on_delete=models.SET_NULL + ) + + def __str__(self) -> str: + return f"{self.pk}|{self.registration_form}" + + def save(self, *args, **kwargs) -> None: + if self.xml: + self.uuid = get_entity_uuid_from_xml(self.xml) + + super().save(*args, **kwargs) + + class Meta(BaseModel.Meta): + app_label = "logger" diff --git a/onadata/apps/logger/models/entity_list.py b/onadata/apps/logger/models/entity_list.py new file mode 100644 index 0000000000..568f6de8de --- /dev/null +++ b/onadata/apps/logger/models/entity_list.py @@ -0,0 +1,60 @@ +""" +EntityList model +""" + +from django.contrib.contenttypes.fields import GenericRelation +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from onadata.apps.logger.models.project import Project +from onadata.libs.models import BaseModel + + +class EntityList(BaseModel): + """The dataset where each entity will be save to + + Entities of the same type are organized in entity lists + """ + + name = models.CharField( + max_length=255, + help_text=_("The name that the follow-up form will reference"), + ) + project = models.ForeignKey( + Project, + related_name="entity_lists", + on_delete=models.CASCADE, + ) + num_entities = models.IntegerField(default=0) + last_entity_update_time = models.DateTimeField(blank=True, null=True) + exports = GenericRelation("viewer.GenericExport") + + class Meta(BaseModel.Meta): + app_label = "logger" + unique_together = ( + "name", + "project", + ) + + def __str__(self): + return f"{self.name}|{self.project}" + + @property + def properties(self) -> list[str]: + """All dataset properties + + Multiple forms can define matching or different properties for the same + dataset + + Returns: + list: properties defined by all forms creating Entities for + the dataset + """ + registration_forms_qs = self.registration_forms.filter(is_active=True) + dataset_properties = set() + + for form in registration_forms_qs: + form_properties = set(form.get_save_to().keys()) + dataset_properties.update(form_properties) + + return list(dataset_properties) diff --git a/onadata/apps/logger/models/follow_up_form.py b/onadata/apps/logger/models/follow_up_form.py new file mode 100644 index 0000000000..272900731a --- /dev/null +++ b/onadata/apps/logger/models/follow_up_form.py @@ -0,0 +1,40 @@ +""" +FollowUpForm model +""" + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from onadata.apps.logger.models.entity_list import EntityList +from onadata.apps.logger.models.xform import XForm +from onadata.libs.models import BaseModel + + +class FollowUpForm(BaseModel): + """Forms that consumes entities from an entity list + + No changes are made to any entities + """ + + class Meta(BaseModel.Meta): + app_label = "logger" + unique_together = ( + "entity_list", + "xform", + ) + + entity_list = models.ForeignKey( + EntityList, + related_name="follow_up_forms", + on_delete=models.CASCADE, + ) + xform = models.ForeignKey( + XForm, + related_name="follow_up_forms", + on_delete=models.CASCADE, + help_text=_("XForm that consumes entities"), + ) + is_active = models.BooleanField(default=True) + + def __str__(self): + return f"{self.xform}|{self.entity_list.name}" diff --git a/onadata/apps/logger/models/project_invitation.py b/onadata/apps/logger/models/project_invitation.py index 08003b81a2..845a60663a 100644 --- a/onadata/apps/logger/models/project_invitation.py +++ b/onadata/apps/logger/models/project_invitation.py @@ -1,20 +1,20 @@ """ ProjectInvitation class """ + from django.db import models from django.contrib.auth import get_user_model from django.utils.translation import gettext_lazy as _ from django.utils import timezone from onadata.apps.logger.models.project import Project -from onadata.libs.models.base_model import BaseModel User = get_user_model() -class ProjectInvitation(BaseModel): +class ProjectInvitation(models.Model): """ProjectInvitation model class""" - class Meta(BaseModel.Meta): + class Meta: app_label = "logger" class Status(models.IntegerChoices): diff --git a/onadata/apps/logger/models/registration_form.py b/onadata/apps/logger/models/registration_form.py new file mode 100644 index 0000000000..2d38a183b6 --- /dev/null +++ b/onadata/apps/logger/models/registration_form.py @@ -0,0 +1,69 @@ +""" +RegistrationForm model +""" + +import json + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from onadata.apps.logger.models.entity_list import EntityList +from onadata.apps.logger.models.xform import XForm +from onadata.apps.logger.models.xform_version import XFormVersion +from onadata.libs.models import BaseModel + + +class RegistrationForm(BaseModel): + """Form that creates entities in an entity list""" + + entity_list = models.ForeignKey( + EntityList, + related_name="registration_forms", + on_delete=models.CASCADE, + ) + xform = models.ForeignKey( + XForm, + related_name="registration_forms", + on_delete=models.CASCADE, + help_text=_("XForm that creates entities"), + ) + is_active = models.BooleanField(default=True) + + class Meta(BaseModel.Meta): + app_label = "logger" + unique_together = ( + "entity_list", + "xform", + ) + + def __str__(self): + return f"{self.xform}|{self.entity_list.name}" + + def get_save_to(self, version: str | None = None) -> dict[str, str]: + """Maps the save_to alias to the original field + + Args: + version (str | None): XFormVersion's version to use to get properties + + Returns: + dict: properties used to create entities mapped to their + original names + """ + if version: + xform_version = XFormVersion.objects.get(version=version, xform=self.xform) + xform_json = json.loads(xform_version.json) + + else: + xform_json = self.xform.json + + result = {} + fields = xform_json.get("children", []) + entity_properties = filter( + lambda field: "bind" in field and "entities:saveto" in field["bind"], fields + ) + + for field in entity_properties: + alias = field["bind"]["entities:saveto"] + result[alias] = field["name"] + + return result diff --git a/onadata/apps/logger/signals.py b/onadata/apps/logger/signals.py index 316c0a998d..fcfc6131cc 100644 --- a/onadata/apps/logger/signals.py +++ b/onadata/apps/logger/signals.py @@ -5,53 +5,30 @@ from django.db.models.signals import post_save from django.dispatch import receiver -from onadata.apps.logger.models import MergedXForm -from onadata.apps.logger.models import XForm -from onadata.apps.logger.models.xform import clear_project_cache -from onadata.libs.permissions import OwnerRole -from onadata.libs.utils.cache_tools import ( - IS_ORG, - safe_delete, -) -from onadata.libs.utils.project_utils import set_project_perms_to_xform +from onadata.apps.logger.models import Entity, Instance, RegistrationForm +from onadata.libs.utils.logger_tools import create_entity as create_new_entity # pylint: disable=unused-argument -@receiver( - post_save, sender=MergedXForm, dispatch_uid="set_project_perms_to_merged_xform" -) -def set_project_object_permissions(sender, instance=None, created=False, **kwargs): - """Apply project permission to the merged form.""" - if created: - OwnerRole.add(instance.user, instance) - OwnerRole.add(instance.user, instance.xform_ptr) - - if instance.created_by and instance.user != instance.created_by: - OwnerRole.add(instance.created_by, instance) - OwnerRole.add(instance.created_by, instance.xform_ptr) - - set_project_perms_to_xform(instance, instance.project) - set_project_perms_to_xform(instance.xform_ptr, instance.project) - - -# pylint: disable=unused-argument -def set_xform_object_permissions(sender, instance=None, created=False, **kwargs): - """Apply project permissions to the user that created the form.""" - # clear cache - project = instance.project - project.refresh_from_db() - clear_project_cache(project.pk) - safe_delete(f"{IS_ORG}{instance.pk}") - - if created: - OwnerRole.add(instance.user, instance) - - if instance.created_by and instance.user != instance.created_by: - OwnerRole.add(instance.created_by, instance) - - set_project_perms_to_xform(instance, project) - - -post_save.connect( - set_xform_object_permissions, sender=XForm, dispatch_uid="xform_object_permissions" -) +@receiver(post_save, sender=Instance, dispatch_uid="create_entity") +def create_entity(sender, instance=Instance | None, created=False, **kwargs): + """Create an Entity if an Instance's form is also RegistrationForm""" + if created and instance: + if RegistrationForm.objects.filter( + xform=instance.xform, is_active=True + ).exists(): + registration_form = RegistrationForm.objects.filter( + xform=instance.xform, is_active=True + ).first() + create_new_entity(instance, registration_form) + + +@receiver(post_save, sender=Entity, dispatch_uid="update_entity_json") +def update_entity_json(sender, instance=Entity | None, created=False, **kwargs): + """Update and Entity json on creation""" + if created and instance: + json = instance.json + json["_id"] = instance.pk + # Queryset.update ensures the model's save is not called and + # the pre_save and post_save signals aren't sent + Entity.objects.filter(pk=instance.pk).update(json=json) diff --git a/onadata/apps/logger/tests/models/test_entity.py b/onadata/apps/logger/tests/models/test_entity.py new file mode 100644 index 0000000000..791805c7cb --- /dev/null +++ b/onadata/apps/logger/tests/models/test_entity.py @@ -0,0 +1,84 @@ +"""Tests for module onadata.apps.logger.models.entity""" + +import pytz +from datetime import datetime + +from onadata.apps.logger.models import Entity +from onadata.apps.logger.models.instance import Instance +from onadata.apps.main.tests.test_base import TestBase + + +class EntityTestCase(TestBase): + """Tests for model Entity""" + + def setUp(self): + super().setUp() + # Mute signal that creates Entity when Instance is saved + self._mute_post_save_signals([(Instance, "create_entity")]) + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.xform = self._publish_registration_form(self.user) + + def test_creation(self): + """We can create an Entity""" + reg_form = self.xform.registration_forms.first() + entity_json = { + "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", + "meta/instanceName": "300cm purpleheart", + "meta/entity/label": "300cm purpleheart", + "_xform_id_string": "trees_registration", + "_version": "2022110901", + } + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + instance = Instance.objects.create( + xml=xml, + user=self.user, + xform=self.xform, + version=self.xform.version, + ) + instance.json = instance.get_full_dict() + instance.save() + instance.refresh_from_db() + entity = Entity.objects.create( + registration_form=reg_form, + json={**entity_json}, + version=self.xform.version, + xml=xml, + instance=instance, + ) + self.assertEqual(entity.registration_form, reg_form) + self.assertEqual(entity.json, {**entity_json, "_id": entity.pk}) + self.assertEqual(entity.version, self.xform.version) + self.assertEqual(entity.xml, xml) + self.assertEqual(entity.instance, instance) + self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") + self.assertEqual(f"{entity}", f"{entity.pk}|{reg_form}") + + def test_optional_fields(self): + """Defaults for optional fields are correct""" + reg_form = self.xform.registration_forms.first() + entity = Entity.objects.create(registration_form=reg_form) + self.assertIsNone(entity.version) + self.assertEqual(entity.json, {"_id": entity.pk}) + self.assertIsNone(entity.instance) + self.assertEqual(entity.xml, "") diff --git a/onadata/apps/logger/tests/models/test_entity_list.py b/onadata/apps/logger/tests/models/test_entity_list.py new file mode 100644 index 0000000000..35e6c4d21a --- /dev/null +++ b/onadata/apps/logger/tests/models/test_entity_list.py @@ -0,0 +1,111 @@ +"""Tests for module onadata.apps.logger.models.entity_list""" + +import pytz +import os +from datetime import datetime +from unittest.mock import patch + +from django.db.utils import IntegrityError, DataError + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import EntityList, Project +from onadata.libs.utils.user_auth import get_user_default_project + + +class EntityListTestCase(TestBase): + """Tests for model EntityList""" + + def setUp(self) -> None: + super().setUp() + + self.project = get_user_default_project(self.user) + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.fixture_dir = os.path.join(self.this_directory, "fixtures", "entities") + + @patch("django.utils.timezone.now") + def test_creation(self, mock_now): + """We can create an EntityList""" + mock_now.return_value = self.mocked_now + entity_list = EntityList.objects.create( + name="trees", + project=self.project, + num_entities=2, + last_entity_update_time=self.mocked_now, + ) + self.assertEqual(EntityList.objects.count(), 1) + self.assertEqual(f"{entity_list}", f"trees|{self.project}") + self.assertEqual(entity_list.name, "trees") + self.assertEqual(entity_list.project, self.project) + self.assertEqual(entity_list.date_created, self.mocked_now) + self.assertEqual(entity_list.date_modified, self.mocked_now) + self.assertEqual(entity_list.num_entities, 2) + self.assertEqual(entity_list.last_entity_update_time, self.mocked_now) + + def test_name_project_unique_together(self): + """No duplicate name and project allowed""" + + EntityList.objects.create(name="trees", project=self.project) + + with self.assertRaises(IntegrityError): + EntityList.objects.create(name="trees", project=self.project) + + # We can create existing name, new project + project = Project.objects.create( + name="Project X", + organization=self.user, + created_by=self.user, + ) + EntityList.objects.create(name="trees", project=project) + # We can create new name, existing project + EntityList.objects.create(name="immunization", project=self.project) + + def test_max_name_length(self): + """Field `name` should not exceed 255 characters""" + # 256 characters fails + invalid_name = "w" * 256 + self.assertEqual(len(invalid_name), 256) + + with self.assertRaises(DataError): + EntityList.objects.create(name=invalid_name, project=self.project) + + # 255 characters succeeds + EntityList.objects.create(name=invalid_name[:-1], project=self.project) + + def test_properties(self): + """Returns the correct dataset properties""" + # Publish XLSForm and implicity create EntityList + self._publish_registration_form(self.user) + height_md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | height | Tree height in m | height_m | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration_height | 2022110901 | concat(${height}, "m ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees | concat(${height}, "m ", ${species}) | | |""" + self._publish_markdown( + height_md, self.user, self.project, id_string="trees_registration_height" + ) + entity_list = EntityList.objects.first() + # The properties should be from all forms creating Entities for the dataset + self.assertCountEqual( + entity_list.properties, + ["geometry", "species", "circumference_cm", "height_m"], + ) + + def test_defaults(self): + """Defaults for optional fields are correct""" + entity_list = EntityList.objects.create(name="trees", project=self.project) + self.assertEqual(entity_list.num_entities, 0) + self.assertIsNone(entity_list.last_entity_update_time) diff --git a/onadata/apps/logger/tests/models/test_follow_up_form.py b/onadata/apps/logger/tests/models/test_follow_up_form.py new file mode 100644 index 0000000000..927ce060c0 --- /dev/null +++ b/onadata/apps/logger/tests/models/test_follow_up_form.py @@ -0,0 +1,68 @@ +"""Tests for module onadata.apps.logger.models.follow_up_form""" + +import pytz +from datetime import datetime +from unittest.mock import patch + +from django.db.utils import IntegrityError + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import FollowUpForm, EntityList + + +class FollowUpFormTestCase(TestBase): + """Tests for model FollowUpForm""" + + def setUp(self): + super().setUp() + + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.xform = self._publish_follow_up_form(self.user) + self.entity_list = EntityList.objects.create(name="trees", project=self.project) + + @patch("django.utils.timezone.now") + def test_creation(self, mock_now): + """We can create a FollowUpForm""" + mock_now.return_value = self.mocked_now + form = FollowUpForm.objects.create( + entity_list=self.entity_list, + xform=self.xform, + is_active=True, + ) + self.assertEqual(FollowUpForm.objects.count(), 1) + self.assertEqual(f"{form}", f"{form.xform}|trees") + self.assertEqual(form.xform, self.xform) + self.assertTrue(form.is_active) + self.assertEqual(form.entity_list, self.entity_list) + self.assertEqual(form.date_created, self.mocked_now) + self.assertEqual(form.date_modified, self.mocked_now) + + def test_related_name(self): + """Related names for foreign keys work""" + FollowUpForm.objects.create( + entity_list=self.entity_list, + xform=self.xform, + ) + self.assertEqual(self.entity_list.follow_up_forms.count(), 1) + self.assertEqual(self.xform.follow_up_forms.count(), 1) + + def test_no_duplicate_entity_list_xform(self): + """No duplicates allowed for existing entity_list and xform""" + FollowUpForm.objects.create( + entity_list=self.entity_list, + xform=self.xform, + ) + + with self.assertRaises(IntegrityError): + FollowUpForm.objects.create( + entity_list=self.entity_list, + xform=self.xform, + ) + + def test_optional_fields(self): + """Defaults for optional fields correct""" + form = FollowUpForm.objects.create( + entity_list=self.entity_list, + xform=self.xform, + ) + self.assertTrue(form.is_active) diff --git a/onadata/apps/logger/tests/models/test_registration_form.py b/onadata/apps/logger/tests/models/test_registration_form.py new file mode 100644 index 0000000000..402f9c0afe --- /dev/null +++ b/onadata/apps/logger/tests/models/test_registration_form.py @@ -0,0 +1,179 @@ +"""Tests for module onadata.apps.logger.models.registration_form""" + +import json +import pytz +from datetime import datetime +from unittest.mock import patch + +from django.db.utils import IntegrityError + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models import RegistrationForm, EntityList, XFormVersion +from onadata.apps.viewer.models import DataDictionary + + +class RegistrationFormTestCase(TestBase): + """Tests for model RegistrationForm""" + + def setUp(self): + super().setUp() + + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + + @patch("django.utils.timezone.now") + def test_creation(self, mock_now): + """We can create a RegistrationForm""" + mock_now.return_value = self.mocked_now + self._mute_post_save_signals( + [(DataDictionary, "create_registration_form_datadictionary")] + ) + self.xform = self._publish_registration_form(self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + reg_form = RegistrationForm.objects.create( + entity_list=entity_list, + xform=self.xform, + is_active=True, + ) + self.assertEqual(RegistrationForm.objects.count(), 1) + self.assertEqual(f"{reg_form}", f"{reg_form.xform}|trees") + self.assertEqual(reg_form.xform, self.xform) + self.assertEqual(reg_form.entity_list, entity_list) + self.assertEqual(reg_form.date_created, self.mocked_now) + self.assertEqual(reg_form.date_modified, self.mocked_now) + self.assertTrue(reg_form.is_active) + # Related names are correct + self.assertEqual(entity_list.registration_forms.count(), 1) + self.assertEqual(self.xform.registration_forms.count(), 1) + + def test_get_save_to(self): + """Method `get_save_to` works correctly""" + self._mute_post_save_signals( + [(DataDictionary, "create_registration_form_datadictionary")] + ) + self.xform = self._publish_registration_form(self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + form = RegistrationForm.objects.create( + entity_list=entity_list, + xform=self.xform, + ) + self.assertEqual( + form.get_save_to(), + { + "geometry": "location", + "species": "species", + "circumference_cm": "circumference", + }, + ) + # Passing version argument works + x_version_json = { + "name": "data", + "type": "survey", + "title": "Trees registration", + "version": "x", + "children": [ + { + "bind": {"required": "yes", "entities:saveto": "location"}, + "name": "location", + "type": "geopoint", + "label": "Tree location", + }, + { + "bind": {"required": "yes", "entities:saveto": "species"}, + "name": "species", + "type": "select one", + "label": "Tree species", + "children": [ + {"name": "wallaba", "label": "Wallaba"}, + {"name": "mora", "label": "Mora"}, + {"name": "purpleheart", "label": "Purpleheart"}, + {"name": "greenheart", "label": "Greenheart"}, + ], + "list_name": "species", + }, + { + "bind": {"required": "yes", "entities:saveto": "circumference"}, + "name": "circumference", + "type": "integer", + "label": "Tree circumference in cm", + }, + {"name": "intake_notes", "type": "text", "label": "Intake notes"}, + { + "name": "meta", + "type": "group", + "control": {"bodyless": "true"}, + "children": [ + { + "bind": {"readonly": "true()", "jr:preload": "uid"}, + "name": "instanceID", + "type": "calculate", + }, + { + "bind": { + "calculate": 'concat(${circumference}, "cm ", ${species})' + }, + "name": "instanceName", + "type": "calculate", + }, + { + "name": "entity", + "type": "entity", + "parameters": { + "label": 'concat(${circumference}, "cm ", ${species})', + "create": "1", + "dataset": "trees", + }, + }, + ], + }, + ], + "id_string": "trees_registration", + "sms_keyword": "trees_registration", + "entity_related": "true", + "default_language": "default", + } + XFormVersion.objects.create( + xform=self.xform, + version="x", + xls=self.xform.xls, + xml=self.xform.xml, + json=json.dumps(x_version_json), + ) + self.assertEqual( + form.get_save_to("x"), + { + "location": "location", + "species": "species", + "circumference": "circumference", + }, + ) + + def test_entity_list_xform_unique(self): + """No duplicates allowed for existing entity_list and xform""" + self._mute_post_save_signals( + [(DataDictionary, "create_registration_form_datadictionary")] + ) + self.xform = self._publish_registration_form(self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + RegistrationForm.objects.create( + entity_list=entity_list, + xform=self.xform, + ) + + with self.assertRaises(IntegrityError): + RegistrationForm.objects.create( + entity_list=entity_list, + xform=self.xform, + ) + + def test_optional_fields(self): + """Defaults for optional fields correct""" + self._mute_post_save_signals( + [(DataDictionary, "create_registration_form_datadictionary")] + ) + self.xform = self._publish_registration_form(self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + reg_form = RegistrationForm.objects.create( + entity_list=entity_list, + xform=self.xform, + ) + self.assertTrue(reg_form.is_active) diff --git a/onadata/apps/logger/tests/test_parsing.py b/onadata/apps/logger/tests/test_parsing.py index 48075fb213..31f8289e4f 100644 --- a/onadata/apps/logger/tests/test_parsing.py +++ b/onadata/apps/logger/tests/test_parsing.py @@ -2,7 +2,7 @@ import json import os import re -from xml.dom import minidom +from defusedxml import minidom from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.xform_instance_parser import ( diff --git a/onadata/apps/logger/xform_instance_parser.py b/onadata/apps/logger/xform_instance_parser.py index b782f4e257..1733054632 100644 --- a/onadata/apps/logger/xform_instance_parser.py +++ b/onadata/apps/logger/xform_instance_parser.py @@ -4,7 +4,8 @@ """ import logging import re -from xml.dom import minidom, Node +from xml.dom import Node +from defusedxml import minidom import dateutil.parser @@ -94,6 +95,9 @@ def get_meta_from_xml(xml_str, meta_name): uuid_tag = uuid_tags[0] + if meta_name == "entity": + return uuid_tag + return uuid_tag.firstChild.nodeValue.strip() if uuid_tag.firstChild else None @@ -167,9 +171,9 @@ def clean_and_parse_xml(xml_string): Returns an XML object via minidom.parseString(xml_string) """ - clean_xml_str = xml_string.strip() - clean_xml_str = re.sub(r">\s+<", "><", smart_str(clean_xml_str)) - xml_obj = minidom.parseString(smart_str(clean_xml_str)) + clean_xml_str = re.sub(r">\s+<", "><", smart_str(xml_string.strip())) + xml_obj = minidom.parseString(clean_xml_str) + return xml_obj @@ -324,11 +328,11 @@ def _get_all_attributes(node): """ if hasattr(node, "hasAttributes") and node.hasAttributes(): for key in node.attributes.keys(): - yield key, node.getAttribute(key) + yield key, node.getAttribute(key), node.tagName for child in node.childNodes: - for pair in _get_all_attributes(child): - yield pair + for result in _get_all_attributes(child): + yield result class XFormInstanceParser: @@ -386,9 +390,13 @@ def _set_attributes(self): # pylint: disable=attribute-defined-outside-init self._attributes = {} all_attributes = list(_get_all_attributes(self._root_node)) - for key, value in all_attributes: + for key, value, node_name in all_attributes: # Since enketo forms may have the template attribute in # multiple xml tags, overriding and log when this occurs + if node_name == "entity": + # We ignore attributes for the entity node + continue + if key in self._attributes: logger = logging.getLogger("console_logger") logger.debug( @@ -447,3 +455,9 @@ def parse_xform_instance(xml_str, data_dictionary): """ parser = XFormInstanceParser(xml_str, data_dictionary) return parser.get_flat_dict_with_attributes() + + +def get_entity_uuid_from_xml(xml): + """Returns the uuid for the XML submission's entity""" + entity_node = get_meta_from_xml(xml, "entity") + return entity_node.getAttribute("id") diff --git a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml new file mode 100644 index 0000000000..c684aa01de --- /dev/null +++ b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml @@ -0,0 +1,28 @@ + + + \n + + \n + d156a2dce4c34751af57f21ef5c4e6cc + \n + + \n + -1.286905 36.772845 0 0 + \n + purpleheart + \n + 300 + \n + + \n + + \n + uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b + \n + 300cm purpleheart + + + + + \n + \ No newline at end of file diff --git a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml new file mode 100644 index 0000000000..a606bcc6d3 --- /dev/null +++ b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml @@ -0,0 +1,17 @@ + + + + d156a2dce4c34751af57f21ef5c4e6cc + + -1.305796 36.791849 0 0 + wallaba + 100 + Looks malnourished + + uuid:648e4106-2224-4bd7-8bf9-859102fc6fae + 100cm wallaba + + + + + \ No newline at end of file diff --git a/onadata/apps/main/tests/test_base.py b/onadata/apps/main/tests/test_base.py index b9d1d63c4b..7eac49240d 100644 --- a/onadata/apps/main/tests/test_base.py +++ b/onadata/apps/main/tests/test_base.py @@ -16,6 +16,7 @@ from django.conf import settings from django.contrib.auth import authenticate, get_user_model from django.core.files.uploadedfile import InMemoryUploadedFile +from django.db.models import signals from django.test import RequestFactory, TransactionTestCase from django.test.client import Client from django.utils import timezone @@ -27,7 +28,7 @@ from six.moves.urllib.request import urlopen from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Attachment, Instance, XForm +from onadata.apps.logger.models import Instance, XForm, XFormVersion from onadata.apps.logger.views import submission from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml from onadata.apps.main.models import UserProfile @@ -246,7 +247,7 @@ def _make_submission( self.factory = APIRequestFactory() if auth is None: - auth = DigestAuth("bob", "bob") + auth = DigestAuth(self.login_username, self.login_password) tmp_file = None @@ -271,7 +272,9 @@ def _make_submission( url = f"/{url_prefix}submission" request = self.factory.post(url, post_data) - request.user = authenticate(username=auth.username, password=auth.password) + request.user = authenticate( + request, username=auth.username, password=auth.password + ) # pylint: disable=attribute-defined-outside-init self.response = submission(request, username=username) @@ -304,10 +307,12 @@ def _make_submission_w_attachment(self, path, attachment_path): data["media_file"] = open(attachment_path, "rb") url = f"/{self.user.username}/submission" - auth = DigestAuth("bob", "bob") + auth = DigestAuth(self.login_username, self.login_password) self.factory = APIRequestFactory() request = self.factory.post(url, data) - request.user = authenticate(username="bob", password="bob") + request.user = authenticate( + request, username=auth.username, password=auth.password + ) # pylint: disable=attribute-defined-outside-init self.response = submission(request, username=self.user.username) @@ -360,8 +365,8 @@ def _internet_on(self, url="http://74.125.113.99"): def _set_auth_headers(self, username, password): return { - "HTTP_AUTHORIZATION": "Basic " + - base64.b64encode(f"{username}:{password}".encode("utf-8")).decode( + "HTTP_AUTHORIZATION": "Basic " + + base64.b64encode(f"{username}:{password}".encode("utf-8")).decode( "utf-8" ), } @@ -401,8 +406,11 @@ def _publish_submit_geojson(self, has_empty_geoms=False, only_geopoints=False): "tests", "fixtures", "geolocation", - ("GeoLocationFormNoPolylineOrPolygon.xlsx" - if only_geopoints else "GeoLocationForm.xlsx"), + ( + "GeoLocationFormNoPolylineOrPolygon.xlsx" + if only_geopoints + else "GeoLocationForm.xlsx" + ), ) self._publish_xls_file_and_set_xform(path) @@ -453,18 +461,29 @@ def _publish_markdown(self, md_xlsform, user, project=None, **kwargs): kwargs["name"] = "data" survey = self.md_to_pyxform_survey(md_xlsform, kwargs=kwargs) survey["sms_keyword"] = survey["id_string"] + if not project or not hasattr(self, "project"): project = get_user_default_project(user) - xform = DataDictionary( + self.project = project + + data_dict = DataDictionary( created_by=user, user=user, xml=survey.to_xml(), - json=survey.to_json(), + json=survey.to_json_dict(), project=project, + version=survey.get("version"), + ) + data_dict.save() + latest_form = XForm.objects.all().order_by("-pk").first() + XFormVersion.objects.create( + xform=latest_form, + version=survey.get("version"), + xml=data_dict.xml, + json=survey.to_json(), ) - xform.save() - return xform + return data_dict def _test_csv_response(self, response, csv_file_path): headers = dict(response.items()) @@ -493,3 +512,61 @@ def _test_csv_files(self, csv_file, csv_file_path): if None in row: row.pop(None) self.assertDictContainsSubset(row, data[index]) + + def _mute_post_save_signals(self, target_signals: list[tuple]): + """Disable post_save signals""" + + for signal in target_signals: + model, dispatch_uid = signal + signals.post_save.disconnect(sender=model, dispatch_uid=dispatch_uid) + + def _publish_registration_form(self, user, project=None): + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees | concat(${circumference}, "cm ", ${species})| | |""" + self._publish_markdown( + md, + user, + project, + id_string="trees_registration", + title="Trees registration", + ) + latest_form = XForm.objects.all().order_by("-pk").first() + + return latest_form + + def _publish_follow_up_form(self, user, project=None): + md = """ + | survey | + | | type | name | label | required | + | | select_one_from_file trees.csv | tree | Select the tree you are visiting | yes | + | settings| | | | | + | | form_title | form_id | version | | + | | Trees follow-up | trees_follow_up | 2022111801 | | + """ + self._publish_markdown( + md, + user, + project, + id_string="trees_follow_up", + title="Trees follow-up", + ) + latest_form = XForm.objects.all().order_by("-pk").first() + + return latest_form diff --git a/onadata/apps/main/tests/test_process.py b/onadata/apps/main/tests/test_process.py index 0dc53e8f6b..6414e0c219 100644 --- a/onadata/apps/main/tests/test_process.py +++ b/onadata/apps/main/tests/test_process.py @@ -11,7 +11,8 @@ from hashlib import md5 from io import BytesIO from unittest.mock import patch -from xml.dom import Node, minidom +from xml.dom import Node +from defusedxml import minidom from django.conf import settings from django.core.files.uploadedfile import UploadedFile diff --git a/onadata/apps/viewer/migrations/0003_genericexport.py b/onadata/apps/viewer/migrations/0003_genericexport.py new file mode 100644 index 0000000000..05d8ce2991 --- /dev/null +++ b/onadata/apps/viewer/migrations/0003_genericexport.py @@ -0,0 +1,37 @@ +# Generated by Django 3.2.23 on 2024-01-22 12:29 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('viewer', '0002_alter_export_export_type'), + ] + + operations = [ + migrations.CreateModel( + name='GenericExport', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('export_type', models.CharField(choices=[('xlsx', 'Excel'), ('csv', 'CSV'), ('zip', 'ZIP'), ('kml', 'kml'), ('csv_zip', 'CSV ZIP'), ('sav_zip', 'SAV ZIP'), ('sav', 'SAV'), ('external', 'Excel'), ('osm', 'osm'), ('gsheets', 'Google Sheets'), ('geojson', 'geojson')], default='xlsx', max_length=10)), + ('created_on', models.DateTimeField(auto_now_add=True)), + ('filename', models.CharField(blank=True, max_length=255, null=True)), + ('filedir', models.CharField(blank=True, max_length=255, null=True)), + ('task_id', models.CharField(blank=True, max_length=255, null=True)), + ('time_of_last_submission', models.DateTimeField(default=None, null=True)), + ('internal_status', models.SmallIntegerField(default=0)), + ('export_url', models.URLField(default=None, null=True)), + ('options', models.JSONField(default=dict)), + ('error_message', models.CharField(blank=True, max_length=255, null=True)), + ('object_id', models.PositiveIntegerField()), + ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')), + ], + options={ + 'abstract': False, + 'unique_together': {('content_type', 'object_id', 'filename')}, + }, + ), + ] diff --git a/onadata/apps/viewer/models/data_dictionary.py b/onadata/apps/viewer/models/data_dictionary.py index d71486ccd3..9c8348f188 100644 --- a/onadata/apps/viewer/models/data_dictionary.py +++ b/onadata/apps/viewer/models/data_dictionary.py @@ -2,9 +2,11 @@ """ DataDictionary model. """ +import json import os from io import BytesIO, StringIO +from django.contrib.contenttypes.models import ContentType from django.core.files.uploadedfile import InMemoryUploadedFile from django.db.models.signals import post_save, pre_save from django.db import transaction @@ -20,8 +22,12 @@ from pyxform.xls2json import parse_file_to_json from pyxform.xls2json_backends import xlsx_value_to_str +from onadata.apps.logger.models.entity_list import EntityList +from onadata.apps.logger.models.registration_form import RegistrationForm +from onadata.apps.logger.models.follow_up_form import FollowUpForm from onadata.apps.logger.models.xform import XForm, check_version_set, check_xform_uuid from onadata.apps.logger.xform_instance_parser import XLSFormError +from onadata.apps.main.models.meta_data import MetaData from onadata.libs.utils.cache_tools import ( PROJ_BASE_FORMS_CACHE, PROJ_FORMS_CACHE, @@ -235,9 +241,6 @@ def set_object_permissions(sender, instance=None, created=False, **kwargs): size = f.tell() f.seek(0) - # pylint: disable=import-outside-toplevel - from onadata.apps.main.models.meta_data import MetaData - data_file = InMemoryUploadedFile( file=f, field_name="data_file", @@ -269,3 +272,142 @@ def save_project(sender, instance=None, created=False, **kwargs): pre_save.connect( save_project, sender=DataDictionary, dispatch_uid="save_project_datadictionary" ) + + +def create_registration_form(sender, instance=None, created=False, **kwargs): + """Create a RegistrationForm for a form that defines entities + + Create an EntityList if it does not exist. If it exists, use the + the existing EntityList + """ + instance_json = instance.json + + if isinstance(instance_json, str): + instance_json = json.loads(instance_json) + + if not instance_json.get("entity_related"): + return + + children = instance_json.get("children", []) + meta_list = filter(lambda child: child.get("name") == "meta", children) + + for meta in meta_list: + for child in meta.get("children", []): + if child.get("name") == "entity": + parameters = child.get("parameters", {}) + dataset = parameters.get("dataset") + entity_list, _ = EntityList.objects.get_or_create( + name=dataset, project=instance.project + ) + ( + registration_form, + registration_form_created, + ) = RegistrationForm.objects.get_or_create( + entity_list=entity_list, + xform=instance, + ) + + if registration_form_created: + # RegistrationForm contributing to any previous + # EntityList should be disabled + for form in instance.registration_forms.exclude( + entity_list=entity_list, is_active=True + ): + form.is_active = False + form.save() + else: + # If previously disabled, enable it + registration_form.is_active = True + registration_form.save() + + return + + +post_save.connect( + create_registration_form, + sender=DataDictionary, + dispatch_uid="create_registration_form_datadictionary", +) + + +def create_follow_up_form(sender, instance=None, created=False, **kwargs): + """Create a FollowUpForm for a form that consumes entities + + Check if a form consumes data from a dataset that is an EntityList. If so, + we create a FollowUpForm + """ + instance_json = instance.json + + if isinstance(instance_json, str): + instance_json = json.loads(instance_json) + + children = instance_json.get("children", []) + active_entity_datasets: list[str] = [] + xform = XForm.objects.get(pk=instance.pk) + + for child in children: + if child["type"] == "select one" and "itemset" in child: + dataset_name = child["itemset"].split(".")[0] + + try: + entity_list = EntityList.objects.get( + name=dataset_name, project=instance.project + ) + + except EntityList.DoesNotExist: + # No EntityList dataset was found with the specified + # name, we simply do nothing + continue + + active_entity_datasets.append(entity_list.name) + follow_up_form, created = FollowUpForm.objects.get_or_create( + entity_list=entity_list, xform=instance + ) + + if not created and not follow_up_form.is_active: + # If previously deactivated, re-activate + follow_up_form.is_active = True + follow_up_form.save() + + content_type = ContentType.objects.get_for_model(xform) + MetaData.objects.get_or_create( + object_id=xform.pk, + content_type=content_type, + data_type="media", + data_value=f"entity_list {entity_list.pk} {entity_list.name}", + ) + + # Deactivate the XForm's FollowUpForms whose EntityList are not + # referenced by the updated XForm version + inactive_follow_up_forms = FollowUpForm.objects.filter(xform=xform).exclude( + entity_list__name__in=active_entity_datasets + ) + inactive_follow_up_forms.update(is_active=False) + + +post_save.connect( + create_follow_up_form, + sender=DataDictionary, + dispatch_uid="create_follow_up_datadictionary", +) + + +def disable_registration_form(sender, instance=None, created=False, **kwargs): + """Disable registration form if form no longer contains entities definitions""" + instance_json = instance.json + + if isinstance(instance_json, str): + instance_json = json.loads(instance_json) + + if not instance_json.get("entity_related"): + # If form creates entities, disable the registration forms + for registration_form in instance.registration_forms.filter(is_active=True): + registration_form.is_active = False + registration_form.save() + + +post_save.connect( + disable_registration_form, + sender=DataDictionary, + dispatch_uid="disable_registration_form_datadictionary", +) diff --git a/onadata/apps/viewer/models/export.py b/onadata/apps/viewer/models/export.py index 9e2553ff13..ced32d9a81 100644 --- a/onadata/apps/viewer/models/export.py +++ b/onadata/apps/viewer/models/export.py @@ -6,6 +6,8 @@ from tempfile import NamedTemporaryFile +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType from django.core.files.storage import get_storage_class from django.db import models from django.db.models import JSONField @@ -62,11 +64,7 @@ def __str__(self): return _("Export server is down.") -class Export(models.Model): - """ - Class representing a data export from an XForm - """ - +class ExportBaseModel(models.Model): XLSX_EXPORT = "xlsx" CSV_EXPORT = "csv" KML_EXPORT = "kml" @@ -133,8 +131,6 @@ class Export(models.Model): # max no. of export files a user can keep MAX_EXPORTS = 10 - # Required fields - xform = models.ForeignKey("logger.XForm", on_delete=models.CASCADE) export_type = models.CharField( max_length=10, choices=EXPORT_TYPES, default=XLSX_EXPORT ) @@ -157,39 +153,6 @@ class Export(models.Model): options = JSONField(default=dict, null=False) error_message = models.CharField(max_length=255, null=True, blank=True) - class Meta: - app_label = "viewer" - unique_together = (("xform", "filename"),) - - def __str__(self): - return f"{self.export_type} - {self.xform} ({self.filename})" - - def save(self, *args, **kwargs): # pylint: disable=arguments-differ - if not self.pk and self.xform: - # if new, check if we've hit our limit for exports for this form, - # if so, delete oldest - num_existing_exports = Export.objects.filter( - xform=self.xform, export_type=self.export_type - ).count() - - if num_existing_exports >= self.MAX_EXPORTS: - Export._delete_oldest_export(self.xform, self.export_type) - - # update time_of_last_submission with - # xform.time_of_last_submission_update - # pylint: disable=no-member - self.time_of_last_submission = self.xform.time_of_last_submission_update() - if self.filename: - self.internal_status = Export.SUCCESSFUL - super().save(*args, **kwargs) - - @classmethod - def _delete_oldest_export(cls, xform, export_type): - oldest_export = Export.objects.filter( - xform=xform, export_type=export_type - ).order_by("created_on")[0] - oldest_export.delete() - @property def is_pending(self): """ @@ -218,23 +181,6 @@ def status(self): return Export.PENDING - def set_filename(self, filename): - """ - Set the filename of an export and mark internal_status as - Export.SUCCESSFUL. - """ - self.filename = filename - self.internal_status = Export.SUCCESSFUL - self._update_filedir() - - def _update_filedir(self): - if not self.filename: - raise AssertionError() - # pylint: disable=no-member - self.filedir = os.path.join( - self.xform.user.username, "exports", self.xform.id_string, self.export_type - ) - @property def filepath(self): """ @@ -266,6 +212,67 @@ def full_filepath(self): return tmp.name return None + class Meta: + abstract = True + + +class Export(ExportBaseModel): + """ + Class representing a data export from an XForm + """ + + xform = models.ForeignKey("logger.XForm", on_delete=models.CASCADE) + + class Meta(ExportBaseModel.Meta): + app_label = "viewer" + unique_together = (("xform", "filename"),) + + def __str__(self): + return f"{self.export_type} - {self.xform} ({self.filename})" + + def save(self, *args, **kwargs): # pylint: disable=arguments-differ + if not self.pk and self.xform: + # if new, check if we've hit our limit for exports for this form, + # if so, delete oldest + num_existing_exports = Export.objects.filter( + xform=self.xform, export_type=self.export_type + ).count() + + if num_existing_exports >= self.MAX_EXPORTS: + Export._delete_oldest_export(self.xform, self.export_type) + + # update time_of_last_submission with + # xform.time_of_last_submission_update + # pylint: disable=no-member + self.time_of_last_submission = self.xform.time_of_last_submission_update() + if self.filename: + self.internal_status = Export.SUCCESSFUL + super().save(*args, **kwargs) + + @classmethod + def _delete_oldest_export(cls, xform, export_type): + oldest_export = Export.objects.filter( + xform=xform, export_type=export_type + ).order_by("created_on")[0] + oldest_export.delete() + + def set_filename(self, filename): + """ + Set the filename of an export and mark internal_status as + Export.SUCCESSFUL. + """ + self.filename = filename + self.internal_status = Export.SUCCESSFUL + self._update_filedir() + + def _update_filedir(self): + if not self.filename: + raise AssertionError() + # pylint: disable=no-member + self.filedir = os.path.join( + self.xform.user.username, "exports", self.xform.id_string, self.export_type + ) + @classmethod def exports_outdated(cls, xform, export_type, options=None): """ @@ -308,3 +315,120 @@ def is_filename_unique(cls, xform, filename): post_delete.connect(export_delete_callback, sender=Export) + + +class GenericExport(ExportBaseModel): + content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) + object_id = models.PositiveIntegerField() + content_object = GenericForeignKey() + + class Meta(ExportBaseModel.Meta): + app_label = "viewer" + unique_together = (("content_type", "object_id", "filename"),) + + @classmethod + def get_object_content_type(cls, instance): + return ContentType.objects.get_for_model(instance) + + @classmethod + def exports_outdated(cls, instance, export_type, options=None): + """ + Return True if export is outdated or there is no export matching the + export_type with the specified options. + """ + if options is None: + options = {} + + instance_ct = cls.get_object_content_type(instance) + export_options = get_export_options_query_kwargs(options) + # Get the most recent Export + try: + latest_export = cls.objects.filter( + content_type=instance_ct, + object_id=instance.id, + export_type=export_type, + internal_status__in=[cls.SUCCESSFUL, cls.PENDING], + **export_options, + ).latest("created_on") + except cls.DoesNotExist: + return True + + export_last_submission_time = latest_export.time_of_last_submission + + if latest_export.time_of_last_submission is not None: + if ( + instance_ct.model == "xform" + and instance.time_of_last_submission_update() is not None + ): + return ( + export_last_submission_time + < instance.time_of_last_submission_update() + ) + + if ( + instance_ct.model == "entity_list" + and instance.last_entity_update_time is not None + ): + return export_last_submission_time < instance.last_entity_update_time + + # Return true if we can't determine the status, to force auto-generation + return True + + @classmethod + def is_filename_unique(cls, instance, filename): + """ + Return True if the filename is unique, False otherwise + """ + instance_ct = cls.get_object_content_type(instance) + + return ( + cls.objects.filter( + content_type=instance_ct, object_id=instance.id, filename=filename + ).count() + == 0 + ) + + @classmethod + def _delete_oldest_export(cls, instance, export_type): + instance_ct = cls.get_object_content_type(instance) + oldest_export = cls.objects.filter( + content_type=instance_ct, object_id=instance.id, export_type=export_type + ).order_by("created_on")[0] + oldest_export.delete() + + def save(self, *args, **kwargs): # pylint: disable=arguments-differ + if not self.pk: + # if new, check if we've hit our limit for exports for this instance, + # if so, delete oldest + instance_ct = ContentType.objects.get_for_model(self.content_object) + num_existing_exports = GenericExport.objects.filter( + content_type=instance_ct, + object_id=self.content_object.id, + export_type=self.export_type, + ).count() + + if num_existing_exports >= self.MAX_EXPORTS: + GenericExport._delete_oldest_export( + self.content_object, self.export_type + ) + + # update time_of_last_submission with + # xform.time_of_last_submission_update + # pylint: disable=no-member + if instance_ct.model == "xform": + self.time_of_last_submission = ( + self.content_object.time_of_last_submission_update() + ) + + elif instance_ct.model == "entity_list": + self.time_of_last_submission = ( + self.content_object.last_entity_update_time + ) + + if self.filename: + self.internal_status = self.SUCCESSFUL + + super().save(*args, **kwargs) + + +post_delete.connect(export_delete_callback, sender=GenericExport) diff --git a/onadata/apps/viewer/models/tests/__init__.py b/onadata/apps/viewer/models/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/apps/viewer/models/tests/test_data_dictionary.py b/onadata/apps/viewer/models/tests/test_data_dictionary.py new file mode 100644 index 0000000000..b9528e9b3f --- /dev/null +++ b/onadata/apps/viewer/models/tests/test_data_dictionary.py @@ -0,0 +1,271 @@ +"""Tests for onadata.apps.viewer.models.data_dictionary""" + +import json + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models.entity_list import EntityList +from onadata.apps.logger.models.xform import XForm +from onadata.apps.logger.models.follow_up_form import FollowUpForm +from onadata.apps.logger.models.registration_form import RegistrationForm +from onadata.apps.main.models.meta_data import MetaData +from onadata.libs.utils.user_auth import get_user_default_project + + +class DataDictionaryTestCase(TestBase): + """Tests for model DataDictionary""" + + def setUp(self): + super().setUp() + + self.project = get_user_default_project(self.user) + self.registration_form = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees | concat(${circumference}, "cm ", ${species})| | |""" + self.follow_up_form = """ + | survey | + | | type | name | label | required | + | | select_one_from_file trees.csv | tree | Select the tree you are visiting | yes | + | settings| | | | | + | | form_title | form_id | version | | + | | Trees follow-up | trees_follow_up | 2022111801 | | + """ + + def _replace_form(self, markdown, data_dict): + survey = self.md_to_pyxform_survey(markdown, kwargs={"name": "data"}) + data_dict.xml = survey.to_xml() + data_dict.json = json.loads(survey.to_json()) + data_dict.save() + + def test_create_registration_form(self): + """Registration form created successfully""" + self._publish_markdown(self.registration_form, self.user) + self.assertEqual(XForm.objects.count(), 1) + self.assertTrue(EntityList.objects.filter(name="trees").exists()) + self.assertEqual(RegistrationForm.objects.count(), 1) + entity_list = EntityList.objects.first() + reg_form = RegistrationForm.objects.first() + latest_form = XForm.objects.all().order_by("-pk").first() + self.assertEqual(entity_list.name, "trees") + self.assertEqual(reg_form.xform, latest_form) + self.assertEqual( + reg_form.get_save_to(), + { + "geometry": "location", + "species": "species", + "circumference_cm": "circumference", + }, + ) + self.assertEqual(reg_form.entity_list, entity_list) + self.assertTrue(reg_form.is_active) + + def test_create_follow_up_form(self): + """Follow up form created successfully""" + # Simulate existing trees dataset + EntityList.objects.create(name="trees", project=self.project) + self._publish_markdown(self.follow_up_form, self.user) + self.assertEqual(XForm.objects.count(), 1) + latest_form = XForm.objects.all().order_by("-pk").first() + entity_list = EntityList.objects.first() + self.assertTrue( + FollowUpForm.objects.filter( + entity_list__name="trees", xform=latest_form + ).exists() + ) + self.assertTrue( + MetaData.objects.filter( + object_id=latest_form.pk, + data_type="media", + data_value=f"entity_list {entity_list.pk} trees", + ).exists() + ) + + def test_follow_up_form_list_not_found(self): + """Entity list not found when publishing followup form""" + self._publish_markdown(self.follow_up_form, self.user) + self.assertEqual(XForm.objects.count(), 1) + self.assertEqual(FollowUpForm.objects.count(), 0) + + def test_replace_form_entities_save_to(self): + """Replacing entity properties works""" + data_dict = self._publish_markdown(self.registration_form, self.user) + registration_form = RegistrationForm.objects.first() + self.assertEqual( + registration_form.get_save_to(), + { + "geometry": "location", + "species": "species", + "circumference_cm": "circumference", + }, + ) + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | location | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees | concat(${circumference}, "cm ", ${species})| | |""" + self._replace_form(md, data_dict) + registration_form.refresh_from_db() + self.assertEqual( + registration_form.get_save_to(), + { + "location": "location", + "species": "species", + }, + ) + + def test_replace_form_entities_list_name(self): + """Replacing entities list_name works""" + data_dict = self._publish_markdown(self.registration_form, self.user) + # name changed entities list_name to `trees_registration` + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees_registration | concat(${circumference}, "cm ", ${species})| | |""" + self._replace_form(md, data_dict) + # A new EntityList is created + self.assertTrue(EntityList.objects.filter(name="trees_registration").exists()) + # A new RegistrationForm referencing the new entity list is + # created for the XForm + latest_form = XForm.objects.all().order_by("-pk").first() + self.assertTrue( + RegistrationForm.objects.filter( + entity_list__name="trees_registration", xform=latest_form + ).exists() + ) + # RegistrationForm contributing to the previous EntityList + # should be disabled + registration_forms = RegistrationForm.objects.all().order_by("pk") + prev_registration_form = registration_forms[0] + new_registration_form = registration_forms[1] + self.assertFalse(prev_registration_form.is_active) + self.assertTrue(new_registration_form.is_active) + + def test_replace_form_remove_entities(self): + """Removing entities definition disables registration form""" + data_dict = self._publish_markdown(self.registration_form, self.user) + md = """ + | survey | + | | type | name | label | | + | | geopoint | location | Tree location | | + | | select_one species | species | Tree species | | + | | integer | circumference | Tree circumference in cm | | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})|""" + self._replace_form(md, data_dict) + registration_form = RegistrationForm.objects.first() + self.assertFalse(registration_form.is_active) + + def test_registration_form_reactivated(self): + """Existing RegistrationForm if disabled is activated""" + data_dict = self._publish_markdown(self.registration_form, self.user) + registration_form = RegistrationForm.objects.first() + # Disable registration form + registration_form.is_active = False + registration_form.save() + registration_form.refresh_from_db() + self.assertFalse(registration_form.is_active) + # Replace + self._replace_form(self.registration_form, data_dict) + registration_form.refresh_from_db() + self.assertTrue(registration_form.is_active) + + def test_followup_form_remove_dataset(self): + """FollowUpForm is deactivated if entity dataset reference removed""" + # Simulate existing trees dataset + EntityList.objects.create(name="trees", project=self.project) + data_dict = self._publish_markdown(self.follow_up_form, self.user) + follow_up_form = FollowUpForm.objects.filter(entity_list__name="trees").first() + self.assertTrue(follow_up_form.is_active) + # Replace + md = """ + | survey | + | | type | name | label | + | | text | tree | What is the name of the tree? | + | | integer | circumference | Tree circumeference in cm | + | settings| | | | + | | form_title | form_id | version | + | | Trees follow-up | trees_follow_up | 2022111801 | + """ + self._replace_form(md, data_dict) + follow_up_form.refresh_from_db() + self.assertFalse(follow_up_form.is_active) + + def test_reactivate_followup_form(self): + """FollowUpForm is re-activated if previously activated + + If entity dataset is referenced again, deactivate FollowUpForm + is re-activated + """ + # Simulate existing deactivate FollowUpForm + md = """ + | survey | + | | type | name | label | + | | text | tree | What is the name of the tree? | + | | integer | circumference | Tree circumeference in cm | + | settings| | | | + | | form_title | form_id | version | + | | Trees follow-up | trees_follow_up| 2022111801 | + """ + data_dict = self._publish_markdown(md, self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + xform = XForm.objects.first() + form = FollowUpForm.objects.create( + entity_list=entity_list, xform=xform, is_active=False + ) + self.assertFalse(form.is_active) + # Replace + self._replace_form(self.follow_up_form, data_dict) + form.refresh_from_db() + self.assertTrue(form.is_active) diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index 22726b4a7d..f203601853 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -769,3 +769,18 @@ def filter_queryset(self, request, queryset, view): return queryset.filter(shared=True) return queryset + + +# pylint: disable=too-few-public-methods +class EntityListProjectFilter(filters.BaseFilterBackend): + """EntityList `project` filter.""" + + # pylint: disable=unused-argument + def filter_queryset(self, request, queryset, view): + """Filter by project id""" + project_id = request.query_params.get("project") + + if project_id: + return queryset.filter(project__pk=project_id) + + return queryset diff --git a/onadata/libs/models/__init__.py b/onadata/libs/models/__init__.py index e69de29bb2..69c059af27 100644 --- a/onadata/libs/models/__init__.py +++ b/onadata/libs/models/__init__.py @@ -0,0 +1 @@ +from .base_model import BaseModel # noqa diff --git a/onadata/libs/models/base_model.py b/onadata/libs/models/base_model.py index 42d8b02576..240fdd4dd2 100644 --- a/onadata/libs/models/base_model.py +++ b/onadata/libs/models/base_model.py @@ -2,6 +2,8 @@ class BaseModel(models.Model): + date_created = models.DateTimeField(auto_now_add=True) + date_modified = models.DateTimeField(auto_now=True) class Meta: abstract = True diff --git a/onadata/libs/pagination.py b/onadata/libs/pagination.py index d103a09c19..ef3a3b26b6 100644 --- a/onadata/libs/pagination.py +++ b/onadata/libs/pagination.py @@ -15,6 +15,7 @@ replace_query_param, ) from rest_framework.request import Request +from rest_framework.response import Response class StandardPageNumberPagination(PageNumberPagination): @@ -73,6 +74,10 @@ def generate_link_header(self, request: Request, queryset: QuerySet): return {"Link": ", ".join(links)} + def get_paginated_response(self, data): + """Override to remove the OrderedDict response""" + return Response(data) + class CountOverridablePaginator(Paginator): """Count override Paginator diff --git a/onadata/libs/serializers/entity_serializer.py b/onadata/libs/serializers/entity_serializer.py new file mode 100644 index 0000000000..ee1d37729e --- /dev/null +++ b/onadata/libs/serializers/entity_serializer.py @@ -0,0 +1,129 @@ +from rest_framework import serializers + +from onadata.apps.logger.models import ( + Entity, + EntityList, + FollowUpForm, + Project, + RegistrationForm, + XForm, +) + + +class EntityListSerializer(serializers.HyperlinkedModelSerializer): + """Default Serializer for EntityList""" + + url = serializers.HyperlinkedIdentityField( + view_name="entity_list-detail", lookup_field="pk" + ) + project = serializers.HyperlinkedRelatedField( + view_name="project-detail", + lookup_field="pk", + queryset=Project.objects.all(), + ) + public = serializers.BooleanField(source="project.shared") + num_registration_forms = serializers.SerializerMethodField() + num_follow_up_forms = serializers.SerializerMethodField() + + def get_num_registration_forms(self, obj: EntityList) -> int: + """Returns number of RegistrationForms for EntityList object""" + return obj.registration_forms.count() + + def get_num_follow_up_forms(self, obj: EntityList) -> int: + """Returns number of FollowUpForms consuming Entities from dataset""" + return obj.follow_up_forms.count() + + class Meta: + model = EntityList + fields = ( + "url", + "id", + "name", + "project", + "public", + "date_created", + "date_modified", + "num_registration_forms", + "num_follow_up_forms", + "num_entities", + ) + + +class RegistrationFormInlineSerializer(serializers.HyperlinkedModelSerializer): + """Inline serializer for RegistrationForm""" + + title = serializers.CharField(source="xform.title") + xform = serializers.HyperlinkedRelatedField( + view_name="xform-detail", + lookup_field="pk", + queryset=XForm.objects.all(), + ) + id_string = serializers.CharField(source="xform.id_string") + save_to = serializers.SerializerMethodField() + + def get_save_to(self, obj: RegistrationForm) -> list[str]: + """Returns the save_to fields defined in the XLSForm""" + return list(obj.get_save_to().keys()) + + class Meta: + model = RegistrationForm + fields = ( + "title", + "xform", + "id_string", + "save_to", + ) + + +class FollowUpFormInlineSerializer(serializers.HyperlinkedModelSerializer): + """Inline serializer for FollowUpForm""" + + title = serializers.CharField(source="xform.title") + xform = serializers.HyperlinkedRelatedField( + view_name="xform-detail", + lookup_field="pk", + queryset=XForm.objects.all(), + ) + id_string = serializers.CharField(source="xform.id_string") + + class Meta: + model = FollowUpForm + fields = ( + "title", + "xform", + "id_string", + ) + + +class EntityListDetailSerializer(EntityListSerializer): + """Serializer for EntityList detail""" + + registration_forms = RegistrationFormInlineSerializer(many=True, read_only=True) + follow_up_forms = FollowUpFormInlineSerializer(many=True, read_only=True) + + class Meta: + model = EntityList + fields = ( + "id", + "name", + "project", + "public", + "date_created", + "date_modified", + "num_registration_forms", + "num_follow_up_forms", + "num_entities", + "registration_forms", + "follow_up_forms", + ) + + +class EntitySerializer(serializers.ModelSerializer): + """Serializer for Entity""" + + def to_representation(self, instance): + return instance.json + + class Meta: + model = Entity + fields = ("json",) diff --git a/onadata/libs/serializers/project_serializer.py b/onadata/libs/serializers/project_serializer.py index 0f86597f7c..833f5b43f8 100644 --- a/onadata/libs/serializers/project_serializer.py +++ b/onadata/libs/serializers/project_serializer.py @@ -218,11 +218,51 @@ class BaseProjectXFormSerializer(serializers.HyperlinkedModelSerializer): formid = serializers.ReadOnlyField(source="id") name = serializers.ReadOnlyField(source="title") + contributes_entities_to = serializers.SerializerMethodField() + consumes_entities_from = serializers.SerializerMethodField() + + def get_contributes_entities_to(self, obj: XForm): + """Return the EntityList that the form contributes Entities to""" + registration_form = obj.registration_forms.first() + + if registration_form is None: + return None + + return { + "id": registration_form.entity_list.pk, + "name": registration_form.entity_list.name, + "is_active": registration_form.is_active, + } + + def get_consumes_entities_from(self, obj: XForm): + """Return the EntityLIst that the form consumes Entities""" + queryset = obj.follow_up_forms.all() + + if not queryset: + return [] + + return list( + map( + lambda follow_up_form: { + "id": follow_up_form.entity_list.pk, + "name": follow_up_form.entity_list.name, + "is_active": follow_up_form.is_active, + }, + queryset, + ) + ) # pylint: disable=too-few-public-methods,missing-class-docstring class Meta: model = XForm - fields = ("name", "formid", "id_string", "is_merged_dataset") + fields = ( + "name", + "formid", + "id_string", + "is_merged_dataset", + "contributes_entities_to", + "consumes_entities_from", + ) # pylint: disable=too-few-public-methods diff --git a/onadata/libs/serializers/xform_serializer.py b/onadata/libs/serializers/xform_serializer.py index 819c5c19f9..082c5d7d6b 100644 --- a/onadata/libs/serializers/xform_serializer.py +++ b/onadata/libs/serializers/xform_serializer.py @@ -23,7 +23,13 @@ from rest_framework.reverse import reverse from onadata.libs.utils.api_export_tools import get_metadata_format -from onadata.apps.logger.models import DataView, Instance, XForm, XFormVersion +from onadata.apps.logger.models import ( + DataView, + EntityList, + Instance, + XForm, + XFormVersion, +) from onadata.apps.main.models.meta_data import MetaData from onadata.apps.main.models.user_profile import UserProfile from onadata.libs.exceptions import EnketoError @@ -348,6 +354,37 @@ def get_last_submission_time(self, obj): obj.last_submission_time.isoformat() if obj.last_submission_time else None ) + def get_contributes_entities_to(self, obj: XForm): + """Return the EntityList that the form contributes Entities to""" + registration_form = obj.registration_forms.first() + + if registration_form is None: + return None + + return { + "id": registration_form.entity_list.pk, + "name": registration_form.entity_list.name, + "is_active": registration_form.is_active, + } + + def get_consumes_entities_from(self, obj: XForm): + """Return the EntityLIst that the form consumes Entities""" + queryset = obj.follow_up_forms.all() + + if not queryset: + return [] + + return list( + map( + lambda follow_up_form: { + "id": follow_up_form.entity_list.pk, + "name": follow_up_form.entity_list.name, + "is_active": follow_up_form.is_active, + }, + queryset, + ) + ) + class XFormBaseSerializer(XFormMixin, serializers.HyperlinkedModelSerializer): """XForm base serializer.""" @@ -385,6 +422,8 @@ class XFormBaseSerializer(XFormMixin, serializers.HyperlinkedModelSerializer): last_submission_time = serializers.SerializerMethodField() data_views = serializers.SerializerMethodField() xls_available = serializers.SerializerMethodField() + contributes_entities_to = serializers.SerializerMethodField() + consumes_entities_from = serializers.SerializerMethodField() # pylint: disable=too-few-public-methods,missing-class-docstring class Meta: @@ -454,6 +493,8 @@ class XFormSerializer(XFormMixin, serializers.HyperlinkedModelSerializer): form_versions = serializers.SerializerMethodField() data_views = serializers.SerializerMethodField() xls_available = serializers.SerializerMethodField() + contributes_entities_to = serializers.SerializerMethodField() + consumes_entities_from = serializers.SerializerMethodField() class Meta: model = XForm @@ -647,6 +688,14 @@ def get_url(self, obj): return url + def _generate_hash(self, data) -> str: + md5_hash = hashlib.new( + "md5", + data, + usedforsecurity=False, + ).hexdigest() + return f"md5:{md5_hash}" + @check_obj def get_hash(self, obj): """ @@ -664,6 +713,14 @@ def get_hash(self, obj): xform = None if dataset_type == "xform": xform = XForm.objects.filter(pk=pk).only("last_submission_time").first() + + elif dataset_type == "entity_list": + entity_list = EntityList.objects.filter(pk=pk).first() + + if entity_list.last_entity_update_time is not None: + hsh = self._generate_hash( + entity_list.last_entity_update_time.isoformat().encode("utf-8") + ) else: data_view = ( DataView.objects.filter(pk=pk) @@ -674,12 +731,9 @@ def get_hash(self, obj): xform = data_view.xform if xform and xform.last_submission_time: - md5_hash = hashlib.new( - "md5", - xform.last_submission_time.isoformat().encode("utf-8"), - usedforsecurity=False, - ).hexdigest() - hsh = f"md5:{md5_hash}" + hsh = self._generate_hash( + xform.last_submission_time.isoformat().encode("utf-8") + ) return f"{hsh or 'md5:'}" diff --git a/onadata/libs/tests/models/test_share_project.py b/onadata/libs/tests/models/test_share_project.py index 3f2f9d7f16..c869852618 100644 --- a/onadata/libs/tests/models/test_share_project.py +++ b/onadata/libs/tests/models/test_share_project.py @@ -54,6 +54,7 @@ def test_share(self, mock_safe_delete, mock_propagate): """ instance = ShareProject(self.project, self.alice, "manager") instance.save() + self.alice.refresh_from_db() self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) diff --git a/onadata/libs/tests/serializers/test_xform_serializer.py b/onadata/libs/tests/serializers/test_xform_serializer.py index 36d8b9f90f..f8dc3713f7 100644 --- a/onadata/libs/tests/serializers/test_xform_serializer.py +++ b/onadata/libs/tests/serializers/test_xform_serializer.py @@ -2,10 +2,13 @@ """ Test onadata.libs.serializers.xform_serializer """ +import os + from unittest.mock import MagicMock from django.test import TestCase +from onadata.apps.logger.models import XForm, Entity from onadata.apps.main.tests.test_base import TestBase from onadata.libs.serializers.xform_serializer import XFormManifestSerializer @@ -63,3 +66,40 @@ def test_get_hash(self): obj.data_value = "an image upload.png" obj.data_file = "data file" self.assertEqual(serializer.get_hash(obj), obj.file_hash) + + def test_entity_dataset_hash(self): + """Hash for entity dataset changes after new Entity created""" + serializer = XFormManifestSerializer() + self._create_user_and_login() + # Publish registration form + self._publish_registration_form(self.user) + # Publish follow up form + self._publish_follow_up_form(self.user) + follow_up_xform = XForm.objects.order_by("pk").reverse()[0] + entity_list = self.project.entity_lists.first() + # Make submission to create new Entity + submission_path = os.path.join( + self.this_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + self._make_submission(submission_path) + metadata = follow_up_xform.metadata_set.get( + data_type="media", + data_value=f"entity_list {entity_list.pk} {entity_list.name}", + ) + old_hash = serializer.get_hash(metadata) + # Make another submission + submission_path = os.path.join( + self.this_directory, + "fixtures", + "entities", + "instances", + "trees_registration_2.xml", + ) + self._make_submission(submission_path) + new_hash = serializer.get_hash(metadata) + self.assertEqual(Entity.objects.count(), 2) + self.assertNotEqual(old_hash, new_hash) diff --git a/onadata/libs/tests/utils/test_csv_builder.py b/onadata/libs/tests/utils/test_csv_builder.py index b9452bc5a8..5e810c4bdf 100644 --- a/onadata/libs/tests/utils/test_csv_builder.py +++ b/onadata/libs/tests/utils/test_csv_builder.py @@ -10,6 +10,7 @@ from django.test.utils import override_settings from django.utils.dateparse import parse_datetime +from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.logger.models.xform import XForm from onadata.apps.logger.xform_instance_parser import xform_instance_to_dict from onadata.apps.main.tests.test_base import TestBase @@ -2025,3 +2026,65 @@ def test_select_multiples_grouped_repeating_wo_split(self): self.assertEqual(row, expected_row) csv_file.close() + + def test_entity_list_dataset(self): + """Export for an EntityList dataset is correct""" + # Publish registration form + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | | | + | | trees | concat(${circumference}, "cm ", ${species})| | |""" + xform = self._publish_markdown(md, self.user) + entity_list = EntityList.objects.first() + cursor = [ + { + "name": 1, + "label": "300cm purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + } + ] + builder = CSVDataFrameBuilder( + self.user.username, + xform.id_string, + include_images=False, + entity_list=entity_list, + ) + temp_file = NamedTemporaryFile(suffix=".csv", delete=False) + builder.export_to(temp_file.name, cursor) + csv_file = open(temp_file.name, "r") + csv_reader = csv.reader(csv_file) + header = next(csv_reader) + expected_header = [ + "name", + "label", + "geometry", + "species", + "circumference_cm", + ] + self.assertCountEqual(header, expected_header) + expected_row = [ + "1", + "300cm purpleheart", + "-1.286905 36.772845 0 0", + "purpleheart", + "300", + ] + row = next(csv_reader) + self.assertCountEqual(row, expected_row) diff --git a/onadata/libs/tests/utils/test_export_tools.py b/onadata/libs/tests/utils/test_export_tools.py index c3138bc13b..cebdf15a00 100644 --- a/onadata/libs/tests/utils/test_export_tools.py +++ b/onadata/libs/tests/utils/test_export_tools.py @@ -24,9 +24,8 @@ from onadata.apps.api import tests as api_tests from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.data_viewset import DataViewSet -from onadata.apps.logger.models import Attachment, Instance, XForm -from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.viewer.models.export import Export +from onadata.apps.logger.models import Attachment, Instance, XForm, EntityList +from onadata.apps.viewer.models.export import Export, GenericExport from onadata.apps.viewer.models.parsed_instance import query_fields_data from onadata.libs.serializers.merged_xform_serializer import MergedXFormSerializer from onadata.libs.serializers.xform_serializer import XFormSerializer @@ -38,6 +37,7 @@ from onadata.libs.utils.export_tools import ( check_pending_export, generate_attachments_zip_export, + generate_entity_list_export, generate_export, generate_geojson_export, generate_kml_export, @@ -54,7 +54,7 @@ def _logger_fixture_path(*args): return os.path.join(settings.PROJECT_ROOT, "libs", "tests", "fixtures", *args) -class TestExportTools(TestBase, TestAbstractViewSet): +class TestExportTools(TestAbstractViewSet): """ Test export_tools functions. """ @@ -1002,3 +1002,26 @@ def test_generate_filtered_attachments_zip_export(self): for a in Attachment.objects.all(): self.assertTrue(os.path.exists(os.path.join(temp_dir, a.media_file.name))) shutil.rmtree(temp_dir) + + +class GenerateExportTestCase(TestAbstractViewSet): + """Tests for method `generate_export`""" + + def test_generate_export_entity_list(self): + """Generate export for EntityList dataset works""" + # Publish registration form and create "trees" Entitylist dataset + self._publish_registration_form(self.user) + # Make submission to trees_registration form + submission_path = os.path.join( + self.main_directory, + "fixtures", + "entities", + "instances", + "trees_registration.xml", + ) + self._make_submission(submission_path) + entity_list = EntityList.objects.get(name="trees") + export = generate_entity_list_export(entity_list) + self.assertIsNotNone(export) + self.assertTrue(export.is_successful) + self.assertEqual(GenericExport.objects.count(), 1) diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index ff0a74fd2b..192c963385 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -14,12 +14,13 @@ from defusedxml.ElementTree import ParseError from onadata.apps.logger.import_tools import django_file -from onadata.apps.logger.models import Instance +from onadata.apps.logger.models import Instance, Entity, RegistrationForm from onadata.apps.logger.xform_instance_parser import AttachmentNameError from onadata.apps.main.tests.test_base import TestBase from onadata.libs.test_utils.pyxform_test_case import PyxformTestCase from onadata.libs.utils.common_tags import MEDIA_ALL_RECEIVED, MEDIA_COUNT, TOTAL_MEDIA from onadata.libs.utils.logger_tools import ( + create_entity, create_instance, generate_content_disposition_header, get_first_record, @@ -648,3 +649,64 @@ def test_handle_parse_error(self): req, ) self.assertContains(ret[0].content.decode(), "Improperly formatted XML.") + + +class CreateEntityTestCase(TestBase): + """Tests for method `create_entity`""" + + def setUp(self): + super().setUp() + # Mute signal that creates Entity when Instance is saved + self._mute_post_save_signals([(Instance, "create_entity")]) + self.xform = self._publish_registration_form(self.user) + self.xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + self.instance = Instance.objects.create( + xml=self.xml, + user=self.user, + xform=self.xform, + version=self.xform.version, + ) + self.registration_form = RegistrationForm.objects.first() + + def test_entity_created(self): + """Entity is created successfully""" + create_entity(self.instance, self.registration_form) + entity = Entity.objects.first() + self.assertEqual(entity.registration_form, self.registration_form) + self.assertEqual(entity.instance, self.instance) + self.assertEqual(entity.xml, self.xml) + expected_json = { + "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", + "meta/instanceName": "300cm purpleheart", + "meta/entity/label": "300cm purpleheart", + "_xform_id_string": "trees_registration", + "_version": "2022110901", + "_id": entity.pk, + } + self.assertCountEqual(entity.json, expected_json) + self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") + entity_list = self.registration_form.entity_list + entity_list.refresh_from_db() + self.assertEqual(entity_list.num_entities, 1) + self.assertEqual(entity_list.last_entity_update_time, entity.date_modified) diff --git a/onadata/libs/utils/api_export_tools.py b/onadata/libs/utils/api_export_tools.py index a8bad4bcac..144970e5be 100644 --- a/onadata/libs/utils/api_export_tools.py +++ b/onadata/libs/utils/api_export_tools.py @@ -56,7 +56,9 @@ from onadata.libs.utils.common_tools import report_exception from onadata.libs.utils.export_tools import ( check_pending_export, + get_latest_generic_export, generate_attachments_zip_export, + generate_entity_list_export, generate_export, generate_external_export, generate_geojson_export, @@ -252,7 +254,7 @@ def _generate_new_export( # noqa: C0901 "username": xform.user.username, "id_string": xform.id_string, "host": request.get_host(), - "sort": request.query_params.get('sort') + "sort": request.query_params.get("sort"), } if query: options["query"] = query @@ -670,3 +672,48 @@ def _get_google_credential(request): ) return HttpResponseRedirect(authorization_url) return credential + + +def get_entity_list_export_response(request, entity_list, filename): + """Returns an EntityList dataset export""" + + # Check if we need to re-generate, + def _new_export(): + return generate_entity_list_export(entity_list) + + if should_create_new_export( + entity_list, Export.CSV_EXPORT, {}, request=request, is_generic=True + ): + export = _new_export() + else: + export = get_latest_generic_export(entity_list, Export.CSV_EXPORT, {}) + + if not export.filename and not export.error_message: + export = _new_export() + + # Log export + audit = {"entity_list": entity_list.name, "export_type": Export.CSV_EXPORT} + log.audit_log( + log.Actions.EXPORT_DOWNLOADED, + request.user, + entity_list.project.user, + _(f"Downloaded {Export.CSV_EXPORT.upper()} export on '{entity_list.name}'."), + audit, + request, + ) + + if export.filename is None and export.error_message: + raise exceptions.ParseError(export.error_message) + + # get extension from file_path, exporter could modify to + # xlsx if it exceeds limits + __, ext = os.path.splitext(export.filename) + ext = ext[1:] + + return response_with_mimetype_and_name( + Export.EXPORT_MIMES[ext], + filename, + extension=ext, + show_date=False, + file_path=export.filepath, + ) diff --git a/onadata/libs/utils/csv_builder.py b/onadata/libs/utils/csv_builder.py index d3aa644544..639aa0a881 100644 --- a/onadata/libs/utils/csv_builder.py +++ b/onadata/libs/utils/csv_builder.py @@ -13,6 +13,7 @@ from pyxform.section import RepeatingSection, Section, GroupedSection from six import iteritems +from onadata.apps.logger.models import EntityList from onadata.apps.logger.models import OsmData from onadata.apps.logger.models.xform import XForm, question_types_to_exclude from onadata.apps.viewer.models.data_dictionary import DataDictionary @@ -251,6 +252,7 @@ def __init__( include_reviews=False, language=None, host=None, + entity_list: EntityList | None = None, ): self.username = username self.id_string = id_string @@ -266,6 +268,8 @@ def __init__( self.extra_columns = self.ADDITIONAL_COLUMNS + getattr( settings, "EXTRA_COLUMNS", [] ) + self.entity_list = entity_list + self.include_images = include_images if include_reviews: self.extra_columns = self.extra_columns + [ @@ -274,15 +278,19 @@ def __init__( REVIEW_DATE, ] - if xform: - self.xform = xform + if entity_list is None: + if xform: + self.xform = xform + else: + self.xform = XForm.objects.get( + id_string=self.id_string, user__username=self.username + ) else: - self.xform = XForm.objects.get( - id_string=self.id_string, user__username=self.username - ) + self.xform = None + self.include_images = False + self.include_labels = include_labels self.include_labels_only = include_labels_only - self.include_images = include_images self.include_hxl = include_hxl self.win_excel_utf8 = win_excel_utf8 self.total_records = total_records @@ -304,11 +312,16 @@ def __init__( self._setup() def _setup(self): - self.data_dictionary = self.xform - self.select_multiples = self._collect_select_multiples( - self.data_dictionary, self.language - ) - self.gps_fields = self._collect_gps_fields(self.data_dictionary) + self.data_dictionary = None + self.select_multiples = {} + self.gps_fields = [] + + if self.entity_list is None: + self.data_dictionary = self.xform + self.select_multiples = self._collect_select_multiples( + self.data_dictionary, self.language + ) + self.gps_fields = self._collect_gps_fields(self.data_dictionary) @classmethod def _fields_to_select(cls, data_dictionary): @@ -515,6 +528,7 @@ def __init__( include_reviews=False, language=None, host=None, + entity_list: EntityList | None = None, ): super().__init__( username, @@ -539,6 +553,7 @@ def __init__( include_reviews, language, host, + entity_list, ) self.ordered_columns = OrderedDict() @@ -813,45 +828,56 @@ def _format_for_dataframe(self, cursor): def export_to(self, path, cursor, dataview=None): """Export a CSV formated to the given ``path``.""" - self.ordered_columns = OrderedDict() - self._build_ordered_columns(self.data_dictionary.survey, self.ordered_columns) - # creator copy of iterator cursor - cursor, ordered_col_cursor = tee(cursor) - self._update_ordered_columns_from_data(ordered_col_cursor) - # Unpack xform columns and data - data = self._format_for_dataframe(cursor) - - if dataview: - columns = list( - chain.from_iterable( - [ - [xpath] if cols is None else cols - for (xpath, cols) in iteritems(self.ordered_columns) - if [c for c in dataview.columns if xpath.startswith(c)] - ] - ) + columns = [] + columns_with_hxl = None + + if self.entity_list is None: + self.ordered_columns = OrderedDict() + self._build_ordered_columns( + self.data_dictionary.survey, self.ordered_columns ) - else: - columns = list( - chain.from_iterable( - [ - [xpath] if cols is None else cols - for (xpath, cols) in iteritems(self.ordered_columns) - ] + # creator copy of iterator cursor + cursor, ordered_col_cursor = tee(cursor) + self._update_ordered_columns_from_data(ordered_col_cursor) + # Unpack xform columns and data + data = self._format_for_dataframe(cursor) + + if dataview: + columns = list( + chain.from_iterable( + [ + [xpath] if cols is None else cols + for (xpath, cols) in iteritems(self.ordered_columns) + if [c for c in dataview.columns if xpath.startswith(c)] + ] + ) + ) + else: + columns = list( + chain.from_iterable( + [ + [xpath] if cols is None else cols + for (xpath, cols) in iteritems(self.ordered_columns) + ] + ) ) - ) - # add extra columns - columns += list(self.extra_columns) + # add extra columns + columns += list(self.extra_columns) - for field in self.data_dictionary.get_survey_elements_of_type("osm"): - columns += OsmData.get_tag_keys( - self.xform, field.get_abbreviated_xpath(), include_prefix=True - ) + for field in self.data_dictionary.get_survey_elements_of_type("osm"): + columns += OsmData.get_tag_keys( + self.xform, field.get_abbreviated_xpath(), include_prefix=True + ) - columns_with_hxl = self.include_hxl and get_columns_with_hxl( - self.data_dictionary.survey_elements - ) + columns_with_hxl = self.include_hxl and get_columns_with_hxl( + self.data_dictionary.survey_elements + ) + + else: + columns = ["name", "label"] + self.entity_list.properties + # Unpack xform columns and data + data = self._format_for_dataframe(cursor) write_to_csv( path, diff --git a/onadata/libs/utils/export_builder.py b/onadata/libs/utils/export_builder.py index 12519f6857..dae69ab941 100644 --- a/onadata/libs/utils/export_builder.py +++ b/onadata/libs/utils/export_builder.py @@ -1137,13 +1137,14 @@ def to_flat_csv_export( end = kwargs.get("end") dataview = kwargs.get("dataview") xform = kwargs.get("xform") - options = kwargs.get("options") + options = kwargs.get("options", {}) total_records = kwargs.get("total_records") host = options.get("host") if options else None win_excel_utf8 = options.get("win_excel_utf8") if options else False index_tags = options.get(REPEAT_INDEX_TAGS, self.REPEAT_INDEX_TAGS) show_choice_labels = options.get("show_choice_labels", False) language = options.get("language") + entity_list = kwargs.get("entity_list") csv_builder = CSVDataFrameBuilder( username, @@ -1168,6 +1169,7 @@ def to_flat_csv_export( include_reviews=self.INCLUDE_REVIEWS, language=language, host=host, + entity_list=entity_list, ) csv_builder.export_to(path, data, dataview=dataview) diff --git a/onadata/libs/utils/export_tools.py b/onadata/libs/utils/export_tools.py index 13f57bf78d..3899003f28 100644 --- a/onadata/libs/utils/export_tools.py +++ b/onadata/libs/utils/export_tools.py @@ -11,8 +11,11 @@ import re import sys from datetime import datetime, timedelta -from django.http import HttpRequest +from typing import Iterator + +import six +from django.http import HttpRequest from django.conf import settings from django.contrib.auth import get_user_model from django.core.files.base import File @@ -23,7 +26,6 @@ from django.utils import timezone from django.utils.translation import gettext as _ -import six from json2xlsclient.client import Client from multidb.pinning import use_master @@ -36,10 +38,21 @@ except ImportError: SPSSIOError = Exception -from onadata.apps.logger.models import Attachment, Instance, OsmData, XForm +from onadata.apps.logger.models import ( + Attachment, + Instance, + OsmData, + XForm, + EntityList, + Entity, +) from onadata.apps.logger.models.data_view import DataView from onadata.apps.main.models.meta_data import MetaData -from onadata.apps.viewer.models.export import Export, get_export_options_query_kwargs +from onadata.apps.viewer.models.export import ( + Export, + GenericExport, + get_export_options_query_kwargs, +) from onadata.apps.viewer.models.parsed_instance import ( query_data, query_count, @@ -54,6 +67,7 @@ retry, str_to_bool, ) +from onadata.libs.utils.export_builder import ExportBuilder from onadata.libs.utils.model_tools import get_columns_with_hxl, queryset_iterator from onadata.libs.utils.osm import get_combined_osm from onadata.libs.utils.viewer_tools import create_attachments_zipfile, image_urls @@ -109,6 +123,31 @@ def get_or_create_export(export_id, xform, export_type, options): return create_export_object(xform, export_type, options) +def get_entity_list_dataset(entity_list: EntityList) -> Iterator[dict]: + """Get entity data for a an EntityList dataset + + Args: + entity_list (EntityList): The EntityList whose data + will be returned + + Returns: + An iterator of dicts which represent the json data for + Entities belonging to the dataset + """ + entities = Entity.objects.filter(registration_form__entity_list=entity_list) + dataset_properties = entity_list.properties + + for entity in queryset_iterator(entities): + data = { + "name": entity.uuid, + "label": entity.json.get("meta/entity/label", ""), + } + for prop in dataset_properties: + data[prop] = entity.json.get(prop, "") + + yield data + + # pylint: disable=too-many-locals, too-many-branches, too-many-statements @retry(MAX_RETRIES) def generate_export(export_type, xform, export_id=None, options=None): # noqa C901 @@ -133,6 +172,10 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C """ username = xform.user.username id_string = xform.id_string + + if options is None: + options = {} + end = options.get("end") extension = options.get("extension", export_type) filter_query = options.get("query") @@ -190,9 +233,6 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C if isinstance(records, QuerySet): records = records.iterator() - # pylint: disable=import-outside-toplevel - from onadata.libs.utils.export_builder import ExportBuilder - export_builder = ExportBuilder() export_builder.TRUNCATE_GROUP_TITLE = ( # noqa True if export_type == Export.SAV_ZIP_EXPORT else remove_group_name @@ -348,7 +388,9 @@ def check_pending_export( return export -def should_create_new_export(xform, export_type, options, request=None): +def should_create_new_export( + instance, export_type, options, request=None, is_generic=False +): """ Function that determines whether to create a new export. param: xform @@ -373,18 +415,31 @@ def should_create_new_export(xform, export_type, options, request=None): return True export_options_kwargs = get_export_options_query_kwargs(options) - export_query = Export.objects.filter( - xform=xform, export_type=export_type, **export_options_kwargs - ) + + if is_generic: + object_ct = GenericExport.get_object_content_type(instance) + export_query = GenericExport.objects.filter( + content_type=object_ct, + object_id=instance.id, + export_type=export_type, + **export_options_kwargs, + ) + else: + export_query = Export.objects.filter( + xform=instance, export_type=export_type, **export_options_kwargs + ) + if options.get(EXPORT_QUERY_KEY) is None: export_query = export_query.exclude(options__has_key=EXPORT_QUERY_KEY) - if export_query.count() == 0 or Export.exports_outdated( - xform, export_type, options=options - ): - return True + if is_generic: + return export_query.count() == 0 or bool( + GenericExport.exports_outdated(instance, export_type, options=options) + ) - return False + return export_query.count() == 0 or bool( + Export.exports_outdated(instance, export_type, options=options) + ) def newest_export_for(xform, export_type, options): @@ -1045,3 +1100,62 @@ def get_repeat_index_tags(index_tags): ) return index_tags + + +def generate_entity_list_export(entity_list: EntityList) -> GenericExport: + """Generates a CSV for an EntityList dataset""" + username = entity_list.project.organization.username + records = get_entity_list_dataset(entity_list) + export_builder = ExportBuilder() + extension = Export.CSV_EXPORT + temp_file = NamedTemporaryFile(suffix=("." + extension)) + export_builder.to_flat_csv_export( + temp_file.name, records, username, None, None, entity_list=entity_list + ) + # Generate filename + basename = f'{entity_list.name}_{datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%f")}' + filename = basename + "." + extension + + # Check filename is unique + while not GenericExport.is_filename_unique(entity_list, filename): + filename = increment_index_in_filename(filename) + + file_path = os.path.join( + username, "exports", entity_list.name, Export.CSV_EXPORT, filename + ) + # seek to the beginning as required by storage classes + temp_file.seek(0) + export_filename = default_storage.save(file_path, File(temp_file, file_path)) + temp_file.close() + dir_name, basename = os.path.split(export_filename) + # Create export object + export = GenericExport.objects.create( + content_object=entity_list, + export_type=Export.CSV_EXPORT, + filedir=dir_name, + filename=basename, + internal_status=Export.SUCCESSFUL, + ) + return export + + +def get_latest_generic_export( + instance, export_type, options=None +) -> GenericExport | None: + """Retrieve the latest GenericExport""" + export_options_kwargs = get_export_options_query_kwargs(options) + object_content_type = GenericExport.get_object_content_type(instance) + export_query = GenericExport.objects.filter( + content_type=object_content_type, + object_id=instance.id, + export_type=export_type, + **export_options_kwargs, + ) + + try: + latest_export = export_query.latest("created_on") + + except GenericExport.DoesNotExist: + return None + + return latest_export diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index e90d745c8a..e62add6179 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# pylint: disable=too-many-lines """ logger_tools - Logger app utility functions. """ @@ -11,7 +12,7 @@ from datetime import datetime from hashlib import sha256 from http.client import BadStatusLine -from typing import NoReturn +from typing import NoReturn, Any from wsgiref.util import FileWrapper from xml.dom import Node from xml.parsers.expat import ExpatError @@ -25,7 +26,7 @@ ) from django.core.files.storage import get_storage_class from django.db import DataError, IntegrityError, transaction -from django.db.models import Q +from django.db.models import Q, F from django.http import ( HttpResponse, HttpResponseNotFound, @@ -46,7 +47,14 @@ from pyxform.xform2json import create_survey_element_from_xml from rest_framework.response import Response -from onadata.apps.logger.models import Attachment, Instance, XForm, XFormVersion +from onadata.apps.logger.models import ( + Attachment, + Entity, + Instance, + RegistrationForm, + XForm, + XFormVersion, +) from onadata.apps.logger.models.instance import ( FormInactiveError, FormIsMergedDatasetError, @@ -962,3 +970,69 @@ def __init__(self, xml_file, user): def publish_xform(self): """Publish an XForm XML file.""" return publish_xml_form(self.xml_file, self.user, self.project) + + +def create_entity(instance: Instance, registration_form: RegistrationForm) -> Entity: + """Create an Entity + + Args: + instance (Instance): Submission from which the Entity is created from + registration_form (RegistrationForm): RegistrationForm creating the + Entity + + Returns: + Entity: A newly created Entity + """ + instance_json: dict[str, Any] = instance.get_dict() + # Getting a mapping of save_to field to the field name + mapped_properties = registration_form.get_save_to(instance.version) + # Field names with an alias defined + target_fields = list(mapped_properties.values()) + + def convert_to_alias(field_name: str) -> str: + """Convert field name to it's alias""" + alias_field_name = field_name + # We split along / to take care of group questions + parts = field_name.split("/") + # Replace field parts with alias + for part in parts: + if part in target_fields: + for alias, field in mapped_properties.items(): + if field == part: + alias_field_name = alias_field_name.replace(field, alias) + break + + return alias_field_name + + def parse_instance_json(data: dict[str, Any]) -> None: + """Parse the original json, replacing field names with their alias + + The data keys are modified in place + """ + for field_name in list(data): + if isinstance(data[field_name], list): + # Handle repeat question + for child_data in data[field_name]: + parse_instance_json(child_data) + + else: + if field_name in target_fields: + alias_field_name = convert_to_alias(field_name) + + if alias_field_name != field_name: + data[alias_field_name] = data[field_name] + del data[field_name] + + parse_instance_json(instance_json) + entity = Entity.objects.create( + registration_form=registration_form, + xml=instance.xml, + json=instance_json, + instance=instance, + ) + entity_list = registration_form.entity_list + entity_list.last_entity_update_time = entity.date_modified + entity_list.num_entities = F("num_entities") + 1 + entity_list.save() + + return entity From 07ebcb7732299d025718463944f074e143a894ce Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 20 May 2024 09:55:52 +0300 Subject: [PATCH 199/270] Set permissions for merged dataset asynchronously (#2595) * apply mergedxform permissions asynchronously * add test case * add test case * refactor code --- onadata/apps/logger/models/merged_xform.py | 13 +++-- onadata/apps/logger/models/tests/__init__.py | 0 .../logger/models/tests/test_merged_xform.py | 51 +++++++++++++++++++ .../libs/tests/utils/test_project_utils.py | 47 +++++++++++++++-- onadata/libs/utils/project_utils.py | 22 +++++--- 5 files changed, 120 insertions(+), 13 deletions(-) create mode 100644 onadata/apps/logger/models/tests/__init__.py create mode 100644 onadata/apps/logger/models/tests/test_merged_xform.py diff --git a/onadata/apps/logger/models/merged_xform.py b/onadata/apps/logger/models/merged_xform.py index 188a37bc9b..5ce9765588 100644 --- a/onadata/apps/logger/models/merged_xform.py +++ b/onadata/apps/logger/models/merged_xform.py @@ -2,7 +2,7 @@ """ MergedXForm model - stores info on forms to merge. """ -from django.db import models +from django.db import models, transaction from django.db.models.signals import post_save from onadata.apps.logger.models.xform import XForm @@ -39,10 +39,15 @@ def set_object_permissions(sender, instance=None, created=False, **kwargs): OwnerRole.add(instance.created_by, instance) OwnerRole.add(instance.created_by, instance.xform_ptr) - from onadata.libs.utils.project_utils import set_project_perms_to_xform + from onadata.libs.utils.project_utils import ( + set_project_perms_to_xform_async, + ) - set_project_perms_to_xform(instance, instance.project) - set_project_perms_to_xform(instance.xform_ptr, instance.project) + transaction.on_commit( + lambda: set_project_perms_to_xform_async.delay( + instance.pk, instance.project.pk + ) + ) post_save.connect( diff --git a/onadata/apps/logger/models/tests/__init__.py b/onadata/apps/logger/models/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/onadata/apps/logger/models/tests/test_merged_xform.py b/onadata/apps/logger/models/tests/test_merged_xform.py new file mode 100644 index 0000000000..3a206c2347 --- /dev/null +++ b/onadata/apps/logger/models/tests/test_merged_xform.py @@ -0,0 +1,51 @@ +"""Tests for module onadata.apps.logger.models.merged_xform""" + +from pyxform.builder import create_survey_element_from_dict +from unittest.mock import call, patch + +from onadata.apps.main.tests.test_base import TestBase +from onadata.apps.logger.models.merged_xform import MergedXForm +from onadata.apps.logger.models.xform import XForm + + +class MergedXFormTestCase(TestBase): + @patch("onadata.libs.utils.project_utils.set_project_perms_to_xform_async.delay") + def test_perms_applied_async_on_create(self, mock_set_perms): + """Permissions are applied asynchronously on create""" + md = """ + | survey | + | | type | name | label | + | | select one fruits | fruit | Fruit | + + | choices | + | | list name | name | label | + | | fruits | orange | Orange | + | | fruits | mango | Mango | + """ + self._publish_markdown(md, self.user, id_string="a") + self._publish_markdown(md, self.user, id_string="b") + xf1 = XForm.objects.get(id_string="a") + xf2 = XForm.objects.get(id_string="b") + survey = create_survey_element_from_dict(xf1.json_dict()) + survey["id_string"] = "c" + survey["sms_keyword"] = survey["id_string"] + survey["title"] = "Merged XForm" + merged_xf = MergedXForm.objects.create( + id_string=survey["id_string"], + sms_id_string=survey["id_string"], + title=survey["title"], + user=self.user, + created_by=self.user, + is_merged_dataset=True, + project=self.project, + xml=survey.to_xml(), + json=survey.to_json(), + ) + merged_xf.xforms.add(xf1) + merged_xf.xforms.add(xf2) + calls = [ + call(xf1.pk, self.project.pk), + call(xf2.pk, self.project.pk), + call(merged_xf.pk, self.project.pk), + ] + mock_set_perms.assert_has_calls(calls, any_order=True) diff --git a/onadata/libs/tests/utils/test_project_utils.py b/onadata/libs/tests/utils/test_project_utils.py index 84696778c9..9711c69675 100644 --- a/onadata/libs/tests/utils/test_project_utils.py +++ b/onadata/libs/tests/utils/test_project_utils.py @@ -2,14 +2,14 @@ """ Test onadata.libs.utils.project_utils """ -from unittest.mock import MagicMock, patch +from unittest.mock import call, MagicMock, patch from django.test.utils import override_settings - +from pyxform.builder import create_survey_element_from_dict from kombu.exceptions import OperationalError from requests import Response -from onadata.apps.logger.models import Project +from onadata.apps.logger.models import MergedXForm, Project, XForm from onadata.apps.main.tests.test_base import TestBase from onadata.libs.permissions import DataEntryRole from onadata.libs.utils.project_utils import ( @@ -73,6 +73,47 @@ def test_set_project_perms_to_xform_async(self, mock): self.assertEqual(args[0], self.xform) self.assertEqual(args[1], self.project) + @patch("onadata.libs.utils.project_utils.set_project_perms_to_xform") + def test_set_project_perms_to_xform_async_mergedxform(self, mock): + """Permissions for a MergedXForm are set""" + md = """ + | survey | + | | type | name | label | + | | select one fruits | fruit | Fruit | + + | choices | + | | list name | name | label | + | | fruits | orange | Orange | + | | fruits | mango | Mango | + """ + self._publish_markdown(md, self.user, id_string="a") + self._publish_markdown(md, self.user, id_string="b") + xf1 = XForm.objects.get(id_string="a") + xf2 = XForm.objects.get(id_string="b") + survey = create_survey_element_from_dict(xf1.json_dict()) + survey["id_string"] = "c" + survey["sms_keyword"] = survey["id_string"] + survey["title"] = "Merged XForm" + merged_xf = MergedXForm.objects.create( + id_string=survey["id_string"], + sms_id_string=survey["id_string"], + title=survey["title"], + user=self.user, + created_by=self.user, + is_merged_dataset=True, + project=self.project, + xml=survey.to_xml(), + json=survey.to_json(), + ) + merged_xf.xforms.add(xf1) + merged_xf.xforms.add(xf2) + set_project_perms_to_xform_async.delay(merged_xf.pk, self.project.pk) + expected_calls = [ + call(merged_xf.xform_ptr, self.project), + call(merged_xf, self.project), + ] + mock.assert_has_calls(expected_calls, any_order=True) + def test_assign_change_asset_permission(self): """ Test that the `assign_change_asset_permission` function calls diff --git a/onadata/libs/utils/project_utils.py b/onadata/libs/utils/project_utils.py index cb0f9dc0cd..a0cf5693c1 100644 --- a/onadata/libs/utils/project_utils.py +++ b/onadata/libs/utils/project_utils.py @@ -21,12 +21,18 @@ from onadata.apps.logger.models.project import Project from onadata.apps.logger.models.xform import XForm from onadata.celeryapp import app -from onadata.libs.permissions import (ROLES, OwnerRole, - get_object_users_with_permissions, - get_role, is_organization) -from onadata.libs.utils.common_tags import (API_TOKEN, - ONADATA_KOBOCAT_AUTH_HEADER, - OWNER_TEAM_NAME) +from onadata.libs.permissions import ( + ROLES, + OwnerRole, + get_object_users_with_permissions, + get_role, + is_organization, +) +from onadata.libs.utils.common_tags import ( + API_TOKEN, + ONADATA_KOBOCAT_AUTH_HEADER, + OWNER_TEAM_NAME, +) from onadata.libs.utils.common_tools import report_exception @@ -124,6 +130,10 @@ def _set_project_perms(): else: set_project_perms_to_xform(xform, project) + # Set MergedXForm permissions if XForm is also a MergedXForm + if hasattr(xform, "mergedxform"): + set_project_perms_to_xform(xform.mergedxform, project) + try: if getattr(settings, "SLAVE_DATABASES", []): From 1761cb821a4a865035ed6f2cdbb6dadac78955ab Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 22 Apr 2024 09:54:57 +0300 Subject: [PATCH 200/270] Exclude media folders from Docker image --- .dockerignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.dockerignore b/.dockerignore index 7425c30a8b..c5e8d87aeb 100644 --- a/.dockerignore +++ b/.dockerignore @@ -17,3 +17,6 @@ **/*tests **/*test **/*__pycache__ +onadata/media +onadata/test_data_media +onadata/test_media From d721e499949c85457b0490494d4a8494a8d2e661 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 22 Apr 2024 09:56:22 +0300 Subject: [PATCH 201/270] Docker: Use Ubuntu 22.04 and pyenv install Python --- docker/onadata-uwsgi/Dockerfile.ubuntu | 143 +++++++++++++++++-------- 1 file changed, 98 insertions(+), 45 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index f225883897..dcf2ebe0c1 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -1,22 +1,8 @@ -FROM python:3.10 as intermediate - -ENV DEBIAN_FRONTEND noninteractive -ENV PYTHONUNBUFFERED 1 +FROM python:3.10.14-bookworm as base +ARG release_version=v4.0.1 ARG optional_packages -# Download public key for github.com -RUN mkdir -m 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts - -# Install optional requirements -# Read more on the ssh argument here: https://docs.docker.com/develop/develop-images/build_enhancements/#using-ssh-to-access-private-data-in-builds -# hadolint ignore=DL3013 -RUN --mount=type=ssh if [ -n "$optional_packages" ]; then pip install ${optional_packages} ; fi - -FROM ubuntu:focal-20240123 - -ARG release_version=v3.18.2 - # Silence configuration prompts ENV DEBIAN_FRONTEND noninteractive @@ -31,10 +17,15 @@ RUN apt-get update -q && \ binutils \ build-essential \ locales \ - netcat \ + netcat-traditional \ gcc \ pkg-config \ git \ + openssh-client \ + libreadline-dev \ + libbz2-dev \ + libffi-dev \ + curl \ automake # Generate and set en_US.UTF-8 locale @@ -43,20 +34,13 @@ ENV LC_ALL en_US.UTF-8 ENV LC_CTYPE en_US.UTF-8 RUN dpkg-reconfigure locales -# Add Deadsnake Repository + # Install OnaData Dependencies -RUN add-apt-repository 'ppa:deadsnakes/ppa' -y \ - && apt-get update -q \ - && apt-get install -y --no-install-recommends \ +RUN apt-get install -y --no-install-recommends \ libproj-dev \ gdal-bin \ memcached \ libmemcached-dev \ - supervisor \ - python3.10 \ - python3.10-dev \ - python3-pip \ - python3-setuptools \ libssl-dev \ libpq-dev \ gfortran \ @@ -64,13 +48,9 @@ RUN add-apt-repository 'ppa:deadsnakes/ppa' -y \ libjpeg-dev \ libxml2-dev \ libxslt1-dev \ - libpython3.10-dev \ zlib1g-dev \ ghostscript \ - python3-celery \ - python3-sphinx \ libtool \ - openjdk-11-jre-headless \ libpcre3 \ libpcre3-dev \ && apt-get autoremove -y \ @@ -78,33 +58,106 @@ RUN add-apt-repository 'ppa:deadsnakes/ppa' -y \ && rm -rf /var/lib/apt/lists/* # Create OnaData user and add to tty group -RUN useradd -G tty -m onadata +RUN useradd -G tty -m onadata \ + && mkdir -p /srv/onadata \ + && chown -R onadata:onadata /srv/onadata -# Clone Repository and Change owner -RUN mkdir -p /srv/onadata +USER onadata -COPY ../../ /srv/onadata/ +# Download public key for github.com +RUN mkdir ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts + +RUN curl https://pyenv.run | bash + +ENV HOME /home/onadata +ENV PYENV_ROOT="$HOME/.pyenv" +ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH + +RUN pyenv install 3.10.14 && pyenv global 3.10.14 && pyenv rehash -RUN chown -R onadata:onadata /srv/onadata +# FROM base as requirements + +COPY ../../ /srv/onadata/ # Install service requirements WORKDIR /srv/onadata -COPY --from=intermediate /usr/local/lib/python3.10/site-packages/ /usr/local/lib/python3.10/dist-packages/ +USER root + +# ENV GIT_SSH_COMMAND="ssh -vvv" +RUN mkdir -p /root/.ssh && ssh-keyscan github.com >> /root/.ssh/known_hosts +RUN echo $(ssh-add -l) && echo $SSH_AUTH_SOCK +RUN --mount=type=ssh echo $(ssh-add -l) && echo $SSH_AUTH_SOCK # hadolint ignore=DL3013 -RUN python3.10 -m pip install --no-cache-dir -U pip && \ - python3.10 -m pip install --no-cache-dir -r requirements/base.pip && \ - python3.10 -m pip install --no-cache-dir -r requirements/s3.pip && \ - python3.10 -m pip install --no-cache-dir -r requirements/ses.pip && \ - python3.10 -m pip install --no-cache-dir -r requirements/azure.pip && \ - python3.10 -m pip install setuptools==65.5.1 && \ - python3.10 -m pip install --no-cache-dir pyyaml uwsgitop +RUN --mount=type=ssh if [ -n "$optional_packages" ]; then /home/onadata/.pyenv/versions/3.10.14/bin/pip install --no-cache-dir ${optional_packages} ; fi +RUN chown -R onadata:onadata /home/onadata/.pyenv/versions/3.10.14/lib/python3.10/site-packages + +USER onadata + +# hadolint ignore=DL3013 +RUN python -m pip install --no-cache-dir -U pip && \ + python -m pip install --no-cache-dir -r requirements/base.pip && \ + python -m pip install --no-cache-dir -r requirements/s3.pip && \ + python -m pip install --no-cache-dir -r requirements/ses.pip && \ + python -m pip install --no-cache-dir -r requirements/azure.pip && \ + python -m pip install --no-cache-dir setuptools==69.2.0 && \ + python -m pip install --no-cache-dir pyyaml==6.0.1 uwsgitop==0.12 + +FROM base as docs +ENV PYENV_ROOT="$HOME/.pyenv" +ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH +COPY --from=base /home/onadata/.pyenv/ /home/onadata/.pyenv/ +COPY --from=base /srv/onadata/ /srv/onadata/ +USER root +RUN chown -R onadata:onadata /srv/onadata/ +USER onadata # Compile API Docs RUN make -C docs html -EXPOSE 8000 + +FROM ubuntu:jammy-20240405 as runtime + +ENV DEBIAN_FRONTEND noninteractive + +# Install prerequisite packages +RUN apt-get update -q && \ + apt-get install -y --no-install-recommends locales netcat + +# # Generate and set en_US.UTF-8 locale +RUN locale-gen en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 +ENV LC_CTYPE en_US.UTF-8 +RUN dpkg-reconfigure locales + + +# # Install OnaData Dependencies +RUN apt-get install -y --no-install-recommends \ + gdal-bin \ + git-core \ + openjdk-11-jre-headless \ + libxml2-dev \ + libxslt1-dev \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +# Create OnaData user and add to tty group +RUN useradd -G tty -m onadata \ + && mkdir -p /srv/onadata \ + && chown -R onadata:onadata /srv/onadata + +COPY --from=base /home/onadata/.pyenv/ /home/onadata/.pyenv/ +COPY --from=docs /srv/onadata/ /srv/onadata/ + +RUN chown -R onadata:onadata /srv/onadata/ USER onadata +WORKDIR /srv/onadata + +ENV HOME /home/onadata +ENV PYTHON_VERSION 3.10.14 +ENV PYENV_ROOT="$HOME/.pyenv" +ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH -CMD ["/usr/local/bin/uwsgi", "--ini", "/uwsgi.ini"] +CMD ["uwsgi", "--ini", "uwsgi.ini"] From 90a7b79c4e31e714f49531aac1c1aaef333b39cf Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 6 May 2024 14:16:27 +0300 Subject: [PATCH 202/270] Docker: Split docs generation Minimises runtime installed dependencies --- .dockerignore | 1 + docker/onadata-uwsgi/Dockerfile.ubuntu | 16 +++- requirements/azure.pip | 12 +-- requirements/base.pip | 77 ++++++------------ requirements/dev.pip | 104 +++++++++---------------- requirements/docs.in | 1 + requirements/docs.pip | 48 ++++++++++++ requirements/s3.pip | 8 +- requirements/ses.pip | 8 +- setup.cfg | 4 +- 10 files changed, 135 insertions(+), 144 deletions(-) create mode 100644 requirements/docs.in create mode 100644 requirements/docs.pip diff --git a/.dockerignore b/.dockerignore index c5e8d87aeb..c3e4984f4e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -20,3 +20,4 @@ onadata/media onadata/test_data_media onadata/test_media +docs/_build diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index dcf2ebe0c1..5a1eae5df4 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -29,9 +29,12 @@ RUN apt-get update -q && \ automake # Generate and set en_US.UTF-8 locale -RUN locale-gen en_US.UTF-8 +RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \ + locale-gen en_US.UTF-8 ENV LC_ALL en_US.UTF-8 ENV LC_CTYPE en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en RUN dpkg-reconfigure locales @@ -101,19 +104,24 @@ RUN python -m pip install --no-cache-dir -U pip && \ python -m pip install --no-cache-dir -r requirements/ses.pip && \ python -m pip install --no-cache-dir -r requirements/azure.pip && \ python -m pip install --no-cache-dir setuptools==69.2.0 && \ - python -m pip install --no-cache-dir pyyaml==6.0.1 uwsgitop==0.12 + python -m pip install --no-cache-dir pyyaml==6.0.1 uwsgitop==0.12 supervisor==4.2.5 FROM base as docs + ENV PYENV_ROOT="$HOME/.pyenv" ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH COPY --from=base /home/onadata/.pyenv/ /home/onadata/.pyenv/ COPY --from=base /srv/onadata/ /srv/onadata/ + USER root + RUN chown -R onadata:onadata /srv/onadata/ + USER onadata -# Compile API Docs -RUN make -C docs html +# install sphinx and build API docs. +RUN python -m pip install --no-cache-dir -r requirements/docs.pip && \ + make -C docs html FROM ubuntu:jammy-20240405 as runtime diff --git a/requirements/azure.pip b/requirements/azure.pip index eb6756a559..0b78e9199a 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -18,7 +18,7 @@ cffi==1.16.0 # via cryptography charset-normalizer==3.3.2 # via requests -cryptography==42.0.5 +cryptography==42.0.6 # via # -r requirements/azure.in # azure-storage-blob @@ -26,13 +26,13 @@ django==4.2.11 # via # -r requirements/azure.in # django-storages -django-storages==1.14.2 +django-storages==1.14.3 # via -r requirements/azure.in -idna==3.6 +idna==3.7 # via requests isodate==0.6.1 # via azure-storage-blob -pycparser==2.21 +pycparser==2.22 # via cffi requests==2.31.0 # via azure-core @@ -40,9 +40,9 @@ six==1.16.0 # via # azure-core # isodate -sqlparse==0.4.4 +sqlparse==0.5.0 # via django -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # azure-core # azure-storage-blob diff --git a/requirements/base.pip b/requirements/base.pip index d3f695e47b..bfc9741ddf 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -6,8 +6,6 @@ # -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in -alabaster==0.7.16 - # via sphinx amqp==5.2.0 # via kombu analytics-python==1.4.post1 @@ -23,15 +21,13 @@ attrs==23.2.0 # jsonlines # jsonschema # referencing -babel==2.14.0 - # via sphinx backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.71 +boto3==1.34.98 # via dataflows-tabulator -botocore==1.34.71 +botocore==1.34.98 # via # boto3 # s3transfer @@ -39,7 +35,7 @@ cached-property==1.5.2 # via tableschema cachetools==5.3.3 # via google-auth -celery==5.3.6 +celery==5.4.0 # via onadata certifi==2024.2.2 # via @@ -68,7 +64,7 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==42.0.5 +cryptography==42.0.6 # via # jwcrypto # onadata @@ -159,7 +155,7 @@ djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==6.1.0 +djangorestframework-jsonapi==7.0.0 # via onadata djangorestframework-jsonp==1.0.2 # via onadata @@ -167,8 +163,6 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.19 - # via sphinx dpath==2.1.6 # via onadata elaphe3==0.2.0 @@ -193,29 +187,25 @@ hiredis==2.3.2 # via redis httplib2==0.22.0 # via onadata -idna==3.6 +idna==3.7 # via requests ijson==3.2.3 # via dataflows-tabulator -imagesize==1.4.1 - # via sphinx inflection==0.5.1 # via djangorestframework-jsonapi isodate==0.6.1 # via tableschema -jinja2==3.1.3 - # via sphinx jmespath==1.0.1 # via # boto3 # botocore jsonlines==4.0.0 # via dataflows-tabulator -jsonpickle==3.0.3 +jsonpickle==3.0.4 # via onadata jsonpointer==2.4 # via datapackage -jsonschema==4.21.1 +jsonschema==4.22.0 # via # datapackage # tableschema @@ -223,16 +213,14 @@ jsonschema-specifications==2023.12.1 # via jsonschema jwcrypto==1.5.6 # via django-oauth-toolkit -kombu==5.3.5 +kombu==5.3.7 # via celery linear-tsv==1.1.0 # via dataflows-tabulator -lxml==5.1.0 +lxml==5.2.1 # via onadata markdown==3.6 # via onadata -markupsafe==2.1.5 - # via jinja2 modilabs-python-utils==0.1.5 # via onadata monotonic==1.6 @@ -252,11 +240,9 @@ openpyxl==3.0.9 # dataflows-tabulator # onadata # pyxform -packaging==24.0 - # via sphinx -paho-mqtt==2.0.0 +paho-mqtt==2.1.0 # via onadata -pillow==10.2.0 +pillow==10.3.0 # via # elaphe3 # onadata @@ -270,19 +256,17 @@ pyasn1==0.6.0 # rsa pyasn1-modules==0.4.0 # via google-auth -pycparser==2.21 +pycparser==2.22 # via cffi pyfloip @ git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d # via -r requirements/base.in -pygments==2.17.2 - # via sphinx pyjwt==2.8.0 # via # ona-oidc # onadata pylibmc==1.6.3 # via onadata -pymongo==4.6.2 +pymongo==4.7.1 # via onadata pyparsing==3.1.2 # via httplib2 @@ -310,11 +294,11 @@ pyxform==1.12.2 # pyfloip recaptcha-client==1.0.6 # via onadata -redis==5.0.3 +redis==5.0.4 # via # django-redis # onadata -referencing==0.34.0 +referencing==0.35.1 # via # jsonschema # jsonschema-specifications @@ -328,7 +312,6 @@ requests==2.31.0 # onadata # python-json2xlsclient # requests-oauthlib - # sphinx # tableschema requests-oauthlib==2.0.0 # via google-auth-oauthlib @@ -342,7 +325,7 @@ rsa==4.9 # via google-auth s3transfer==0.10.1 # via boto3 -sentry-sdk==1.43.0 +sentry-sdk==1.45.0 # via onadata simplejson==3.19.2 # via onadata @@ -356,31 +339,15 @@ six==1.16.0 # linear-tsv # python-dateutil # tableschema -snowballstemmer==2.2.0 - # via sphinx -sphinx==6.2.1 - # via onadata -sphinxcontrib-applehelp==1.0.8 - # via sphinx -sphinxcontrib-devhelp==1.0.6 - # via sphinx -sphinxcontrib-htmlhelp==2.0.5 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.7 - # via sphinx -sphinxcontrib-serializinghtml==1.1.10 - # via sphinx -sqlalchemy==2.0.29 +sqlalchemy==2.0.30 # via dataflows-tabulator -sqlparse==0.4.4 +sqlparse==0.5.0 # via # django # django-debug-toolbar -tableschema==1.20.10 +tableschema==1.20.11 # via datapackage -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # jwcrypto # sqlalchemy @@ -399,7 +366,7 @@ urllib3==2.2.1 # botocore # requests # sentry-sdk -uwsgi==2.0.24 +uwsgi==2.0.25.1 # via onadata vine==5.1.0 # via diff --git a/requirements/dev.pip b/requirements/dev.pip index 28137f7906..e944503f2f 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -6,8 +6,6 @@ # -e git+https://github.com/onaio/savreaderwriter.git@fix-pep-440-issues#egg=savreaderwriter # via -r requirements/base.in -alabaster==0.7.16 - # via sphinx amqp==5.2.0 # via kombu analytics-python==1.4.post1 @@ -31,15 +29,13 @@ attrs==23.2.0 # jsonlines # jsonschema # referencing -babel==2.14.0 - # via sphinx backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.71 +boto3==1.34.98 # via dataflows-tabulator -botocore==1.34.71 +botocore==1.34.98 # via # boto3 # s3transfer @@ -47,7 +43,7 @@ cached-property==1.5.2 # via tableschema cachetools==5.3.3 # via google-auth -celery==5.3.6 +celery==5.4.0 # via onadata certifi==2024.2.2 # via @@ -78,7 +74,7 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==42.0.5 +cryptography==42.0.6 # via # jwcrypto # onadata @@ -180,7 +176,7 @@ djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==6.1.0 +djangorestframework-jsonapi==7.0.0 # via onadata djangorestframework-jsonp==1.0.2 # via onadata @@ -188,8 +184,6 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -docutils==0.19 - # via sphinx dodgy==0.2.1 # via prospector dpath==2.1.6 @@ -200,7 +194,7 @@ et-xmlfile==1.1.0 # via openpyxl executing==2.0.1 # via stack-data -filelock==3.13.3 +filelock==3.14.0 # via virtualenv flake8==5.0.4 # via @@ -218,7 +212,7 @@ geojson==3.1.0 # via onadata gitdb==4.0.11 # via gitpython -gitpython==3.1.42 +gitpython==3.1.43 # via prospector google-auth==2.29.0 # via @@ -234,21 +228,19 @@ httmock==1.4.0 # via -r requirements/dev.in httplib2==0.22.0 # via onadata -identify==2.5.35 +identify==2.5.36 # via pre-commit -idna==3.6 +idna==3.7 # via requests ijson==3.2.3 # via dataflows-tabulator -imagesize==1.4.1 - # via sphinx importlib-metadata==7.1.0 # via yapf inflection==0.5.1 # via djangorestframework-jsonapi ipdb==0.13.13 # via -r requirements/dev.in -ipython==8.22.2 +ipython==8.24.0 # via ipdb isodate==0.6.1 # via tableschema @@ -258,19 +250,17 @@ isort==5.13.2 # pylint jedi==0.19.1 # via ipython -jinja2==3.1.3 - # via sphinx jmespath==1.0.1 # via # boto3 # botocore jsonlines==4.0.0 # via dataflows-tabulator -jsonpickle==3.0.3 +jsonpickle==3.0.4 # via onadata jsonpointer==2.4 # via datapackage -jsonschema==4.21.1 +jsonschema==4.22.0 # via # datapackage # tableschema @@ -278,19 +268,17 @@ jsonschema-specifications==2023.12.1 # via jsonschema jwcrypto==1.5.6 # via django-oauth-toolkit -kombu==5.3.5 +kombu==5.3.7 # via celery lazy-object-proxy==1.10.0 # via astroid linear-tsv==1.1.0 # via dataflows-tabulator -lxml==5.1.0 +lxml==5.2.1 # via onadata markdown==3.6 # via onadata -markupsafe==2.1.5 - # via jinja2 -matplotlib-inline==0.1.6 +matplotlib-inline==0.1.7 # via ipython mccabe==0.7.0 # via @@ -322,20 +310,19 @@ packaging==24.0 # via # prospector # requirements-detector - # sphinx -paho-mqtt==2.0.0 +paho-mqtt==2.1.0 # via onadata -parso==0.8.3 +parso==0.8.4 # via jedi pep8-naming==0.10.0 # via prospector pexpect==4.9.0 # via ipython -pillow==10.2.0 +pillow==10.3.0 # via # elaphe3 # onadata -platformdirs==4.2.0 +platformdirs==4.2.1 # via # pylint # virtualenv @@ -364,7 +351,7 @@ pycodestyle==2.9.1 # via # flake8 # prospector -pycparser==2.21 +pycparser==2.22 # via cffi pydocstyle==6.3.0 # via prospector @@ -374,10 +361,8 @@ pyflakes==2.5.0 # prospector pyfloip @ git+https://github.com/onaio/floip-py.git@3c980eb184069ae7c3c9136b18441978237cd41d # via -r requirements/base.in -pygments==2.17.2 - # via - # ipython - # sphinx +pygments==2.18.0 + # via ipython pyjwt==2.8.0 # via # ona-oidc @@ -406,7 +391,7 @@ pylint-plugin-utils==0.7 # pylint-celery # pylint-django # pylint-flask -pymongo==4.6.2 +pymongo==4.7.1 # via onadata pyparsing==3.1.2 # via httplib2 @@ -438,11 +423,11 @@ pyyaml==6.0.1 # prospector recaptcha-client==1.0.6 # via onadata -redis==5.0.3 +redis==5.0.4 # via # django-redis # onadata -referencing==0.34.0 +referencing==0.35.1 # via # jsonschema # jsonschema-specifications @@ -458,9 +443,8 @@ requests==2.31.0 # python-json2xlsclient # requests-mock # requests-oauthlib - # sphinx # tableschema -requests-mock==1.11.0 +requests-mock==1.12.1 # via -r requirements/dev.in requests-oauthlib==2.0.0 # via google-auth-oauthlib @@ -478,7 +462,7 @@ s3transfer==0.10.1 # via boto3 semver==3.0.2 # via requirements-detector -sentry-sdk==1.43.0 +sentry-sdk==1.45.0 # via onadata setoptconf-tmp==0.3.1 # via prospector @@ -494,37 +478,20 @@ six==1.16.0 # isodate # linear-tsv # python-dateutil - # requests-mock # tableschema smmap==5.0.1 # via gitdb snowballstemmer==2.2.0 - # via - # pydocstyle - # sphinx -sphinx==6.2.1 - # via onadata -sphinxcontrib-applehelp==1.0.8 - # via sphinx -sphinxcontrib-devhelp==1.0.6 - # via sphinx -sphinxcontrib-htmlhelp==2.0.5 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.7 - # via sphinx -sphinxcontrib-serializinghtml==1.1.10 - # via sphinx -sqlalchemy==2.0.29 + # via pydocstyle +sqlalchemy==2.0.30 # via dataflows-tabulator -sqlparse==0.4.4 +sqlparse==0.5.0 # via # django # django-debug-toolbar stack-data==0.6.3 # via ipython -tableschema==1.20.10 +tableschema==1.20.11 # via datapackage tblib==3.0.0 # via -r requirements/dev.in @@ -536,12 +503,13 @@ tomli==2.0.1 # via yapf tomlkit==0.12.4 # via pylint -traitlets==5.14.2 +traitlets==5.14.3 # via # ipython # matplotlib-inline -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via + # ipython # jwcrypto # sqlalchemy tzdata==2024.1 @@ -559,14 +527,14 @@ urllib3==2.2.1 # botocore # requests # sentry-sdk -uwsgi==2.0.24 +uwsgi==2.0.25.1 # via onadata vine==5.1.0 # via # amqp # celery # kombu -virtualenv==20.25.1 +virtualenv==20.26.1 # via pre-commit wcwidth==0.2.13 # via prompt-toolkit diff --git a/requirements/docs.in b/requirements/docs.in new file mode 100644 index 0000000000..d0d294c97e --- /dev/null +++ b/requirements/docs.in @@ -0,0 +1 @@ +sphinx>=6.2,<7 diff --git a/requirements/docs.pip b/requirements/docs.pip new file mode 100644 index 0000000000..981a9f8d7c --- /dev/null +++ b/requirements/docs.pip @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --output-file=requirements/docs.pip --strip-extras requirements/docs.in +# +alabaster==0.7.16 + # via sphinx +babel==2.15.0 + # via sphinx +certifi==2024.2.2 + # via requests +charset-normalizer==3.3.2 + # via requests +docutils==0.19 + # via sphinx +idna==3.7 + # via requests +imagesize==1.4.1 + # via sphinx +jinja2==3.1.4 + # via sphinx +markupsafe==2.1.5 + # via jinja2 +packaging==24.0 + # via sphinx +pygments==2.18.0 + # via sphinx +requests==2.31.0 + # via sphinx +snowballstemmer==2.2.0 + # via sphinx +sphinx==6.2.1 + # via -r requirements/docs.in +sphinxcontrib-applehelp==1.0.8 + # via sphinx +sphinxcontrib-devhelp==1.0.6 + # via sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.7 + # via sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via sphinx +urllib3==2.2.1 + # via requests diff --git a/requirements/s3.pip b/requirements/s3.pip index fe691e6de3..60197bf2f6 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -6,9 +6,9 @@ # asgiref==3.8.1 # via django -boto3==1.34.71 +boto3==1.34.98 # via -r requirements/s3.in -botocore==1.34.71 +botocore==1.34.98 # via # boto3 # s3transfer @@ -16,7 +16,7 @@ django==4.2.11 # via # -r requirements/s3.in # django-storages -django-storages==1.14.2 +django-storages==1.14.3 # via -r requirements/s3.in jmespath==1.0.1 # via @@ -28,7 +28,7 @@ s3transfer==0.10.1 # via boto3 six==1.16.0 # via python-dateutil -sqlparse==0.4.4 +sqlparse==0.5.0 # via django urllib3==2.2.1 # via botocore diff --git a/requirements/ses.pip b/requirements/ses.pip index 1ca64618da..d3f3d8f86a 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -8,9 +8,9 @@ asgiref==3.8.1 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.34.71 +boto3==1.34.98 # via django-ses -botocore==1.34.71 +botocore==1.34.98 # via # boto3 # s3transfer @@ -18,7 +18,7 @@ django==4.2.11 # via # -r requirements/ses.in # django-ses -django-ses==3.5.2 +django-ses==4.0.0 # via -r requirements/ses.in jmespath==1.0.1 # via @@ -32,7 +32,7 @@ s3transfer==0.10.1 # via boto3 six==1.16.0 # via python-dateutil -sqlparse==0.4.4 +sqlparse==0.5.0 # via django urllib3==2.2.1 # via botocore diff --git a/setup.cfg b/setup.cfg index edfc236e5f..0db2a0fa58 100644 --- a/setup.cfg +++ b/setup.cfg @@ -74,8 +74,6 @@ install_requires = python-memcached #XML Instance API utility xmltodict - #docs - sphinx>=6.2,<7 Markdown #others unicodecsv @@ -92,7 +90,7 @@ install_requires = simplejson uwsgi django-activity-stream - sentry-sdk + sentry-sdk>=1.43.0,<2 paho-mqtt cryptography>=39.0.1 #Monitoring From d6a7721a69ea29e9c813d693e6d6ef552dd25624 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 7 May 2024 03:17:01 +0300 Subject: [PATCH 203/270] Update CHANGES.rst --- CHANGES.rst | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4bc31351cf..6640c285a4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,10 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +Next release +------------- +- Rebuild docker image with pyenv and on Ubuntu 22.04 runtime image. + v4.1.0(2024-05-03) ------------------ - Fix API docs not created when building image @@ -15,22 +19,22 @@ v4.1.0(2024-05-03) v4.0.1(2024-04-15) ------------------ - When an odk token expires is None, deactivate and replace - `PR #2583 ` + `PR #2583 ` [@FrankApiyo] - Flatten select multiples in repeating sections during CSV import - `PR #2578 ` + `PR #2578 ` [@kelvin-muchiri] v4.0.0(2024-04-08) ------------------ - Remove redundant Dockerfile used for development - `PR #2575 ` + `PR #2575 ` [@kelvin-muchiri] - Add user to organization asynchronously - `PR #2574 ` + `PR #2574 ` [@kelvin-muchiri] - Upgrade to Django 4.2 - `PR #2572 ` + `PR #2572 ` [@ukanga] v3.19.0(2024-03-26) @@ -98,13 +102,13 @@ v3.17.1(2023-12-11) v3.17.0(2023-11-24) ------------------- -- Create Composite Index for xform_id and id fields +- Create Composite Index for xform_id and id fields `PR #2519 ` [@KipSigei] - Filter out soft-deleted xforms from project forms endpoint `PR #2515 ` [@KipSigei] -- Check length of paginated instances in briefcase viewset +- Check length of paginated instances in briefcase viewset `PR #2517 ` [@KipSigei] @@ -113,7 +117,7 @@ v3.16.0(2023-11-23) - Add custom template tag: settings_value `PR #2510 ` [@FrankApiyo] -- Enhancement: Handle Statement Timeout in Briefcase Viewset +- Enhancement: Handle Statement Timeout in Briefcase Viewset `PR #2508 ` [@KipSigei] - Trigger database call to correctly capture OperationalError @@ -274,7 +278,7 @@ v3.10.0(2023-07-03) v3.9.2(2023-06-19) ------------------ -- Fix open-data endpoint timeout bug +- Fix open-data endpoint timeout bug `PR #2435 ` [@kelvin-muchiri] @@ -461,7 +465,7 @@ v3.6.0(2022-10-31) v3.5.0(2022-10-06) ------------------ -- Fix org members permissions +- Fix org members permissions `PR #2323 `_ [@KipSigei] - Add pagination to projects endpoint @@ -576,19 +580,19 @@ v3.1.0(2022-07-08) v3.0.4(2022-06-14) ------------------ -- Add geojson simplestyle-spec support +- Add geojson simplestyle-spec support `PR #2255 `_ [@KipSigei] -- Fix data type of filtered /data JSON response +- Fix data type of filtered /data JSON response `PR #2256 `_ [@ukanga] -- Use xlsx file object instead of absolute path +- Use xlsx file object instead of absolute path `PR #2257 `_ [@KipSigei] -- Add netcat to allow liveness/readiness probes that make use of open port checks. +- Add netcat to allow liveness/readiness probes that make use of open port checks. `PR #2259 `_ [@ukanga] -- Fix netcat package include in Dockerfile +- Fix netcat package include in Dockerfile `PR #2260 `_ [@ukanga] @@ -889,7 +893,7 @@ v2.5.6(2021-06-02) v2.5.5(2021-05-17) ------------------ -- Add documentation for the messaging endpoint +- Add documentation for the messaging endpoint `PR #2026 `_ [@DavisRayM] - Fix submission deletion endpoint error From cd1efafcd886175694532eaaa845323a612af23c Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 7 May 2024 03:31:34 +0300 Subject: [PATCH 204/270] Build multi-platform image --- .github/workflows/ecr-image-build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index a1241825f9..abe21917bd 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -64,7 +64,7 @@ jobs: with: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu - platforms: linux/amd64 + platforms: linux/amd64,linux/arm64 cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version }} cache-to: type=inline ssh: | @@ -82,7 +82,7 @@ jobs: with: context: . file: ./docker/onadata-uwsgi/Dockerfile.alpine - platforms: linux/amd64 + platforms: linux/amd64,linux/arm64 cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version }} cache-to: type=inline ssh: | From b980a04dd234e967870f2dfa9676e97b596e7eb7 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 7 May 2024 03:57:58 +0300 Subject: [PATCH 205/270] Upgrade ecr image build github actions version --- .github/workflows/ecr-image-build.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index abe21917bd..d153f2d0d3 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -20,16 +20,16 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -37,7 +37,7 @@ jobs: - name: Login to Amazon ECR id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 + uses: aws-actions/amazon-ecr-login@v2 - name: Setup SSH Agent and add Github to known hosts env: @@ -60,7 +60,7 @@ jobs: - name: (Ubuntu) Build and push id: docker-build-ubuntu - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu @@ -77,7 +77,7 @@ jobs: - name: (Alpine) Build and push id: docker-build-alpine - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 if: github.event.inputs.buildAlpine with: context: . @@ -108,7 +108,7 @@ jobs: output: 'trivy-results.sarif' - name: Upload Trivy scan result to Github security lab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: 'trivy-results.sarif' @@ -129,7 +129,7 @@ jobs: echo "SUMMARY=$summary" >> $GITHUB_ENV - name: Send Slack Notification - uses: slackapi/slack-github-action@v1.23.0 + uses: slackapi/slack-github-action@v1.26.0 with: payload: | { From ac2e4a6153d005b8e8a8eb93f8f3fda99b7c1108 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 7 May 2024 14:40:54 +0300 Subject: [PATCH 206/270] Build the platform specific images in parallel --- .github/workflows/ecr-image-build.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index d153f2d0d3..4b65966eb5 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -18,7 +18,17 @@ on: jobs: build: runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + paltform: + - linux/arm64 steps: + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + - name: Checkout uses: actions/checkout@v4 @@ -64,7 +74,7 @@ jobs: with: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu - platforms: linux/amd64,linux/arm64 + platforms: ${{ matrix.platform }} cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version }} cache-to: type=inline ssh: | @@ -82,7 +92,7 @@ jobs: with: context: . file: ./docker/onadata-uwsgi/Dockerfile.alpine - platforms: linux/amd64,linux/arm64 + platforms: ${{ matrix.platform }} cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version }} cache-to: type=inline ssh: | From ee5e896051102287ac5e0ffea2c8707b57d86aa2 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 13 May 2024 09:40:34 +0300 Subject: [PATCH 207/270] Set platform tag --- .github/workflows/ecr-image-build.yml | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index 4b65966eb5..d74c1f10b6 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -21,7 +21,8 @@ jobs: strategy: fail-fast: false matrix: - paltform: + platform: + - linux/amd64 - linux/arm64 steps: - name: Prepare @@ -82,8 +83,24 @@ jobs: build-args: | optional_packages=PyYAML django-redis ${{ secrets.ECR_OPTIONAL_PACKAGES }} push: true - tags: | - ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} + labels: | + "${{ matrix.platform }}" + provenance: false + outputs: type=image,name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }},push-by-digest=false,name-canonical=true,push=true + - + name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.docker-build-ubuntu.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + - + name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 - name: (Alpine) Build and push id: docker-build-alpine From ea41bd988f59bb8d83076c1bc9b1636c4d99e320 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 15 May 2024 19:55:23 +0300 Subject: [PATCH 208/270] Use onaio/python-deps image for docker builds --- docker/onadata-uwsgi/Dockerfile.ubuntu | 97 ++++---------------------- 1 file changed, 14 insertions(+), 83 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index 5a1eae5df4..a51b765200 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -1,4 +1,4 @@ -FROM python:3.10.14-bookworm as base +FROM onaio/python-deps:3.10.14 as base ARG release_version=v4.0.1 ARG optional_packages @@ -10,92 +10,23 @@ ENV PYTHONUNBUFFERED 1 ENV DJANGO_SETTINGS_MODULE onadata.settings.docker -# Install prerequisite packages -RUN apt-get update -q && \ - apt-get install -y --no-install-recommends \ - software-properties-common \ - binutils \ - build-essential \ - locales \ - netcat-traditional \ - gcc \ - pkg-config \ - git \ - openssh-client \ - libreadline-dev \ - libbz2-dev \ - libffi-dev \ - curl \ - automake - -# Generate and set en_US.UTF-8 locale -RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \ - locale-gen en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 -ENV LC_CTYPE en_US.UTF-8 -ENV LANG en_US.UTF-8 -ENV LANGUAGE en_US:en -RUN dpkg-reconfigure locales - - -# Install OnaData Dependencies -RUN apt-get install -y --no-install-recommends \ - libproj-dev \ - gdal-bin \ - memcached \ - libmemcached-dev \ - libssl-dev \ - libpq-dev \ - gfortran \ - libatlas-base-dev \ - libjpeg-dev \ - libxml2-dev \ - libxslt1-dev \ - zlib1g-dev \ - ghostscript \ - libtool \ - libpcre3 \ - libpcre3-dev \ - && apt-get autoremove -y \ - && apt-get clean -y \ - && rm -rf /var/lib/apt/lists/* - -# Create OnaData user and add to tty group -RUN useradd -G tty -m onadata \ - && mkdir -p /srv/onadata \ - && chown -R onadata:onadata /srv/onadata - -USER onadata - -# Download public key for github.com -RUN mkdir ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts - -RUN curl https://pyenv.run | bash - -ENV HOME /home/onadata -ENV PYENV_ROOT="$HOME/.pyenv" -ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH +USER root -RUN pyenv install 3.10.14 && pyenv global 3.10.14 && pyenv rehash +RUN mkdir -p /root/.ssh && ssh-keyscan github.com >> /root/.ssh/known_hosts -# FROM base as requirements +RUN mkdir -p /srv/onadata \ + && chown -R appuser:appuser /srv/onadata COPY ../../ /srv/onadata/ # Install service requirements WORKDIR /srv/onadata -USER root - -# ENV GIT_SSH_COMMAND="ssh -vvv" -RUN mkdir -p /root/.ssh && ssh-keyscan github.com >> /root/.ssh/known_hosts -RUN echo $(ssh-add -l) && echo $SSH_AUTH_SOCK -RUN --mount=type=ssh echo $(ssh-add -l) && echo $SSH_AUTH_SOCK # hadolint ignore=DL3013 -RUN --mount=type=ssh if [ -n "$optional_packages" ]; then /home/onadata/.pyenv/versions/3.10.14/bin/pip install --no-cache-dir ${optional_packages} ; fi -RUN chown -R onadata:onadata /home/onadata/.pyenv/versions/3.10.14/lib/python3.10/site-packages +RUN --mount=type=ssh if [ -n "$optional_packages" ]; then /home/appuser/.pyenv/versions/3.10.14/bin/pip install --no-cache-dir ${optional_packages} ; fi +RUN chown -R appuser:appuser /home/appuser/.pyenv -USER onadata +USER appuser # hadolint ignore=DL3013 RUN python -m pip install --no-cache-dir -U pip && \ @@ -103,21 +34,20 @@ RUN python -m pip install --no-cache-dir -U pip && \ python -m pip install --no-cache-dir -r requirements/s3.pip && \ python -m pip install --no-cache-dir -r requirements/ses.pip && \ python -m pip install --no-cache-dir -r requirements/azure.pip && \ - python -m pip install --no-cache-dir setuptools==69.2.0 && \ python -m pip install --no-cache-dir pyyaml==6.0.1 uwsgitop==0.12 supervisor==4.2.5 FROM base as docs ENV PYENV_ROOT="$HOME/.pyenv" ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH -COPY --from=base /home/onadata/.pyenv/ /home/onadata/.pyenv/ +COPY --from=base /home/appuser/.pyenv/ /home/appuser/.pyenv/ COPY --from=base /srv/onadata/ /srv/onadata/ USER root -RUN chown -R onadata:onadata /srv/onadata/ +RUN chown -R appuser:appuser /srv/onadata/ -USER onadata +USER appuser # install sphinx and build API docs. RUN python -m pip install --no-cache-dir -r requirements/docs.pip && \ @@ -155,10 +85,11 @@ RUN useradd -G tty -m onadata \ && mkdir -p /srv/onadata \ && chown -R onadata:onadata /srv/onadata -COPY --from=base /home/onadata/.pyenv/ /home/onadata/.pyenv/ +COPY --from=base /home/appuser/.pyenv/ /home/onadata/.pyenv/ COPY --from=docs /srv/onadata/ /srv/onadata/ +COPY --from=base /usr/local/lib/ /usr/lib/ -RUN chown -R onadata:onadata /srv/onadata/ +RUN chown -R onadata:onadata /srv/onadata /home/onadata/.pyenv USER onadata WORKDIR /srv/onadata From f7f531d8cf6a563f9a7cd610b7fc07d251850b19 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 17 May 2024 12:55:04 +0300 Subject: [PATCH 209/270] Upload manifest to ECR for amd64 and arm64 images --- .github/workflows/ecr-image-build.yml | 61 +++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index d74c1f10b6..cf8ec5ae86 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -8,6 +8,8 @@ on: branches: - "main" - "*-rc" + tags: + - "v*" workflow_dispatch: buildAlpine: description: Whether to build an Alpine based image @@ -50,6 +52,14 @@ jobs: id: login-ecr uses: aws-actions/amazon-ecr-login@v2 + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata + tags: | + type=ref,event=branch + - name: Setup SSH Agent and add Github to known hosts env: SSH_AUTH_SOCK: /tmp/ssh-agent.sock @@ -76,17 +86,16 @@ jobs: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu platforms: ${{ matrix.platform }} - cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version }} + cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} cache-to: type=inline ssh: | default=/tmp/ssh-agent.sock build-args: | optional_packages=PyYAML django-redis ${{ secrets.ECR_OPTIONAL_PACKAGES }} push: true - labels: | - "${{ matrix.platform }}" + labels: ${{ steps.meta.outputs.labels }} provenance: false - outputs: type=image,name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }},push-by-digest=false,name-canonical=true,push=true + outputs: type=image,name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata,push-by-digest=true,name-canonical=true,push=true - name: Export digest run: | @@ -181,3 +190,47 @@ jobs: env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + + merge: + runs-on: ubuntu-latest + needs: + - build + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata + tags: | + type=ref,event=branch + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} From 8e63de2103a14392c1ffab576a43a392941f7d37 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Fri, 17 May 2024 17:55:57 +0300 Subject: [PATCH 210/270] Use appuser from base image Supervisor and other apps are installed have this hard coded in the executable file --- docker/onadata-uwsgi/Dockerfile.ubuntu | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index a51b765200..0d6abd6e15 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -12,10 +12,10 @@ ENV DJANGO_SETTINGS_MODULE onadata.settings.docker USER root -RUN mkdir -p /root/.ssh && ssh-keyscan github.com >> /root/.ssh/known_hosts - -RUN mkdir -p /srv/onadata \ - && chown -R appuser:appuser /srv/onadata +RUN mkdir -p /root/.ssh && \ + ssh-keyscan github.com >> /root/.ssh/known_hosts && \ + mkdir -p /srv/onadata && \ + chown -R appuser:appuser /srv/onadata COPY ../../ /srv/onadata/ @@ -69,7 +69,7 @@ ENV LC_CTYPE en_US.UTF-8 RUN dpkg-reconfigure locales -# # Install OnaData Dependencies +# Install OnaData runtime dependencies RUN apt-get install -y --no-install-recommends \ gdal-bin \ git-core \ @@ -81,20 +81,20 @@ RUN apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* # Create OnaData user and add to tty group -RUN useradd -G tty -m onadata \ - && mkdir -p /srv/onadata \ - && chown -R onadata:onadata /srv/onadata +RUN useradd -G tty -m appuser && \ + mkdir -p /srv/onadata && \ + chown -R appuser:appuser /srv/onadata -COPY --from=base /home/appuser/.pyenv/ /home/onadata/.pyenv/ +COPY --from=base /home/appuser/.pyenv/ /home/appuser/.pyenv/ COPY --from=docs /srv/onadata/ /srv/onadata/ COPY --from=base /usr/local/lib/ /usr/lib/ -RUN chown -R onadata:onadata /srv/onadata /home/onadata/.pyenv +RUN chown -R appuser:appuser /srv/onadata /home/appuser/.pyenv -USER onadata +USER appuser WORKDIR /srv/onadata -ENV HOME /home/onadata +ENV HOME /home/appuser ENV PYTHON_VERSION 3.10.14 ENV PYENV_ROOT="$HOME/.pyenv" ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH From 868df6a881a9f316fb70cc68ddab5133095c8dae Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 20 May 2024 10:24:42 +0300 Subject: [PATCH 211/270] remove unused arg in Dockefile --- docker/onadata-uwsgi/Dockerfile.ubuntu | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index 0d6abd6e15..c35d1dd2a7 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -1,6 +1,5 @@ FROM onaio/python-deps:3.10.14 as base -ARG release_version=v4.0.1 ARG optional_packages # Silence configuration prompts From d4b16841e0e19f4e7fb69b1c266935c33f6415c0 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 21 May 2024 11:10:33 +0300 Subject: [PATCH 212/270] Move trivy scan after docker image tags have been set --- .github/workflows/ecr-image-build.yml | 93 +++++++++++++-------------- 1 file changed, 46 insertions(+), 47 deletions(-) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index cf8ec5ae86..66bd4c80f8 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -135,11 +135,54 @@ jobs: - name: (Alpine) Image digest if: github.event.inputs.buildAlpine run: echo ${{ steps.docker-build-alpine.outputs.digest }} + merge: + runs-on: ubuntu-latest + needs: + - build + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata + tags: | + type=ref,event=branch + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: - image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} + image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} format: 'sarif' output: 'trivy-results.sarif' @@ -151,7 +194,7 @@ jobs: - name: Run Trivy vulnerability scanner for Slack uses: aquasecurity/trivy-action@master with: - image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} + image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} format: json output: 'trivy-results.json' @@ -169,7 +212,7 @@ jobs: with: payload: | { - "text": "Trivy scan results for ${{ env.version || github.ref_name }}", + "text": "Trivy scan results for ${{ steps.meta.outputs.version }}", "blocks": [ { "type": "section", @@ -190,47 +233,3 @@ jobs: env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK - - merge: - runs-on: ubuntu-latest - needs: - - build - steps: - - name: Download digests - uses: actions/download-artifact@v4 - with: - path: /tmp/digests - pattern: digests-* - merge-multiple: true - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-central-1 - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v2 - - - name: Docker meta - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata - tags: | - type=ref,event=branch - - - name: Create manifest list and push - working-directory: /tmp/digests - run: | - docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) - - - name: Inspect image - run: | - docker buildx imagetools inspect ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} From be4f34f5fc7f71b88b37c7ba2ccf62fdcde85380 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 22 May 2024 14:21:34 +0300 Subject: [PATCH 213/270] Fix merged dataset permissions not applied on share (#2598) * fix merged dataset permissions not applied on share * add testcase * update comment * update docstring * add decorator for flaky test * add flaky decorator to test * mark flaky test --- .../api/tests/viewsets/test_data_viewset.py | 2 +- .../api/tests/viewsets/test_xform_viewset.py | 2 + .../logger/tests/test_briefcase_client.py | 2 + onadata/libs/models/share_project.py | 7 +++ .../libs/tests/models/test_share_project.py | 46 +++++++++++++++++-- 5 files changed, 53 insertions(+), 6 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 223925aae3..d30b708283 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -3721,7 +3721,7 @@ def setUp(self): self.logger = logging.getLogger("console_logger") # pylint: disable=invalid-name,too-many-locals - @flaky(max_runs=5) + @flaky(max_runs=8) def test_data_retrieve_instance_osm_format(self): """Test /data endpoint OSM format.""" filenames = [ diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index a3996d96ed..a5969e631c 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -710,6 +710,7 @@ def setUp(self): ) @patch("onadata.apps.api.viewsets.xform_viewset.send_message") + @flaky def test_replace_form_with_external_choices(self, mock_send_message): with HTTMock(enketo_mock): xls_file_path = os.path.join( @@ -2283,6 +2284,7 @@ def test_form_clone_shared_forms(self): self.assertEqual(response.status_code, 201) self.assertEqual(count + 1, XForm.objects.count()) + @flaky def test_return_error_on_clone_duplicate(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() diff --git a/onadata/apps/logger/tests/test_briefcase_client.py b/onadata/apps/logger/tests/test_briefcase_client.py index 82d30fd855..ead683b0d9 100644 --- a/onadata/apps/logger/tests/test_briefcase_client.py +++ b/onadata/apps/logger/tests/test_briefcase_client.py @@ -16,6 +16,7 @@ import requests import requests_mock from django_digest.test import Client as DigestClient +from flaky import flaky from six.moves.urllib.parse import urljoin from onadata.apps.logger.models import Instance, XForm @@ -168,6 +169,7 @@ def _download_submissions(self): mocker.head(requests_mock.ANY, content=submission_list) self.briefcase_client.download_instances(self.xform.id_string) + @flaky(max_runs=8) def test_download_xform_xml(self): """ Download xform via briefcase api diff --git a/onadata/libs/models/share_project.py b/onadata/libs/models/share_project.py index d179d15972..88490069b3 100644 --- a/onadata/libs/models/share_project.py +++ b/onadata/libs/models/share_project.py @@ -30,6 +30,9 @@ def remove_xform_permissions(project, user, role): for xform in project.xform_set.all(): # pylint: disable=protected-access role._remove_obj_permissions(user, xform) + # Removed MergedXForm permissions if XForm is also a MergedXForm + if hasattr(xform, "mergedxform"): + role._remove_obj_permissions(user, xform.mergedxform) def remove_dataview_permissions(project, user, role): @@ -85,6 +88,10 @@ def save(self, **kwargs): role = ROLES.get(meta_perm[1]) role.add(self.user, xform) + # Set MergedXForm permissions if XForm is also a MergedXForm + if hasattr(xform, "mergedxform"): + role.add(self.user, xform.mergedxform) + for dataview in self.project.dataview_set.all(): if dataview.matches_parent: role.add(self.user, dataview.xform) diff --git a/onadata/libs/tests/models/test_share_project.py b/onadata/libs/tests/models/test_share_project.py index c869852618..f60d299a24 100644 --- a/onadata/libs/tests/models/test_share_project.py +++ b/onadata/libs/tests/models/test_share_project.py @@ -1,9 +1,11 @@ """Tests for module onadata.libs.models.share_project""" from unittest.mock import patch, call +from pyxform.builder import create_survey_element_from_dict from onadata.apps.logger.models.data_view import DataView from onadata.apps.logger.models.project import Project +from onadata.apps.logger.models.merged_xform import MergedXForm from onadata.apps.logger.models.xform import XForm from onadata.apps.main.tests.test_base import TestBase from onadata.libs.models.share_project import ShareProject @@ -35,7 +37,7 @@ def setUp(self): project = Project.objects.create( name="Demo", organization=self.user, created_by=self.user ) - self._publish_markdown(md_xform, self.user, project) + self._publish_markdown(md_xform, self.user, project, id_string="a") self.dataview_form = XForm.objects.all().order_by("-pk")[0] DataView.objects.create( name="Demo", @@ -44,13 +46,34 @@ def setUp(self): matches_parent=True, columns=[], ) + # MergedXForm + self._publish_markdown(md_xform, self.user, project, id_string="b") + xf1 = XForm.objects.get(id_string="a") + xf2 = XForm.objects.get(id_string="b") + survey = create_survey_element_from_dict(xf1.json_dict()) + survey["id_string"] = "c" + survey["sms_keyword"] = survey["id_string"] + survey["title"] = "Merged XForm" + self.merged_xf = MergedXForm.objects.create( + id_string=survey["id_string"], + sms_id_string=survey["id_string"], + title=survey["title"], + user=self.user, + created_by=self.user, + is_merged_dataset=True, + project=self.project, + xml=survey.to_xml(), + json=survey.to_json(), + ) + self.merged_xf.xforms.add(xf1) + self.merged_xf.xforms.add(xf2) self.alice = self._create_user("alice", "Yuao8(-)") @patch("onadata.libs.models.share_project.safe_delete") def test_share(self, mock_safe_delete, mock_propagate): """A project is shared with a user - Permissions assigned to project, xform and dataview + Permissions assigned to project, xform, mergedxform and dataview """ instance = ShareProject(self.project, self.alice, "manager") instance.save() @@ -58,6 +81,8 @@ def test_share(self, mock_safe_delete, mock_propagate): self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr)) mock_propagate.assert_called_once_with(args=[self.project.pk]) # Cache is invalidated mock_safe_delete.assert_has_calls( @@ -69,21 +94,32 @@ def test_share(self, mock_safe_delete, mock_propagate): @patch("onadata.libs.models.share_project.safe_delete") def test_remove(self, mock_safe_delete, mock_propagate): - """A user is removed from a project""" - # Add user + """A user is removed from a project + + Permissions removed from project, xform, mergedxform and dataview + """ + # Simulate share project ManagerRole.add(self.alice, self.project) ManagerRole.add(self.alice, self.xform) ManagerRole.add(self.alice, self.dataview_form) - + ManagerRole.add(self.alice, self.merged_xf) + ManagerRole.add(self.alice, self.merged_xf.xform_ptr) + # Confirm project shared self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr)) # Remove user instance = ShareProject(self.project, self.alice, "manager", True) instance.save() self.assertFalse(ManagerRole.user_has_role(self.alice, self.project)) self.assertFalse(ManagerRole.user_has_role(self.alice, self.xform)) self.assertFalse(ManagerRole.user_has_role(self.alice, self.dataview_form)) + self.assertFalse(ManagerRole.user_has_role(self.alice, self.merged_xf)) + self.assertFalse( + ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr) + ) mock_propagate.assert_called_once_with(args=[self.project.pk]) # Cache is invalidated mock_safe_delete.assert_has_calls( From c1ce64deda096aefeffe89204b84bbac8cb5ea22 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 23 May 2024 17:10:33 +0300 Subject: [PATCH 214/270] fix exception thrown when deleting export (#2600) fix argument of type 'NoneType' is not iterable --- .../api/tests/viewsets/test_export_viewset.py | 26 ++++++++++++++++++- onadata/libs/filters.py | 4 ++- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_export_viewset.py b/onadata/apps/api/tests/viewsets/test_export_viewset.py index f9005f5fe6..6334c66e5b 100644 --- a/onadata/apps/api/tests/viewsets/test_export_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_export_viewset.py @@ -2,7 +2,6 @@ """ test_export_viewset module """ - import os from tempfile import NamedTemporaryFile @@ -293,6 +292,31 @@ def test_export_delete(self): response = view(request, pk=export.pk) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) + def test_export_delete_null_body(self): + """Null request body is handled""" + markdown_xlsform = """ + | survey | + | | type | name | label | + | | select one fruits | fruit | Fruit | + + | choices | + | | list name | name | label | + | | fruits | orange | Orange | + | | fruits | mango | Mango | + """ + self._create_user_and_login() + xform = self._publish_markdown(markdown_xlsform, self.user) + bob = self.user + export = Export.objects.create(xform=xform) + export.save() + view = ExportViewSet.as_view({"delete": "destroy"}) + request = self.factory.delete( + "/export", data=None, content_type="application/json" + ) + force_authenticate(request, user=bob) + response = view(request, pk=export.pk) + self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) + def test_export_list_with_meta_perms(self): """ Test export list for forms with meta permissions. diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index f203601853..6eb34653c4 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -335,8 +335,10 @@ def _add_instance_prefix_to_dataview_filter_kwargs(self, filter_kwargs): def _xform_filter(self, request, view, keyword, queryset=None): """Use XForm permissions""" xform = request.query_params.get("xform") - if xform is None and "xform" in request.data: + + if xform is None and request.data is not None and "xform" in request.data: xform = request.data.get("xform") + dataview = request.query_params.get("dataview") merged_xform = request.query_params.get("merged_xform") filename = request.query_params.get("filename") From 76f8408da5de6489d20b9b70d0dbd88744772690 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 24 May 2024 15:59:24 +0300 Subject: [PATCH 215/270] Optimize attachment xform migration (#2599) * convert python code into SQL query Run migration as SQL query to optimize performance when number of records huge * update reverse query for migration --- .../0014_populate_attachment_xform.py | 41 ++----------------- 1 file changed, 4 insertions(+), 37 deletions(-) diff --git a/onadata/apps/logger/migrations/0014_populate_attachment_xform.py b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py index ba010bb481..7f34fc3f20 100644 --- a/onadata/apps/logger/migrations/0014_populate_attachment_xform.py +++ b/onadata/apps/logger/migrations/0014_populate_attachment_xform.py @@ -3,40 +3,6 @@ from django.db import migrations -def populate_attachment_xform(apps, schema_editor): - """Populate xform field for Attachments""" - Attachment = apps.get_model("logger", "Attachment") - queryset = Attachment.objects.filter(xform__isnull=True).values( - "pk", "instance__xform", "instance__user" - ) - count = queryset.count() - print("Start populating attachment xform...") - print(f"Found {count} records") - - for attachment in queryset.iterator(chunk_size=100): - # We do not want to trigger Model.save or any signal - # Queryset.update is a workaround to achieve this. - # Model.save and the post/pre signals may contain - # some side-effects which we are not interested in - Attachment.objects.filter(pk=attachment["pk"]).update( - xform=attachment["instance__xform"], - user=attachment["instance__user"], - ) - count -= 1 - print(f"{count} remaining") - - print("Done populating attachment xform!") - - -def reverse_populate_attachment_xform(apps, schema_editor): - """Reverse populate xform field when migrations are unapplied""" - Attachment = apps.get_model("logger", "Attachment") - queryset = Attachment.objects.filter(xform__isnull=False).values("pk") - - for attachment in queryset.iterator(chunk_size=100): - Attachment.objects.filter(pk=attachment["pk"]).update(xform=None, user=None) - - class Migration(migrations.Migration): dependencies = [ @@ -44,7 +10,8 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython( - populate_attachment_xform, reverse_populate_attachment_xform - ) + migrations.RunSQL( + sql="WITH logger_attachment_instance AS (SELECT logger_attachment.id, logger_instance.xform_id, logger_instance.user_id FROM logger_attachment INNER JOIN logger_instance ON logger_attachment.instance_id = logger_instance.id INNER JOIN logger_xform T4 ON logger_instance.xform_id = T4.id WHERE logger_attachment.xform_id IS NULL AND T4.deleted_at IS NULL) UPDATE logger_attachment SET xform_id = logger_attachment_instance.xform_id, user_id = logger_attachment_instance.user_id FROM logger_attachment_instance WHERE logger_attachment.id = logger_attachment_instance.id;", + reverse_sql="WITH logger_attachment_xform AS (SELECT logger_attachment.id FROM logger_attachment INNER JOIN logger_xform T4 ON logger_attachment.xform_id = T4.id WHERE logger_attachment.xform_id IS NOT NULL AND T4.deleted_at IS NULL) UPDATE logger_attachment SET xform_id = NULL, user_id = NULL FROM logger_attachment_xform WHERE logger_attachment.id = logger_attachment_xform.id;", + ), ] From b77f86e1c8ff2906d40393f27ade2646eaba981a Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Mon, 3 Jun 2024 13:31:21 +0300 Subject: [PATCH 216/270] Tag release v4.2.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 21 +++++++++++++++++++-- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 6640c285a4..62ba3f4286 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,9 +3,26 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` -Next release -------------- +v4.1.0(2024-06-03) +------------------ +- Add support for Entities + `PR #2504 ` + [@kelvin-muchiri] +- Optimize attachment xform migration + `PR #2599 ` + [@kelvin-muchiri] +- Fix exception thrown when deleting export + `PR #2600 ` + [@kelvin-muchiri] +- Fix merged dataset permissions not applied on share + `PR #2598 ` + [@kelvin-muchiri] - Rebuild docker image with pyenv and on Ubuntu 22.04 runtime image. + `PR #2597 ` + [@ukanga] +- Set permissions for merged dataset asynchronously + `PR #2600 ` + [@kelvin-muchiri] v4.1.0(2024-05-03) ------------------ diff --git a/onadata/__init__.py b/onadata/__init__.py index b5b0237a59..94769c0d94 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.1.0" +__version__ = "4.2.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 0db2a0fa58..ba9e026036 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.1.0 +version = 4.2.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 48eac23edf3628d18ecea213ef761339e51285cc Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 4 Jun 2024 15:29:20 +0300 Subject: [PATCH 217/270] Update metadata action to capture tags Signed-off-by: Kipchirchir Sigei --- .github/workflows/ecr-image-build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ecr-image-build.yml b/.github/workflows/ecr-image-build.yml index 66bd4c80f8..9f6918814c 100644 --- a/.github/workflows/ecr-image-build.yml +++ b/.github/workflows/ecr-image-build.yml @@ -59,6 +59,7 @@ jobs: images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata tags: | type=ref,event=branch + type=ref,event=tag - name: Setup SSH Agent and add Github to known hosts env: @@ -168,6 +169,7 @@ jobs: images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata tags: | type=ref,event=branch + type=ref,event=tag - name: Create manifest list and push working-directory: /tmp/digests From b1bd0a3d4afd3ab5d3d0a57dfb2e50cc438a77d0 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 4 Jun 2024 15:59:12 +0300 Subject: [PATCH 218/270] Tag release v4.2.1 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 8 +++++++- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 62ba3f4286..80dcacd803 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,7 +3,13 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` -v4.1.0(2024-06-03) +v4.2.1(2024-06-04) +------------------ +- Update Metadata action to capture tags + `PR #2610 ` + [@KipSigei] + +v4.2.0(2024-06-03) ------------------ - Add support for Entities `PR #2504 ` diff --git a/onadata/__init__.py b/onadata/__init__.py index 94769c0d94..cbab3c1023 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.2.0" +__version__ = "4.2.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index ba9e026036..ffbf9af758 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.2.0 +version = 4.2.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 32663a1b7f2fb4ab9463193eb5f42a9faab19391 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Wed, 5 Jun 2024 17:33:48 +0300 Subject: [PATCH 219/270] Add support for merged dataset geojson format on endpoint `/api/v1/data/` (#2608) * add merged dataset geojson format on endpoint /api/v1/data/ * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * mark flaky test * set max_runs for flaky test * update flaky test max run * fix typo * enhance test case * add disclaimer for merged datasets docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update docs * update flaky test max_runs * update flaky test max_runs --- docs/merged-datasets.rst | 38 +++++++++------ .../api/tests/viewsets/test_data_viewset.py | 47 ++++++++++++++++++- .../api/tests/viewsets/test_xform_viewset.py | 3 +- onadata/apps/api/viewsets/data_viewset.py | 15 +++--- .../logger/models/tests/test_merged_xform.py | 37 ++------------- .../logger/tests/test_briefcase_client.py | 2 +- onadata/apps/main/tests/test_base.py | 46 +++++++++++++++++- .../libs/tests/models/test_share_project.py | 26 +--------- .../libs/tests/utils/test_project_utils.py | 37 ++------------- 9 files changed, 135 insertions(+), 116 deletions(-) diff --git a/docs/merged-datasets.rst b/docs/merged-datasets.rst index 50efd3b4e1..b0eb279d19 100644 --- a/docs/merged-datasets.rst +++ b/docs/merged-datasets.rst @@ -1,6 +1,15 @@ Merged Datasets *************** +.. warning:: **Disclaimer: Experimental Feature** + + This feature is experimental. As a result, users may encounter bugs, glitches, or unexpected behavior. While we have taken steps to ensure a stable experience, some functionality may not work as intended. + + Your feedback is invaluable in helping us improve this feature. Please report any issues or provide suggestions to help us enhance the final version. + + Use this feature at your own discretion and be prepared for potential interruptions or performance inconsistencies. + + This endpoint provides access to data from multiple forms. Merged datasets should have the same functionality as the forms endpoint with the difference being: - They do not accept submissions directly, submissions to individual forms will be reflected in merged datasets.. @@ -138,28 +147,26 @@ Response Retrieving Data from a Merged Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Returns the data from both forms. The key `_xform_id_string` can be used to -differentiate data from linked forms. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Returns the data from all linked forms. .. raw:: html -
-    GET /api/v1/merged-datasets/{pk}/data
-    GET /api/v1/data/{pk}
-    
+
GET /api/v1/merged-datasets/{pk}/data
-:: - curl -X GET "https://api.ona.io/api/v1/merged-datasets/1/data" - curl -X GET "https://api.ona.io/api/v1/data/1" +Example +------- +:: + curl -X GET "https://api.ona.io/api/v1/merged-datasets/1/data" -Example Response ----------------- -:: +Response +-------- +:: [ {"date": "2015-05-19", "gender": "male", "age": 32, "name": "Kendy", "_xform_id_string": "form_a"}, @@ -167,6 +174,9 @@ Example Response {"date": "2015-05-19", "gender": "male", "age": 21, "name": "Tom", "_xform_id_string": "form_c"} ] + +For data pagination and advanced filtering options, use endpoint `/api/v1/data/{pk} `_ + How data in parent forms differs from and affects the merged xform ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -174,6 +184,6 @@ A merged dataset combines data from multiple forms into one form. It creates a n A merged dataset: - Does not allow submissions or data edits, this can only be done on the individual forms. - - Data deleted from the individual forms will also not be present in the mereged dataset. + - Data deleted from the individual forms will also not be present in the merged dataset. - Form replacement is not supported. - It has it's own form structure, which is not replaceable the same way you could replace an individual form when changing certain aspects of a form. diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index d30b708283..096e59c111 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -3707,6 +3707,51 @@ def test_data_paginated_past_threshold(self): '; rel="last"', ) + def test_merged_dataset(self): + """Data for merged dataset is returned""" + merged_xf = self._create_merged_dataset(make_submissions=True) + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_xf.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 2) + + def test_merged_dataset_geojson(self): + """Merged dataset geojson works""" + merged_xf = self._create_merged_dataset(make_submissions=True) + view = DataViewSet.as_view({"get": "list"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=merged_xf.pk, format="geojson") + self.assertEqual(response.status_code, 200) + # we get correct content type + headers = dict(response.items()) + self.assertEqual(headers["Content-Type"], "application/geo+json") + instance_qs = Instance.objects.all().order_by("pk") + self.assertEqual( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": None, + "properties": { + "id": instance_qs[0].pk, + "xform": instance_qs[0].xform.pk, + }, + }, + { + "type": "Feature", + "geometry": None, + "properties": { + "id": instance_qs[1].pk, + "xform": instance_qs[1].xform.pk, + }, + }, + ], + }, + response.data, + ) + class TestOSM(TestAbstractViewSet): """ @@ -3721,7 +3766,7 @@ def setUp(self): self.logger = logging.getLogger("console_logger") # pylint: disable=invalid-name,too-many-locals - @flaky(max_runs=8) + @flaky(max_runs=10) def test_data_retrieve_instance_osm_format(self): """Test /data endpoint OSM format.""" filenames = [ diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index a5969e631c..65d4a5dce7 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -2284,7 +2284,7 @@ def test_form_clone_shared_forms(self): self.assertEqual(response.status_code, 201) self.assertEqual(count + 1, XForm.objects.count()) - @flaky + @flaky(max_runs=8) def test_return_error_on_clone_duplicate(self): with HTTMock(enketo_mock): self._publish_xls_form_to_project() @@ -3567,6 +3567,7 @@ def test_failed_form_publishing_after_maximum_retries( self.assertEqual(response.status_code, 202) self.assertEqual(response.data, error_message) + @flaky(max_runs=3) def test_survey_preview_endpoint(self): view = XFormViewSet.as_view({"post": "survey_preview", "get": "survey_preview"}) diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 693f9ef86a..99617d5407 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -648,17 +648,20 @@ def list(self, request, *args, **kwargs): return super().list(request, *args, **kwargs) if export_type == "geojson": - # raise 404 if all instances dont have geoms - if not xform.instances_with_geopoints and not ( - xform.polygon_xpaths() or xform.geotrace_xpaths() - ): - raise Http404(_("Not Found")) + if not is_merged_dataset: + # raise 404 if all instances dont have geoms + if not xform.instances_with_geopoints and not ( + xform.polygon_xpaths() or xform.geotrace_xpaths() + ): + raise Http404(_("Not Found")) # add pagination when fetching geojson features page = self.paginate_queryset(self.object_list) serializer = self.get_serializer(page, many=True) - return Response(serializer.data) + return Response( + serializer.data, headers={"Content-Type": "application/geo+json"} + ) return custom_response_handler(request, xform, query, export_type) diff --git a/onadata/apps/logger/models/tests/test_merged_xform.py b/onadata/apps/logger/models/tests/test_merged_xform.py index 3a206c2347..1ac9db9cc1 100644 --- a/onadata/apps/logger/models/tests/test_merged_xform.py +++ b/onadata/apps/logger/models/tests/test_merged_xform.py @@ -1,48 +1,17 @@ """Tests for module onadata.apps.logger.models.merged_xform""" -from pyxform.builder import create_survey_element_from_dict from unittest.mock import call, patch from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.logger.models.merged_xform import MergedXForm -from onadata.apps.logger.models.xform import XForm class MergedXFormTestCase(TestBase): @patch("onadata.libs.utils.project_utils.set_project_perms_to_xform_async.delay") def test_perms_applied_async_on_create(self, mock_set_perms): """Permissions are applied asynchronously on create""" - md = """ - | survey | - | | type | name | label | - | | select one fruits | fruit | Fruit | - - | choices | - | | list name | name | label | - | | fruits | orange | Orange | - | | fruits | mango | Mango | - """ - self._publish_markdown(md, self.user, id_string="a") - self._publish_markdown(md, self.user, id_string="b") - xf1 = XForm.objects.get(id_string="a") - xf2 = XForm.objects.get(id_string="b") - survey = create_survey_element_from_dict(xf1.json_dict()) - survey["id_string"] = "c" - survey["sms_keyword"] = survey["id_string"] - survey["title"] = "Merged XForm" - merged_xf = MergedXForm.objects.create( - id_string=survey["id_string"], - sms_id_string=survey["id_string"], - title=survey["title"], - user=self.user, - created_by=self.user, - is_merged_dataset=True, - project=self.project, - xml=survey.to_xml(), - json=survey.to_json(), - ) - merged_xf.xforms.add(xf1) - merged_xf.xforms.add(xf2) + merged_xf = self._create_merged_dataset() + xf1 = merged_xf.xforms.get(id_string="a") + xf2 = merged_xf.xforms.get(id_string="b") calls = [ call(xf1.pk, self.project.pk), call(xf2.pk, self.project.pk), diff --git a/onadata/apps/logger/tests/test_briefcase_client.py b/onadata/apps/logger/tests/test_briefcase_client.py index ead683b0d9..1e80ea8272 100644 --- a/onadata/apps/logger/tests/test_briefcase_client.py +++ b/onadata/apps/logger/tests/test_briefcase_client.py @@ -169,7 +169,7 @@ def _download_submissions(self): mocker.head(requests_mock.ANY, content=submission_list) self.briefcase_client.download_instances(self.xform.id_string) - @flaky(max_runs=8) + @flaky(max_runs=10) def test_download_xform_xml(self): """ Download xform via briefcase api diff --git a/onadata/apps/main/tests/test_base.py b/onadata/apps/main/tests/test_base.py index 7eac49240d..a24c38e622 100644 --- a/onadata/apps/main/tests/test_base.py +++ b/onadata/apps/main/tests/test_base.py @@ -13,6 +13,8 @@ from io import StringIO from tempfile import NamedTemporaryFile +from pyxform.builder import create_survey_element_from_dict + from django.conf import settings from django.contrib.auth import authenticate, get_user_model from django.core.files.uploadedfile import InMemoryUploadedFile @@ -28,7 +30,7 @@ from six.moves.urllib.request import urlopen from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Instance, XForm, XFormVersion +from onadata.apps.logger.models import Instance, MergedXForm, XForm, XFormVersion from onadata.apps.logger.views import submission from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml from onadata.apps.main.models import UserProfile @@ -570,3 +572,45 @@ def _publish_follow_up_form(self, user, project=None): latest_form = XForm.objects.all().order_by("-pk").first() return latest_form + + def _create_merged_dataset(self, make_submissions=False): + md = """ + | survey | + | | type | name | label | + | | select one fruits | fruit | Fruit | + | choices | + | | list name | name | label | + | | fruits | orange | Orange | + | | fruits | mango | Mango | + """ + self._publish_markdown(md, self.user, id_string="a") + self._publish_markdown(md, self.user, id_string="b") + xf1 = XForm.objects.get(id_string="a") + xf2 = XForm.objects.get(id_string="b") + survey = create_survey_element_from_dict(xf1.json_dict()) + survey["id_string"] = "c" + survey["sms_keyword"] = survey["id_string"] + survey["title"] = "Merged XForm" + merged_xf = MergedXForm.objects.create( + id_string=survey["id_string"], + sms_id_string=survey["id_string"], + title=survey["title"], + user=self.user, + created_by=self.user, + is_merged_dataset=True, + project=self.project, + xml=survey.to_xml(), + json=survey.to_json(), + ) + merged_xf.xforms.add(xf1) + merged_xf.xforms.add(xf2) + + if make_submissions: + # Make submission for form a + xml = 'orange' + Instance(xform=xf1, xml=xml).save() + # Make submission for form b + xml = 'mango' + Instance(xform=xf2, xml=xml).save() + + return merged_xf diff --git a/onadata/libs/tests/models/test_share_project.py b/onadata/libs/tests/models/test_share_project.py index f60d299a24..5a29e06839 100644 --- a/onadata/libs/tests/models/test_share_project.py +++ b/onadata/libs/tests/models/test_share_project.py @@ -1,11 +1,9 @@ """Tests for module onadata.libs.models.share_project""" from unittest.mock import patch, call -from pyxform.builder import create_survey_element_from_dict from onadata.apps.logger.models.data_view import DataView from onadata.apps.logger.models.project import Project -from onadata.apps.logger.models.merged_xform import MergedXForm from onadata.apps.logger.models.xform import XForm from onadata.apps.main.tests.test_base import TestBase from onadata.libs.models.share_project import ShareProject @@ -37,7 +35,7 @@ def setUp(self): project = Project.objects.create( name="Demo", organization=self.user, created_by=self.user ) - self._publish_markdown(md_xform, self.user, project, id_string="a") + self._publish_markdown(md_xform, self.user, project) self.dataview_form = XForm.objects.all().order_by("-pk")[0] DataView.objects.create( name="Demo", @@ -46,27 +44,7 @@ def setUp(self): matches_parent=True, columns=[], ) - # MergedXForm - self._publish_markdown(md_xform, self.user, project, id_string="b") - xf1 = XForm.objects.get(id_string="a") - xf2 = XForm.objects.get(id_string="b") - survey = create_survey_element_from_dict(xf1.json_dict()) - survey["id_string"] = "c" - survey["sms_keyword"] = survey["id_string"] - survey["title"] = "Merged XForm" - self.merged_xf = MergedXForm.objects.create( - id_string=survey["id_string"], - sms_id_string=survey["id_string"], - title=survey["title"], - user=self.user, - created_by=self.user, - is_merged_dataset=True, - project=self.project, - xml=survey.to_xml(), - json=survey.to_json(), - ) - self.merged_xf.xforms.add(xf1) - self.merged_xf.xforms.add(xf2) + self.merged_xf = self._create_merged_dataset() self.alice = self._create_user("alice", "Yuao8(-)") @patch("onadata.libs.models.share_project.safe_delete") diff --git a/onadata/libs/tests/utils/test_project_utils.py b/onadata/libs/tests/utils/test_project_utils.py index 9711c69675..133698367b 100644 --- a/onadata/libs/tests/utils/test_project_utils.py +++ b/onadata/libs/tests/utils/test_project_utils.py @@ -5,11 +5,10 @@ from unittest.mock import call, MagicMock, patch from django.test.utils import override_settings -from pyxform.builder import create_survey_element_from_dict from kombu.exceptions import OperationalError from requests import Response -from onadata.apps.logger.models import MergedXForm, Project, XForm +from onadata.apps.logger.models import Project from onadata.apps.main.tests.test_base import TestBase from onadata.libs.permissions import DataEntryRole from onadata.libs.utils.project_utils import ( @@ -75,38 +74,8 @@ def test_set_project_perms_to_xform_async(self, mock): @patch("onadata.libs.utils.project_utils.set_project_perms_to_xform") def test_set_project_perms_to_xform_async_mergedxform(self, mock): - """Permissions for a MergedXForm are set""" - md = """ - | survey | - | | type | name | label | - | | select one fruits | fruit | Fruit | - - | choices | - | | list name | name | label | - | | fruits | orange | Orange | - | | fruits | mango | Mango | - """ - self._publish_markdown(md, self.user, id_string="a") - self._publish_markdown(md, self.user, id_string="b") - xf1 = XForm.objects.get(id_string="a") - xf2 = XForm.objects.get(id_string="b") - survey = create_survey_element_from_dict(xf1.json_dict()) - survey["id_string"] = "c" - survey["sms_keyword"] = survey["id_string"] - survey["title"] = "Merged XForm" - merged_xf = MergedXForm.objects.create( - id_string=survey["id_string"], - sms_id_string=survey["id_string"], - title=survey["title"], - user=self.user, - created_by=self.user, - is_merged_dataset=True, - project=self.project, - xml=survey.to_xml(), - json=survey.to_json(), - ) - merged_xf.xforms.add(xf1) - merged_xf.xforms.add(xf2) + """set_project_perms_to_xform_async sets permissions for a MergedXForm""" + merged_xf = self._create_merged_dataset() set_project_perms_to_xform_async.delay(merged_xf.pk, self.project.pk) expected_calls = [ call(merged_xf.xform_ptr, self.project), From 515b37cb7df42b2cc23171002c5f9f8004681d92 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 6 Jun 2024 16:00:17 +0300 Subject: [PATCH 220/270] Tag release v4.2.2 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 6 ++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 80dcacd803..18b7371cd1 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,12 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.2.2(2024-06-06) +------------------ +- Add support for merged dataset geojson format on endpoint /api/v1/data/ + `PR #2608 ` + [@kelvin-muchiri] + v4.2.1(2024-06-04) ------------------ - Update Metadata action to capture tags diff --git a/onadata/__init__.py b/onadata/__init__.py index cbab3c1023..1e81ec7589 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.2.1" +__version__ = "4.2.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index ffbf9af758..72cd5cf995 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.2.1 +version = 4.2.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From a5098aae341b6b89e68cf78542725ced68dfd12e Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 21 May 2024 09:54:00 +0300 Subject: [PATCH 221/270] Fix ValueError for User instance primary key in password validation Signed-off-by: Kipchirchir Sigei --- onadata/libs/utils/validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/utils/validators.py b/onadata/libs/utils/validators.py index f05f33d7a9..89d00a238b 100644 --- a/onadata/libs/utils/validators.py +++ b/onadata/libs/utils/validators.py @@ -12,7 +12,7 @@ def __init__(self, history_limt=5): self.history_limit = history_limt def validate(self, password, user=None): - if user and user.is_active: + if user and user.pk and user.is_active: if user.check_password(password): raise ValidationError(self.message) From 44f6549f5555bc644f83761463d6f4164330b462 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jun 2024 14:10:15 +0300 Subject: [PATCH 222/270] Add tests Signed-off-by: Kipchirchir Sigei --- .../tests/utils/test_password_validator.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 onadata/libs/tests/utils/test_password_validator.py diff --git a/onadata/libs/tests/utils/test_password_validator.py b/onadata/libs/tests/utils/test_password_validator.py new file mode 100644 index 0000000000..6119e96f57 --- /dev/null +++ b/onadata/libs/tests/utils/test_password_validator.py @@ -0,0 +1,57 @@ +from django.test import TestCase +from django.contrib.auth.models import User +from django.core.exceptions import ValidationError + +from onadata.apps.main.models.password_history import PasswordHistory +from onadata.libs.utils.validators import PreviousPasswordValidator + +class PreviousPasswordValidatorTestCase(TestCase): + def test_validator_does_not_raise_valueerror_missing_pk(self): + # Create a validator instance + validator = PreviousPasswordValidator() + + # Create a user instance without saving it to the database + user = User(username='testuser') + + # Call the validate method and ensure it does not raise a ValueError + try: + validator.validate('somepassword', user=user) + except ValueError: + self.fail("PreviousPasswordValidator raised ValueError unexpectedly!") + + def test_validator_raises_validationerror_for_reused_password(self): + # Create and save a user to the database + user = User.objects.create(username='testuser') + user.set_password('oldpassword') + user.save() + + # Add the old password to password history + PasswordHistory.objects.create(user=user, hashed_password=user.password) + + # Create a validator instance + validator = PreviousPasswordValidator() + + # Try using an old password + with self.assertRaises(ValidationError) as cm: + validator.validate('oldpassword', user=user) + + self.assertEqual( + str(cm.exception.message), "You cannot use a previously used password.") + + def test_validator_allows_new_password(self): + # Create and save a user to the database + user = User.objects.create(username='testuser') + user.set_password('oldpassword') + user.save() + + # Add the old password to password history + PasswordHistory.objects.create(user=user, hashed_password=user.password) + + # Create a validator instance + validator = PreviousPasswordValidator() + + # Try using a new password + try: + validator.validate('newpassword@123', user=user) + except ValidationError: + self.fail("PreviousPasswordValidator raised ValidationError unexpectedly!") From 301447c245cb65a172ac2b7b1ef066dfb7399b94 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 19 Jun 2024 15:06:34 +0300 Subject: [PATCH 223/270] Add docstrings to testcases Signed-off-by: Kipchirchir Sigei --- .../tests/utils/test_password_validator.py | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/onadata/libs/tests/utils/test_password_validator.py b/onadata/libs/tests/utils/test_password_validator.py index 6119e96f57..af9203b86c 100644 --- a/onadata/libs/tests/utils/test_password_validator.py +++ b/onadata/libs/tests/utils/test_password_validator.py @@ -5,24 +5,32 @@ from onadata.apps.main.models.password_history import PasswordHistory from onadata.libs.utils.validators import PreviousPasswordValidator + class PreviousPasswordValidatorTestCase(TestCase): - def test_validator_does_not_raise_valueerror_missing_pk(self): + """ + Test case for the PreviousPasswordValidator class. + Ensures correct behavior of password validation. + """ + + def test_missing_pk(self): + """Validator does not raise ValueError for missing pk""" # Create a validator instance validator = PreviousPasswordValidator() # Create a user instance without saving it to the database - user = User(username='testuser') + user = User(username="testuser") # Call the validate method and ensure it does not raise a ValueError try: - validator.validate('somepassword', user=user) + validator.validate("somepassword", user=user) except ValueError: self.fail("PreviousPasswordValidator raised ValueError unexpectedly!") - def test_validator_raises_validationerror_for_reused_password(self): + def test_reused_password(self): + """Test ValidationError exception thrown on reused password""" # Create and save a user to the database - user = User.objects.create(username='testuser') - user.set_password('oldpassword') + user = User.objects.create(username="testuser") + user.set_password("oldpassword") user.save() # Add the old password to password history @@ -33,15 +41,17 @@ def test_validator_raises_validationerror_for_reused_password(self): # Try using an old password with self.assertRaises(ValidationError) as cm: - validator.validate('oldpassword', user=user) + validator.validate("oldpassword", user=user) self.assertEqual( - str(cm.exception.message), "You cannot use a previously used password.") + str(cm.exception.message), "You cannot use a previously used password." + ) - def test_validator_allows_new_password(self): + def test_allows_new_password(self): + """Test validator allows new password not used before""" # Create and save a user to the database - user = User.objects.create(username='testuser') - user.set_password('oldpassword') + user = User.objects.create(username="testuser") + user.set_password("oldpassword") user.save() # Add the old password to password history @@ -52,6 +62,6 @@ def test_validator_allows_new_password(self): # Try using a new password try: - validator.validate('newpassword@123', user=user) + validator.validate("newpassword@123", user=user) except ValidationError: self.fail("PreviousPasswordValidator raised ValidationError unexpectedly!") From c043d0c5b58ee8049502c547d10e0315a6a717bd Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 20 Jun 2024 09:49:10 +0300 Subject: [PATCH 224/270] Add timestamp filtering documentation on messaging endpoint Signed-off-by: Kipchirchir Sigei --- docs/messaging.rst | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/docs/messaging.rst b/docs/messaging.rst index 3b3df8d3c0..fcfefe1e45 100644 --- a/docs/messaging.rst +++ b/docs/messaging.rst @@ -156,3 +156,44 @@ Sample response with link header "timestamp": "2021-02-26T03:32:57.799647-05:00" } ] + +Query events of a target using timestamp +---------------------------------------- + +Filter messages using the timestamp field. To filter messages by their timestamp, the `timestamp` parameter is appended to the messaging endpoint URL. +This allows for precise retrieval of events that occurred within specific time frames or ranges. + +Example +^^^^^^^^ +:: + + curl -X GET https://api.ona.io/api/v1/messaging?target_type=xform&target_id=1&verb=submission_edited×tamp__lte=2024-06-20T00:00 + +Where: + +- ``target_type=xform`` - The target object type is xform +- ``target_id=1`` - The target object unique id, in this case the xform ID +- ``verb=submission_edited`` - The action occured on the xform, in this case a submission in the xform has been edited +- ``timestamp__lte=2024-06-20T00:00`` - Filters the events/messages to include only those that occurred on or before June 20, 2024, at 00:00 (midnight). + +Timestamp field Filters Options + +============================= =================================== +Filter Description +============================= =================================== +**timestamp__year** Exact year e.g. 2024 +**timestamp__year__lt** Year Less than +**timestamp__year__lte** Year Less than or Equal to +**timestamp__year__gt** Year Greater than +**timestamp__year__gte** Year Greater than or Equal to +**timestamp__month** Exact month e.g. 10 +**timestamp__month__lt** Month Less than +**timestamp__month__lte** Month Less than or Equal to +**timestamp__month__gt** Month Greater than +**timestamp__month__gte** Month Greater than or Equal to +**timestamp__day** Exact day e.g. 12 +**timestamp__day__lt** Day Less than +**timestamp__day__lte** Day Less than or Equal to +**timestamp__day__gt** Day Greater than +**timestamp__day__gte** Day Greater than or Equal to +============================= =================================== From e0d1932bad1ea571548e455a77e7a6fc4bda68c9 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 24 Jun 2024 15:21:52 +0300 Subject: [PATCH 225/270] Add Entity updates (#2592) * upgrade pyxform version * downgrade pyxform to previous version * upgrade pyxform * upgrade package openpyxl * downgrade pyxform to v2.0.0 * upgrade pyxform to v2.0.3 * fix import error fix ImportError: cannot import name 'NSMAP' from 'pyxform.utils' * fix error ValueError: seek of closed file * fix lint warning fix warning line too long * fix Entities not created on submission * fix failing tests * fix failing test * fix failing tests * fix failing test * resolve flaky test * remove code added for debugging * fix failing tests * remove tets cases tests for entities are catered for in onadata/apps/viewer/models/tests/test_data_dictionary.py * fix failing tests * update entity from form * add test case * move test case move onadata.apps.api.tests.viewset.test_xform_submission_viewset.TestXFormSubmissionViewSet.test_create_entity to onadata.apps.logger.tests.models.test_instance.TestInstance.test_create_entity * add history when updating entity from instance * rename method names * add test in test case * move test case move onadata.apps.api.tests.viewsets.test_xform_submission_viewset.TestXFormSubmissionViewSet.test_registration_form_inactive to onadata.apps.logger.tests.models.test_instance.TestInstance.test_registration_form_inactive * handle conditions under which an Entity is updated * rename variable * fix lint error redefined-outer-name * fix lint error redefined-outer-name * Refactor code * fix failing test * refactor test case * fix failing tests * fix failing test * add test case for model EntityHistory * fix failing test * refactor tests * refactor tests * refactor tests * refactor tests * refactor test * update compose api command * update CI postgres image * do not mute Instance signals in tests * unmute create_registration_form_datadictionary signal * update Entities via API * refactor code * add tests * update test * delete Entities via API * refactor code * refactor tests * update entities docs * add test * update flaky test max_runs * update docs * update docs * decrement dataset num_entities on delete Entity * update EntityList last_entity_update_time when Entity is updated * update method name * fix url not found fix url api/v2/entity-lists//entities/ not found * DRY tests * update docs * update EntityList last_entity_update_time on Entity delete * add test * add field uuid to Entity response * fix failing tests * add Entity id, uuid during serialization * add url to detail view in list of Entities response * add EntityList object level permissions * set project permissions to EntityList asynchronously * add tests * unassign entitylists permissions when removing user from project * fix lint error invalid-name * fix lint warning ungrouped-imports * fix lint warning redefined-builtin * fix failing tests * handle more edge cases for update Entity from form * DRY code * update doc string * add date_created, date_modified to Entities API response * rename Entity label key * use chunked iterator in Python migration * add OwnerRole to creator of form * prefetch related data for EntityList list * add tests --- docker-compose.yml | 2 +- docs/entities.md | 176 ++++- onadata/apps/api/permissions.py | 10 + .../tests/fixtures/Transportation Form.xml | 326 ++++---- .../tests/viewsets/test_abstract_viewset.py | 16 + .../api/tests/viewsets/test_data_viewset.py | 4 +- .../viewsets/test_entity_list_viewset.py | 695 ++++++++++++++---- .../tests/viewsets/test_xform_list_viewset.py | 9 - .../viewsets/test_xform_submission_viewset.py | 70 +- .../api/tests/viewsets/test_xform_viewset.py | 2 +- onadata/apps/api/urls/tests/test_v2_urls.py | 68 ++ .../apps/api/viewsets/entity_list_viewset.py | 89 ++- .../migrations/0016_add_entity_entity_list.py | 24 + .../0017_populate_entity_entity_list.py | 17 + ...ntitylistgroupobjectpermission_and_more.py | 218 ++++++ onadata/apps/logger/models/__init__.py | 2 +- onadata/apps/logger/models/entity.py | 74 +- onadata/apps/logger/models/entity_list.py | 15 + onadata/apps/logger/signals.py | 71 +- onadata/apps/logger/tasks.py | 27 + .../apps/logger/tests/models/test_entity.py | 170 +++-- .../logger/tests/models/test_entity_list.py | 17 + .../apps/logger/tests/models/test_instance.py | 592 ++++++++++++++- .../tests/models/test_registration_form.py | 39 +- onadata/apps/logger/tests/test_publish_xls.py | 77 +- onadata/apps/logger/tests/test_tasks.py | 63 ++ .../entities/instances/trees_registration.xml | 28 - .../instances/trees_registration_2.xml | 17 - .../fixtures/transportation/headers.json | 1 + .../fixtures/transportation/headers_csv.json | 1 + .../transportation/transportation.xml | 326 ++++---- .../transportation/transportation_export.xlsx | Bin 6411 -> 6957 bytes onadata/apps/main/tests/test_base.py | 32 +- onadata/apps/main/tests/test_process.py | 10 +- onadata/apps/viewer/models/data_dictionary.py | 21 +- onadata/libs/filters.py | 13 +- onadata/libs/models/share_project.py | 12 + onadata/libs/permissions.py | 19 + onadata/libs/serializers/entity_serializer.py | 86 ++- onadata/libs/serializers/xform_serializer.py | 4 +- onadata/libs/test_utils/pyxform_test_case.py | 2 +- .../libs/tests/models/test_share_project.py | 23 +- .../test_merged_xform_serializer.py | 494 +++++++------ .../serializers/test_xform_serializer.py | 55 +- onadata/libs/tests/utils/test_csv_builder.py | 2 - onadata/libs/tests/utils/test_export_tools.py | 21 +- onadata/libs/tests/utils/test_logger_tools.py | 51 +- .../libs/tests/utils/test_project_utils.py | 48 +- onadata/libs/utils/export_tools.py | 4 +- onadata/libs/utils/logger_tools.py | 97 ++- onadata/libs/utils/project_utils.py | 48 +- onadata/libs/utils/user_auth.py | 18 +- requirements/base.pip | 4 +- requirements/dev.pip | 4 +- setup.cfg | 2 +- 55 files changed, 3046 insertions(+), 1270 deletions(-) create mode 100644 onadata/apps/api/urls/tests/test_v2_urls.py create mode 100644 onadata/apps/logger/migrations/0016_add_entity_entity_list.py create mode 100644 onadata/apps/logger/migrations/0017_populate_entity_entity_list.py create mode 100644 onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py create mode 100644 onadata/apps/logger/tasks.py create mode 100644 onadata/apps/logger/tests/test_tasks.py delete mode 100644 onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml delete mode 100644 onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml diff --git a/docker-compose.yml b/docker-compose.yml index f619f154cb..7801883d5e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,7 +12,7 @@ services: - ./:/srv/onadata ports: - 8000:8000 - command: /usr/local/bin/uwsgi --ini /srv/onadata/uwsgi.ini + command: python3.10 manage.py runserver --settings=onadata.settings.docker celery: build: diff --git a/docs/entities.md b/docs/entities.md index 093c4cbdb2..13edbcd1a7 100644 --- a/docs/entities.md +++ b/docs/entities.md @@ -4,20 +4,29 @@ Entities allow you to share information between forms, enabling the collection o The following endpoints provides access to Entities related data: Where: -- _Entity_ - Each item that gets managed by an ODK workflow. Entities are automatically created from submissions receieved from a form that contains entity definitions. -- _EntityList_ - a dataset that contains Entities of the same type. +- `Entity` - Each item that gets managed by an ODK workflow. Entities are automatically created from submissions receieved from a form that contains entity definitions. +- `EntityList` - a dataset that contains Entities of the same type. +- `entity_list_id` - An EntityList's unique identifier +- `entity_id` - An Entity's unique identifier ## Get EntityLists `GET /api/v2/entity-lists` -**Example** +This endpoint is used to get all EntityList datasets the user permission to view. + +**Request** `curl -X GET https://api.ona.io/api/v2/entity-lists` **Response** +Status: `200 OK` + +Body: + ``` + [ { "url":"http://testserver/api/v2/entity-lists/9", @@ -39,12 +48,16 @@ To get EntityLists for a specific project `GET /api/v2/entity-lists?project=` -**Example** +**Request** `curl -X GET https://api.ona.io/api/v2/entity-lists?project=9` **Response** +Status: `200 OK` + +Body: + ``` [ { @@ -67,12 +80,18 @@ To get EntityLists for a specific project `GET /api/v2/entity-lists/` -**Example** +This endpoint is used to get a single EntityList. + +**Request** `curl -X GET https://api.ona.io/api/v2/entity-lists/1` **Response** +Status: `200 OK` + +Body: + ``` { "id":16, @@ -110,38 +129,141 @@ To get EntityLists for a specific project `GET api/v2/entity-lists//entities` -**Example** +This endpoint is used to get Entities belonging to a single EntityList dataset. + +**Request** `curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities` **Response** +Status: `200 OK` + +Body: + ``` [ { - "_id":3, - "species":"purpleheart", - "_version":"2022110901", - "geometry":"-1.286905 36.772845 0 0", - "formhub/uuid":"d156a2dce4c34751af57f21ef5c4e6cc", - "meta/instanceID":"uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", - "_xform_id_string":"trees_registration", - "circumference_cm":300, - "meta/entity/label":"300cm purpleheart", - "meta/instanceName":"300cm purpleheart" + "url":"http://testserver/api/v2/entity-lists/1/entities/3", + "id":3, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": "2024-06-20T07:37:20.416054Z", + "data": { + "species":"purpleheart", + "geometry":"-1.286905 36.772845 0 0", + "circumference_cm":300, + "label":"300cm purpleheart", + } }, { - "_id":4, - "species":"wallaba", - "_version":"2022110901", - "geometry":"-1.305796 36.791849 0 0", - "formhub/uuid":"d156a2dce4c34751af57f21ef5c4e6cc", - "intake_notes":"Looks malnourished", - "meta/instanceID":"uuid:648e4106-2224-4bd7-8bf9-859102fc6fae", - "_xform_id_string":"trees_registration", - "circumference_cm":100, - "meta/entity/label":"100cm wallaba", - "meta/instanceName":"100cm wallaba" + "url":"http://testserver/api/v2/entity-lists/1/entities/4", + "id":4, + "uuid": "517185b4-bc06-450c-a6ce-44605dec5480", + "date_created": "2024-06-20T07:38:20.416054Z", + "data": { + "species":"wallaba", + "geometry":"-1.305796 36.791849 0 0", + "intake_notes":"Looks malnourished", + "circumference_cm":100, + "label":"100cm wallaba", + } } ] ``` + +## Get a single Entity + +`GET api/v2/entity-lists//entities/` + +This endpoint is used to get a single Entity. + +**Request** + +`curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities/3` + +**Response** + +Status: `200 OK` + +Body: + +``` +{ + "id":3, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": "2024-06-20T07:37:20.416054Z", + "date_modified: "2024-06-20T07:37:20.416054Z", + "data": { + "species":"purpleheart", + "geometry":"-1.286905 36.772845 0 0", + "circumference_cm":300, + "label":"300cm purpleheart", + } +} +``` + +## Update Entity + +`PATCH api/v2/entity-lists//entities/` + +This endpoint is used to update the label or the properties (passed as JSON in the request body) of an Entity. + +You only need to include the properties you wish to update. To unset the value of any property, you can set it to empty string ("") or null. + +A property must exist in the EntityList dataset. + +The label must be a non-empty string. + +**Request** + +```sh +curl -X PATCH https://api.ona.io/api/v2/entity-lists/1/entities/1 \ +-H "Authorization: Token ACCESS_TOKEN" \ +-H "Content-Type: application/json" \ +-d '{ + "label": "30cm mora", + "data": { + "geometry": "-1.286805 36.772845 0 0", + "species": "mora", + "circumference_cm": 30 + } + }' +``` + +**Response** + +Status: `200 OK` + +Body: + +``` +{ + "id": 1, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": "2024-06-20T07:37:20.416054Z", + "date_modified: "2024-06-20T08:37:20.416054Z", + "data": { + "geometry": "-1.286805 36.772845 0 0", + "species": "mora", + "circumference_cm": 30, + "label": "30cm mora", + } +} +``` + +## Delete an Entity + +`DELETE api/v2/entity-lists//entities/` + +The endpoint is used to delete an Entity + +**Request** + +```sh +curl -X DELETE https://api.ona.io/api/v2/entity-lists/1/entities/1 \ +-H "Authorization: Token ACCESS_TOKEN" +``` + +**Response** + +Status: `204 No Content` diff --git a/onadata/apps/api/permissions.py b/onadata/apps/api/permissions.py index 07485f9903..195a98e5a4 100644 --- a/onadata/apps/api/permissions.py +++ b/onadata/apps/api/permissions.py @@ -558,3 +558,13 @@ def has_permission(self, request, view): return False return True + + +class EntityListPermission(DjangoObjectPermissionsAllowAnon): + """Permission for EntityList""" + + def has_permission(self, request, view): + if request.user.is_anonymous: + return True + + return super().has_permission(request, view) diff --git a/onadata/apps/api/tests/fixtures/Transportation Form.xml b/onadata/apps/api/tests/fixtures/Transportation Form.xml index f55eb6b98e..c4454ddbce 100644 --- a/onadata/apps/api/tests/fixtures/Transportation Form.xml +++ b/onadata/apps/api/tests/fixtures/Transportation Form.xml @@ -1,5 +1,5 @@ - + transportation_2011_07_25 @@ -39,6 +39,9 @@ + + + @@ -47,19 +50,84 @@ + + + + ambulance + + + + bicycle + + + + boat_canoe + + + + bus + + + + donkey_mule_cart + + + + keke_pepe + + + + lorry + + + + motorbike + + + + taxi + + + + other + + + + + + + + daily + + + + weekly + + + + other + + + + dont_know + + + + - - - - - - - - - - + + + + + + + + + + + - + @@ -68,46 +136,10 @@ @@ -117,198 +149,100 @@ - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + + + + + + + + + + diff --git a/onadata/apps/api/tests/viewsets/test_abstract_viewset.py b/onadata/apps/api/tests/viewsets/test_abstract_viewset.py index 4164a29f77..81ac952be4 100644 --- a/onadata/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_abstract_viewset.py @@ -31,6 +31,8 @@ from onadata.apps.api.viewsets.widget_viewset import WidgetViewSet from onadata.apps.logger.models import ( Attachment, + Entity, + EntityList, Instance, Project, XForm, @@ -687,3 +689,17 @@ def _get_request_session_with_auth(self, view, auth, extra=None): request.session = self.client.session return request + + def _create_entity(self): + self._publish_registration_form(self.user) + self.entity_list = EntityList.objects.get(name="trees") + self.entity = Entity.objects.create( + entity_list=self.entity_list, + json={ + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", + ) diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 096e59c111..1897434b1c 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -3881,6 +3881,7 @@ def test_csv_export(self): "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/other/frequency_to_referral_facility", "image1", "meta/instanceID", "_id", @@ -3936,6 +3937,7 @@ def test_default_ordering(self): "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/other/frequency_to_referral_facility", "image1", "meta/instanceID", "_id", @@ -3955,4 +3957,4 @@ def test_default_ordering(self): # csv records should be ordered by id in descending order for instance in instances: row = next(csv_reader) - self.assertEqual(str(instance.id), row[22]) + self.assertEqual(str(instance.id), row[23]) diff --git a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py index ce6333733a..44820a2565 100644 --- a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py @@ -1,58 +1,60 @@ """Tests for module onadata.apps.api.viewsets.entity_list_viewset""" import json -import os import sys +from datetime import datetime + +from unittest.mock import patch -from django.conf import settings from django.test import override_settings from django.utils import timezone from onadata.apps.api.viewsets.entity_list_viewset import EntityListViewSet from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet -from onadata.apps.logger.models import Entity, EntityList, Project +from onadata.apps.logger.models import Entity, EntityHistory, EntityList, Project from onadata.libs.models.share_project import ShareProject +from onadata.libs.permissions import ROLES, OwnerRole -class GetEntityListsTestCase(TestAbstractViewSet): +class GetEntityListArrayTestCase(TestAbstractViewSet): """Tests for GET all EntityLists""" def setUp(self): super().setUp() self.view = EntityListViewSet.as_view({"get": "list"}) + self._publish_registration_form(self.user) + self._publish_follow_up_form(self.user) + self.trees_entity_list = EntityList.objects.get(name="trees") + OwnerRole.add(self.user, self.trees_entity_list) + # Create more EntityLists explicitly + self._create_entity_list("immunization") + self._create_entity_list("savings") + + def _create_entity_list(self, name, project=None): + if project is None: + project = self.project + + entity_list = EntityList.objects.create(name=name, project=project) + OwnerRole.add(self.user, entity_list) @override_settings(TIME_ZONE="UTC") def test_get_all(self): - """GET all EntityLists works""" - # Publish registration form and create "trees" EntityList dataset - self._publish_registration_form(self.user) - # Publish follow up form for "trees" dataset - self._publish_follow_up_form(self.user) - # Make submission on tree_registration form - submission_path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "entities", - "instances", - "trees_registration.xml", + """Getting all EntityLists works""" + Entity.objects.create( + entity_list=self.trees_entity_list, + json={ + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", ) - self._make_submission(submission_path) - # Create more EntityLists explicitly - EntityList.objects.create(name="immunization", project=self.project) - EntityList.objects.create(name="savings", project=self.project) qs = EntityList.objects.all().order_by("pk") first = qs[0] second = qs[1] third = qs[2] - # Make request - request = self.factory.get("/", **self.extra) - response = self.view(request) - self.assertEqual(response.status_code, 200) - self.assertIsNotNone(response.get("Cache-Control")) expected_data = [ { "url": f"http://testserver/api/v2/entity-lists/{first.pk}", @@ -93,67 +95,77 @@ def test_get_all(self): "num_entities": 0, }, ] + + request = self.factory.get("/", **self.extra) + response = self.view(request) + + self.assertEqual(response.status_code, 200) + self.assertIsNotNone(response.get("Cache-Control")) self.assertEqual(response.data, expected_data) def test_anonymous_user(self): """Anonymous user can only view EntityLists under public projects""" - # Create public project public_project = Project.objects.create( name="public", shared=True, created_by=self.user, organization=self.user, ) - # Create private project - private_project = Project.objects.create( - name="private", - shared=False, - created_by=self.user, - organization=self.user, + entity_list = EntityList.objects.create( + name="public_entity_list", project=public_project ) - # Create EntityList explicitly - EntityList.objects.create(name="immunization", project=public_project) - EntityList.objects.create(name="savings", project=private_project) - # Make request as anonymous user request = self.factory.get("/") response = self.view(request) self.assertEqual(response.status_code, 200) - self.assertIsNotNone(response.get("Cache-Control")) self.assertEqual(len(response.data), 1) - first = EntityList.objects.all()[0] - self.assertEqual(response.data[0]["id"], first.pk) - # Logged in user is able to view all - request = self.factory.get("/", **self.extra) - response = self.view(request) - self.assertEqual(response.status_code, 200) - self.assertIsNotNone(response.get("Cache-Control")) - self.assertEqual(len(response.data), 2) + self.assertEqual(response.data[0]["id"], entity_list.pk) def test_pagination(self): """Pagination works""" - self._project_create() - EntityList.objects.create(name="dataset_1", project=self.project) - EntityList.objects.create(name="dataset_2", project=self.project) request = self.factory.get("/", data={"page": 1, "page_size": 1}, **self.extra) response = self.view(request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - def test_filtering_by_project(self): - """Filter by project id works""" - self._project_create() + def test_filter_by_project(self): + """Filtering by `project` query param works""" project_2 = Project.objects.create( name="Other project", created_by=self.user, organization=self.user, ) - EntityList.objects.create(name="dataset_1", project=self.project) - EntityList.objects.create(name="dataset_2", project=project_2) + self._create_entity_list("census", project_2) request = self.factory.get("/", data={"project": project_2.pk}, **self.extra) response = self.view(request) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual(response.data[0]["name"], "dataset_2") + self.assertEqual(response.data[0]["name"], "census") + + def test_object_permissions(self): + """Results limited to objects user has view level permissions""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + for role in ROLES: + ShareProject(self.project, "alice", role).save() + request = self.factory.get("/", **extra) + response = self.view(request) + + if role in ["owner", "manager"]: + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 3) + + else: + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 0) @override_settings(TIME_ZONE="UTC") @@ -169,18 +181,19 @@ def setUp(self): # Publish follow up form for "trees" dataset self._publish_follow_up_form(self.user) self.entity_list = EntityList.objects.first() - # Make submission on tree_registration form - submission_path = os.path.join( - settings.PROJECT_ROOT, - "apps", - "main", - "tests", - "fixtures", - "entities", - "instances", - "trees_registration.xml", + # Create Entity for trees EntityList + trees_entity_list = EntityList.objects.get(name="trees") + OwnerRole.add(self.user, trees_entity_list) + Entity.objects.create( + entity_list=trees_entity_list, + json={ + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", ) - self._make_submission(submission_path) def test_get_entity_list(self): """Returns a single EntityList""" @@ -240,14 +253,14 @@ def test_anonymous_user(self): response = self.view(request, pk=self.entity_list.pk) self.assertEqual(response.status_code, 200) - def test_does_not_exist(self): + def test_invalid_entity_list(self): """Invalid EntityList is handled""" request = self.factory.get("/", **self.extra) response = self.view(request, pk=sys.maxsize) self.assertEqual(response.status_code, 404) - def test_shared_project(self): - """A user can view a project shared with them""" + def test_object_permissions(self): + """User must have object view level permissions""" alice_data = { "username": "alice", "email": "aclie@example.com", @@ -257,14 +270,21 @@ def test_shared_project(self): "last_name": "Hughes", } alice_profile = self._create_user_profile(alice_data) - # Share project with Alice - ShareProject(self.project, "alice", "readonly-no-download") extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} - request = self.factory.get("/", **extra) - response = self.view(request, pk=self.entity_list.pk) - self.assertEqual(response.status_code, 200) + + for role in ROLES: + ShareProject(self.project, "alice", role).save() + request = self.factory.get("/", **extra) + response = self.view(request, pk=self.entity_list.pk) + + if role in ["owner", "manager"]: + self.assertEqual(response.status_code, 200) + + else: + self.assertEqual(response.status_code, 404) +@override_settings(TIME_ZONE="UTC") class GetEntitiesTestCase(TestAbstractViewSet): """Tests for GET Entities""" @@ -276,54 +296,61 @@ def setUp(self): self._publish_registration_form(self.user) # Publish follow up form for "trees" dataset self._publish_follow_up_form(self.user) - # Make submissions which will then create Entities - paths = [ - os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ), - os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration_2.xml", - ), - ] - - for path in paths: - self._make_submission(path) - - self.entity_list = EntityList.objects.first() - entity_qs = Entity.objects.all().order_by("pk") - self.expected_data = [ - { - "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + # Create Entity for trees EntityList + self.entity_list = EntityList.objects.get(name="trees") + OwnerRole.add(self.user, self.entity_list) + Entity.objects.create( + entity_list=self.entity_list, + json={ "geometry": "-1.286905 36.772845 0 0", "species": "purpleheart", "circumference_cm": 300, - "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", - "meta/instanceName": "300cm purpleheart", - "meta/entity/label": "300cm purpleheart", - "_xform_id_string": "trees_registration", - "_version": "2022110901", - "_id": entity_qs[0].pk, + "label": "300cm purpleheart", }, - { - "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", + ), + Entity.objects.create( + entity_list=self.entity_list, + json={ "geometry": "-1.305796 36.791849 0 0", "species": "wallaba", "circumference_cm": 100, "intake_notes": "Looks malnourished", - "meta/instanceID": "uuid:648e4106-2224-4bd7-8bf9-859102fc6fae", - "meta/instanceName": "100cm wallaba", - "meta/entity/label": "100cm wallaba", - "_xform_id_string": "trees_registration", - "_version": "2022110901", - "_id": entity_qs[1].pk, + "label": "100cm wallaba", + }, + uuid="517185b4-bc06-450c-a6ce-44605dec5480", + ) + entity_qs = Entity.objects.all().order_by("pk") + pk = self.entity_list.pk + self.expected_data = [ + { + "url": f"http://testserver/api/v2/entity-lists/{pk}/entities/{entity_qs[0].pk}", + "id": entity_qs[0].pk, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": entity_qs[0] + .date_created.isoformat() + .replace("+00:00", "Z"), + "data": { + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + }, + { + "url": f"http://testserver/api/v2/entity-lists/{pk}/entities/{entity_qs[1].pk}", + "id": entity_qs[1].pk, + "uuid": "517185b4-bc06-450c-a6ce-44605dec5480", + "date_created": entity_qs[1] + .date_created.isoformat() + .replace("+00:00", "Z"), + "data": { + "geometry": "-1.305796 36.791849 0 0", + "species": "wallaba", + "circumference_cm": 100, + "intake_notes": "Looks malnourished", + "label": "100cm wallaba", + }, }, ] @@ -337,11 +364,11 @@ def test_get_all(self): def test_anonymous_user(self): """Anonymous user cannot view Entities for a private EntityList""" - # Anonymous user cannot view private EntityList + # Private EntityList request = self.factory.get("/") response = self.view(request, pk=self.entity_list.pk) self.assertEqual(response.status_code, 404) - # Anonymous user can view public EntityList + # Public EntityList self.project.shared = True self.project.save() request = self.factory.get("/") @@ -349,8 +376,8 @@ def test_anonymous_user(self): self.assertEqual(response.status_code, 200) self.assertEqual(response.data, self.expected_data) - def test_shared_project(self): - """A user can view Entities for a project shared with them""" + def test_object_permissions(self): + """User must have EntityList view level permissions""" alice_data = { "username": "alice", "email": "aclie@example.com", @@ -360,13 +387,19 @@ def test_shared_project(self): "last_name": "Hughes", } alice_profile = self._create_user_profile(alice_data) - # Share project with Alice - ShareProject(self.project, "alice", "readonly-no-download") extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} - request = self.factory.get("/", **extra) - response = self.view(request, pk=self.entity_list.pk) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, self.expected_data) + + for role in ROLES: + ShareProject(self.project, "alice", role).save() + request = self.factory.get("/", **extra) + response = self.view(request, pk=self.entity_list.pk) + + if role in ["owner", "manager"]: + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, self.expected_data) + + else: + self.assertEqual(response.status_code, 404) def test_pagination(self): """Pagination works""" @@ -374,7 +407,9 @@ def test_pagination(self): response = self.view(request, pk=self.entity_list.pk) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) - self.assertEqual(response.data[0]["meta/entity/label"], "300cm purpleheart") + self.assertEqual( + response.data[0]["uuid"], "dbee4c32-a922-451c-9df7-42f40bf78f48" + ) def test_deleted_ignored(self): """Deleted Entities are ignored""" @@ -387,3 +422,409 @@ def test_deleted_ignored(self): self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [self.expected_data[-1]]) self.assertIsNotNone(response.get("Cache-Control")) + + def test_invalid_entity_list(self): + """Invalid EntityList is handled""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + +@override_settings(TIME_ZONE="UTC") +class GetSingleEntityTestCase(TestAbstractViewSet): + """Tests for getting a single Entity""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"get": "entities"}) + self._create_entity() + OwnerRole.add(self.user, self.entity_list) + + def test_get_entity(self): + """Getting a single Entity works""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.data, + { + "id": self.entity.pk, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": self.entity.date_created.isoformat().replace( + "+00:00", "Z" + ), + "date_modified": self.entity.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "data": { + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + }, + ) + + def test_invalid_entity(self): + """Invalid Entity is handled""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + def test_invalid_entity_list(self): + """Invalid EntityList is handled""" + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=sys.maxsize, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + def test_entity_already_deleted(self): + """Deleted Entity cannot be retrieved""" + self.entity.deleted_at = timezone.now() + self.entity.save() + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + def test_anonymous_user(self): + """Anonymous user cannot get a private Entity""" + # Anonymous user cannot get private Entity + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + # Anonymous user can get public Entity + self.project.shared = True + self.project.save() + request = self.factory.get("/") + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 200) + + def test_object_permissions(self): + """User must have EntityList view level permissions""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + for role in ROLES: + ShareProject(self.project, "alice", role).save() + request = self.factory.get("/", **extra) + response = self.view( + request, pk=self.entity_list.pk, entity_pk=self.entity.pk + ) + + if role in ["owner", "manager"]: + self.assertEqual(response.status_code, 200) + + else: + self.assertEqual(response.status_code, 404) + + +@override_settings(TIME_ZONE="UTC") +class UpdateEntityTestCase(TestAbstractViewSet): + """Tests for updating a single Entity""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view( + {"put": "entities", "patch": "entities"}, + ) + self._create_entity() + OwnerRole.add(self.user, self.entity_list) + + @patch("django.utils.timezone.now") + def test_updating_entity(self, mock_now): + """Updating an Entity works""" + mock_date = datetime(2024, 6, 12, 12, 34, 0, tzinfo=timezone.utc) + mock_now.return_value = mock_date + data = { + "label": "30cm mora", + "data": { + "geometry": "-1.286805 36.772845 0 0", + "species": "mora", + "circumference_cm": 30, + }, + } + request = self.factory.put("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.entity.refresh_from_db() + self.entity_list.refresh_from_db() + self.assertEqual(response.status_code, 200) + expected_json = { + "geometry": "-1.286805 36.772845 0 0", + "species": "mora", + "circumference_cm": 30, + "label": "30cm mora", + } + + self.assertDictEqual( + response.data, + { + "id": self.entity.pk, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": self.entity.date_created.isoformat().replace( + "+00:00", "Z" + ), + "date_modified": self.entity.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "data": expected_json, + }, + ) + self.assertDictEqual(self.entity.json, expected_json) + self.assertEqual(self.entity_list.last_entity_update_time, mock_date) + self.assertEqual(EntityHistory.objects.count(), 1) + history = EntityHistory.objects.first() + self.assertEqual(history.entity, self.entity) + self.assertIsNone(history.registration_form) + self.assertIsNone(history.instance) + self.assertIsNone(history.xml) + self.assertIsNone(history.form_version) + self.assertDictEqual(history.json, expected_json) + self.assertEqual(history.created_by, self.user) + + def test_invalid_entity(self): + """Invalid Entity is handled""" + request = self.factory.put("/", data={}, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + def test_patch_label(self): + """Patching label only works""" + data = {"label": "Patched label"} + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.entity.refresh_from_db() + self.assertEqual(response.status_code, 200) + expected_data = { + "id": self.entity.pk, + "uuid": self.entity.uuid, + "date_created": self.entity.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": self.entity.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "data": { + **self.entity.json, + "label": "Patched label", + }, + } + self.assertDictEqual(response.data, expected_data) + + def test_patch_data(self): + """Patch data only works""" + data = {"data": {"species": "mora"}} + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.entity.refresh_from_db() + self.assertEqual(response.status_code, 200) + expected_data = { + "id": self.entity.pk, + "uuid": self.entity.uuid, + "date_created": self.entity.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": self.entity.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "data": { + **self.entity.json, + "species": "mora", + }, + } + self.assertDictEqual(response.data, expected_data) + + def test_label_empty(self): + """Label must be a non-empty string""" + # Empty string + data = {"label": ""} + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 400) + # Null + data = {"label": None} + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 400) + + def test_unset_property(self): + """Unsetting a property value works""" + data = {"data": {"species": ""}} + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.entity.refresh_from_db() + self.assertEqual(response.status_code, 200) + expected_data = { + "id": self.entity.pk, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": self.entity.date_created.isoformat().replace("+00:00", "Z"), + "date_modified": self.entity.date_modified.isoformat().replace( + "+00:00", "Z" + ), + "data": { + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + } + self.assertDictEqual(response.data, expected_data) + + def test_invalid_property(self): + """A property that does not exist in the EntityList fails""" + data = {"data": {"foo": "bar"}} + + self.assertTrue("foo" not in self.entity_list.properties) + + request = self.factory.patch("/", data=data, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 400) + + def test_anonymous_user(self): + """Anonymous user cannot update Entity""" + # Anonymous user cannot update private Entity + request = self.factory.patch("/", data={}, format="json") + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + # Anonymous user cannot update public Entity + self.project.shared = True + self.project.save() + request = self.factory.patch("/", data={}, format="json") + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 401) + + def test_object_permissions(self): + """User must have update level permissions""" + data = {"data": {"species": "mora"}} + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + for role in ROLES: + ShareProject(self.project, "alice", role).save() + request = self.factory.patch("/", data=data, format="json", **extra) + response = self.view( + request, pk=self.entity_list.pk, entity_pk=self.entity.pk + ) + + if role not in ["owner", "manager"]: + self.assertEqual(response.status_code, 404) + + else: + self.assertEqual(response.status_code, 200) + + def test_deleted_entity(self): + """Deleted Entity cannot be updated""" + self.entity.deleted_at = timezone.now() + self.entity.save() + request = self.factory.patch("/", data={}, format="json", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + def test_invalid_entity_list(self): + """Invalid EntityList is handled""" + request = self.factory.patch("/", data={}, format="json", **self.extra) + response = self.view(request, pk=sys.maxsize, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + +class DeleteEntityTestCase(TestAbstractViewSet): + """Tests for delete Entity""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"delete": "entities"}) + self._create_entity() + OwnerRole.add(self.user, self.entity_list) + + @patch("django.utils.timezone.now") + def test_delete(self, mock_now): + """Delete Entity works""" + self.entity_list.refresh_from_db() + self.assertEqual(self.entity_list.num_entities, 1) + date = datetime(2024, 6, 11, 14, 9, 0, tzinfo=timezone.utc) + mock_now.return_value = date + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.entity.refresh_from_db() + self.assertEqual(response.status_code, 204) + self.assertEqual(self.entity.deleted_at, date) + self.assertEqual(self.entity.deleted_by, self.user) + self.entity_list.refresh_from_db() + self.assertEqual(self.entity_list.num_entities, 0) + self.assertEqual(self.entity_list.last_entity_update_time, date) + + def test_invalid_entity(self): + """Invalid Entity is handled""" + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + def test_invalid_entity_list(self): + """Invalid EntityList is handled""" + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=sys.maxsize, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + def test_entity_already_deleted(self): + """Deleted Entity cannot be deleted""" + self.entity.deleted_at = timezone.now() + self.entity.save() + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + + def test_anonymous_user(self): + """Anonymous user cannot delete Entity""" + # Anonymous user cannot delete private Entity + request = self.factory.delete("/") + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 404) + # Anonymous user cannot delete public Entity + self.project.shared = True + self.project.save() + response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) + self.assertEqual(response.status_code, 401) + + def test_object_permissions(self): + """User must have delete level permissions""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + def restore_entity(): + self.entity.deleted_at = None + self.entity.deleted_by = None + self.entity.save() + + for role in ROLES: + restore_entity() + ShareProject(self.project, "alice", role).save() + request = self.factory.delete("/", **extra) + response = self.view( + request, pk=self.entity_list.pk, entity_pk=self.entity.pk + ) + + if role not in ["owner", "manager"]: + self.assertEqual(response.status_code, 404) + + else: + self.assertEqual(response.status_code, 204) diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index 4d3bb0eafc..6ad86fed90 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -969,15 +969,6 @@ def test_retrieve_xform_media_entity_list_dataset(self): """EntityList dataset is returned""" # Publish registration form and create "trees" Entitylist dataset self._publish_registration_form(self.user) - # Make submission to trees_registration form - submission_path = os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ) - self._make_submission(submission_path) entity_list = EntityList.objects.get(name="trees") metadata = MetaData.objects.create( content_object=self.xform, diff --git a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py index bb5500f1a6..04c4667931 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_submission_viewset.py @@ -20,7 +20,7 @@ add_uuid_to_submission_xml, ) from onadata.apps.api.viewsets.xform_submission_viewset import XFormSubmissionViewSet -from onadata.apps.logger.models import Attachment, Instance, XForm, Entity +from onadata.apps.logger.models import Attachment, Instance, XForm from onadata.apps.restservice.models import RestService from onadata.apps.restservice.services.textit import ServiceDefinition from onadata.libs.permissions import DataEntryRole @@ -1320,71 +1320,3 @@ def test_edit_submission_sent_to_rapidpro(self, mock_send): new_uuid = "6b2cc313-fc09-437e-8139-fcd32f695d41" instance = Instance.objects.get(uuid=new_uuid) mock_send.assert_called_once_with(rest_service.service_url, instance) - - def test_create_entity(self): - """An Entity is created for if the form is a RegistrationForm""" - self.xform = self._publish_registration_form(self.user) - submission_path = os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ) - - with open(submission_path, "rb") as sf: - data = {"xml_submission_file": sf} - request = self.factory.post("/submission", data) - response = self.view(request) - self.assertEqual(response.status_code, 401) - auth = DigestAuth("bob", "bobbob") - request.META.update(auth(request.META, response)) - response = self.view(request, username=self.user.username) - self.assertContains(response, "Successful submission", status_code=201) - self.assertEqual(Instance.objects.count(), 1) - self.assertEqual(Entity.objects.count(), 1) - instance = Instance.objects.first() - entity = Entity.objects.first() - self.assertEqual(entity.registration_form.xform, self.xform) - self.assertEqual(entity.xml, instance.xml) - expected_json = { - "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", - "geometry": "-1.286905 36.772845 0 0", - "species": "purpleheart", - "circumference_cm": 300, - "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", - "meta/instanceName": "300cm purpleheart", - "meta/entity/label": "300cm purpleheart", - "_xform_id_string": "trees_registration", - "_version": "2022110901", - "_id": entity.pk, - } - self.assertEqual(entity.json, expected_json) - self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") - - def test_registration_form_inactive(self): - """When the RegistrationForm is inactive, Entity should not be created""" - self.xform = self._publish_registration_form(self.user) - registration_form = self.xform.registration_forms.first() - # deactivate registration form - registration_form.is_active = False - registration_form.save() - submission_path = os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ) - - with open(submission_path, "rb") as sf: - data = {"xml_submission_file": sf} - request = self.factory.post("/submission", data) - response = self.view(request) - self.assertEqual(response.status_code, 401) - auth = DigestAuth("bob", "bobbob") - request.META.update(auth(request.META, response)) - response = self.view(request, username=self.user.username) - self.assertContains(response, "Successful submission", status_code=201) - self.assertEqual(Instance.objects.count(), 1) - self.assertEqual(Entity.objects.count(), 0) diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 65d4a5dce7..2fcee5a6a2 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -3567,7 +3567,7 @@ def test_failed_form_publishing_after_maximum_retries( self.assertEqual(response.status_code, 202) self.assertEqual(response.data, error_message) - @flaky(max_runs=3) + @flaky(max_runs=8) def test_survey_preview_endpoint(self): view = XFormViewSet.as_view({"post": "survey_preview", "get": "survey_preview"}) diff --git a/onadata/apps/api/urls/tests/test_v2_urls.py b/onadata/apps/api/urls/tests/test_v2_urls.py new file mode 100644 index 0000000000..e75dc410d2 --- /dev/null +++ b/onadata/apps/api/urls/tests/test_v2_urls.py @@ -0,0 +1,68 @@ +from onadata.apps.api.tests.viewsets.test_abstract_viewset import ( + TestAbstractViewSet as TestBase, +) + +from rest_framework.test import APIClient + + +class TestAbstractViewSet(TestBase): + """Base class for test cases""" + + def setUp(self): + super().setUp() + + self.client = APIClient() + self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.user.auth_token}") + + +class EntityListTestCase(TestAbstractViewSet): + """Entity list, create tests""" + + def setUp(self): + super().setUp() + + self._create_entity() + self.url = f"/api/v2/entity-lists/{self.entity_list.pk}/entities" + + def test_get(self): + """GET list of Entities""" + response = self.client.get(self.url) + self.assertEqual(response.status_code, 200) + + def test_create(self): + """POST Entity""" + response = self.client.post(self.url) + self.assertEqual(response.status_code, 405) + + +class EntityDetailTestCase(TestAbstractViewSet): + """Entity retrieve, update, partial_update, destroy tests""" + + def setUp(self): + super().setUp() + + self._create_entity() + self.data = {"data": {"species": "mora"}} + self.url = ( + f"/api/v2/entity-lists/{self.entity_list.pk}/entities/{self.entity.pk}" + ) + + def test_get(self): + """GET Entity""" + response = self.client.get(self.url) + self.assertEqual(response.status_code, 200) + + def test_patch(self): + """PATCH Entity""" + response = self.client.patch(self.url, data=self.data, format="json") + self.assertEqual(response.status_code, 200) + + def test_put(self): + """PUT Entity""" + response = self.client.put(self.url, data=self.data, format="json") + self.assertEqual(response.status_code, 200) + + def test_delete(self): + """DELETE Entity""" + response = self.client.delete(self.url) + self.assertEqual(response.status_code, 204) diff --git a/onadata/apps/api/viewsets/entity_list_viewset.py b/onadata/apps/api/viewsets/entity_list_viewset.py index 9b863eb366..7812f98e86 100644 --- a/onadata/apps/api/viewsets/entity_list_viewset.py +++ b/onadata/apps/api/viewsets/entity_list_viewset.py @@ -1,16 +1,20 @@ +from django.shortcuts import get_object_or_404 + +from rest_framework import status from rest_framework.decorators import action -from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework.viewsets import ReadOnlyModelViewSet +from onadata.apps.api.permissions import EntityListPermission from onadata.apps.api.tools import get_baseviewset_class from onadata.apps.logger.models import Entity, EntityList -from onadata.libs.filters import EntityListProjectFilter +from onadata.libs.filters import AnonUserEntityListFilter, EntityListProjectFilter from onadata.libs.mixins.cache_control_mixin import CacheControlMixin from onadata.libs.mixins.etags_mixin import ETagsMixin from onadata.libs.pagination import StandardPageNumberPagination from onadata.libs.serializers.entity_serializer import ( + EntityArraySerializer, EntitySerializer, EntityListSerializer, EntityListDetailSerializer, @@ -28,27 +32,18 @@ class EntityListViewSet( BaseViewset, ReadOnlyModelViewSet, ): - queryset = EntityList.objects.all().order_by("pk") + queryset = ( + EntityList.objects.all() + .order_by("pk") + .prefetch_related( + "registration_forms", + "follow_up_forms", + ) + ) serializer_class = EntityListSerializer - permission_classes = (AllowAny,) + permission_classes = (EntityListPermission,) pagination_class = StandardPageNumberPagination - filter_backends = (EntityListProjectFilter,) - - def get_queryset(self): - queryset = super().get_queryset() - - if self.request and self.request.user.is_anonymous: - queryset = queryset.filter(project__shared=True) - - if self.action == "retrieve": - # Prefetch related objects to be rendered for performance - # optimization - return queryset.prefetch_related( - "registration_forms", - "follow_up_forms", - ) - - return queryset + filter_backends = (AnonUserEntityListFilter, EntityListProjectFilter) def get_serializer_class(self): """Override get_serializer_class""" @@ -56,29 +51,67 @@ def get_serializer_class(self): return EntityListDetailSerializer if self.action == "entities": + if self.kwargs.get("entity_pk") is None: + return EntityArraySerializer + return EntitySerializer return super().get_serializer_class() - @action(methods=["GET"], detail=True) + def get_serializer_context(self): + """Override get_serializer_context""" + context = super().get_serializer_context() + + if self.action == "entities": + context.update({"entity_list": self.get_object()}) + + return context + + @action( + methods=["GET", "PUT", "PATCH", "DELETE"], + detail=True, + url_path="entities(?:/(?P[^/.]+))?", + ) def entities(self, request, *args, **kwargs): - """Returns a list of Entities for a single EntityList""" + """Provides `list`, `retrieve` `update` and `destroy` actions for Entities""" entity_list = self.get_object() - entities_qs = ( + entity_pk = kwargs.get("entity_pk") + + if entity_pk: + method = request.method.upper() + entity = get_object_or_404(Entity, pk=entity_pk, deleted_at__isnull=True) + + if method == "DELETE": + entity.soft_delete(request.user) + + return Response(status=status.HTTP_204_NO_CONTENT) + + if method in ["PUT", "PATCH"]: + serializer = self.get_serializer(entity, data=request.data) + serializer.is_valid(raise_exception=True) + serializer.save() + + return Response(serializer.data) + + serializer = self.get_serializer(entity) + + return Response(serializer.data) + + entity_qs = ( Entity.objects.filter( - registration_form__entity_list=entity_list, + entity_list=entity_list, deleted_at__isnull=True, ) # To improve performance, we specify only the column(s) # we are interested in using .only .only("json").order_by("pk") ) - queryset = self.filter_queryset(entities_qs) - page = self.paginate_queryset(queryset) + page = self.paginate_queryset(entity_qs) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) - serializer = self.get_serializer(queryset, many=True) + serializer = self.get_serializer(entity_qs, many=True) + return Response(serializer.data) diff --git a/onadata/apps/logger/migrations/0016_add_entity_entity_list.py b/onadata/apps/logger/migrations/0016_add_entity_entity_list.py new file mode 100644 index 0000000000..f71a4a45a4 --- /dev/null +++ b/onadata/apps/logger/migrations/0016_add_entity_entity_list.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.11 on 2024-05-30 11:09 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("logger", "0015_entity_entitylist_followupform_registrationform_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="entity", + name="entity_list", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="entities", + to="logger.entitylist", + ), + ), + ] diff --git a/onadata/apps/logger/migrations/0017_populate_entity_entity_list.py b/onadata/apps/logger/migrations/0017_populate_entity_entity_list.py new file mode 100644 index 0000000000..4cd9de6911 --- /dev/null +++ b/onadata/apps/logger/migrations/0017_populate_entity_entity_list.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.11 on 2024-05-30 11:12 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("logger", "0016_add_entity_entity_list"), + ] + + operations = [ + migrations.RunSQL( + sql="WITH logger_entity_entity_list AS (SELECT logger_entity.id, logger_registrationform.entity_list_id FROM logger_entity INNER JOIN logger_registrationform ON logger_entity.registration_form_id = logger_registrationform.id WHERE logger_entity.entity_list_id IS NULL) UPDATE logger_entity SET entity_list_id = logger_entity_entity_list.entity_list_id FROM logger_entity_entity_list WHERE logger_entity.id = logger_entity_entity_list.id;", + reverse_sql="UPDATE logger_entity SET entity_list_id = NULL WHERE entity_list_id IS NOT NULL;", + ) + ] diff --git a/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py b/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py new file mode 100644 index 0000000000..484b073e67 --- /dev/null +++ b/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py @@ -0,0 +1,218 @@ +# Generated by Django 4.2.11 on 2024-06-19 08:30 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +def rename_entity_label_key(apps, schema_editor): + Entity = apps.get_model("logger", "Entity") + entity_qs = Entity.objects.all().iterator(chunk_size=100) + + for entity in entity_qs: + old_key = "meta/entity/label" + + if entity.json.get(old_key): + updated_json = {**entity.json} + updated_json["label"] = entity.json[old_key] + del updated_json[old_key] + entity.json = updated_json + entity.save(update_fields=["json"]) + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("auth", "0012_alter_user_first_name_max_length"), + ("logger", "0017_populate_entity_entity_list"), + ] + + operations = [ + migrations.CreateModel( + name="EntityHistory", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date_created", models.DateTimeField(auto_now_add=True)), + ("date_modified", models.DateTimeField(auto_now=True)), + ("xml", models.TextField(blank=True, null=True)), + ("json", models.JSONField(default=dict)), + ( + "form_version", + models.CharField(blank=True, max_length=255, null=True), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="EntityListGroupObjectPermission", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="EntityListUserObjectPermission", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.RemoveField( + model_name="entity", + name="instance", + ), + migrations.RemoveField( + model_name="entity", + name="registration_form", + ), + migrations.RemoveField( + model_name="entity", + name="version", + ), + migrations.RemoveField( + model_name="entity", + name="xml", + ), + migrations.AlterField( + model_name="entity", + name="deleted_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="entity", + name="entity_list", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="entities", + to="logger.entitylist", + ), + ), + migrations.AddField( + model_name="entitylistuserobjectpermission", + name="content_object", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="logger.entitylist" + ), + ), + migrations.AddField( + model_name="entitylistuserobjectpermission", + name="permission", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="auth.permission" + ), + ), + migrations.AddField( + model_name="entitylistuserobjectpermission", + name="user", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), + migrations.AddField( + model_name="entitylistgroupobjectpermission", + name="content_object", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="logger.entitylist" + ), + ), + migrations.AddField( + model_name="entitylistgroupobjectpermission", + name="group", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="auth.group" + ), + ), + migrations.AddField( + model_name="entitylistgroupobjectpermission", + name="permission", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="auth.permission" + ), + ), + migrations.AddField( + model_name="entityhistory", + name="created_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddField( + model_name="entityhistory", + name="entity", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="history", + to="logger.entity", + ), + ), + migrations.AddField( + model_name="entityhistory", + name="instance", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="entity_history", + to="logger.instance", + ), + ), + migrations.AddField( + model_name="entityhistory", + name="registration_form", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="entity_history", + to="logger.registrationform", + ), + ), + migrations.AlterUniqueTogether( + name="entitylistuserobjectpermission", + unique_together={("user", "permission", "content_object")}, + ), + migrations.AlterUniqueTogether( + name="entitylistgroupobjectpermission", + unique_together={("group", "permission", "content_object")}, + ), + migrations.RunPython( + rename_entity_label_key, reverse_code=migrations.RunPython.noop + ), + ] diff --git a/onadata/apps/logger/models/__init__.py b/onadata/apps/logger/models/__init__.py index 8a39d078e7..debac49c25 100644 --- a/onadata/apps/logger/models/__init__.py +++ b/onadata/apps/logger/models/__init__.py @@ -20,4 +20,4 @@ from onadata.apps.logger.models.entity_list import EntityList # noqa from onadata.apps.logger.models.registration_form import RegistrationForm # noqa from onadata.apps.logger.models.follow_up_form import FollowUpForm # noqa -from onadata.apps.logger.models.entity import Entity # noqa +from onadata.apps.logger.models.entity import Entity, EntityHistory # noqa diff --git a/onadata/apps/logger/models/entity.py b/onadata/apps/logger/models/entity.py index 22f5a52644..300592ff96 100644 --- a/onadata/apps/logger/models/entity.py +++ b/onadata/apps/logger/models/entity.py @@ -3,11 +3,12 @@ """ from django.contrib.auth import get_user_model -from django.db import models +from django.db import models, transaction +from django.utils import timezone +from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.logger.models.instance import Instance from onadata.apps.logger.models.registration_form import RegistrationForm -from onadata.apps.logger.xform_instance_parser import get_entity_uuid_from_xml from onadata.libs.models import BaseModel User = get_user_model() @@ -16,35 +17,64 @@ class Entity(BaseModel): """An entity created by a registration form""" - registration_form = models.ForeignKey( - RegistrationForm, - on_delete=models.CASCADE, + entity_list = models.ForeignKey( + EntityList, related_name="entities", + on_delete=models.CASCADE, ) - instance = models.OneToOneField( - Instance, - on_delete=models.SET_NULL, - related_name="entity", - null=True, - blank=True, - ) - xml = models.TextField() json = models.JSONField(default=dict) - version = models.CharField(max_length=255, null=True) uuid = models.CharField(max_length=249, default="", db_index=True) deleted_at = models.DateTimeField(null=True, blank=True) - deleted_by = models.ForeignKey( - User, related_name="deleted_entities", null=True, on_delete=models.SET_NULL - ) + deleted_by = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) def __str__(self) -> str: - return f"{self.pk}|{self.registration_form}" + return f"{self.pk}|{self.entity_list}" + + @transaction.atomic() + def soft_delete(self, deleted_by=None): + """Soft delete Entity""" + if self.deleted_at is None: + deletion_time = timezone.now() + self.deleted_at = deletion_time + self.deleted_by = deleted_by + self.save(update_fields=["deleted_at", "deleted_by"]) + self.entity_list.num_entities = models.F("num_entities") - 1 + self.entity_list.last_entity_update_time = deletion_time + self.entity_list.save() + + class Meta(BaseModel.Meta): + app_label = "logger" + - def save(self, *args, **kwargs) -> None: - if self.xml: - self.uuid = get_entity_uuid_from_xml(self.xml) +class EntityHistory(BaseModel): + """Maintains a history of Entity updates - super().save(*args, **kwargs) + An Entity can be created/updated from a form or via API + """ class Meta(BaseModel.Meta): app_label = "logger" + + entity = models.ForeignKey( + Entity, + related_name="history", + on_delete=models.CASCADE, + ) + registration_form = models.ForeignKey( + RegistrationForm, + on_delete=models.CASCADE, + related_name="entity_history", + null=True, + blank=True, + ) + instance = models.ForeignKey( + Instance, + on_delete=models.SET_NULL, + related_name="entity_history", + null=True, + blank=True, + ) + xml = models.TextField(blank=True, null=True) + json = models.JSONField(default=dict) + form_version = models.CharField(max_length=255, null=True, blank=True) + created_by = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) diff --git a/onadata/apps/logger/models/entity_list.py b/onadata/apps/logger/models/entity_list.py index 568f6de8de..d26daef08c 100644 --- a/onadata/apps/logger/models/entity_list.py +++ b/onadata/apps/logger/models/entity_list.py @@ -6,6 +6,8 @@ from django.db import models from django.utils.translation import gettext_lazy as _ +from guardian.models import UserObjectPermissionBase, GroupObjectPermissionBase + from onadata.apps.logger.models.project import Project from onadata.libs.models import BaseModel @@ -58,3 +60,16 @@ def properties(self) -> list[str]: dataset_properties.update(form_properties) return list(dataset_properties) + + +class EntityListUserObjectPermission(UserObjectPermissionBase): + """Guardian model to create direct foreign keys.""" + + content_object = models.ForeignKey(EntityList, on_delete=models.CASCADE) + + +# pylint: disable=too-few-public-methods +class EntityListGroupObjectPermission(GroupObjectPermissionBase): + """Guardian model to create direct foreign keys.""" + + content_object = models.ForeignKey(EntityList, on_delete=models.CASCADE) diff --git a/onadata/apps/logger/signals.py b/onadata/apps/logger/signals.py index fcfc6131cc..7848ac60b3 100644 --- a/onadata/apps/logger/signals.py +++ b/onadata/apps/logger/signals.py @@ -2,33 +2,68 @@ """ logger signals module """ +from django.db import transaction +from django.db.models import F from django.db.models.signals import post_save from django.dispatch import receiver -from onadata.apps.logger.models import Entity, Instance, RegistrationForm -from onadata.libs.utils.logger_tools import create_entity as create_new_entity +from onadata.apps.logger.models import Entity, EntityList, Instance, RegistrationForm +from onadata.apps.logger.xform_instance_parser import get_meta_from_xml +from onadata.apps.logger.tasks import set_entity_list_perms_async +from onadata.libs.utils.logger_tools import ( + create_entity_from_instance, + update_entity_from_instance, +) # pylint: disable=unused-argument -@receiver(post_save, sender=Instance, dispatch_uid="create_entity") -def create_entity(sender, instance=Instance | None, created=False, **kwargs): - """Create an Entity if an Instance's form is also RegistrationForm""" - if created and instance: +@receiver(post_save, sender=Instance, dispatch_uid="create_or_update_entity") +def create_or_update_entity(sender, instance, created=False, **kwargs): + """Create or update an Entity after Instance saved""" + if instance: if RegistrationForm.objects.filter( xform=instance.xform, is_active=True ).exists(): + entity_node = get_meta_from_xml(instance.xml, "entity") registration_form = RegistrationForm.objects.filter( xform=instance.xform, is_active=True ).first() - create_new_entity(instance, registration_form) - - -@receiver(post_save, sender=Entity, dispatch_uid="update_entity_json") -def update_entity_json(sender, instance=Entity | None, created=False, **kwargs): - """Update and Entity json on creation""" - if created and instance: - json = instance.json - json["_id"] = instance.pk - # Queryset.update ensures the model's save is not called and - # the pre_save and post_save signals aren't sent - Entity.objects.filter(pk=instance.pk).update(json=json) + mutation_success_checks = ["1", "true"] + entity_uuid = entity_node.getAttribute("id") + exists = False + + if entity_uuid is not None: + exists = Entity.objects.filter(uuid=entity_uuid).exists() + + if exists and entity_node.getAttribute("update") in mutation_success_checks: + # Update Entity + update_entity_from_instance(entity_uuid, instance, registration_form) + + elif ( + not exists + and entity_node.getAttribute("create") in mutation_success_checks + ): + # Create Entity + create_entity_from_instance(instance, registration_form) + + +@receiver(post_save, sender=Entity, dispatch_uid="update_entity_dataset") +def update_entity_dataset(sender, instance, created=False, **kwargs): + """Update EntityList when Entity is created or updated""" + if not instance: + return + + entity_list = instance.entity_list + + if created: + entity_list.num_entities = F("num_entities") + 1 + + entity_list.last_entity_update_time = instance.date_modified + entity_list.save() + + +@receiver(post_save, sender=EntityList, dispatch_uid="set_entity_list_perms") +def set_entity_list_perms(sender, instance, created=False, **kwargs): + """Set project permissions to EntityList""" + if created: + transaction.on_commit(lambda: set_entity_list_perms_async.delay(instance.pk)) diff --git a/onadata/apps/logger/tasks.py b/onadata/apps/logger/tasks.py new file mode 100644 index 0000000000..cc09f523e1 --- /dev/null +++ b/onadata/apps/logger/tasks.py @@ -0,0 +1,27 @@ +import logging + +from django.db import DatabaseError + +from onadata.apps.logger.models import EntityList +from onadata.celeryapp import app +from onadata.libs.utils.project_utils import set_project_perms_to_object + + +logger = logging.getLogger(__name__) + + +@app.task(retry_backoff=3, autoretry_for=(DatabaseError, ConnectionError)) +def set_entity_list_perms_async(entity_list_id): + """Set permissions for EntityList asynchronously + + Args: + pk (int): Primary key for EntityList + """ + try: + entity_list = EntityList.objects.get(pk=entity_list_id) + + except EntityList.DoesNotExist as err: + logger.exception(err) + return + + set_project_perms_to_object(entity_list, entity_list.project) diff --git a/onadata/apps/logger/tests/models/test_entity.py b/onadata/apps/logger/tests/models/test_entity.py index 791805c7cb..f42073e0e1 100644 --- a/onadata/apps/logger/tests/models/test_entity.py +++ b/onadata/apps/logger/tests/models/test_entity.py @@ -2,10 +2,19 @@ import pytz from datetime import datetime +from unittest.mock import patch -from onadata.apps.logger.models import Entity -from onadata.apps.logger.models.instance import Instance +from django.utils import timezone + +from onadata.apps.logger.models import ( + Entity, + EntityHistory, + EntityList, + Instance, + SurveyType, +) from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.utils.user_auth import get_user_default_project class EntityTestCase(TestBase): @@ -13,29 +22,91 @@ class EntityTestCase(TestBase): def setUp(self): super().setUp() - # Mute signal that creates Entity when Instance is saved - self._mute_post_save_signals([(Instance, "create_entity")]) self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) - self.xform = self._publish_registration_form(self.user) + self.project = get_user_default_project(self.user) + self.entity_list = EntityList.objects.create(name="trees", project=self.project) - def test_creation(self): + @patch("django.utils.timezone.now") + def test_creation(self, mock_now): """We can create an Entity""" - reg_form = self.xform.registration_forms.first() + mock_now.return_value = self.mocked_now entity_json = { - "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", "geometry": "-1.286905 36.772845 0 0", - "species": "purpleheart", "circumference_cm": 300, - "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", - "meta/instanceName": "300cm purpleheart", - "meta/entity/label": "300cm purpleheart", - "_xform_id_string": "trees_registration", - "_version": "2022110901", + "label": "300cm purpleheart", } - xml = ( + uuid = "dbee4c32-a922-451c-9df7-42f40bf78f48" + entity = Entity.objects.create( + entity_list=self.entity_list, + json=entity_json, + uuid=uuid, + ) + self.assertEqual(entity.entity_list, self.entity_list) + self.assertEqual(entity.json, entity_json) + self.assertEqual(entity.uuid, uuid) + self.assertEqual(f"{entity}", f"{entity.pk}|{self.entity_list}") + self.assertEqual(entity.date_created, self.mocked_now) + + def test_optional_fields(self): + """Defaults for optional fields are correct""" + entity = Entity.objects.create(entity_list=self.entity_list) + self.assertIsNone(entity.deleted_at) + self.assertIsNone(entity.deleted_by) + self.assertEqual(entity.json, {}) + self.assertEqual(entity.uuid, "") + + @patch("django.utils.timezone.now") + def test_soft_delete(self, mock_now): + """Soft delete works""" + mock_now.return_value = self.mocked_now + entity = Entity.objects.create(entity_list=self.entity_list) + self.entity_list.refresh_from_db() + + self.assertEqual(self.entity_list.num_entities, 1) + self.assertIsNone(entity.deleted_at) + self.assertIsNone(entity.deleted_by) + + entity.soft_delete(self.user) + self.entity_list.refresh_from_db() + entity.refresh_from_db() + + self.assertEqual(self.entity_list.num_entities, 0) + self.assertEqual(self.entity_list.last_entity_update_time, self.mocked_now) + self.assertEqual(entity.deleted_at, self.mocked_now) + self.assertEqual(entity.deleted_at, self.mocked_now) + + # Soft deleted item cannot be soft deleted again + deleted_at = timezone.now() + entity2 = Entity.objects.create( + entity_list=self.entity_list, deleted_at=deleted_at + ) + entity2.soft_delete(self.user) + entity2.refresh_from_db() + # deleted_at should not remain unchanged + self.assertEqual(entity2.deleted_at, deleted_at) + + # deleted_by is optional + entity3 = Entity.objects.create(entity_list=self.entity_list) + entity3.soft_delete() + entity2.refresh_from_db() + + self.assertEqual(entity3.deleted_at, self.mocked_now) + self.assertIsNone(entity3.deleted_by) + + +class EntityHistoryTestCase(TestBase): + """Tests for model EntityHistory""" + + def setUp(self): + super().setUp() + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.xform = self._publish_registration_form(self.user) + self.entity_list = EntityList.objects.first() + self.entity = Entity.objects.create(entity_list=self.entity_list) + self.xml = ( '' '' + '"http://openrosa.org/xforms" id="trees_registration" version="2022110901">' "d156a2dce4c34751af57f21ef5c4e6cc" "-1.286905 36.772845 0 0" "purpleheart" @@ -50,35 +121,46 @@ def test_creation(self): "" "" ) - instance = Instance.objects.create( - xml=xml, - user=self.user, - xform=self.xform, - version=self.xform.version, - ) - instance.json = instance.get_full_dict() - instance.save() - instance.refresh_from_db() - entity = Entity.objects.create( - registration_form=reg_form, - json={**entity_json}, - version=self.xform.version, - xml=xml, + + @patch("django.utils.timezone.now") + def test_creation(self, mock_now): + """We can create an EntityHistory""" + mock_now.return_value = self.mocked_now + registration_form = self.xform.registration_forms.first() + entity_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + } + survey_type = SurveyType.objects.create(slug="slug-foo") + instance = Instance(xform=self.xform, xml=self.xml, survey_type=survey_type) + # We use bulk_create to avoid calling create_entity signal + Instance.objects.bulk_create([instance]) + instance = Instance.objects.first() + history = EntityHistory.objects.create( + entity=self.entity, + registration_form=registration_form, instance=instance, + xml=self.xml, + json=entity_json, + form_version=self.xform.version, + created_by=self.user, ) - self.assertEqual(entity.registration_form, reg_form) - self.assertEqual(entity.json, {**entity_json, "_id": entity.pk}) - self.assertEqual(entity.version, self.xform.version) - self.assertEqual(entity.xml, xml) - self.assertEqual(entity.instance, instance) - self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") - self.assertEqual(f"{entity}", f"{entity.pk}|{reg_form}") + self.assertEqual(history.entity, self.entity) + self.assertEqual(history.registration_form, registration_form) + self.assertEqual(history.instance, instance) + self.assertEqual(history.xml, self.xml) + self.assertEqual(history.form_version, self.xform.version) + self.assertEqual(history.created_by, self.user) + self.assertEqual(history.date_created, self.mocked_now) def test_optional_fields(self): - """Defaults for optional fields are correct""" - reg_form = self.xform.registration_forms.first() - entity = Entity.objects.create(registration_form=reg_form) - self.assertIsNone(entity.version) - self.assertEqual(entity.json, {"_id": entity.pk}) - self.assertIsNone(entity.instance) - self.assertEqual(entity.xml, "") + """Default for optional fields are correct""" + history = EntityHistory.objects.create(entity=self.entity) + self.assertEqual(history.entity, self.entity) + self.assertIsNone(history.registration_form) + self.assertIsNone(history.instance) + self.assertIsNone(history.xml) + self.assertIsNone(history.form_version) + self.assertIsNone(history.created_by) diff --git a/onadata/apps/logger/tests/models/test_entity_list.py b/onadata/apps/logger/tests/models/test_entity_list.py index 35e6c4d21a..ec7c3d9d7a 100644 --- a/onadata/apps/logger/tests/models/test_entity_list.py +++ b/onadata/apps/logger/tests/models/test_entity_list.py @@ -40,6 +40,17 @@ def test_creation(self, mock_now): self.assertEqual(entity_list.date_modified, self.mocked_now) self.assertEqual(entity_list.num_entities, 2) self.assertEqual(entity_list.last_entity_update_time, self.mocked_now) + self.assertTrue( + self.user.has_perms( + [ + "add_entitylist", + "view_entitylist", + "change_entitylist", + "delete_entitylist", + ], + entity_list, + ) + ) def test_name_project_unique_together(self): """No duplicate name and project allowed""" @@ -109,3 +120,9 @@ def test_defaults(self): entity_list = EntityList.objects.create(name="trees", project=self.project) self.assertEqual(entity_list.num_entities, 0) self.assertIsNone(entity_list.last_entity_update_time) + + @patch("onadata.apps.logger.signals.set_entity_list_perms_async.delay") + def test_permissions_applied_async(self, mock_set_perms): + """Permissions are applied asynchronously""" + entity_list = EntityList.objects.create(name="trees", project=self.project) + mock_set_perms.assert_called_once_with(entity_list.pk) diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 5c2d9db30f..9d4e953fe7 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -12,7 +12,14 @@ from django_digest.test import DigestAuth -from onadata.apps.logger.models import Instance, SubmissionReview, XForm +from onadata.apps.logger.models import ( + Entity, + EntityList, + Instance, + RegistrationForm, + SubmissionReview, + XForm, +) from onadata.apps.logger.models.instance import ( get_id_string_from_xml_str, numeric_checker, @@ -27,6 +34,7 @@ SubmissionReviewSerializer, ) from onadata.libs.utils.common_tags import MONGO_STRFTIME, SUBMITTED_BY +from onadata.libs.utils.user_auth import get_user_default_project class TestInstance(TestBase): @@ -415,3 +423,585 @@ def test_light_tasks_synchronous(self, mock_json_async): "transport/available_transportation_types_to_referral_facility": "none", }, ) + + def test_create_entity(self): + """An Entity is created from a submission""" + self.project = get_user_default_project(self.user) + xform = self._publish_registration_form(self.user) + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + instance = Instance.objects.create(xml=xml, user=self.user, xform=xform) + + self.assertEqual(Entity.objects.count(), 1) + + entity = Entity.objects.first() + entity_list = EntityList.objects.get(name="trees") + + self.assertEqual(entity.entity_list, entity_list) + + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + } + + self.assertDictEqual(entity.json, expected_json) + self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") + self.assertEqual(entity.history.count(), 1) + + entity_history = entity.history.first() + registration_form = RegistrationForm.objects.get(xform=xform) + + self.assertEqual(entity_history.registration_form, registration_form) + self.assertEqual(entity_history.instance, instance) + self.assertEqual(entity_history.xml, instance.xml) + self.assertDictEqual(entity_history.json, expected_json) + self.assertEqual(entity_history.form_version, xform.version) + self.assertEqual(entity_history.created_by, instance.user) + + def test_create_entity_false(self): + """An Entity is not created if create_if evaluates to false""" + project = get_user_default_project(self.user) + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | create_if | | + | | trees | concat(${circumference}, "cm ", ${species})| false() | |""" + self._publish_markdown( + md, + self.user, + project, + id_string="trees_registration", + title="Trees registration", + ) + xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=xform) + + self.assertEqual(Entity.objects.count(), 0) + + def test_create_entity_true(self): + """An Entity is created if create_if evaluates to true""" + project = get_user_default_project(self.user) + md = """ + | survey | + | | type | name | label | save_to | + | | geopoint | location | Tree location | geometry | + | | select_one species | species | Tree species | species | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | text | intake_notes | Intake notes | | + | choices | | | | | + | | list_name | name | label | | + | | species | wallaba | Wallaba | | + | | species | mora | Mora | | + | | species | purpleheart | Purpleheart | | + | | species | greenheart | Greenheart | | + | settings | | | | | + | | form_title | form_id | version | instance_name | + | | Trees registration | trees_registration | 2022110901 | concat(${circumference}, "cm ", ${species})| + | entities | | | | | + | | list_name | label | create_if | | + | | trees | concat(${circumference}, "cm ", ${species})| true() | |""" + self._publish_markdown( + md, + self.user, + project, + id_string="trees_registration", + title="Trees registration", + ) + xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=xform) + + self.assertEqual(Entity.objects.count(), 1) + + def test_registration_form_inactive(self): + """When the RegistrationForm is inactive, Entity should not be created""" + xform = self._publish_registration_form(self.user) + registration_form = xform.registration_forms.first() + # Deactivate registration form + registration_form.is_active = False + registration_form.save() + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "purpleheart" + "300" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "300cm purpleheart" + '' + "" + "" + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=xform) + + self.assertEqual(Entity.objects.count(), 0) + + def _simulate_existing_entity(self): + if not hasattr(self, "project"): + self.project = get_user_default_project(self.user) + + self.entity_list, _ = EntityList.objects.get_or_create( + name="trees", project=self.project + ) + self.entity = Entity.objects.create( + entity_list=self.entity_list, + json={ + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", + ) + + def test_update_entity(self): + """An Entity is updated from a submission""" + self._simulate_existing_entity() + xform = self._publish_entity_update_form(self.user) + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + ) + instance = Instance.objects.create(xml=xml, user=self.user, xform=xform) + # Update XForm is a RegistrationForm + self.assertEqual(RegistrationForm.objects.filter(xform=xform).count(), 1) + # No new Entity created + self.assertEqual(Entity.objects.count(), 1) + + entity = Entity.objects.first() + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "300cm purpleheart", + } + + self.assertDictEqual(entity.json, expected_json) + + entity_history = entity.history.first() + registration_form = RegistrationForm.objects.get(xform=xform) + + self.assertEqual(entity_history.registration_form, registration_form) + self.assertEqual(entity_history.instance, instance) + self.assertEqual(entity_history.xml, xml) + self.assertDictEqual(entity_history.json, expected_json) + self.assertEqual(entity_history.form_version, xform.version) + self.assertEqual(entity_history.created_by, instance.user) + # New property is part of EntityList properties + self.assertTrue("latest_visit" in entity.entity_list.properties) + + def test_update_entity_label(self): + """An Entity label is updated from a submission""" + # Simulate existing Entity + self._simulate_existing_entity() + # Update Entity via submission + md = """ + | survey | + | | type | name | label | save_to | + | | select_one_from_file trees.csv | tree | Select the tree | | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | date | today | Today's date | latest_visit | + | settings| | | | | + | | form_title | form _id | version | instance_name | + | | Trees update | trees_update | 2024050801 | concat(${circumference}, "cm ", ${tree})| + | entities| list_name | entity_id | label | | + | | trees | ${tree} | concat(${circumference}, "cm updated")| | | + """ + self._publish_markdown( + md, + self.user, + self.project, + id_string="trees_update", + title="Trees update", + ) + updating_xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=updating_xform) + + self.entity.refresh_from_db() + + self.assertEqual( + self.entity.json, + { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "30cm updated", + }, + ) + + def test_update_entity_false(self): + """Entity not updated if update_if evaluates to false""" + # Simulate existing Entity + self._simulate_existing_entity() + # If expression evaluates to false, Entity should not be updated + md = """ + | survey | + | | type | name | label | save_to | + | | select_one_from_file trees.csv | tree | Select the tree | | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | date | today | Today's date | latest_visit | + | settings| | | | | + | | form_title | form _id | version | instance_name | + | | Trees update | trees_update | 2024050801 | concat(${circumference}, "cm ", ${tree})| + | entities| list_name | entity_id | update_if | | + | | trees | ${tree} | false() | | + """ + self._publish_markdown( + md, + self.user, + self.project, + id_string="trees_update", + title="Trees update", + ) + updating_xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=updating_xform) + expected_json = self.entity.json + self.entity.refresh_from_db() + + self.assertEqual(self.entity.json, expected_json) + + def test_update_entity_true(self): + """Entity updated if update_if evaluates to true""" + self._simulate_existing_entity() + md = """ + | survey | + | | type | name | label | save_to | + | | select_one_from_file trees.csv | tree | Select the tree | | + | | integer | circumference | Tree circumference in cm | circumference_cm | + | | date | today | Today's date | latest_visit | + | settings| | | | | + | | form_title | form _id | version | instance_name | + | | Trees update | trees_update | 2024050801 | concat(${circumference}, "cm ", ${tree})| + | entities| list_name | entity_id | update_if | | + | | trees | ${tree} | true() | | + """ + self._publish_markdown( + md, + self.user, + self.project, + id_string="trees_update", + title="Trees update", + ) + updating_xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=updating_xform) + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "300cm purpleheart", + } + self.entity.refresh_from_db() + + self.assertDictEqual(self.entity.json, expected_json) + + def test_entity_create_update_true(self): + """Both create_if and update_if evaluate to true""" + self.project = get_user_default_project(self.user) + md = """ + | survey | + | | type | name | label | save_to | | + | | select_one_from_file trees.csv | tree | Select the tree | | | + | | integer | circumference | Tree circumference in cm | circumference_cm | | + | | date | today | Today's date | latest_visit | | + | settings| | | | | | + | | form_title | form _id | version | instance_name | | + | | Trees update | trees_update | 2024050801 | concat(${circumference}, "cm ", ${tree})| | + | entities| list_name | entity_id | update_if | create_if | label | + | | trees | ${tree} | true() | true() | concat(${circumference}, "cm ", ${tree})| + """ + self._publish_markdown( + md, + self.user, + self.project, + id_string="trees_update", + title="Trees update", + ) + xform = XForm.objects.all().order_by("-pk").first() + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + "" + "" + ) + + # If Entity, does not exist, we create one + Instance.objects.create(xml=xml, user=self.user, xform=xform) + + self.assertEqual(Entity.objects.count(), 1) + + entity = Entity.objects.first() + expected_json = { + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "30cm dbee4c32-a922-451c-9df7-42f40bf78f48", + } + self.assertDictEqual(entity.json, expected_json) + + # If Entity exists, we update + Instance.objects.all().delete() + Entity.objects.all().delete() + # Simulate existsing Entity + self._simulate_existing_entity() + Instance.objects.create(xml=xml, user=self.user, xform=xform) + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "30cm dbee4c32-a922-451c-9df7-42f40bf78f48", + } + self.entity.refresh_from_db() + # No new Entity should be created + self.assertEqual(Entity.objects.count(), 1) + self.assertDictEqual(self.entity.json, expected_json) + + def test_update_entity_via_instance_update(self): + """Entity is updated if Instance from updating form is updated""" + self._simulate_existing_entity() + xform = self._publish_entity_update_form(self.user) + # Update Entity via Instance creation + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "30" + "2024-05-28" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "30cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + ) + instance = Instance.objects.create(xml=xml, user=self.user, xform=xform) + entity = Entity.objects.first() + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-05-28", + "circumference_cm": 30, + "label": "300cm purpleheart", + } + + self.assertDictEqual(entity.json, expected_json) + # Update Entity via Instance update + instance = Instance.objects.get( + pk=instance.pk + ) # Get anew from DB to update Instance._parser + xml = ( + '' + '' + "a9caf13e366b44a68f173bbb6746e3d4" + "dbee4c32-a922-451c-9df7-42f40bf78f48" + "32" # Update to 32 + "2024-06-19" + "" + "uuid:fa6bcdce-e344-4dbd-9227-0f1cbdddb09c" + "32cm dbee4c32-a922-451c-9df7-42f40bf78f48" + '' + "" + "" + "uuid:45d27780-48fd-4035-8655-9332649385bd" + "" + "" + ) + instance.xml = xml + instance.uuid = "fa6bcdce-e344-4dbd-9227-0f1cbdddb09c" + instance.save() + entity.refresh_from_db() + expected_json = { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "latest_visit": "2024-06-19", + "circumference_cm": 32, + "label": "32cm purpleheart", + } + self.assertDictEqual(entity.json, expected_json) + + def test_create_entity_exists(self): + """Attempting to create an Entity that already exists fails""" + self._simulate_existing_entity() + xform = self._publish_registration_form(self.user) + # Attempt to create an Entity whose uuid exists, with different data + xml = ( + '' + '' + "d156a2dce4c34751af57f21ef5c4e6cc" + "-1.286905 36.772845 0 0" + "wallaba" + "54" + "" + "" + "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b" + "54cm wallaba" + '' + "" + "" + "" + "" + ) + Instance.objects.create(xml=xml, user=self.user, xform=xform) + # No new Entity should be created + self.assertEqual(Entity.objects.count(), 1) + # Existing Entity unchanged + self.entity.refresh_from_db() + self.assertEqual( + self.entity.json, + { + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + ) diff --git a/onadata/apps/logger/tests/models/test_registration_form.py b/onadata/apps/logger/tests/models/test_registration_form.py index 402f9c0afe..a22dd3ceaa 100644 --- a/onadata/apps/logger/tests/models/test_registration_form.py +++ b/onadata/apps/logger/tests/models/test_registration_form.py @@ -9,7 +9,6 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models import RegistrationForm, EntityList, XFormVersion -from onadata.apps.viewer.models import DataDictionary class RegistrationFormTestCase(TestBase): @@ -19,41 +18,35 @@ def setUp(self): super().setUp() self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.xform = self._publish_registration_form(self.user) + self.entity_list = EntityList.objects.first() + # Delete RegistrationForm created when form is published + RegistrationForm.objects.all().delete() @patch("django.utils.timezone.now") def test_creation(self, mock_now): """We can create a RegistrationForm""" mock_now.return_value = self.mocked_now - self._mute_post_save_signals( - [(DataDictionary, "create_registration_form_datadictionary")] - ) - self.xform = self._publish_registration_form(self.user) - entity_list = EntityList.objects.create(name="trees", project=self.project) reg_form = RegistrationForm.objects.create( - entity_list=entity_list, + entity_list=self.entity_list, xform=self.xform, is_active=True, ) self.assertEqual(RegistrationForm.objects.count(), 1) self.assertEqual(f"{reg_form}", f"{reg_form.xform}|trees") self.assertEqual(reg_form.xform, self.xform) - self.assertEqual(reg_form.entity_list, entity_list) + self.assertEqual(reg_form.entity_list, self.entity_list) self.assertEqual(reg_form.date_created, self.mocked_now) self.assertEqual(reg_form.date_modified, self.mocked_now) self.assertTrue(reg_form.is_active) # Related names are correct - self.assertEqual(entity_list.registration_forms.count(), 1) + self.assertEqual(self.entity_list.registration_forms.count(), 1) self.assertEqual(self.xform.registration_forms.count(), 1) def test_get_save_to(self): """Method `get_save_to` works correctly""" - self._mute_post_save_signals( - [(DataDictionary, "create_registration_form_datadictionary")] - ) - self.xform = self._publish_registration_form(self.user) - entity_list = EntityList.objects.create(name="trees", project=self.project) form = RegistrationForm.objects.create( - entity_list=entity_list, + entity_list=self.entity_list, xform=self.xform, ) self.assertEqual( @@ -149,31 +142,21 @@ def test_get_save_to(self): def test_entity_list_xform_unique(self): """No duplicates allowed for existing entity_list and xform""" - self._mute_post_save_signals( - [(DataDictionary, "create_registration_form_datadictionary")] - ) - self.xform = self._publish_registration_form(self.user) - entity_list = EntityList.objects.create(name="trees", project=self.project) RegistrationForm.objects.create( - entity_list=entity_list, + entity_list=self.entity_list, xform=self.xform, ) with self.assertRaises(IntegrityError): RegistrationForm.objects.create( - entity_list=entity_list, + entity_list=self.entity_list, xform=self.xform, ) def test_optional_fields(self): """Defaults for optional fields correct""" - self._mute_post_save_signals( - [(DataDictionary, "create_registration_form_datadictionary")] - ) - self.xform = self._publish_registration_form(self.user) - entity_list = EntityList.objects.create(name="trees", project=self.project) reg_form = RegistrationForm.objects.create( - entity_list=entity_list, + entity_list=self.entity_list, xform=self.xform, ) self.assertTrue(reg_form.is_active) diff --git a/onadata/apps/logger/tests/test_publish_xls.py b/onadata/apps/logger/tests/test_publish_xls.py index 808ba5c659..20fdf2d8e4 100644 --- a/onadata/apps/logger/tests/test_publish_xls.py +++ b/onadata/apps/logger/tests/test_publish_xls.py @@ -78,80 +78,21 @@ def test_xform_hash(self): self.assertFalse(self.xform.hash == "" or self.xform.hash is None) self.assertFalse(self.xform.hash == xform_old_hash) - def test_xform_with_entities(self): - md=""" - | survey | | | | - | | type | name | label | - | | text | a | A | - | entities | | | | - | | dataset | label | | - | | trees | a | | - """ - self._create_user_and_login() - self.xform = self._publish_markdown(md, self.user) - # assert has entities namespace - self.assertIn( - 'xmlns:entities="http://www.opendatakit.org/xforms/entities"', - self.xform.xml - ) - # assert has entities version - self.assertIn( - 'entities:entities-version="2022.1.0"', - self.xform.xml - ) - - def test_xform_with_entities_save_to(self): - md=""" - | survey | | | | | - | | type | name | label | save_to | - | | text | a | A | foo | - | entities | | | | | - | | dataset | label | | | - | | trees | a | | | - """ - self._create_user_and_login() - self.xform = self._publish_markdown(md, self.user) - # assert has save_to column in xml - self.assertIn( - 'entities:saveto="foo"', - self.xform.xml - ) - - def test_xform_create_if_in_entities(self): - md=""" - | survey | | | | - | | type | name | label | - | | text | a | A | - | entities | | | | - | | dataset | create_if | label | - | | trees | string-length(a) > 3 | a | - """ - self._create_user_and_login() - self.xform = self._publish_markdown(md, self.user) - # assert has create_if entity expression - self.assertIn( - 'calculate="string-length(a) > 3"', - self.xform.xml - ) - self.assertIn( - '', - self.xform.xml - ) - def test_xform_big_image_invalid_if_no_image(self): - md=""" + md = """ | survey | | | | | | type | name | media::big-image | | | text | c | m.png | """ self._create_user_and_login() - msg = ("To use big-image, you must also specify" - " an image for the survey element") + msg = ( + "To use big-image, you must also specify" " an image for the survey element" + ) with self.assertRaisesMessage(PyXFormError, msg): self.xform = self._publish_markdown(md, self.user) def test_single_entity_allowed_per_form(self): - md=""" + md = """ | survey | | | | | | type | name | label | | | text | a | A | @@ -161,9 +102,11 @@ def test_single_entity_allowed_per_form(self): | | shovels | | | """ self._create_user_and_login() - msg = ("Currently, you can only declare a single entity per form." - " Please make sure your entities sheet only declares" - " one entity.") + msg = ( + "Currently, you can only declare a single entity per form." + " Please make sure your entities sheet only declares" + " one entity." + ) with self.assertRaisesMessage(PyXFormError, msg): self.xform = self._publish_markdown(md, self.user) diff --git a/onadata/apps/logger/tests/test_tasks.py b/onadata/apps/logger/tests/test_tasks.py new file mode 100644 index 0000000000..ed75102c0e --- /dev/null +++ b/onadata/apps/logger/tests/test_tasks.py @@ -0,0 +1,63 @@ +"""Tests for module onadata.apps.logger.tasks""" + +import sys + +from unittest.mock import patch + +from celery.exceptions import Retry + +from django.db import DatabaseError + +from onadata.apps.logger.models import EntityList +from onadata.apps.logger.tasks import set_entity_list_perms_async +from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.utils.user_auth import get_user_default_project + + +@patch("onadata.apps.logger.tasks.set_project_perms_to_object") +class SetEntityListPermsAsyncTestCase(TestBase): + """Tests for set_entity_list_perms_async""" + + def setUp(self): + super().setUp() + + self.project = get_user_default_project(self.user) + self.entity_list = EntityList.objects.create(name="trees", project=self.project) + + def test_set_perms(self, mock_set_perms): + """Permissions are applied""" + set_entity_list_perms_async.delay(self.entity_list.pk) + mock_set_perms.assert_called_once_with(self.entity_list, self.project) + + @patch("onadata.apps.logger.tasks.set_entity_list_perms_async.retry") + def test_retry_connection_error(self, mock_retry, mock_set_perms): + """ConnectionError exception is retried""" + mock_retry.side_effect = Retry + mock_set_perms.side_effect = ConnectionError + + set_entity_list_perms_async.delay(self.entity_list.pk) + + self.assertTrue(mock_retry.called) + + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], ConnectionError)) + + @patch("onadata.apps.logger.tasks.set_entity_list_perms_async.retry") + def test_retry_database_error(self, mock_retry, mock_set_perms): + """DatabaseError exception is retried""" + mock_retry.side_effect = Retry + mock_set_perms.side_effect = DatabaseError + + set_entity_list_perms_async.delay(self.entity_list.pk) + + self.assertTrue(mock_retry.called) + + _, kwargs = mock_retry.call_args_list[0] + self.assertTrue(isinstance(kwargs["exc"], DatabaseError)) + + @patch("onadata.apps.logger.tasks.logger.exception") + def test_invalid_pk(self, mock_logger, mock_set_perms): + """Invalid EntityList primary key is handled""" + set_entity_list_perms_async.delay(sys.maxsize) + mock_set_perms.assert_not_called() + mock_logger.assert_called_once() diff --git a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml deleted file mode 100644 index c684aa01de..0000000000 --- a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - \n - - \n - d156a2dce4c34751af57f21ef5c4e6cc - \n - - \n - -1.286905 36.772845 0 0 - \n - purpleheart - \n - 300 - \n - - \n - - \n - uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b - \n - 300cm purpleheart - - - - - \n - \ No newline at end of file diff --git a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml b/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml deleted file mode 100644 index a606bcc6d3..0000000000 --- a/onadata/apps/main/tests/fixtures/entities/instances/trees_registration_2.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - d156a2dce4c34751af57f21ef5c4e6cc - - -1.305796 36.791849 0 0 - wallaba - 100 - Looks malnourished - - uuid:648e4106-2224-4bd7-8bf9-859102fc6fae - 100cm wallaba - - - - - \ No newline at end of file diff --git a/onadata/apps/main/tests/fixtures/transportation/headers.json b/onadata/apps/main/tests/fixtures/transportation/headers.json index e5e65e8498..dd909450fd 100644 --- a/onadata/apps/main/tests/fixtures/transportation/headers.json +++ b/onadata/apps/main/tests/fixtures/transportation/headers.json @@ -19,6 +19,7 @@ "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/other/frequency_to_referral_facility", "image1", "meta/instanceID", "_id", diff --git a/onadata/apps/main/tests/fixtures/transportation/headers_csv.json b/onadata/apps/main/tests/fixtures/transportation/headers_csv.json index 3113b3245d..650bb42616 100644 --- a/onadata/apps/main/tests/fixtures/transportation/headers_csv.json +++ b/onadata/apps/main/tests/fixtures/transportation/headers_csv.json @@ -18,6 +18,7 @@ "transport/loop_over_transport_types_frequency/keke_pepe/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/lorry/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/motorbike/frequency_to_referral_facility", + "transport/loop_over_transport_types_frequency/other/frequency_to_referral_facility", "transport/loop_over_transport_types_frequency/taxi/frequency_to_referral_facility", "image1", "meta/instanceID", diff --git a/onadata/apps/main/tests/fixtures/transportation/transportation.xml b/onadata/apps/main/tests/fixtures/transportation/transportation.xml index a7558dc5e6..49ddf5347c 100644 --- a/onadata/apps/main/tests/fixtures/transportation/transportation.xml +++ b/onadata/apps/main/tests/fixtures/transportation/transportation.xml @@ -1,5 +1,5 @@ - + transportation_2011_07_25 @@ -39,6 +39,9 @@ + + + @@ -47,19 +50,84 @@ + + + + ambulance + + + + bicycle + + + + boat_canoe + + + + bus + + + + donkey_mule_cart + + + + keke_pepe + + + + lorry + + + + motorbike + + + + taxi + + + + other + + + + + + + + daily + + + + weekly + + + + other + + + + dont_know + + + + - - - - - - - - - - + + + + + + + + + + + - + @@ -68,46 +136,10 @@ @@ -117,198 +149,100 @@ - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + - - - daily - - - - weekly - - - - other - - - - dont_know - + + + + + + + + + + + + diff --git a/onadata/apps/main/tests/fixtures/transportation/transportation_export.xlsx b/onadata/apps/main/tests/fixtures/transportation/transportation_export.xlsx index e14cffd6cfb23109c8ebcc88a11fcf52c853e6be..558ec4e223350fbc32a5e7003bf283a8fbe21ea4 100644 GIT binary patch literal 6957 zcmai31yq#nwx+vF8itVWl#uQjx>I6gkRD1>O1ir{q&uXM22r|Y2qh#X1f?$g|5;a$ z_niOkS!=#;&AfZR-?w`|ueu5%5)m948X8=LRgFH}Z-jY&Zw3Y1xo~kld{-s(s`hbX zg&cWDuwDp!l0?fdX$LB{1NfqnT7_)|8|+6BI9ux$EWASM2nN~P3OFdyP;4ng zj<(rvk0X8t2cg-&IPhB@{~)!H7AnjgPUP$UM+`(B$VNMGhQ-yT8Wckr3u@!qx|V<0_5XcV7kPo=`LQ= zchf^{5iFt>Pf|(VOiI^uGr(x&zJ{rcbVShym-afRLe-_|!30=OD2B5An|vdLN-AFF zM_=X>Lp>A8Il%oBGot)-5p=#R&}po%DNM~S4u`Kt&qOR3CciIw9>iSG^#vkPQb)&# z!jQwj0r(WJOA)sKYSo!ia{CNs4SGi%SPKr}q^s*#r-OSG6X`%VZ%k%zMAr~~g*>~%@&!#U_b{xCs z$ze3AHRm@{BNCLj<|VE+$btFmAKa=GTWayAByW|hgSiVQk2bvPMXx9spmfW)fwhxx zf|rv8SBGmWo}jhB}tQ!ft$oD6zH+*YkwZUMqBR8)uz8rKHL|+mNH2 zw$IG{sJ3a=7+L)_Eyq&@Mi<`D5pw-p<*L@^0!qRu$TFO6xupfYa!0yx$SmoGfAG)n zi|{X&_r6(<^iTeY{kMO*xO&=wT^_viz`)3HiJR0HR#SUjE0m$qSB$mOrV8{4HSgwi`>9e&YmOSeDc4}}48Qpyx-I%dI7SI&TSdN?DkTE#Y4B<|Cj3@@ z7+Nmw`nc7~SE)Sy4g7+@O@OgE@<)Xu6y+M`E>-Ti9&RUr;P5#cn6q};vyho^MIIsg zx;QMG>0}p$VuC_NFs(cl%uN(WCb)0XpKBq_23fjp%?oWAjJ7a6KMdJJ!qab4!6#!e z;~pZ=*b&QEsb*61y|_7gJwYSZ4W^KTIePZ?WB$Cgx#O?Rr;5t<9rf57VRHc*MS+Yo z_>q-z>$)^W^EyS&EK|0KaH3J_)+h?SCQ#%N-Ea@_1e}Dd<*=4-Uo+@hMv>;TQJlFb zV~W8YQVWO?X)DrhHvlMOKV84%v`wQedmq(oXXHlWE}8Bfv|-oCzCX)mOee@Ehg0?q zBdHzSoniKaC9^Vk3R?g3UVIRdO*A7-CJysbqRxvg5h2^C;*ryw;T)C5lWTY6xLn|; z^tgC~>A7U_R-Ayaytiv%t3@)T^yS>_GPwgZT{tl)L_D!gj-4J2iW@eqzQn@S!7 zn=xZS;14`4uWXJuiH-5u_7GPxM=priQJS03v?)J_)})k;vYuspJmhVU+?rO7CZ>rx z?>3f%Vn>4Xruz1tH#If(ga|%^bb_}`_Kp9kh13`|YDTcZuqhW3IoG*Iems~b<5n?A z6gdA-klxB!x9<|vSC3~$6wumaPR`+19W+_!Ph-39-E0CZ`59Y2$k3A z(Y-@@bR-Q^KyK|FdhgQQSADDhap`pzIq>r>bA zxDS{HChAJj9o3Sociy4?(`b2m-VXD7!$I+@pw(*YgJ|o6!G=!pJ;9sn;0{m{kjcMtJ4uE#lS`PAsG{ zdp4$6yF$1gx!hFZO zc&1$iVQ>ns{t7m8wdfLFL^wD_%6|$r(qF~E1p)@Ux^Vq<;dzL=PD3M?OyT>uGwVH` zSj=8T*xFFB^+sK;+ceIw%i;&E644I<;WYqEAio=B1UvRJ)#m?T1Yl*r~#*mMo&5Y2L13Vs|Ctu*@nt{V6B-R|9_uk59Fb8sAl zUozqRj1jQQH2r={g*52BBnxmS8TlO#?HLg)(;xzX)~ZFB-V7EPCk+nIQMe2?BShpN zS*MJd@FDIC{w&2E1(*NDdW?H~6e1C%H-O3gvPgzwuPjM;w5D=Xz?j99@&nK4L?EdP z(WBy#!u_=DD(At|c_I?TYu{wQq5v(C)^=`;5yBdcBM;AGmr9OzVUj)(2qIjVc;Glw zm0s~IDQNP^-e-brEg8?neUI^XZ(-Qd%79FF>^?cQ3yR$9vLRZ^Zvw%w24D2YQg)ny zn8tFnY@5!TNTIz-X_7A(U+R!@O>E-Hg(ji5HI_ee02Ej9uO0g-sfYz&*5fGke8Q4< zQhla=E@~7X_BdU9z=PQk6>SZnlJ1>y-Yy>J0dC_)P7e1|dUAqd_5nP34K*5mSNjjO zTIAikUzhl#zuDo?jcWGQvu>NUwnM-^IsQzI8;wMI20|-`F-$7Ez)ndp`Q6+>TUqJAXgyO?lG8MNXc*6;696)t|u3OT4U zC~$$Ik!?0Ymh*gXfZGL)MEiwVTZETz^by-%wC(x*o{G4z`F5+Yj9;3`J0ML(fz0q- zyGGF>BHK>T4MU{h2*HLqd?*s$ZJ@hQU~(Fg_&4+d?QQc6P~oiAb%(uji{dV`rWaM8 zO}Fo;YW$Q|QPHVv>P}{@tPy3HzM3yAIRi=55nX2$bQ7ZU~U4Z!lRr1QL$gVYJlqeUEYn(a~p?zl>0 zF?GvF^E~d0Jk8~nc4Lr(0MkLtbnN&0K2` z;Y}P_oV_uR8F$&fSalQeGxCUPW+G1L^H|yObeqE49X5EHJA$sJlFj$RcgM)&G(bn6 z7KOK8)oT7(upWLeq^O?do;PR&ZefKN5| z%MP0m=xVP(55L{ByF1>qn{-2@+avII&WY_InAbnc<%!U~6IVenv99F!L}uNInZFNV zMD^6wKok{d7xfy8=xg^Mfp&FyC^A5-*|s;09=l9;hG)$uHe<7x4Wh#U>d+O9;Xpq| zq&;EFJ1h6&Z8H;>{E5~49X5cxKmpCnG1dHW_RO((<+^Cqy0>=UMQfYlZRt^yMF9HL z@%@t}t(RaZXC{;L=3D}$~sf17FzO)S8O)Q>QHo~&Cra*#nP4N7jxX!rl`bP zr6;8*pBKCVeZg-_F_g{nAi-Ct20k-i+c)-0eWaI3%=#uMI16wn(R$zMMlRCN?pL#vmK-e&6Y?u-@ ztdG2Fn!HOvz$e!BGKY08m6;wR?6Pq~xsw&?tqxLrj_wK-?^I)kgV9V_Z=~lR)f6XvJh=V9FEq1ks)8*>}Fu26};n-#XR)L3wX4D69(I)`|o z$PT%`4IV)gf2{53tQa~h)N*!4WF9D`2r0%-dv%ex=0%opzKX-qlUE;Yfl~+xvuIX% zY$*yL7(UsEs=dK9)BQ#@ChtPayV8UF}ogXKK5e9gFB5`8= zv`g{)D}VIYuzndVl=1?k*S!PoXq^E!hX?Cqy5j|_YY@iRS{j0i4#EZhM@n3V4X45e zoEGomL`{lDlvT^-N7{l8G`Pl2l%})b$BDJ7j1U_g*wouy-rdz&&ST@46?*4#4`ZW` zL#gBWCN*O%*4VgsQP1pud=dX5K}WSfKUx!%t10M#K5O=z`p65@5q-Hz-EaSz;q;^> zU9PHT^^*_6gJ#r@e|SyZzc=GQj-W*UYVjfFP_UJjE7Zop`mauZAQcpQKZk}K-4KU~ zVs2&XB#7HJX*!BGTJxs?oDl_^KyAA3L1KP(-P2*n9D^GIT^%BW?*E8tYxDKxx8m z!Byn?trJ%obFsEF@`=sM=Q!NbN9tYTrMEPyT>bi(g-cvndS7 zPtEJIN!F6*RoO89&hJTE7mL@RRrsGxHNcDILYj|646GR*N*Sr}2Zm@smo7frdh`$sNS`(g#_j{50so)U#QmReR*shH zP)8>hE=xx!_@VdqP0(-*Eg34ZszD^Tc~cq>h+w314J8&sI}7RC&q`*?CM9+-}g6l0PE`mTEFsQh-5+ zsZ~>)3+L3db8PRbiR7y#+=IS}kS0zB2j9*ri{~9wuJvSgr-oeE&jTC>=dNPao|P=y zDB9VzvgEN{tmopU`yoCUYqvnB+W{U9Zsi{{7vAs2GIw%%uvK;($no6+(0E0x)8C}T zVkE_ky~KYI+(;$TXq3#zX*HUfK-Al928S*Dk?E@Ge8F$`>oeE7P`u;@j*JZq;%BcD zM;(mL7CVzMwfp<4J}((SCh_SE>rj8n0bcCaSJG6@G{ovF9-n)_*8#o!q z`HZH;y_(FQj;`Dz&?QjKwR`F?Mv{ayQ#~8xp*}^3)K+;IQ92UKogW}c*ZVr*5MN=y z6Y~9;TiQTfWa0-kT}!*toU3X+?3u17`cA{eco!W{kj_$~C4{}!%jj>37k_TqJu-DK z-;e1mK`270_u7muBgkNyNwHE4~}DdySf#diK{$#T3^&}L&0?< zQ;I^x-9{cdA%BdoV~rwdnIAFvgpef>n)+F}gw>>jv)Z_;3d5FHAz%k zXk?rcTgrGt&5k}S!T&=KB|GVX1v?#C7pIam56EU_Y_2{;i&dOT3EUD&YuORH@O&WhWB`PbD7d++FNB; z_KM`|#5{f(MW7CJ$seWcWT&38<;0ggx1s}QI@fAYS0qg}>EUM- zL}AzoxRE(QSyOBPq6pS)N^BHTUEEUM+MHDvkX$yjto!0V4e&jDN&_#^vebIar!V}e z-{UY$6-0}$CwI5w5fSine+CaGg6dZ;X!NA!%i;;gxY{=j`3y6!pXd)p+7NBQwYoRZ z4Dy5T!y^#E{W_TV9gBZBnD}@4?U>?E#@|8G2dMrRG2L(V2XOx<^Y5U~1CIKOhVG%D zd*)xL>YuE?_p}eF&@bY@?{xpQ@BhJw{*>~2|M$@U{-QXXzoh(^Zun0*zn9L3Zt)kD z-z(+eMgHs^|CI21K7S~&zo>=aKNQ?Q*^z$BYY$2P7g^mu&;Qozf0ckgxqlDehursz rGKqiV{*?*;6!5!i{kwn@5;(a35cBFP$S4oHVBCNF?@L(fUpD+7^IP|l literal 6411 zcmZ`-1z1#Fw;md#V<@G&K|mU%yQE7=0f|9|mTu`5kq~JFks3i7>CU0+BL)x{QW`Jw zdG(k7Uf+4 z@c_BlgKe#?UBNv6p7^*u9UYXE2>$iCdGKF<#93Dd4S<9qX>QSIJgv7PO6pr)zIPzg z%tXDKfH#+-jH!+5Dh1SSuwsK`6jg9NHKcQHZdzkQ>aFE8YR5NIyKtaVvN3iSIR8fX zD&?gtSanopf_$qaXfz;1*p#|tvD^klnDfg_VnU!v8j(TN`P%hJHb-#F)?hFgWQ51>ji3($aGGx zb7ZXH&fHg{h6KDFcvh?GMI0GSgf8(3<~p8>A9ca`wPQcW+qy`3 z8Hz^dA-8F0K(0-0Vv<9ShO=LtqeqUW{KhaX&Y7wTWCf1gbLptnwcikE#8R9)3SMN#;(qq3gfYH;b`>&h8Qq(Ki#NTc zS<{$fP2R$%HYDWuwMvJ{02a+Cq-N{Tn`{0?`JdJ8Wc>Eqf)s!iQs_I@el5U7>^ z6vxBWPXuD~@*~{Kzmxxn@>A_uW{!w}X`RWVT z89P8GL>(OY5gRakrfCc*o|Pb&M;}R9lP{M#nh~KaC=_cNl*zZ-@J)@{6#AwC)eL@C zBxM}JJC{?4LOj(1NbO>IhW`-x6V(^UDE34Vlz9RZoTVv`&-2)LkyYyT0q_`_z1nlFxR4_zFr>-MTU~fzNk*)U_P%HO zI*loW@JovavY*%@jTkzOv%dw|1%v$5ossK)$(@*KllW=aXTB(`p7jv8fk*%I$AohJ z=`U`rL=7F_@k~7rNN_T&&!Tj!6vCOr&dR+Y|yCCvtE$Uv@XT4o>pQ3eP)6Syle)z^EH1Azz zIT_Yqlzz#f&2}HUzs24tYIdKnl&Ns0ItBL5Axs!|M8^MhT&Vl@A2Np#=l3q?*Sq%DiV*Tvo;(h|5iV{2T=ZAX3QHAj%BMz6FTb_Ax8u#IG zM+xpf*au~a26c!AD;P80FlG=d2^S_h4>B%isI=9^haAdS6_W#|K4Lf|@rdwVf*>TG zU7%ps+AdL~4kgj?GIV{GQCTwIE-aP=M~ zvwbdMV^7ht7am)7(NYtIkN1r1YTqEQYNaP?CU|#iEHbC@Wy|yNgPI^iZB1c8{R@r3 z7mePB)bn8h((}{y_as(pvQ{>(u;z=g6eaXI@Nqy zcu$JVZr-EyN-s^wfrD}Ro=V#rr8bd-5HLFyOqFP`jxhsUNjRHpKaHodtv3yhp^nPg zbr)3&6C*XaAR)!nD5YocYlbs5f{`u!MUo4w)#q=tWHe=7|GpRH?~^MkQfwCE>3Bl3^i|;d0oS4r4v3 z+Xc%k$RZFwvMeX(Q0Z7cL_MD+0T0(0_JWdgE)aNnZ5HV2yTqfqgkDgENtzIlHN`{k zT9_3PpVD^--GvfQa&ixs5m+V+#U|Vw+Yb$M67ds~#@UH>`VMhSri@@&! z!#7d|FNl{!4p#bp(`1IoBHGT}?4LWXNN8EZipJXvImFJF;p zXA*!JfBweE|7-7k4xAb+ z2?ZQx#}keDBHwOubzRcsYh(bR1`hzh{of|LdO2ACZY#oo0Ky4@-)-H|HwDe`lK8^Q zKV5zjN{_2_vJ5G)2Rhg!$+eNlr?5?b5Ime`|V5Y_5y1 zoAyn|$z_&_%gsoXc5ZWGH@35X67|w4FqU5S`Ko)dD7^F7)%~O7^izfYmJSH>-sK}| z;%Q1F9gNDf?w$7}Mo|Zk=xOF`;m1c?lj&j%D4ooyKTa(CiJ;h4`ib*6F{kdMTRApG zF*~u-ZwM^(-LOn@^W%?NKTUNQ0Wj=DTe5`HpE(WG3-En;otNHbB&J-GoV=(mr`%W; zv+{aawqCmzkPK@%b^_}?w(Z{Lh=>*&C{?hF;YP9G1N8t)@vi!%IX)g@l*5kpfiqRpGsHhj}?eNA}W)Wzg!l%y z(V}s-=l5FvMRoU7A$^Gi3Lj1Acy+gna*N_D2S4;^YIh9RQML_#L3FQ(Hon$OT5Txe zqXlO-{|2|!$b%7z(n+e$k+^=m;KWZ=2k%>HISzdWmLko(H?&W1^UB+wclyh`La0VLxLd?HETn3U ziM|fywi3j0G#Au2EoHgMW5Pd`umrhS|B?b>L;&H4z;7u+6HM*fcuB(G7aYpFvp&^0 zxHV54JZoCbVq@k*kBZEhah0CUH$UyK)+TiK^2|P2W-NRIKYp+ncvvFDFRK#d!dnSX zHXIuM=AxxK+%Q0CSGP?iUcV58eaDCTWhdCq#+Eo}jr?UD5|W70Sb0|+cNd)*p3iDQ>K0Q$=)~d$OdbBEddstI1D$N*xhTRr zpf)y&o<@BJK^z$oomWv8+LzZyr-@lctSiC1M4E5cyuj69s)wNbcYV1bKe#<27fLR| z*N|(Xhn1vkTc;BB8nX5xeFaX9oXu14WKR{83($&_Tzd_v8f```ABwE-QEiVeuX263 z>mzAn*U;eh_6l%YX*?eeO`;nC&u_lOx0H4RyMi2lX^k)=fN(|Nx7PUy73yDk3Bpb! zL)YeORv0ynmDtm(`DM(^q7qe}7ejKV%8Cf!vBsG1Vn5AqHec)}aT_;pp@_DwCJTzh zrs6R0mhB`{xLoSMyYG}o$$b}|CS)#e$@yev5__@`m-u*cS4zl?OTDA%jj zlb)+TXzGa zuYD>EeEv({zK+3e$c<Lj z4EypssrMb3Hc=gjUjlTiqq}-zcwz**@V;A5XE&?duhOG!)%IBw^B+86m|nAVoJI+; zZRk65U>8Br_jH`2gFS=@frif3o+r9V&cPd-ZAf-4207ZFvB`3>7wKl4qu=vVrgXB6-WD{_E|j-EJ}QI7242fQ+_4~fXIHS` zjsX5A-3!Sqw@$moD*;N5t^9_xAJC=%5uLQdlc=h8Aj^`&hE6XZd4DmbORQAg)Mc?5iIR&nVD9hlD@nzF=wa)t7l=nQyCjs)j~sZq4W8bM&2o`a0~x8FUVS zv4>Kx`4b+P3wr%m$)YD4KrwdHsXRA>73I(Vf9hiG0RDGF=vNeQBkmwJgAiT_Af7X9+4TcGajAi%L+u;@x;*?+GBt`Lqt(%4VF8qSu! zCQfJbJ|C&0MZxqL-o{g0J$=Ew)K9ZmYcbq%srxjN$AZ4G33%ItYqM({-k%2)t~SRu zqWnQoLbu1E{3}8<>zf1!LHjul8Owm2T&9z6v;a;#WIxOlsWGK2ETJA zuu17D(UfKLkMyNop;`+#WK>L_P0bzWTFl5@T`)*HilCq_uPt9^r zqKnln#rQ#@`7q*Kzkkk14|gcCg`LV7tR{=x6N?fqKO{??=J>~!n`erR@Y8Ttmt-?F@>u9d93Zdks6k$S2@qU-Qbza zQx`tTWyc@nDGk6lyZ2ySKPp+V?e#tU3BBfcbQWRl{);U4`y$I z7W*c&f5!FiSVr7N0O5kbf4VAh2ocNSH?homGBN)MGY3+ys#u?6C;mur?%{J{g;!m@ zvq^oZDdh9!U5pR?uO0H|)ZIz4$t^TKCVp#(N6us(tA`}QyCx^9=AedNEm|)k+drOY zGZ2AkiE|oiFU? z)qtj`Blj_n?ke>o&l~XyCX*WP+WD>XVY7r3 zt$!(Rz+kDGrI@)H99|IaA;SEk$P{3p};P4NDa X+ has no data.")) @@ -285,7 +292,7 @@ def create_registration_form(sender, instance=None, created=False, **kwargs): if isinstance(instance_json, str): instance_json = json.loads(instance_json) - if not instance_json.get("entity_related"): + if not instance_json.get("entity_features"): return children = instance_json.get("children", []) @@ -315,7 +322,7 @@ def create_registration_form(sender, instance=None, created=False, **kwargs): ): form.is_active = False form.save() - else: + elif not registration_form_created and not registration_form.is_active: # If previously disabled, enable it registration_form.is_active = True registration_form.save() @@ -399,7 +406,7 @@ def disable_registration_form(sender, instance=None, created=False, **kwargs): if isinstance(instance_json, str): instance_json = json.loads(instance_json) - if not instance_json.get("entity_related"): + if not instance_json.get("entity_features"): # If form creates entities, disable the registration forms for registration_form in instance.registration_forms.filter(is_active=True): registration_form.is_active = False diff --git a/onadata/libs/filters.py b/onadata/libs/filters.py index 6eb34653c4..ecda489211 100644 --- a/onadata/libs/filters.py +++ b/onadata/libs/filters.py @@ -775,14 +775,23 @@ def filter_queryset(self, request, queryset, view): # pylint: disable=too-few-public-methods class EntityListProjectFilter(filters.BaseFilterBackend): - """EntityList `project` filter.""" + """Limits results to EntityLists under `project` query param""" # pylint: disable=unused-argument def filter_queryset(self, request, queryset, view): - """Filter by project id""" project_id = request.query_params.get("project") if project_id: return queryset.filter(project__pk=project_id) return queryset + + +class AnonUserEntityListFilter(ObjectPermissionsFilter): + """Limits results to public EntityLists for anonymous users""" + + def filter_queryset(self, request, queryset, view): + if request.user.is_anonymous: + return queryset.filter(project__shared=True) + + return super().filter_queryset(request, queryset, view) diff --git a/onadata/libs/models/share_project.py b/onadata/libs/models/share_project.py index 88490069b3..928eb7cfca 100644 --- a/onadata/libs/models/share_project.py +++ b/onadata/libs/models/share_project.py @@ -42,6 +42,13 @@ def remove_dataview_permissions(project, user, role): role._remove_obj_permissions(user, dataview.xform) +def remove_entity_list_permissions(project, user, role): + """Remove user permissions for all entitylists for the given project""" + for entity_list in project.entity_lists.all(): + # pylint: disable=protected-access + role._remove_obj_permissions(user, entity_list) + + class ShareProject: """Share project with a user.""" @@ -96,6 +103,10 @@ def save(self, **kwargs): if dataview.matches_parent: role.add(self.user, dataview.xform) + # Apply same role to EntityLists under project + for entity_list in self.project.entity_lists.all(): + role.add(self.user, entity_list) + # clear cache safe_delete(f"{PROJ_OWNER_CACHE}{self.project.pk}") safe_delete(f"{PROJ_PERM_CACHE}{self.project.pk}") @@ -109,5 +120,6 @@ def __remove_user(self): if role and self.user and self.project: remove_xform_permissions(self.project, self.user, role) remove_dataview_permissions(self.project, self.user, role) + remove_entity_list_permissions(self.project, self.user, role) # pylint: disable=protected-access role._remove_obj_permissions(self.user, self.project) diff --git a/onadata/libs/permissions.py b/onadata/libs/permissions.py index 00d174a2b7..6638208ecd 100644 --- a/onadata/libs/permissions.py +++ b/onadata/libs/permissions.py @@ -76,10 +76,17 @@ CAN_CHANGE_DATADICTIONARY = "change_datadictionary" CAN_DELETE_DATADICTIONARY = "delete_datadictionary" +# Entity permissions +CAN_ADD_ENTITYLIST = "add_entitylist" +CAN_VIEW_ENTITYLIST = "view_entitylist" +CAN_CHANGE_ENTITYLIST = "change_entitylist" +CAN_DELETE_ENTITYLIST = "delete_entitylist" + DataDictionary = apps.get_model("viewer", "DataDictionary") MergedXForm = apps.get_model("logger", "MergedXForm") OrganizationProfile = apps.get_model("api", "OrganizationProfile") UserProfile = apps.get_model("main", "UserProfile") +EntityList = apps.get_model("logger", "EntityList") class Role: @@ -340,6 +347,12 @@ class ManagerRole(Role): CAN_VIEW_XFORM_ALL, CAN_VIEW_XFORM_DATA, ], + EntityList: [ + CAN_ADD_ENTITYLIST, + CAN_VIEW_ENTITYLIST, + CAN_CHANGE_ENTITYLIST, + CAN_DELETE_ENTITYLIST, + ], } @@ -408,6 +421,12 @@ class OwnerRole(Role): CAN_MOVE_TO_FOLDER, CAN_TRANSFER_OWNERSHIP, ], + EntityList: [ + CAN_ADD_ENTITYLIST, + CAN_VIEW_ENTITYLIST, + CAN_CHANGE_ENTITYLIST, + CAN_DELETE_ENTITYLIST, + ], } diff --git a/onadata/libs/serializers/entity_serializer.py b/onadata/libs/serializers/entity_serializer.py index ee1d37729e..32963b55ff 100644 --- a/onadata/libs/serializers/entity_serializer.py +++ b/onadata/libs/serializers/entity_serializer.py @@ -1,4 +1,7 @@ +from django.utils.translation import gettext as _ + from rest_framework import serializers +from rest_framework.reverse import reverse from onadata.apps.logger.models import ( Entity, @@ -121,9 +124,88 @@ class Meta: class EntitySerializer(serializers.ModelSerializer): """Serializer for Entity""" + label = serializers.CharField(write_only=True, required=False) + data = serializers.JSONField(write_only=True, required=False) + + def validate_data(self, value): + if value: + for key in value.keys(): + if key not in self.context["entity_list"].properties: + raise serializers.ValidationError( + _(f"Invalid dataset property {key}") + ) + + return value + + def update(self, instance, validated_data): + data = validated_data.pop("data", {}) + label = validated_data.pop("label", None) + + if label: + instance.json["label"] = label + + if data: + updated_data = {**instance.json, **data} + + for key, value in data.items(): + if not value: + # Unset property + del updated_data[key] + + instance.json = updated_data + + instance.save() + instance.history.create( + json=instance.json, created_by=self.context["request"].user + ) + + return instance + def to_representation(self, instance): - return instance.json + data = super().to_representation(instance) + instance_json = data.pop("json") + + return {**data, "data": instance_json} + + class Meta: + model = Entity + fields = ( + "id", + "uuid", + "date_created", + "date_modified", + "json", + "label", + "data", + ) + + +class EntityArraySerializer(EntitySerializer): + """Serializer for a list of Entities""" + + url = serializers.SerializerMethodField() + + def get_url(self, obj): + entity_list = self.context["entity_list"] + request = self.context["request"] + response_format = self.context.get("format") + kwargs = {"pk": entity_list.pk, "entity_pk": obj.pk} + + return reverse( + "entity_list-entities", + kwargs=kwargs, + request=request, + format=response_format, + ) class Meta: model = Entity - fields = ("json",) + fields = ( + "url", + "id", + "uuid", + "date_created", + "json", + "label", + "data", + ) diff --git a/onadata/libs/serializers/xform_serializer.py b/onadata/libs/serializers/xform_serializer.py index 082c5d7d6b..11958def5b 100644 --- a/onadata/libs/serializers/xform_serializer.py +++ b/onadata/libs/serializers/xform_serializer.py @@ -718,8 +718,10 @@ def get_hash(self, obj): entity_list = EntityList.objects.filter(pk=pk).first() if entity_list.last_entity_update_time is not None: + update_time_str = entity_list.last_entity_update_time.isoformat() + num_entities = str(entity_list.num_entities) hsh = self._generate_hash( - entity_list.last_entity_update_time.isoformat().encode("utf-8") + f"{update_time_str}-{num_entities}".encode("utf-8") ) else: data_view = ( diff --git a/onadata/libs/test_utils/pyxform_test_case.py b/onadata/libs/test_utils/pyxform_test_case.py index c6d440cd79..9aa34cdf81 100644 --- a/onadata/libs/test_utils/pyxform_test_case.py +++ b/onadata/libs/test_utils/pyxform_test_case.py @@ -14,8 +14,8 @@ from lxml import etree from pyxform.builder import create_survey_element_from_dict +from pyxform.constants import NSMAP from pyxform.errors import PyXFormError -from pyxform.utils import NSMAP from pyxform.validators.odk_validate import ODKValidateError, check_xform from pyxform.xls2json import workbook_to_json diff --git a/onadata/libs/tests/models/test_share_project.py b/onadata/libs/tests/models/test_share_project.py index 5a29e06839..700e6fa4c0 100644 --- a/onadata/libs/tests/models/test_share_project.py +++ b/onadata/libs/tests/models/test_share_project.py @@ -3,7 +3,7 @@ from unittest.mock import patch, call from onadata.apps.logger.models.data_view import DataView -from onadata.apps.logger.models.project import Project +from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.logger.models.xform import XForm from onadata.apps.main.tests.test_base import TestBase from onadata.libs.models.share_project import ShareProject @@ -20,22 +20,6 @@ def setUp(self): super().setUp() self._publish_transportation_form() - md_xform = """ - | survey | - | | type | name | label | - | | text | name | Name | - | | integer | age | Age | - | | select one fruits | fruit | Fruit | - | | | | | - | choices | list name | name | label | - | | fruits | 1 | Mango | - | | fruits | 2 | Orange | - | | fruits | 3 | Apple | - """ - project = Project.objects.create( - name="Demo", organization=self.user, created_by=self.user - ) - self._publish_markdown(md_xform, self.user, project) self.dataview_form = XForm.objects.all().order_by("-pk")[0] DataView.objects.create( name="Demo", @@ -45,6 +29,7 @@ def setUp(self): columns=[], ) self.merged_xf = self._create_merged_dataset() + self.entity_list = EntityList.objects.create(name="trees", project=self.project) self.alice = self._create_user("alice", "Yuao8(-)") @patch("onadata.libs.models.share_project.safe_delete") @@ -61,6 +46,7 @@ def test_share(self, mock_safe_delete, mock_propagate): self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.entity_list)) mock_propagate.assert_called_once_with(args=[self.project.pk]) # Cache is invalidated mock_safe_delete.assert_has_calls( @@ -82,12 +68,14 @@ def test_remove(self, mock_safe_delete, mock_propagate): ManagerRole.add(self.alice, self.dataview_form) ManagerRole.add(self.alice, self.merged_xf) ManagerRole.add(self.alice, self.merged_xf.xform_ptr) + ManagerRole.add(self.alice, self.entity_list) # Confirm project shared self.assertTrue(ManagerRole.user_has_role(self.alice, self.project)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.dataview_form)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf)) self.assertTrue(ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.entity_list)) # Remove user instance = ShareProject(self.project, self.alice, "manager", True) instance.save() @@ -98,6 +86,7 @@ def test_remove(self, mock_safe_delete, mock_propagate): self.assertFalse( ManagerRole.user_has_role(self.alice, self.merged_xf.xform_ptr) ) + self.assertFalse(ManagerRole.user_has_role(self.alice, self.entity_list)) mock_propagate.assert_called_once_with(args=[self.project.pk]) # Cache is invalidated mock_safe_delete.assert_has_calls( diff --git a/onadata/libs/tests/serializers/test_merged_xform_serializer.py b/onadata/libs/tests/serializers/test_merged_xform_serializer.py index 176bcd960e..429957b0b0 100644 --- a/onadata/libs/tests/serializers/test_merged_xform_serializer.py +++ b/onadata/libs/tests/serializers/test_merged_xform_serializer.py @@ -5,10 +5,11 @@ import copy from rest_framework import serializers -from onadata.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.libs.serializers.merged_xform_serializer import ( - MergedXFormSerializer, get_merged_xform_survey) + MergedXFormSerializer, + get_merged_xform_survey, +) from onadata.libs.utils.user_auth import get_user_default_project MD = """ @@ -125,75 +126,67 @@ def test_create_merged_xform(self): self.assertFalse(serializer.is_valid(raise_exception=False)) # project is required - self.assertEqual(serializer.errors['project'], - [u'This field is required.']) + self.assertEqual(serializer.errors["project"], ["This field is required."]) # name is required - self.assertEqual(serializer.errors['name'], - [u'This field is required.']) + self.assertEqual(serializer.errors["name"], ["This field is required."]) # At least 2 *different* xforms # 0 xforms - self.assertEqual(serializer.errors['xforms'], - [u'This field is required.']) + self.assertEqual(serializer.errors["xforms"], ["This field is required."]) self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(MD, self.user, id_string='a') + xform1 = self._publish_markdown(MD, self.user, id_string="a") data = { - 'xforms': [], - 'name': - 'Merged Dataset', - 'project': - "http://testserver.com/api/v1/projects/%s" % self.project.pk, + "xforms": [], + "name": "Merged Dataset", + "project": "http://testserver.com/api/v1/projects/%s" % self.project.pk, } serializer = MergedXFormSerializer(data=data) self.assertFalse(serializer.is_valid(raise_exception=False)) - self.assertNotIn('name', serializer.errors) - self.assertNotIn('project', serializer.errors) - self.assertEqual(serializer.errors['xforms'], - [u'This list may not be empty.']) + self.assertNotIn("name", serializer.errors) + self.assertNotIn("project", serializer.errors) + self.assertEqual(serializer.errors["xforms"], ["This list may not be empty."]) # 1 xform - data['xforms'] = ["http://testserver.com/api/v1/forms/%s" % xform1.pk] + data["xforms"] = ["http://testserver.com/api/v1/forms/%s" % xform1.pk] serializer = MergedXFormSerializer(data=data) self.assertFalse(serializer.is_valid(raise_exception=False)) self.assertIn( - u'This field should have at least two unique xforms.', - serializer.errors['xforms'] + "This field should have at least two unique xforms.", + serializer.errors["xforms"], ) # same xform twice - data['xforms'] = [ + data["xforms"] = [ + "http://testserver.com/api/v1/forms/%s" % xform1.pk, "http://testserver.com/api/v1/forms/%s" % xform1.pk, - "http://testserver.com/api/v1/forms/%s" % xform1.pk ] serializer = MergedXFormSerializer(data=data) self.assertFalse(serializer.is_valid(raise_exception=False)) self.assertIn( - u'This field should have unique xforms', - serializer.errors['xforms'] + "This field should have unique xforms", serializer.errors["xforms"] ) # xform with no matching fields - xform3 = self._publish_markdown(A_MD, self.user, id_string='c') - data['xforms'] = [ + xform3 = self._publish_markdown(A_MD, self.user, id_string="c") + data["xforms"] = [ "http://testserver.com/api/v1/forms/%s" % xform1.pk, - "http://testserver.com/api/v1/forms/%s" % xform3.pk + "http://testserver.com/api/v1/forms/%s" % xform3.pk, ] serializer = MergedXFormSerializer(data=data) self.assertFalse(serializer.is_valid(raise_exception=False)) - self.assertEqual(serializer.errors['xforms'], - [u'No matching fields in xforms.']) + self.assertEqual(serializer.errors["xforms"], ["No matching fields in xforms."]) # two different xforms - xform2 = self._publish_markdown(MD, self.user, id_string='b') - data['xforms'] = [ + xform2 = self._publish_markdown(MD, self.user, id_string="b") + data["xforms"] = [ "http://testserver.com/api/v1/forms/%s" % xform1.pk, - "http://testserver.com/api/v1/forms/%s" % xform2.pk + "http://testserver.com/api/v1/forms/%s" % xform2.pk, ] serializer = MergedXFormSerializer(data=data) self.assertTrue(serializer.is_valid(raise_exception=False)) - self.assertNotIn('xforms', serializer.errors) + self.assertNotIn("xforms", serializer.errors) def test_get_merged_xform_survey(self): """ @@ -203,47 +196,48 @@ def test_get_merged_xform_survey(self): get_merged_xform_survey([]) self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(A_MD, self.user, id_string='a') - xform2 = self._publish_markdown(B_MD, self.user, id_string='b') - xform3 = self._publish_markdown(MD, self.user, id_string='c') + xform1 = self._publish_markdown(A_MD, self.user, id_string="a") + xform2 = self._publish_markdown(B_MD, self.user, id_string="b") + xform3 = self._publish_markdown(MD, self.user, id_string="c") expected = { - u'name': u'data', - u'title': u'pyxform_autotesttitle', - u'sms_keyword': u'a', - u'default_language': u'default', - u'id_string': u'a', - u'type': u'survey', - u'children': [{ - u'name': u'name', - u'label': u'Name', - u'type': u'text' - }, { - u'children': [{ - u'name': u'male', - u'label': u'Male' - }, { - u'name': u'female', - u'label': u'Female' - }], - u'name': u'gender', - u'label': u'Sex', - u'list_name': u'gender', - u'type': u'select one' - }, { - u'control': { - u'bodyless': True + "name": "data", + "title": "pyxform_autotesttitle", + "sms_keyword": "a", + "default_language": "default", + "id_string": "a", + "type": "survey", + "choices": { + "gender": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ] + }, + "children": [ + {"name": "name", "label": "Name", "type": "text"}, + { + "name": "gender", + "label": "Sex", + "type": "select one", + "itemset": "gender", + "list_name": "gender", + "children": [ + {"name": "male", "label": "Male"}, + {"name": "female", "label": "Female"}, + ], + }, + { + "name": "meta", + "type": "group", + "control": {"bodyless": True}, + "children": [ + { + "name": "instanceID", + "type": "calculate", + "bind": {"readonly": "true()", "jr:preload": "uid"}, + } + ], }, - u'children': [{ - u'name': u'instanceID', - u'bind': { - u'readonly': u'true()', - u'jr:preload': u"uid" - }, - u'type': u'calculate' - }], - u'name': u'meta', - u'type': u'group' - }] + ], } # yapf: disable with self.assertRaises(serializers.ValidationError): @@ -254,9 +248,10 @@ def test_get_merged_xform_survey(self): # this field seems to change 50% of the time expected2 = copy.deepcopy(expected) - expected2['children'][1]['children'] = \ - [{'name': 'female', 'label': 'Female'}, - {'name': 'male', 'label': 'Male'}] + expected2["children"][1]["children"] = [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ] self.assertTrue(survey_dict == expected or survey_dict == expected2) # no matching fields @@ -268,76 +263,81 @@ def test_group_merged_xform_survey(self): Test get_merged_xform_survey() with groups in xforms. """ self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(GROUP_A_MD, self.user, id_string='a') - xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string='b') + xform1 = self._publish_markdown(GROUP_A_MD, self.user, id_string="a") + xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string="b") survey = get_merged_xform_survey([xform1, xform2]) expected = { - u'default_language': u'default', - u'id_string': u'a', - u'children': [{ - u'name': u'name', - u'label': u'Name', - u'type': u'text' - }, { - u'children': [{ - u'children': [{ - u'name': u'female', - u'label': u'Female' - }, { - u'name': u'male', - u'label': u'Male' - }], - u'name': u'gender', - u'label': u'Sex', - u'list_name': u'gender', - u'type': u'select one' - }], - u'name': u'info', - u'label': u'Info', - u'type': u'group' - }, { - u'children': [{ - u'children': [{ - u'children': [{ - u'name': u'female', - u'label': u'Female' - }, { - u'name': u'male', - u'label': u'Male' - }], - u'name': u'gender', - u'label': u'Sex', - u'list_name': u'gender', - u'type': u'select one' - }], - u'name': u'person', - u'label': u'Person', - u'type': u'group' - }], - u'name': u'other', - u'label': u'Other', - u'type': u'group', - }, { - u'control': { - u'bodyless': True + "name": "data", + "type": "survey", + "title": "pyxform_autotesttitle", + "id_string": "a", + "sms_keyword": "a", + "default_language": "default", + "choices": { + "gender": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ] + }, + "children": [ + {"name": "name", "label": "Name", "type": "text"}, + { + "name": "info", + "label": "Info", + "type": "group", + "children": [ + { + "name": "gender", + "label": "Sex", + "type": "select one", + "itemset": "gender", + "list_name": "gender", + "children": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ], + } + ], }, - u'children': [{ - u'name': u'instanceID', - u'bind': { - u'readonly': u'true()', - u'jr:preload': u"uid" - }, - u'type': u'calculate' - }], - u'name': u'meta', - u'type': u'group' - }], - u'type': u'survey', - u'name': u'data', - u'sms_keyword': u'a', - u'title': u'pyxform_autotesttitle' + { + "name": "other", + "label": "Other", + "type": "group", + "children": [ + { + "name": "person", + "label": "Person", + "type": "group", + "children": [ + { + "name": "gender", + "label": "Sex", + "type": "select one", + "itemset": "gender", + "list_name": "gender", + "children": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ], + } + ], + } + ], + }, + { + "name": "meta", + "type": "group", + "control": {"bodyless": True}, + "children": [ + { + "name": "instanceID", + "type": "calculate", + "bind": {"readonly": "true()", "jr:preload": "uid"}, + } + ], + }, + ], } # yapf: disable - self.assertEqual(survey.to_json_dict(), expected) def test_repeat_merged_xform_survey(self): @@ -345,78 +345,85 @@ def test_repeat_merged_xform_survey(self): Test get_merged_xform_survey() with repeats in xforms. """ self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(GROUP_A_MD.replace('group', 'repeat'), - self.user, id_string='a') - xform2 = self._publish_markdown(GROUP_B_MD.replace('group', 'repeat'), - self.user, id_string='b') + xform1 = self._publish_markdown( + GROUP_A_MD.replace("group", "repeat"), self.user, id_string="a" + ) + xform2 = self._publish_markdown( + GROUP_B_MD.replace("group", "repeat"), self.user, id_string="b" + ) survey = get_merged_xform_survey([xform1, xform2]) expected = { - u'default_language': u'default', - u'id_string': u'a', - u'children': [{ - u'name': u'name', - u'label': u'Name', - u'type': u'text' - }, { - u'children': [{ - u'children': [{ - u'name': u'female', - u'label': u'Female' - }, { - u'name': u'male', - u'label': u'Male' - }], - u'name': u'gender', - u'label': u'Sex', - u'list_name': u'gender', - u'type': u'select one' - }], - u'name': u'info', - u'label': u'Info', - u'type': u'repeat' - }, { - u'children': [{ - u'children': [{ - u'children': [{ - u'name': u'female', - u'label': u'Female' - }, { - u'name': u'male', - u'label': u'Male' - }], - u'name': u'gender', - u'label': u'Sex', - u'list_name': u'gender', - u'type': u'select one' - }], - u'name': u'person', - u'label': u'Person', - u'type': u'repeat' - }], - u'name': u'other', - u'label': u'Other', - u'type': u'repeat', - }, { - u'control': { - u'bodyless': True + "name": "data", + "type": "survey", + "title": "pyxform_autotesttitle", + "id_string": "a", + "sms_keyword": "a", + "default_language": "default", + "choices": { + "gender": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ] + }, + "children": [ + {"name": "name", "label": "Name", "type": "text"}, + { + "name": "info", + "label": "Info", + "type": "repeat", + "children": [ + { + "name": "gender", + "label": "Sex", + "type": "select one", + "itemset": "gender", + "list_name": "gender", + "children": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ], + } + ], + }, + { + "name": "other", + "label": "Other", + "type": "repeat", + "children": [ + { + "name": "person", + "label": "Person", + "type": "repeat", + "children": [ + { + "name": "gender", + "label": "Sex", + "type": "select one", + "itemset": "gender", + "list_name": "gender", + "children": [ + {"name": "female", "label": "Female"}, + {"name": "male", "label": "Male"}, + ], + } + ], + } + ], }, - u'children': [{ - u'name': u'instanceID', - u'bind': { - u'readonly': u'true()', - u'jr:preload': u'uid' - }, - u'type': u'calculate' - }], - u'name': u'meta', - u'type': u'group' - }], - u'type': u'survey', - u'name': u'data', - u'sms_keyword': u'a', - u'title': u'pyxform_autotesttitle' + { + "name": "meta", + "type": "group", + "control": {"bodyless": True}, + "children": [ + { + "name": "instanceID", + "type": "calculate", + "bind": {"readonly": "true()", "jr:preload": "uid"}, + } + ], + }, + ], } # yapf: disable - self.assertEqual(survey.to_json_dict(), expected) def test_matching_fields_by_type(self): @@ -425,36 +432,39 @@ def test_matching_fields_by_type(self): matches. """ self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(GROUP_A_MD.replace('group', 'repeat'), - self.user, id_string='a') - xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string='b') + xform1 = self._publish_markdown( + GROUP_A_MD.replace("group", "repeat"), self.user, id_string="a" + ) + xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string="b") survey = get_merged_xform_survey([xform1, xform2]) expected = { - u'default_language': u'default', - u'id_string': u'a', - u'children': [{ - u'name': u'name', - u'label': u'Name', - u'type': u'text' - }, { - u'control': { - u'bodyless': True + "default_language": "default", + "id_string": "a", + "choices": { + "gender": [ + {"label": "Female", "name": "female"}, + {"label": "Male", "name": "male"}, + ] + }, + "children": [ + {"name": "name", "label": "Name", "type": "text"}, + { + "control": {"bodyless": True}, + "children": [ + { + "name": "instanceID", + "bind": {"readonly": "true()", "jr:preload": "uid"}, + "type": "calculate", + } + ], + "name": "meta", + "type": "group", }, - u'children': [{ - u'name': u'instanceID', - u'bind': { - u'readonly': u'true()', - u'jr:preload': u'uid' - }, - u'type': u'calculate' - }], - u'name': u'meta', - u'type': u'group' - }], - u'type': u'survey', - u'name': u'data', - u'sms_keyword': u'a', - u'title': u'pyxform_autotesttitle' + ], + "type": "survey", + "name": "data", + "sms_keyword": "a", + "title": "pyxform_autotesttitle", } # yapf: disable self.assertEqual(survey.to_json_dict(), expected) @@ -464,13 +474,13 @@ def test_merged_dataset_dict_contains_no_bind_attributes(self): Test get_merged_xform_survey(): should not contain bind elements. """ self.project = get_user_default_project(self.user) - xform1 = self._publish_markdown(GROUP_A_MD, self.user, id_string='a') - xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string='b') - xform3 = self._publish_markdown(GROUP_C_MD, self.user, id_string='c') + xform1 = self._publish_markdown(GROUP_A_MD, self.user, id_string="a") + xform2 = self._publish_markdown(GROUP_B_MD, self.user, id_string="b") + xform3 = self._publish_markdown(GROUP_C_MD, self.user, id_string="c") survey = get_merged_xform_survey([xform1, xform2, xform3]) result = survey.to_json_dict() - count = len([child for child in result["children"] if 'bind' in child]) + count = len([child for child in result["children"] if "bind" in child]) # check that no elements within the newly created # merged_dataset_dict contains bind attributes diff --git a/onadata/libs/tests/serializers/test_xform_serializer.py b/onadata/libs/tests/serializers/test_xform_serializer.py index f8dc3713f7..e45675b8e7 100644 --- a/onadata/libs/tests/serializers/test_xform_serializer.py +++ b/onadata/libs/tests/serializers/test_xform_serializer.py @@ -2,13 +2,13 @@ """ Test onadata.libs.serializers.xform_serializer """ -import os - from unittest.mock import MagicMock +from django.db.models import F from django.test import TestCase +from django.utils import timezone -from onadata.apps.logger.models import XForm, Entity +from onadata.apps.logger.models import EntityList from onadata.apps.main.tests.test_base import TestBase from onadata.libs.serializers.xform_serializer import XFormManifestSerializer @@ -67,39 +67,42 @@ def test_get_hash(self): obj.data_file = "data file" self.assertEqual(serializer.get_hash(obj), obj.file_hash) - def test_entity_dataset_hash(self): - """Hash for entity dataset changes after new Entity created""" + def test_entity_list_last_update_time_hash(self): + """Hash changes when EntityList last_entity_update_time changes""" serializer = XFormManifestSerializer() self._create_user_and_login() # Publish registration form self._publish_registration_form(self.user) # Publish follow up form - self._publish_follow_up_form(self.user) - follow_up_xform = XForm.objects.order_by("pk").reverse()[0] - entity_list = self.project.entity_lists.first() - # Make submission to create new Entity - submission_path = os.path.join( - self.this_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ) - self._make_submission(submission_path) + follow_up_xform = self._publish_follow_up_form(self.user) + entity_list = EntityList.objects.get(name="trees") metadata = follow_up_xform.metadata_set.get( data_type="media", data_value=f"entity_list {entity_list.pk} {entity_list.name}", ) old_hash = serializer.get_hash(metadata) - # Make another submission - submission_path = os.path.join( - self.this_directory, - "fixtures", - "entities", - "instances", - "trees_registration_2.xml", + entity_list.last_entity_update_time = timezone.now() + entity_list.save() + new_hash = serializer.get_hash(metadata) + self.assertNotEqual(old_hash, new_hash) + + def test_entity_list_num_entities_hash(self): + """Hash changes when EntityList num_entities changes""" + serializer = XFormManifestSerializer() + self._create_user_and_login() + # Publish registration form + self._publish_registration_form(self.user) + # Publish follow up form + follow_up_xform = self._publish_follow_up_form(self.user) + entity_list = EntityList.objects.get(name="trees") + entity_list.last_entity_update_time = timezone.now() + entity_list.save() + metadata = follow_up_xform.metadata_set.get( + data_type="media", + data_value=f"entity_list {entity_list.pk} {entity_list.name}", ) - self._make_submission(submission_path) + old_hash = serializer.get_hash(metadata) + entity_list.num_entities = F("num_entities") + 1 + entity_list.save() new_hash = serializer.get_hash(metadata) - self.assertEqual(Entity.objects.count(), 2) self.assertNotEqual(old_hash, new_hash) diff --git a/onadata/libs/tests/utils/test_csv_builder.py b/onadata/libs/tests/utils/test_csv_builder.py index 5e810c4bdf..1b5b68c127 100644 --- a/onadata/libs/tests/utils/test_csv_builder.py +++ b/onadata/libs/tests/utils/test_csv_builder.py @@ -1670,7 +1670,6 @@ def test_multiple_repeats_column_order(self): "food/Salad", "food/Sandwich", "no_food", - "food_repeat_count", "food_repeat[1]/food_group/Apple", "food_repeat[1]/food_group/Orange", "food_repeat[1]/food_group/Banana", @@ -1690,7 +1689,6 @@ def test_multiple_repeats_column_order(self): "food_repeat[2]/food_group/Salad", "food_repeat[2]/food_group/Sandwich", "no_food_2", - "food_repeat_2_count", "food_repeat_2[1]/food_group_2/Apple", "food_repeat_2[1]/food_group_2/Orange", "food_repeat_2[1]/food_group_2/Banana", diff --git a/onadata/libs/tests/utils/test_export_tools.py b/onadata/libs/tests/utils/test_export_tools.py index cebdf15a00..a2cf21fe59 100644 --- a/onadata/libs/tests/utils/test_export_tools.py +++ b/onadata/libs/tests/utils/test_export_tools.py @@ -24,7 +24,7 @@ from onadata.apps.api import tests as api_tests from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.data_viewset import DataViewSet -from onadata.apps.logger.models import Attachment, Instance, XForm, EntityList +from onadata.apps.logger.models import Attachment, Instance, XForm, Entity, EntityList from onadata.apps.viewer.models.export import Export, GenericExport from onadata.apps.viewer.models.parsed_instance import query_fields_data from onadata.libs.serializers.merged_xform_serializer import MergedXFormSerializer @@ -1011,16 +1011,17 @@ def test_generate_export_entity_list(self): """Generate export for EntityList dataset works""" # Publish registration form and create "trees" Entitylist dataset self._publish_registration_form(self.user) - # Make submission to trees_registration form - submission_path = os.path.join( - self.main_directory, - "fixtures", - "entities", - "instances", - "trees_registration.xml", - ) - self._make_submission(submission_path) entity_list = EntityList.objects.get(name="trees") + Entity.objects.create( + entity_list=entity_list, + json={ + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", + ) export = generate_entity_list_export(entity_list) self.assertIsNotNone(export) self.assertTrue(export.is_successful) diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index 192c963385..e133dd1986 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -14,13 +14,13 @@ from defusedxml.ElementTree import ParseError from onadata.apps.logger.import_tools import django_file -from onadata.apps.logger.models import Instance, Entity, RegistrationForm +from onadata.apps.logger.models import Instance, Entity, RegistrationForm, SurveyType from onadata.apps.logger.xform_instance_parser import AttachmentNameError from onadata.apps.main.tests.test_base import TestBase from onadata.libs.test_utils.pyxform_test_case import PyxformTestCase from onadata.libs.utils.common_tags import MEDIA_ALL_RECEIVED, MEDIA_COUNT, TOTAL_MEDIA from onadata.libs.utils.logger_tools import ( - create_entity, + create_entity_from_instance, create_instance, generate_content_disposition_header, get_first_record, @@ -656,8 +656,6 @@ class CreateEntityTestCase(TestBase): def setUp(self): super().setUp() - # Mute signal that creates Entity when Instance is saved - self._mute_post_save_signals([(Instance, "create_entity")]) self.xform = self._publish_registration_form(self.user) self.xml = ( '' @@ -677,36 +675,49 @@ def setUp(self): "" "" ) - self.instance = Instance.objects.create( - xml=self.xml, - user=self.user, + survey_type = SurveyType.objects.create(slug="slug-foo") + instance = Instance( xform=self.xform, + xml=self.xml, version=self.xform.version, + survey_type=survey_type, ) + # We use bulk_create to avoid calling create_entity signal + Instance.objects.bulk_create([instance]) + self.instance = Instance.objects.first() self.registration_form = RegistrationForm.objects.first() def test_entity_created(self): """Entity is created successfully""" - create_entity(self.instance, self.registration_form) + create_entity_from_instance(self.instance, self.registration_form) + + self.assertEqual(Entity.objects.count(), 1) + entity = Entity.objects.first() - self.assertEqual(entity.registration_form, self.registration_form) - self.assertEqual(entity.instance, self.instance) - self.assertEqual(entity.xml, self.xml) + entity_list = self.registration_form.entity_list + entity_list.refresh_from_db() + + self.assertEqual(entity.entity_list, entity_list) + expected_json = { - "formhub/uuid": "d156a2dce4c34751af57f21ef5c4e6cc", "geometry": "-1.286905 36.772845 0 0", "species": "purpleheart", "circumference_cm": 300, - "meta/instanceID": "uuid:9d3f042e-cfec-4d2a-8b5b-212e3b04802b", - "meta/instanceName": "300cm purpleheart", - "meta/entity/label": "300cm purpleheart", - "_xform_id_string": "trees_registration", - "_version": "2022110901", - "_id": entity.pk, + "label": "300cm purpleheart", } + self.assertCountEqual(entity.json, expected_json) self.assertEqual(entity.uuid, "dbee4c32-a922-451c-9df7-42f40bf78f48") - entity_list = self.registration_form.entity_list - entity_list.refresh_from_db() + self.assertEqual(entity_list.num_entities, 1) self.assertEqual(entity_list.last_entity_update_time, entity.date_modified) + self.assertEqual(entity.history.count(), 1) + + entity_history = entity.history.first() + + self.assertEqual(entity_history.registration_form, self.registration_form) + self.assertEqual(entity_history.instance, self.instance) + self.assertEqual(entity_history.xml, self.instance.xml) + self.assertEqual(entity_history.json, expected_json) + self.assertEqual(entity_history.form_version, self.xform.version) + self.assertEqual(entity_history.created_by, self.instance.user) diff --git a/onadata/libs/tests/utils/test_project_utils.py b/onadata/libs/tests/utils/test_project_utils.py index 133698367b..d5cea1186c 100644 --- a/onadata/libs/tests/utils/test_project_utils.py +++ b/onadata/libs/tests/utils/test_project_utils.py @@ -7,15 +7,18 @@ from django.test.utils import override_settings from kombu.exceptions import OperationalError from requests import Response +from guardian.shortcuts import get_perms +from onadata.apps.api.models import Team from onadata.apps.logger.models import Project from onadata.apps.main.tests.test_base import TestBase -from onadata.libs.permissions import DataEntryRole +from onadata.libs.permissions import DataEntryRole, ManagerRole, OwnerRole from onadata.libs.utils.project_utils import ( assign_change_asset_permission, retrieve_asset_permissions, set_project_perms_to_xform, set_project_perms_to_xform_async, + set_project_perms_to_object, ) @@ -156,3 +159,46 @@ def test_rabbitmq_connection_error(self, mock_set_perms_async, mock_set_perms): self._publish_transportation_form() self.assertFalse(mock_set_perms_async.called) self.assertTrue(mock_set_perms.called) + + +class SetProjectPermsToObjectTestCase(TestBase): + """Tests for set_project_perms_to_object""" + + def setUp(self): + super().setUp() + self._publish_transportation_form() + self.alice = self._create_user(username="alice", password="abc123!!") + ManagerRole.add(self.alice, self.project) + + def test_set_perms(self): + """Project permissions are applied to object""" + set_project_perms_to_object(self.xform, self.project) + + self.assertTrue(OwnerRole.user_has_role(self.user, self.xform)) + self.assertTrue(ManagerRole.user_has_role(self.alice, self.xform)) + + def test_owners_team_permissions(self): + """Object permissions for owners group are set""" + team = Team.objects.create( + name=f"{self.user.username}#Owners", organization=self.user + ) + self.assertEqual(get_perms(team, self.xform), []) + + set_project_perms_to_object(self.xform, self.project) + perms = get_perms(team, self.xform) + + self.assertTrue("add_xform" in perms) + self.assertTrue("view_xform" in perms) + self.assertTrue("change_xform" in perms) + self.assertTrue("delete_xform" in perms) + + def test_xform_creator(self): + """XForm creator is made owner""" + self.xform.created_by = self.alice + self.xform.save() + + self.assertFalse(OwnerRole.user_has_role(self.alice, self.xform)) + + set_project_perms_to_object(self.xform, self.project) + + self.assertTrue(OwnerRole.user_has_role(self.alice, self.xform)) diff --git a/onadata/libs/utils/export_tools.py b/onadata/libs/utils/export_tools.py index 3899003f28..5755b2d356 100644 --- a/onadata/libs/utils/export_tools.py +++ b/onadata/libs/utils/export_tools.py @@ -134,13 +134,13 @@ def get_entity_list_dataset(entity_list: EntityList) -> Iterator[dict]: An iterator of dicts which represent the json data for Entities belonging to the dataset """ - entities = Entity.objects.filter(registration_form__entity_list=entity_list) + entities = Entity.objects.filter(entity_list=entity_list) dataset_properties = entity_list.properties for entity in queryset_iterator(entities): data = { "name": entity.uuid, - "label": entity.json.get("meta/entity/label", ""), + "label": entity.json.get("label", ""), } for prop in dataset_properties: data[prop] = entity.json.get(prop, "") diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index e62add6179..3787a0313b 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -4,6 +4,7 @@ logger_tools - Logger app utility functions. """ import json +import logging import os import re import sys @@ -26,7 +27,7 @@ ) from django.core.files.storage import get_storage_class from django.db import DataError, IntegrityError, transaction -from django.db.models import Q, F +from django.db.models import Q from django.http import ( HttpResponse, HttpResponseNotFound, @@ -75,6 +76,7 @@ get_deprecated_uuid_from_xml, get_submission_date_from_xml, get_uuid_from_xml, + get_entity_uuid_from_xml, ) from onadata.apps.messaging.constants import ( SUBMISSION_CREATED, @@ -107,6 +109,8 @@ r"\s*\s*([^<]+)\s*\s*", re.DOTALL ) +logger = logging.getLogger(__name__) + # pylint: disable=invalid-name User = get_user_model() @@ -972,22 +976,22 @@ def publish_xform(self): return publish_xml_form(self.xml_file, self.user, self.project) -def create_entity(instance: Instance, registration_form: RegistrationForm) -> Entity: - """Create an Entity +def get_entity_json_from_instance( + instance: Instance, registration_form: RegistrationForm +) -> dict: + """Parses Instance json and returns Entity json Args: - instance (Instance): Submission from which the Entity is created from - registration_form (RegistrationForm): RegistrationForm creating the - Entity + instance (Instance): Submission to create Entity Returns: - Entity: A newly created Entity + dict: Entity properties """ instance_json: dict[str, Any] = instance.get_dict() # Getting a mapping of save_to field to the field name mapped_properties = registration_form.get_save_to(instance.version) # Field names with an alias defined - target_fields = list(mapped_properties.values()) + property_fields = list(mapped_properties.values()) def convert_to_alias(field_name: str) -> str: """Convert field name to it's alias""" @@ -996,7 +1000,7 @@ def convert_to_alias(field_name: str) -> str: parts = field_name.split("/") # Replace field parts with alias for part in parts: - if part in target_fields: + if part in property_fields: for alias, field in mapped_properties.items(): if field == part: alias_field_name = alias_field_name.replace(field, alias) @@ -1016,23 +1020,86 @@ def parse_instance_json(data: dict[str, Any]) -> None: parse_instance_json(child_data) else: - if field_name in target_fields: + if field_name in property_fields: alias_field_name = convert_to_alias(field_name) if alias_field_name != field_name: data[alias_field_name] = data[field_name] del data[field_name] + elif field_name == "meta/entity/label": + data["label"] = data["meta/entity/label"] + del data["meta/entity/label"] + + else: + del data[field_name] + parse_instance_json(instance_json) + + return instance_json + + +def create_entity_from_instance( + instance: Instance, registration_form: RegistrationForm +) -> Entity: + """Create an Entity + + Args: + instance (Instance): Submission from which the Entity is created from + registration_form (RegistrationForm): RegistrationForm creating the + Entity + + Returns: + Entity: A newly created Entity + """ + entity_json = get_entity_json_from_instance(instance, registration_form) + entity_list = registration_form.entity_list entity = Entity.objects.create( + entity_list=entity_list, + json=entity_json, + uuid=get_entity_uuid_from_xml(instance.xml), + ) + entity.history.create( registration_form=registration_form, xml=instance.xml, - json=instance_json, instance=instance, + form_version=instance.version, + json=entity_json, + created_by=instance.user, + ) + + return entity + + +def update_entity_from_instance( + uuid: str, instance: Instance, registration_form: RegistrationForm +) -> Entity | None: + """Updates Entity + + Args: + uuid (str): uuid of the Entity to be updated + instance (Instance): Submission that updates an Entity + + Returns: + Entity | None: updated Entity if uuid valid, else None + """ + try: + entity = Entity.objects.get(uuid=uuid) + + except Entity.DoesNotExist as err: + logger.exception(err) + return None + + patch_data = get_entity_json_from_instance(instance, registration_form) + entity.json = {**entity.json, **patch_data} + entity.save() + entity.history.create( + registration_form=registration_form, + xml=instance.xml, + instance=instance, + form_version=instance.version, + json=entity.json, + created_by=instance.user, ) - entity_list = registration_form.entity_list - entity_list.last_entity_update_time = entity.date_modified - entity_list.num_entities = F("num_entities") + 1 - entity_list.save() return entity diff --git a/onadata/libs/utils/project_utils.py b/onadata/libs/utils/project_utils.py index a0cf5693c1..53e9d78eb8 100644 --- a/onadata/libs/utils/project_utils.py +++ b/onadata/libs/utils/project_utils.py @@ -4,7 +4,7 @@ """ import re import sys -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from urllib.parse import urljoin from django.conf import settings @@ -87,24 +87,7 @@ def set_project_perms_to_xform(xform, project): if role and (user not in (xform.user, project.user, project.created_by)): role.remove_obj_permissions(user, xform) - owners = project.organization.team_set.filter( - name=f"{project.organization.username}#{OWNER_TEAM_NAME}", - organization=project.organization, - ) - - if owners: - OwnerRole.add(owners[0], xform) - - for perm in get_object_users_with_permissions(project, with_group_users=True): - user = perm["user"] - role_name = perm["role"] - role = ROLES.get(role_name) - - if user == xform.created_by: - OwnerRole.add(user, xform) - else: - if role: - role.add(user, xform) + set_project_perms_to_object(xform, project) # pylint: disable=invalid-name @@ -327,3 +310,30 @@ def propagate_project_permissions( new_users, session, ) + + +def set_project_perms_to_object(obj: Any, project: Project) -> None: + """Apply project permissions to an object + + Args: + obj: Object to set permissions for + project: Project under which the object belongs to + """ + owners = project.organization.team_set.filter( + name=f"{project.organization.username}#{OWNER_TEAM_NAME}", + organization=project.organization, + ) + + if owners: + OwnerRole.add(owners[0], obj) + + for perm in get_object_users_with_permissions(project, with_group_users=True): + user = perm["user"] + role_name = perm["role"] + role = ROLES.get(role_name) + + if isinstance(obj, XForm) and user == obj.created_by: + OwnerRole.add(user, obj) + + elif role: + role.add(user, obj) diff --git a/onadata/libs/utils/user_auth.py b/onadata/libs/utils/user_auth.py index ec1f91ab34..d219028030 100644 --- a/onadata/libs/utils/user_auth.py +++ b/onadata/libs/utils/user_auth.py @@ -17,6 +17,7 @@ from onadata.apps.api.models.team import Team from onadata.apps.api.models.temp_token import TempToken +from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.logger.models.note import Note from onadata.apps.logger.models.project import Project from onadata.apps.logger.models.xform import XForm @@ -212,9 +213,9 @@ def add_cors_headers(response): """Add CORS headers to the HttpResponse ``response`` instance.""" response["Access-Control-Allow-Origin"] = "*" response["Access-Control-Allow-Methods"] = "GET" - response[ - "Access-Control-Allow-Headers" - ] = "Accept, Origin, X-Requested-With, Authorization" + response["Access-Control-Allow-Headers"] = ( + "Accept, Origin, X-Requested-With, Authorization" + ) response["Content-Type"] = "application/json" return response @@ -222,7 +223,16 @@ def add_cors_headers(response): def set_api_permissions_for_user(user): """Sets the permissions to allow a ``user`` to access the APU.""" - models = [UserProfile, XForm, MergedXForm, Project, Team, OrganizationProfile, Note] + models = [ + UserProfile, + XForm, + MergedXForm, + Project, + Team, + OrganizationProfile, + Note, + EntityList, + ] for model in models: for perm in get_perms_for_model(model): assign_perm(f"{perm.content_type.app_label}.{perm.codename}", user) diff --git a/requirements/base.pip b/requirements/base.pip index bfc9741ddf..669f6c64e1 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -235,7 +235,7 @@ oauthlib==3.2.2 # requests-oauthlib ona-oidc @ git+https://github.com/onaio/ona-oidc.git@pytz-deprecated # via -r requirements/base.in -openpyxl==3.0.9 +openpyxl==3.1.2 # via # dataflows-tabulator # onadata @@ -288,7 +288,7 @@ pytz==2024.1 # via # django-query-builder # fleming -pyxform==1.12.2 +pyxform==2.0.3 # via # onadata # pyfloip diff --git a/requirements/dev.pip b/requirements/dev.pip index e944503f2f..69d5643e7a 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -301,7 +301,7 @@ oauthlib==3.2.2 # requests-oauthlib ona-oidc @ git+https://github.com/onaio/ona-oidc.git@pytz-deprecated # via -r requirements/base.in -openpyxl==3.0.9 +openpyxl==3.1.2 # via # dataflows-tabulator # onadata @@ -413,7 +413,7 @@ pytz==2024.1 # via # django-query-builder # fleming -pyxform==1.12.2 +pyxform==2.0.3 # via # onadata # pyfloip diff --git a/setup.cfg b/setup.cfg index 72cd5cf995..dfec5bba2b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -68,7 +68,7 @@ install_requires = dict2xml lxml>=4.9.1 #pyxform - pyxform==1.12.2 + pyxform==2.0.3 #memcached support pylibmc python-memcached From 8f728de8df3145bdd45c2cf8f7be8dc03cd40fa1 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 12 Jun 2024 15:45:49 +0300 Subject: [PATCH 226/270] Separate ecr image build to use arm64 runner for arm64 builds --- .../ecr-image-build-w-arm-runner.yml | 205 ++++++++++++++++++ 1 file changed, 205 insertions(+) create mode 100644 .github/workflows/ecr-image-build-w-arm-runner.yml diff --git a/.github/workflows/ecr-image-build-w-arm-runner.yml b/.github/workflows/ecr-image-build-w-arm-runner.yml new file mode 100644 index 0000000000..60a6101210 --- /dev/null +++ b/.github/workflows/ecr-image-build-w-arm-runner.yml @@ -0,0 +1,205 @@ +--- +name: AWS ECR Build Image with ARM Runner + +on: + release: + types: + - "released" + push: + - "main" + - "*-rc" + tags: + - "v*" + +jobs: + build: + strategy: + fail-fast: false + matrix: + platforms: + - [linux/amd64, ubuntu-latest] + - [linux/arm64, ubuntu-arm64-runner] + runs-on: ${{ matrix.platforms[1] }} + steps: + - name: Prepare + run: | + platform=${{ matrix.platforms[0] }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata + tags: | + type=ref,event=branch + type=ref,event=tag + + - name: Setup SSH Agent and add Github to known hosts + env: + SSH_AUTH_SOCK: /tmp/ssh-agent.sock + run: | + ssh-agent -a $SSH_AUTH_SOCK >> /dev/null + ssh-add - <<< "${{ secrets.SSH_PRIVATE_KEY }}" + mkdir -p ~/.ssh + ssh-keyscan github.com > ~/.ssh/known_hosts + + - name: Get the version + id: get-version + if: github.event_name != 'push' + run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + + - name: Get the branch name + id: get-branch-name + if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + run: echo "version=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + + - name: (Ubuntu) Build and push + id: docker-build-ubuntu + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/onadata-uwsgi/Dockerfile.ubuntu + platforms: ${{ matrix.platforms[0] }} + cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} + cache-to: type=inline + ssh: | + default=/tmp/ssh-agent.sock + build-args: | + optional_packages=PyYAML django-redis ${{ secrets.ECR_OPTIONAL_PACKAGES }} + push: true + labels: ${{ steps.meta.outputs.labels }} + provenance: false + outputs: type=image,name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata,push-by-digest=true,name-canonical=true,push=true + - + name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.docker-build-ubuntu.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + - + name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + merge: + runs-on: ubuntu-latest + needs: + - build + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata + tags: | + type=ref,event=branch + type=ref,event=tag + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + format: 'sarif' + output: 'trivy-results.sarif' + + - name: Upload Trivy scan result to Github security lab + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: 'trivy-results.sarif' + continue-on-error: true + + - name: Run Trivy vulnerability scanner for Slack + uses: aquasecurity/trivy-action@master + with: + image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + format: json + output: 'trivy-results.json' + + - name: Create summary of trivy issues + run: | + summary=$(jq -r '.Results[] | select(.Vulnerabilities) | .Vulnerabilities | group_by(.Severity) | map({Severity: .[0].Severity, Count: length}) | .[] | [.Severity, .Count] | join(": ")' trivy-results.json | awk 'NR > 1 { printf(" | ") } {printf "%s",$0}') + if [ -z $summary ] + then + summary="0 Issues" + fi + echo "SUMMARY=$summary" >> $GITHUB_ENV + + - name: Send Slack Notification + uses: slackapi/slack-github-action@v1.26.0 + with: + payload: | + { + "text": "Trivy scan results for ${{ steps.meta.outputs.version }}", + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "[Ona Data] Trivy scan results: ${{ env.SUMMARY }}" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "View scan results: https://github.com/${{ github.repository }}/security/code-scanning?query=branch:${{ env.version || github.ref_name }}+is:open++" + } + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK From 6fa567c5f17c67cf9b465e9cdf0fe4f3c44a1cf5 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 26 Jun 2024 09:10:48 +0300 Subject: [PATCH 227/270] Tag release v4.3.0 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 15 +++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 18b7371cd1..9d0572b3e1 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,21 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.3.0(2024-06-26) +------------------ +- Separate ecr image build to use arm64 runner for arm64 builds + `PR #2620 ` + [@kelvin-muchiri] +- Add Entity updates + `PR #2592 ` + [@kelvin-muchiri] +- Update messaging endpoint docs + `PR #2616 ` + [@KipSigei] +- Fix ValueError for User instance primary key in password validation + `PR #2613 ` + [@KipSigei] + v4.2.2(2024-06-06) ------------------ - Add support for merged dataset geojson format on endpoint /api/v1/data/ diff --git a/onadata/__init__.py b/onadata/__init__.py index 1e81ec7589..51ab971548 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.2.2" +__version__ = "4.3.0" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index dfec5bba2b..e611b85adc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.2.2 +version = 4.3.0 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From ad56227970dcce168b9cb86d999f2542a2837573 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 27 Jun 2024 13:11:36 +0300 Subject: [PATCH 228/270] fix 0 integer, decimal values parsed as string when saving json (#2621) --- onadata/apps/logger/models/instance.py | 8 +-- .../apps/logger/tests/models/test_instance.py | 67 +++++++++++++++++++ 2 files changed, 69 insertions(+), 6 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index e84aafe019..6543423a06 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -421,17 +421,13 @@ def numeric_converter(self, json_dict, numeric_fields=None): numeric_fields = get_numeric_fields(self.xform) for key, value in json_dict.items(): if isinstance(value, str) and key in numeric_fields: - converted_value = numeric_checker(value) - if converted_value: - json_dict[key] = converted_value + json_dict[key] = numeric_checker(value) elif isinstance(value, dict): json_dict[key] = self.numeric_converter(value, numeric_fields) elif isinstance(value, list): for k, v in enumerate(value): if isinstance(v, str) and key in numeric_fields: - converted_value = numeric_checker(v) - if converted_value: - json_dict[key] = converted_value + json_dict[key] = numeric_checker(v) elif isinstance(v, dict): value[k] = self.numeric_converter(v, numeric_fields) return json_dict diff --git a/onadata/apps/logger/tests/models/test_instance.py b/onadata/apps/logger/tests/models/test_instance.py index 9d4e953fe7..dc160c322b 100644 --- a/onadata/apps/logger/tests/models/test_instance.py +++ b/onadata/apps/logger/tests/models/test_instance.py @@ -1005,3 +1005,70 @@ def test_create_entity_exists(self): "label": "300cm purpleheart", }, ) + + def test_parse_numbers(self): + """Integers and decimals are parsed correctly""" + md = """ + | survey | + | | type | name | label | + | | integer | num_integer | I am an integer| + | | decimal | num_decimal | I am a decimal | + """ + self._publish_markdown(md, self.user) + xform = XForm.objects.order_by("-pk").first() + xml = ( + '' + "" + "bd4278ad2fd8418fba5e6a822e2623e7" + "" + "4" + "5.5" + "" + "uuid:49d75027-405a-4e08-be71-db9a75c70fc2" + "" + "" + ) + instance = Instance.objects.create(xml=xml, xform=xform) + instance.refresh_from_db() + + self.assertEqual(instance.json["num_integer"], 4) + self.assertEqual(instance.json["num_decimal"], 5.5) + + # Test 0 + xml = ( + '' + "" + "bd4278ad2fd8418fba5e6a822e2623e7" + "" + "0" + "0.0" + "" + "uuid:59d75027-405a-4e08-be71-db9a75c70fc2" + "" + "" + ) + instance = Instance.objects.create(xml=xml, xform=xform) + instance.refresh_from_db() + self.assertEqual(instance.json["num_integer"], 0) + self.assertEqual(instance.json["num_decimal"], 0.0) + + # Test negatives + xml = ( + '' + "" + "bd4278ad2fd8418fba5e6a822e2623e7" + "" + "-1" + "-1.0" + "" + "uuid:69d75027-405a-4e08-be71-db9a75c70fc2" + "" + "" + ) + instance = Instance.objects.create(xml=xml, xform=xform) + instance.refresh_from_db() + self.assertEqual(instance.json["num_integer"], -1) + self.assertEqual(instance.json["num_decimal"], -1.0) From 2c2b5a03b5c4d357d82d6b8e7243899d35137fba Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 28 Jun 2024 12:16:39 +0300 Subject: [PATCH 229/270] bump version to v4.3.1 (#2626) --- CHANGES.rst | 7 +++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9d0572b3e1..c0a2fcdc2c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,13 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.3.1(2024-06-28) +------------------ +- Fix 0 integer, decimal values parsed as string when saving json + `PR #2621 ` + [@kelvin-muchiri] + + v4.3.0(2024-06-26) ------------------ - Separate ecr image build to use arm64 runner for arm64 builds diff --git a/onadata/__init__.py b/onadata/__init__.py index 51ab971548..c9932d2532 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.3.0" +__version__ = "4.3.1" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index e611b85adc..0ff9bd4420 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.3.0 +version = 4.3.1 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 2a472d495ba47ca297e27dc9b199d1a958965a5d Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 1 Jul 2024 10:50:22 +0300 Subject: [PATCH 230/270] Create and delete EntityList via API (#2625) * add support for deleting EntityList * rename migration * update docs * add API support for create EntityList * limit entitylist creation to users who have special perm * fix typo * squash migrations * fix lint warning line-too-ling * enhance create EntityList permissions * address lint warning raise-missing-from * add code comment * refactor code * update validation error message * uncomment tests * add test cases for validation error messages * refactor code * update method docstring --- docs/entities.md | 57 ++++ onadata/apps/api/permissions.py | 10 - .../viewsets/test_entity_list_viewset.py | 317 +++++++++++++++++- .../apps/api/viewsets/entity_list_viewset.py | 48 ++- .../0019_alter_project_options_and_more.py | 44 +++ onadata/apps/logger/models/entity_list.py | 17 + onadata/apps/logger/models/project.py | 1 + .../logger/tests/models/test_entity_list.py | 33 +- onadata/libs/permissions.py | 3 + onadata/libs/serializers/entity_serializer.py | 53 ++- 10 files changed, 558 insertions(+), 25 deletions(-) create mode 100644 onadata/apps/logger/migrations/0019_alter_project_options_and_more.py diff --git a/docs/entities.md b/docs/entities.md index 13edbcd1a7..3f0e99c37e 100644 --- a/docs/entities.md +++ b/docs/entities.md @@ -9,6 +9,46 @@ The following endpoints provides access to Entities related data: Where: - `entity_list_id` - An EntityList's unique identifier - `entity_id` - An Entity's unique identifier +## Create EntityList + +`POST /api/v2/entity-lists` + +This endpoint is used to create a single EntityList dataset within a project. Entities for the dataset can then be created from a form or via the API. + +EntityList name must not include `.` or start with `__`. + +EntityList name is unique per project. + +The EntityList by default has no properties. + +**Request** + +```sh +curl -X POST "https://api.ona.io/api/v2/entity-lists" \ + -H "Authorization: Token ACCESS_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "trees", + "project": "2", + }' +``` + +**Response** + +Staus: `201 Created` + +Body: + +``` +{ + "id":1, + "name":"trees", + "project":2, + "date_created":"2024-06-27T07:35:53.451077Z", + "date_modified":"2024-06-27T07:35:53.451091Z" +} +``` + ## Get EntityLists `GET /api/v2/entity-lists` @@ -125,6 +165,23 @@ Body: } ``` +## Delete EntityList + +`DELETE api/v2/entity-lists/` + +This endpoint is used to delete a single EntityList dataset + +**Request** + +```sh +curl -X DELETE https://api.ona.io/api/v2/entity-lists/1 \ +-H "Authorization: Token ACCESS_TOKEN" +``` + +**Response** + +Status: `204 No Content` + ## Get Entities `GET api/v2/entity-lists//entities` diff --git a/onadata/apps/api/permissions.py b/onadata/apps/api/permissions.py index 195a98e5a4..07485f9903 100644 --- a/onadata/apps/api/permissions.py +++ b/onadata/apps/api/permissions.py @@ -558,13 +558,3 @@ def has_permission(self, request, view): return False return True - - -class EntityListPermission(DjangoObjectPermissionsAllowAnon): - """Permission for EntityList""" - - def has_permission(self, request, view): - if request.user.is_anonymous: - return True - - return super().has_permission(request, view) diff --git a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py index 44820a2565..8c05092e28 100644 --- a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py @@ -14,10 +14,209 @@ from onadata.apps.logger.models import Entity, EntityHistory, EntityList, Project from onadata.libs.models.share_project import ShareProject from onadata.libs.permissions import ROLES, OwnerRole +from onadata.libs.utils.user_auth import get_user_default_project + + +@override_settings(TIME_ZONE="UTC") +class CreateEntityListTestCase(TestAbstractViewSet): + """Tests for creating an EntityList""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"post": "create"}) + self.project = get_user_default_project(self.user) + self.data = {"name": "trees", "project": self.project.pk} + + def test_create(self): + """EntityList is created""" + request = self.factory.post("/", data=self.data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + entity_list = EntityList.objects.first() + self.assertEqual( + response.data, + { + "id": entity_list.pk, + "name": "trees", + "project": self.project.pk, + "date_created": entity_list.date_created.isoformat().replace( + "+00:00", "Z" + ), + "date_modified": entity_list.date_modified.isoformat().replace( + "+00:00", "Z" + ), + }, + ) + + def test_auth_required(self): + """Authentication is required""" + request = self.factory.post("/", data={}) + response = self.view(request) + self.assertEqual(response.status_code, 401) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + def test_name_required(self): + """`name` field is required""" + post_data = {"project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["name"][0]), "This field is required.") + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + def test_project_required(self): + """`project` field is required""" + post_data = {"name": "trees"} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["project"][0]), "This field is required.") + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + def test_name_valid(self): + """`name` should be valid""" + # name should not start with __ + post_data = {"name": "__trees", "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data["name"][0]), "May not start with reserved prefix __." + ) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + # name should not include periods(.) + # period start + post_data = {"name": ".trees", "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["name"][0]), "May not include periods.") + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + # period middle + post_data = {"name": "tre.es", "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["name"][0]), "May not include periods.") + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + # period end + post_data = {"name": "trees.", "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["name"][0]), "May not include periods.") + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + # name should not exceed 255 characters + post_data = {"name": "x" * 256, "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data["name"][0]), + "Ensure this field has no more than 255 characters.", + ) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + post_data = {"name": "x" * 255, "project": self.project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 1) + + def test_project_valid(self): + """`project` should be a valid project""" + post_data = {"name": "trees", "project": sys.maxsize} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data["project"][0]), + f'Invalid pk "{sys.maxsize}" - object does not exist.', + ) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 0) + + def test_object_permissions(self): + """User must have object create level permissions""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + # Public project, project NOT shared with user + self.project.shared = True + self.project.save() + request = self.factory.post("/", data=self.data, **extra) + response = self.view(request) + self.assertEqual(response.status_code, 403) + + # Private project, project NOT shared with user + self.project.shared = False + self.project.save() + request = self.factory.post("/", data=self.data, **extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data["project"][0]), + f'Invalid pk "{self.project.pk}" - object does not exist.', + ) + + # Project shared with user + for role in ROLES: + EntityList.objects.all().delete() + ShareProject(self.project, "alice", role).save() + request = self.factory.post("/", data=self.data, **extra) + response = self.view(request) + + if role in ["owner", "manager"]: + self.assertEqual(response.status_code, 201) + + else: + self.assertEqual(response.status_code, 403) + + def test_name_unique(self): + """`name` should be unique per project""" + EntityList.objects.create(name="trees", project=self.project) + request = self.factory.post("/", data=self.data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 400) + self.assertEqual( + str(response.data["non_field_errors"][0]), + "The fields name, project must make a unique set.", + ) + project = Project.objects.create( + name="Other project", + created_by=self.user, + organization=self.user, + ) + post_data = {"name": "trees", "project": project.pk} + request = self.factory.post("/", data=post_data, **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 201) + num_datasets = EntityList.objects.count() + self.assertEqual(num_datasets, 2) class GetEntityListArrayTestCase(TestAbstractViewSet): - """Tests for GET all EntityLists""" + """Tests for getting an array of EntityList""" def setUp(self): super().setUp() @@ -167,6 +366,21 @@ def test_object_permissions(self): self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 0) + def test_soft_deleted_excluded(self): + """Soft deleted items are excluded""" + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 3) + + for entity_list in EntityList.objects.all(): + entity_list.soft_delete() + + request = self.factory.get("/", **self.extra) + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 0) + @override_settings(TIME_ZONE="UTC") class GetSingleEntityListTestCase(TestAbstractViewSet): @@ -283,6 +497,100 @@ def test_object_permissions(self): else: self.assertEqual(response.status_code, 404) + def test_soft_deleted(self): + """Soft deleted dataset cannot be retrieved""" + self.entity_list.soft_delete() + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 404) + + +class DeleteEntityListTestCase(TestAbstractViewSet): + """Tests for deleting a single EntityList""" + + def setUp(self): + super().setUp() + + self.view = EntityListViewSet.as_view({"delete": "destroy"}) + self.project = get_user_default_project(self.user) + self.entity_list = EntityList.objects.create(name="trees", project=self.project) + OwnerRole.add(self.user, self.entity_list) + + @patch("django.utils.timezone.now") + def test_delete(self, mock_now): + """Delete EntityList works""" + mocked_date = datetime(2024, 6, 25, 11, 11, 0, tzinfo=timezone.utc) + mock_now.return_value = mocked_date + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 204) + self.entity_list.refresh_from_db() + self.assertEqual(self.entity_list.deleted_at, mocked_date) + self.assertEqual(self.entity_list.deleted_by, self.user) + self.assertEqual( + self.entity_list.name, f'trees{mocked_date.strftime("-deleted-at-%s")}' + ) + + def test_authentication_required(self): + """Anonymous user cannot delete EntityList""" + # Private EntityList + request = self.factory.delete("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 401) + # Public EntityList + self.project.shared = True + self.project.save() + request = self.factory.delete("/") + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 401) + + def test_invalid_entity_list(self): + """Invalid EntityList is handled""" + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=sys.maxsize) + self.assertEqual(response.status_code, 404) + + def test_object_permissions(self): + """User must have delete level permission""" + alice_data = { + "username": "alice", + "email": "aclie@example.com", + "password1": "password12345", + "password2": "password12345", + "first_name": "Alice", + "last_name": "Hughes", + } + alice_profile = self._create_user_profile(alice_data) + extra = {"HTTP_AUTHORIZATION": f"Token {alice_profile.user.auth_token}"} + + def restore_dataset(): + self.entity_list.deleted_at = None + self.entity_list.deleted_by = None + self.entity_list.save() + + for role in ROLES: + restore_dataset() + ShareProject(self.project, "alice", role).save() + request = self.factory.delete("/", **extra) + response = self.view(request, pk=self.entity_list.pk) + + if role not in ["owner", "manager"]: + self.assertEqual(response.status_code, 404) + + else: + self.assertEqual(response.status_code, 204) + + def test_already_soft_deleted(self): + """Soft deleted EntityList cannot be deleted""" + deleted_at = timezone.now() + self.entity_list.deleted_at = deleted_at + self.entity_list.save() + request = self.factory.delete("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.entity_list.refresh_from_db() + self.assertEqual(response.status_code, 404) + self.assertEqual(self.entity_list.deleted_at, deleted_at) + @override_settings(TIME_ZONE="UTC") class GetEntitiesTestCase(TestAbstractViewSet): @@ -645,11 +953,13 @@ def test_label_empty(self): request = self.factory.patch("/", data=data, format="json", **self.extra) response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["label"][0]), "This field may not be blank.") # Null data = {"label": None} request = self.factory.patch("/", data=data, format="json", **self.extra) response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["label"][0]), "This field may not be null.") def test_unset_property(self): """Unsetting a property value works""" @@ -682,13 +992,14 @@ def test_invalid_property(self): request = self.factory.patch("/", data=data, format="json", **self.extra) response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) self.assertEqual(response.status_code, 400) + self.assertEqual(str(response.data["data"][0]), "Invalid dataset property foo.") def test_anonymous_user(self): """Anonymous user cannot update Entity""" # Anonymous user cannot update private Entity request = self.factory.patch("/", data={}, format="json") response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) - self.assertEqual(response.status_code, 404) + self.assertEqual(response.status_code, 401) # Anonymous user cannot update public Entity self.project.shared = True self.project.save() @@ -790,7 +1101,7 @@ def test_anonymous_user(self): # Anonymous user cannot delete private Entity request = self.factory.delete("/") response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) - self.assertEqual(response.status_code, 404) + self.assertEqual(response.status_code, 401) # Anonymous user cannot delete public Entity self.project.shared = True self.project.save() diff --git a/onadata/apps/api/viewsets/entity_list_viewset.py b/onadata/apps/api/viewsets/entity_list_viewset.py index 7812f98e86..617046dcd9 100644 --- a/onadata/apps/api/viewsets/entity_list_viewset.py +++ b/onadata/apps/api/viewsets/entity_list_viewset.py @@ -3,20 +3,28 @@ from rest_framework import status from rest_framework.decorators import action from rest_framework.response import Response -from rest_framework.viewsets import ReadOnlyModelViewSet +from rest_framework.viewsets import GenericViewSet +from rest_framework.mixins import ( + CreateModelMixin, + RetrieveModelMixin, + DestroyModelMixin, + ListModelMixin, +) -from onadata.apps.api.permissions import EntityListPermission +from onadata.apps.api.permissions import DjangoObjectPermissionsAllowAnon from onadata.apps.api.tools import get_baseviewset_class from onadata.apps.logger.models import Entity, EntityList from onadata.libs.filters import AnonUserEntityListFilter, EntityListProjectFilter from onadata.libs.mixins.cache_control_mixin import CacheControlMixin from onadata.libs.mixins.etags_mixin import ETagsMixin from onadata.libs.pagination import StandardPageNumberPagination +from onadata.libs.permissions import CAN_ADD_PROJECT_ENTITYLIST from onadata.libs.serializers.entity_serializer import ( EntityArraySerializer, EntitySerializer, EntityListSerializer, + EntityListArraySerializer, EntityListDetailSerializer, ) @@ -30,10 +38,14 @@ class EntityListViewSet( CacheControlMixin, ETagsMixin, BaseViewset, - ReadOnlyModelViewSet, + GenericViewSet, + ListModelMixin, + CreateModelMixin, + RetrieveModelMixin, + DestroyModelMixin, ): queryset = ( - EntityList.objects.all() + EntityList.objects.filter(deleted_at__isnull=True) .order_by("pk") .prefetch_related( "registration_forms", @@ -41,12 +53,15 @@ class EntityListViewSet( ) ) serializer_class = EntityListSerializer - permission_classes = (EntityListPermission,) + permission_classes = (DjangoObjectPermissionsAllowAnon,) pagination_class = StandardPageNumberPagination filter_backends = (AnonUserEntityListFilter, EntityListProjectFilter) def get_serializer_class(self): - """Override get_serializer_class""" + """Override `get_serializer_class` method""" + if self.action == "list": + return EntityListArraySerializer + if self.action == "retrieve": return EntityListDetailSerializer @@ -59,7 +74,7 @@ def get_serializer_class(self): return super().get_serializer_class() def get_serializer_context(self): - """Override get_serializer_context""" + """Override `get_serializer_context` method""" context = super().get_serializer_context() if self.action == "entities": @@ -115,3 +130,22 @@ def entities(self, request, *args, **kwargs): serializer = self.get_serializer(entity_qs, many=True) return Response(serializer.data) + + def perform_destroy(self, instance): + """Override `perform_detroy` method""" + instance.soft_delete(self.request.user) + + def create(self, request, *args, **kwargs): + """Override `create` method""" + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + project = serializer.validated_data["project"] + + if not self.request.user.has_perm(CAN_ADD_PROJECT_ENTITYLIST, project): + return Response(status=status.HTTP_403_FORBIDDEN) + + self.perform_create(serializer) + headers = self.get_success_headers(serializer.data) + return Response( + serializer.data, status=status.HTTP_201_CREATED, headers=headers + ) diff --git a/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py new file mode 100644 index 0000000000..0c30bdbab8 --- /dev/null +++ b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py @@ -0,0 +1,44 @@ +# Generated by Django 4.2.11 on 2024-06-27 10:19 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("logger", "0018_entityhistory_entitylistgroupobjectpermission_and_more"), + ] + + operations = [ + migrations.AlterModelOptions( + name="project", + options={ + "permissions": ( + ("add_project_xform", "Can add xform to project"), + ("report_project_xform", "Can make submissions to the project"), + ("transfer_project", "Can transfer project to different owner"), + ("can_export_project_data", "Can export data in project"), + ("view_project_all", "Can view all associated data"), + ("view_project_data", "Can view submitted data"), + ("add_project_entitylist", "Can add entitylist to project"), + ) + }, + ), + migrations.AddField( + model_name="entitylist", + name="deleted_at", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name="entitylist", + name="deleted_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/onadata/apps/logger/models/entity_list.py b/onadata/apps/logger/models/entity_list.py index d26daef08c..c0af121c50 100644 --- a/onadata/apps/logger/models/entity_list.py +++ b/onadata/apps/logger/models/entity_list.py @@ -2,15 +2,19 @@ EntityList model """ +from django.contrib.auth import get_user_model from django.contrib.contenttypes.fields import GenericRelation from django.db import models from django.utils.translation import gettext_lazy as _ +from django.utils import timezone from guardian.models import UserObjectPermissionBase, GroupObjectPermissionBase from onadata.apps.logger.models.project import Project from onadata.libs.models import BaseModel +User = get_user_model() + class EntityList(BaseModel): """The dataset where each entity will be save to @@ -30,6 +34,8 @@ class EntityList(BaseModel): num_entities = models.IntegerField(default=0) last_entity_update_time = models.DateTimeField(blank=True, null=True) exports = GenericRelation("viewer.GenericExport") + deleted_at = models.DateTimeField(null=True, blank=True) + deleted_by = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) class Meta(BaseModel.Meta): app_label = "logger" @@ -61,6 +67,17 @@ def properties(self) -> list[str]: return list(dataset_properties) + def soft_delete(self, deleted_by=None): + """Soft delete EntityList""" + if self.deleted_at is None: + deletion_time = timezone.now() + deletion_suffix = deletion_time.strftime("-deleted-at-%s") + self.deleted_at = deletion_time + self.deleted_by = deleted_by + self.name += deletion_suffix + self.name = self.name[:255] # Only first 255 characters + self.save() + class EntityListUserObjectPermission(UserObjectPermissionBase): """Guardian model to create direct foreign keys.""" diff --git a/onadata/apps/logger/models/project.py b/onadata/apps/logger/models/project.py index 7ec858086b..7415751741 100644 --- a/onadata/apps/logger/models/project.py +++ b/onadata/apps/logger/models/project.py @@ -132,6 +132,7 @@ class Meta: ("can_export_project_data", "Can export data in project"), ("view_project_all", "Can view all associated data"), ("view_project_data", "Can view submitted data"), + ("add_project_entitylist", "Can add entitylist to project"), ) def __str__(self): diff --git a/onadata/apps/logger/tests/models/test_entity_list.py b/onadata/apps/logger/tests/models/test_entity_list.py index ec7c3d9d7a..70b6a32521 100644 --- a/onadata/apps/logger/tests/models/test_entity_list.py +++ b/onadata/apps/logger/tests/models/test_entity_list.py @@ -1,11 +1,11 @@ """Tests for module onadata.apps.logger.models.entity_list""" -import pytz import os from datetime import datetime from unittest.mock import patch from django.db.utils import IntegrityError, DataError +from django.utils import timezone from onadata.apps.main.tests.test_base import TestBase from onadata.apps.logger.models import EntityList, Project @@ -19,7 +19,7 @@ def setUp(self) -> None: super().setUp() self.project = get_user_default_project(self.user) - self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=pytz.utc) + self.mocked_now = datetime(2023, 11, 8, 13, 17, 0, tzinfo=timezone.utc) self.fixture_dir = os.path.join(self.this_directory, "fixtures", "entities") @patch("django.utils.timezone.now") @@ -126,3 +126,32 @@ def test_permissions_applied_async(self, mock_set_perms): """Permissions are applied asynchronously""" entity_list = EntityList.objects.create(name="trees", project=self.project) mock_set_perms.assert_called_once_with(entity_list.pk) + + def test_soft_delete(self): + """EntityList is soft deleted""" + with patch("django.utils.timezone.now") as mock_now: + mock_now.return_value = self.mocked_now + entity_list = EntityList.objects.create(name="trees", project=self.project) + entity_list.soft_delete(self.user) + entity_list.refresh_from_db() + self.assertEqual(entity_list.deleted_at, self.mocked_now) + self.assertEqual(entity_list.deleted_by, self.user) + self.assertEqual( + entity_list.name, f'trees{self.mocked_now.strftime("-deleted-at-%s")}' + ) + + # Try soft deleting soft deleted dataset + entity_list.soft_delete(self.user) + entity_list.refresh_from_db() + self.assertEqual(entity_list.deleted_at, self.mocked_now) + # deleted_by is optional + entity_list = EntityList.objects.create(name="trees", project=self.project) + entity_list.soft_delete() + entity_list.refresh_from_db() + self.assertIsNone(entity_list.deleted_by) + # updated name is truncated if more than 255 characters + dataset_name = "x" * 255 + entity_list = EntityList.objects.create(name=dataset_name, project=self.project) + entity_list.soft_delete() + entity_list.refresh_from_db() + self.assertEqual(entity_list.name, dataset_name) diff --git a/onadata/libs/permissions.py b/onadata/libs/permissions.py index 6638208ecd..a27e365263 100644 --- a/onadata/libs/permissions.py +++ b/onadata/libs/permissions.py @@ -70,6 +70,7 @@ CAN_ADD_PROJECT_XFORM = "add_project_xform" CAN_ADD_SUBMISSIONS_PROJECT = "report_project_xform" CAN_EXPORT_PROJECT = "can_export_project_data" +CAN_ADD_PROJECT_ENTITYLIST = "add_project_entitylist" # Data dictionary permissions CAN_ADD_DATADICTIONARY = "add_datadictionary" @@ -330,6 +331,7 @@ class ManagerRole(Role): CAN_VIEW_PROJECT, CAN_VIEW_PROJECT_ALL, CAN_VIEW_PROJECT_DATA, + CAN_ADD_PROJECT_ENTITYLIST, ], UserProfile: [ CAN_ADD_PROJECT_TO_PROFILE, @@ -399,6 +401,7 @@ class OwnerRole(Role): CAN_VIEW_PROJECT, CAN_VIEW_PROJECT_ALL, CAN_VIEW_PROJECT_DATA, + CAN_ADD_PROJECT_ENTITYLIST, ], UserProfile: [ CAN_ADD_PROJECT_TO_PROFILE, diff --git a/onadata/libs/serializers/entity_serializer.py b/onadata/libs/serializers/entity_serializer.py index 32963b55ff..66583f5ae7 100644 --- a/onadata/libs/serializers/entity_serializer.py +++ b/onadata/libs/serializers/entity_serializer.py @@ -1,5 +1,7 @@ from django.utils.translation import gettext as _ +from pyxform.constants import ENTITIES_RESERVED_PREFIX + from rest_framework import serializers from rest_framework.reverse import reverse @@ -11,11 +13,56 @@ RegistrationForm, XForm, ) +from onadata.libs.permissions import CAN_VIEW_PROJECT -class EntityListSerializer(serializers.HyperlinkedModelSerializer): +class EntityListSerializer(serializers.ModelSerializer): """Default Serializer for EntityList""" + def validate_name(self, value: str) -> str: + """Validate `name` field + + Uses the same validation rules as PyXForm rules for dataset name + """ + if value.startswith(ENTITIES_RESERVED_PREFIX): + err_msg = f"May not start with reserved prefix {ENTITIES_RESERVED_PREFIX}." + raise serializers.ValidationError(_(err_msg)) + + if "." in value: + raise serializers.ValidationError("May not include periods.") + + return value + + def validate_project(self, value: Project) -> Project: + """Validate `project` field""" + user = self.context["request"].user + + if not value.shared and not user.has_perm(CAN_VIEW_PROJECT, value): + raise serializers.ValidationError( + f'Invalid pk "{value.pk}" - object does not exist.', + code="does_not_exist", + ) + + return value + + class Meta: + model = EntityList + fields = ( + "id", + "name", + "project", + "date_created", + "date_modified", + ) + read_only_fields = ( + "date_created", + "date_modified", + ) + + +class EntityListArraySerializer(serializers.HyperlinkedModelSerializer): + """Serializer for an array of EntityList""" + url = serializers.HyperlinkedIdentityField( view_name="entity_list-detail", lookup_field="pk" ) @@ -98,7 +145,7 @@ class Meta: ) -class EntityListDetailSerializer(EntityListSerializer): +class EntityListDetailSerializer(EntityListArraySerializer): """Serializer for EntityList detail""" registration_forms = RegistrationFormInlineSerializer(many=True, read_only=True) @@ -132,7 +179,7 @@ def validate_data(self, value): for key in value.keys(): if key not in self.context["entity_list"].properties: raise serializers.ValidationError( - _(f"Invalid dataset property {key}") + _(f"Invalid dataset property {key}.") ) return value From 270417b34728311228f319a3e40099d1f654c449 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 24 Jun 2024 19:57:39 +0300 Subject: [PATCH 231/270] Cleanup github action yaml file --- .../ecr-image-build-w-arm-runner.yml | 62 +++++++++++++------ docker/onadata-uwsgi/Dockerfile.ubuntu | 26 ++++---- 2 files changed, 57 insertions(+), 31 deletions(-) diff --git a/.github/workflows/ecr-image-build-w-arm-runner.yml b/.github/workflows/ecr-image-build-w-arm-runner.yml index 60a6101210..d0f4a268fa 100644 --- a/.github/workflows/ecr-image-build-w-arm-runner.yml +++ b/.github/workflows/ecr-image-build-w-arm-runner.yml @@ -1,11 +1,12 @@ --- name: AWS ECR Build Image with ARM Runner -on: +on: # yamllint disable-line rule:truthy release: - types: - - "released" + types: + - "released" push: + branches: - "main" - "*-rc" tags: @@ -68,9 +69,18 @@ jobs: - name: Get the branch name id: get-branch-name - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + if: > + github.event_name == 'push' + || github.event_name == 'workflow_dispatch' run: echo "version=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Get docker repository URL + id: get-repo-url + run: | + echo "docker_repo=${{ steps.login-ecr.outputs.registry }}\ + /onaio/onadata:${{ env.version || github.ref_name }}"\ + | sed 's/ //g' >> $GITHUB_ENV + - name: (Ubuntu) Build and push id: docker-build-ubuntu uses: docker/build-push-action@v5 @@ -78,24 +88,31 @@ jobs: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu platforms: ${{ matrix.platforms[0] }} - cache-from: type=registry,ref=${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ env.version || github.ref_name }} + cache-from: type=registry,ref=${{ env.docker_repo }} cache-to: type=inline ssh: | default=/tmp/ssh-agent.sock - build-args: | - optional_packages=PyYAML django-redis ${{ secrets.ECR_OPTIONAL_PACKAGES }} + build-args: > + optional_packages=PyYAML + django-redis + ${{ secrets.ECR_OPTIONAL_PACKAGES }} push: true labels: ${{ steps.meta.outputs.labels }} provenance: false - outputs: type=image,name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata,push-by-digest=true,name-canonical=true,push=true - - - name: Export digest + outputs: > + type=image, + name=${{ steps.login-ecr.outputs.registry }}/onaio/onadata, + push-by-digest=true, + name-canonical=true, + push=true + + - name: Export digest run: | mkdir -p /tmp/digests digest="${{ steps.docker-build-ubuntu.outputs.digest }}" touch "/tmp/digests/${digest#sha256:}" - - - name: Upload digest + + - name: Upload digest uses: actions/upload-artifact@v4 with: name: digests-${{ env.PLATFORM_PAIR }} @@ -138,20 +155,27 @@ jobs: type=ref,event=branch type=ref,event=tag + - name: Get docker repository URL + id: get-repo-url + run: | + echo "docker_repo=${{ steps.login-ecr.outputs.registry }}\ + /onaio/onadata:${{ steps.meta.outputs.version }}"\ + | sed 's/ //g' >> $GITHUB_ENV + - name: Create manifest list and push working-directory: /tmp/digests run: | - docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") $(printf '${{ steps.login-ecr.outputs.registry }}/onaio/onadata@sha256:%s ' *) - name: Inspect image run: | - docker buildx imagetools inspect ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + docker buildx imagetools inspect ${{ env.docker_repo }} - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: - image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + image-ref: | + ${{ env.docker_repo }} format: 'sarif' output: 'trivy-results.sarif' @@ -164,7 +188,8 @@ jobs: - name: Run Trivy vulnerability scanner for Slack uses: aquasecurity/trivy-action@master with: - image-ref: ${{ steps.login-ecr.outputs.registry }}/onaio/onadata:${{ steps.meta.outputs.version }} + image-ref: | + ${{ env.docker_repo }} format: json output: 'trivy-results.json' @@ -182,7 +207,8 @@ jobs: with: payload: | { - "text": "Trivy scan results for ${{ steps.meta.outputs.version }}", + "text": + "Trivy scan results for ${{ steps.meta.outputs.version }}", "blocks": [ { "type": "section", diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index c35d1dd2a7..18823135ca 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -1,13 +1,13 @@ -FROM onaio/python-deps:3.10.14 as base +FROM onaio/python-deps:3.10.14 AS base ARG optional_packages # Silence configuration prompts -ENV DEBIAN_FRONTEND noninteractive +ENV DEBIAN_FRONTEND=noninteractive -ENV PYTHONUNBUFFERED 1 +ENV PYTHONUNBUFFERED=1 -ENV DJANGO_SETTINGS_MODULE onadata.settings.docker +ENV DJANGO_SETTINGS_MODULE=onadata.settings.docker USER root @@ -35,10 +35,10 @@ RUN python -m pip install --no-cache-dir -U pip && \ python -m pip install --no-cache-dir -r requirements/azure.pip && \ python -m pip install --no-cache-dir pyyaml==6.0.1 uwsgitop==0.12 supervisor==4.2.5 -FROM base as docs +FROM base AS docs ENV PYENV_ROOT="$HOME/.pyenv" -ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH +ENV PATH=$PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH COPY --from=base /home/appuser/.pyenv/ /home/appuser/.pyenv/ COPY --from=base /srv/onadata/ /srv/onadata/ @@ -53,9 +53,9 @@ RUN python -m pip install --no-cache-dir -r requirements/docs.pip && \ make -C docs html -FROM ubuntu:jammy-20240405 as runtime +FROM ubuntu:jammy-20240405 AS runtime -ENV DEBIAN_FRONTEND noninteractive +ENV DEBIAN_FRONTEND=noninteractive # Install prerequisite packages RUN apt-get update -q && \ @@ -63,8 +63,8 @@ RUN apt-get update -q && \ # # Generate and set en_US.UTF-8 locale RUN locale-gen en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 -ENV LC_CTYPE en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 +ENV LC_CTYPE=en_US.UTF-8 RUN dpkg-reconfigure locales @@ -93,9 +93,9 @@ RUN chown -R appuser:appuser /srv/onadata /home/appuser/.pyenv USER appuser WORKDIR /srv/onadata -ENV HOME /home/appuser -ENV PYTHON_VERSION 3.10.14 +ENV HOME=/home/appuser +ENV PYTHON_VERSION=3.10.14 ENV PYENV_ROOT="$HOME/.pyenv" -ENV PATH $PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH +ENV PATH=$PYENV_ROOT/versions/3.10.14/bin:$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH CMD ["uwsgi", "--ini", "uwsgi.ini"] From e0f9e3271c1b2f44ee035b45c8c4bfed7236cc49 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 27 Jun 2024 10:03:01 +0300 Subject: [PATCH 232/270] Update base image Signed-off-by: Kipchirchir Sigei --- docker/onadata-uwsgi/Dockerfile.ubuntu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index 18823135ca..50233f47a7 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -53,7 +53,7 @@ RUN python -m pip install --no-cache-dir -r requirements/docs.pip && \ make -C docs html -FROM ubuntu:jammy-20240405 AS runtime +FROM ubuntu:jammy-20240530 as runtime ENV DEBIAN_FRONTEND=noninteractive From e30bb0332ef77201ffcf474e5d922243c98394ae Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 27 Jun 2024 10:07:31 +0300 Subject: [PATCH 233/270] Update dependencies Signed-off-by: Kipchirchir Sigei --- requirements/azure.pip | 17 ++++---- requirements/base.pip | 60 +++++++++++++++------------- requirements/dev.pip | 91 +++++++++++++++++++++++------------------- requirements/docs.pip | 8 ++-- requirements/s3.pip | 12 +++--- requirements/ses.pip | 14 ++++--- 6 files changed, 109 insertions(+), 93 deletions(-) diff --git a/requirements/azure.pip b/requirements/azure.pip index 0b78e9199a..3129b2fe2b 100644 --- a/requirements/azure.pip +++ b/requirements/azure.pip @@ -6,23 +6,23 @@ # asgiref==3.8.1 # via django -azure-core==1.30.1 +azure-core==1.30.2 # via # azure-storage-blob # django-storages -azure-storage-blob==12.19.1 +azure-storage-blob==12.20.0 # via django-storages -certifi==2024.2.2 +certifi==2024.6.2 # via requests cffi==1.16.0 # via cryptography charset-normalizer==3.3.2 # via requests -cryptography==42.0.6 +cryptography==42.0.8 # via # -r requirements/azure.in # azure-storage-blob -django==4.2.11 +django==4.2.13 # via # -r requirements/azure.in # django-storages @@ -34,7 +34,7 @@ isodate==0.6.1 # via azure-storage-blob pycparser==2.22 # via cffi -requests==2.31.0 +requests==2.32.3 # via azure-core six==1.16.0 # via @@ -42,9 +42,10 @@ six==1.16.0 # isodate sqlparse==0.5.0 # via django -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via + # asgiref # azure-core # azure-storage-blob -urllib3==2.2.1 +urllib3==2.2.2 # via requests diff --git a/requirements/base.pip b/requirements/base.pip index 669f6c64e1..4f4ffd52d8 100644 --- a/requirements/base.pip +++ b/requirements/base.pip @@ -16,6 +16,8 @@ asgiref==3.8.1 # via # django # django-cors-headers +async-timeout==4.0.3 + # via redis attrs==23.2.0 # via # jsonlines @@ -25,9 +27,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.98 +boto3==1.34.134 # via dataflows-tabulator -botocore==1.34.98 +botocore==1.34.134 # via # boto3 # s3transfer @@ -37,7 +39,7 @@ cachetools==5.3.3 # via google-auth celery==5.4.0 # via onadata -certifi==2024.2.2 +certifi==2024.6.2 # via # requests # sentry-sdk @@ -64,7 +66,7 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==42.0.6 +cryptography==42.0.8 # via # jwcrypto # onadata @@ -84,7 +86,7 @@ deprecated==1.2.14 # via onadata dict2xml==1.7.5 # via onadata -django==4.2.11 +django==4.2.13 # via # django-activity-stream # django-cors-headers @@ -105,11 +107,11 @@ django==4.2.11 # onadata django-activity-stream==2.0.0 # via onadata -django-cors-headers==4.3.1 +django-cors-headers==4.4.0 # via onadata django-csp==3.8 # via onadata -django-debug-toolbar==4.3.0 +django-debug-toolbar==4.4.2 # via onadata django-digest @ git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92 # via -r requirements/base.in @@ -123,7 +125,7 @@ django-multidb-router @ git+https://github.com/onaio/django-multidb-router.git@f # via -r requirements/base.in django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.3.0 +django-oauth-toolkit==2.4.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -141,7 +143,7 @@ django-taggit==5.0.1 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.15.1 +djangorestframework==3.15.2 # via # djangorestframework-csv # djangorestframework-gis @@ -155,7 +157,7 @@ djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==7.0.0 +djangorestframework-jsonapi==7.0.1 # via onadata djangorestframework-jsonp==1.0.2 # via onadata @@ -163,7 +165,7 @@ djangorestframework-xml==2.0.0 # via onadata dnspython==2.6.1 # via pymongo -dpath==2.1.6 +dpath==2.2.0 # via onadata elaphe3==0.2.0 # via onadata @@ -175,7 +177,7 @@ future==1.0.0 # via python-json2xlsclient geojson==3.1.0 # via onadata -google-auth==2.29.0 +google-auth==2.30.0 # via # google-auth-oauthlib # onadata @@ -189,7 +191,7 @@ httplib2==0.22.0 # via onadata idna==3.7 # via requests -ijson==3.2.3 +ijson==3.3.0 # via dataflows-tabulator inflection==0.5.1 # via djangorestframework-jsonapi @@ -201,9 +203,9 @@ jmespath==1.0.1 # botocore jsonlines==4.0.0 # via dataflows-tabulator -jsonpickle==3.0.4 +jsonpickle==3.2.2 # via onadata -jsonpointer==2.4 +jsonpointer==3.0.0 # via datapackage jsonschema==4.22.0 # via @@ -217,7 +219,7 @@ kombu==5.3.7 # via celery linear-tsv==1.1.0 # via dataflows-tabulator -lxml==5.2.1 +lxml==5.2.2 # via onadata markdown==3.6 # via onadata @@ -227,7 +229,7 @@ monotonic==1.6 # via analytics-python nose==1.3.7 # via django-nose -numpy==1.26.4 +numpy==2.0.0 # via onadata oauthlib==3.2.2 # via @@ -246,7 +248,7 @@ pillow==10.3.0 # via # elaphe3 # onadata -prompt-toolkit==3.0.43 +prompt-toolkit==3.0.47 # via click-repl psycopg2-binary==2.9.9 # via onadata @@ -266,7 +268,7 @@ pyjwt==2.8.0 # onadata pylibmc==1.6.3 # via onadata -pymongo==4.7.1 +pymongo==4.8.0 # via onadata pyparsing==3.1.2 # via httplib2 @@ -286,6 +288,7 @@ python-memcached==1.62 # via onadata pytz==2024.1 # via + # django-oauth-toolkit # django-query-builder # fleming pyxform==2.0.3 @@ -294,7 +297,7 @@ pyxform==2.0.3 # pyfloip recaptcha-client==1.0.6 # via onadata -redis==5.0.4 +redis==5.0.7 # via # django-redis # onadata @@ -302,7 +305,7 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 +requests==2.32.3 # via # analytics-python # dataflows-tabulator @@ -317,13 +320,13 @@ requests-oauthlib==2.0.0 # via google-auth-oauthlib rfc3986==2.0.0 # via tableschema -rpds-py==0.18.0 +rpds-py==0.18.1 # via # jsonschema # referencing rsa==4.9 # via google-auth -s3transfer==0.10.1 +s3transfer==0.10.2 # via boto3 sentry-sdk==1.45.0 # via onadata @@ -339,7 +342,7 @@ six==1.16.0 # linear-tsv # python-dateutil # tableschema -sqlalchemy==2.0.30 +sqlalchemy==2.0.31 # via dataflows-tabulator sqlparse==0.5.0 # via @@ -347,13 +350,14 @@ sqlparse==0.5.0 # django-debug-toolbar tableschema==1.20.11 # via datapackage -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via + # asgiref # jwcrypto # sqlalchemy tzdata==2024.1 # via celery -ujson==5.9.0 +ujson==5.10.0 # via onadata unicodecsv==0.14.1 # via @@ -361,12 +365,12 @@ unicodecsv==0.14.1 # datapackage # onadata # tableschema -urllib3==2.2.1 +urllib3==2.2.2 # via # botocore # requests # sentry-sdk -uwsgi==2.0.25.1 +uwsgi==2.0.26 # via onadata vine==5.1.0 # via diff --git a/requirements/dev.pip b/requirements/dev.pip index 69d5643e7a..3818661b3f 100644 --- a/requirements/dev.pip +++ b/requirements/dev.pip @@ -24,6 +24,8 @@ astroid==2.15.8 # requirements-detector asttokens==2.4.1 # via stack-data +async-timeout==4.0.3 + # via redis attrs==23.2.0 # via # jsonlines @@ -33,9 +35,9 @@ backoff==1.10.0 # via analytics-python billiard==4.2.0 # via celery -boto3==1.34.98 +boto3==1.34.134 # via dataflows-tabulator -botocore==1.34.98 +botocore==1.34.134 # via # boto3 # s3transfer @@ -45,7 +47,7 @@ cachetools==5.3.3 # via google-auth celery==5.4.0 # via onadata -certifi==2024.2.2 +certifi==2024.6.2 # via # requests # sentry-sdk @@ -74,7 +76,7 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -cryptography==42.0.6 +cryptography==42.0.8 # via # jwcrypto # onadata @@ -102,7 +104,7 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -django==4.2.11 +django==4.2.13 # via # django-activity-stream # django-cors-headers @@ -124,11 +126,11 @@ django==4.2.11 # onadata django-activity-stream==2.0.0 # via onadata -django-cors-headers==4.3.1 +django-cors-headers==4.4.0 # via onadata django-csp==3.8 # via onadata -django-debug-toolbar==4.3.0 +django-debug-toolbar==4.4.2 # via onadata django-digest @ git+https://github.com/onaio/django-digest.git@6bf61ec08502fd3545d4f2c0838b6cb15e7ffa92 # via -r requirements/base.in @@ -144,7 +146,7 @@ django-multidb-router @ git+https://github.com/onaio/django-multidb-router.git@f # via -r requirements/base.in django-nose==1.4.7 # via onadata -django-oauth-toolkit==2.3.0 +django-oauth-toolkit==2.4.0 # via onadata django-ordered-model==3.7.4 # via onadata @@ -162,7 +164,7 @@ django-taggit==5.0.1 # via onadata django-templated-email==3.0.1 # via onadata -djangorestframework==3.15.1 +djangorestframework==3.15.2 # via # djangorestframework-csv # djangorestframework-gis @@ -176,7 +178,7 @@ djangorestframework-gis==1.0 # via onadata djangorestframework-guardian==0.3.0 # via onadata -djangorestframework-jsonapi==7.0.0 +djangorestframework-jsonapi==7.0.1 # via onadata djangorestframework-jsonp==1.0.2 # via onadata @@ -186,15 +188,17 @@ dnspython==2.6.1 # via pymongo dodgy==0.2.1 # via prospector -dpath==2.1.6 +dpath==2.2.0 # via onadata elaphe3==0.2.0 # via onadata et-xmlfile==1.1.0 # via openpyxl +exceptiongroup==1.2.1 + # via ipython executing==2.0.1 # via stack-data -filelock==3.14.0 +filelock==3.15.4 # via virtualenv flake8==5.0.4 # via @@ -214,7 +218,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via prospector -google-auth==2.29.0 +google-auth==2.30.0 # via # google-auth-oauthlib # onadata @@ -232,15 +236,15 @@ identify==2.5.36 # via pre-commit idna==3.7 # via requests -ijson==3.2.3 +ijson==3.3.0 # via dataflows-tabulator -importlib-metadata==7.1.0 +importlib-metadata==8.0.0 # via yapf inflection==0.5.1 # via djangorestframework-jsonapi ipdb==0.13.13 # via -r requirements/dev.in -ipython==8.24.0 +ipython==8.25.0 # via ipdb isodate==0.6.1 # via tableschema @@ -256,9 +260,9 @@ jmespath==1.0.1 # botocore jsonlines==4.0.0 # via dataflows-tabulator -jsonpickle==3.0.4 +jsonpickle==3.2.2 # via onadata -jsonpointer==2.4 +jsonpointer==3.0.0 # via datapackage jsonschema==4.22.0 # via @@ -274,7 +278,7 @@ lazy-object-proxy==1.10.0 # via astroid linear-tsv==1.1.0 # via dataflows-tabulator -lxml==5.2.1 +lxml==5.2.2 # via onadata markdown==3.6 # via onadata @@ -289,11 +293,11 @@ modilabs-python-utils==0.1.5 # via onadata monotonic==1.6 # via analytics-python -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit nose==1.3.7 # via django-nose -numpy==1.26.4 +numpy==2.0.0 # via onadata oauthlib==3.2.2 # via @@ -306,7 +310,7 @@ openpyxl==3.1.2 # dataflows-tabulator # onadata # pyxform -packaging==24.0 +packaging==24.1 # via # prospector # requirements-detector @@ -322,14 +326,14 @@ pillow==10.3.0 # via # elaphe3 # onadata -platformdirs==4.2.1 +platformdirs==4.2.2 # via # pylint # virtualenv # yapf -pre-commit==3.7.0 +pre-commit==3.7.1 # via -r requirements/dev.in -prompt-toolkit==3.0.43 +prompt-toolkit==3.0.47 # via # click-repl # ipython @@ -391,7 +395,7 @@ pylint-plugin-utils==0.7 # pylint-celery # pylint-django # pylint-flask -pymongo==4.7.1 +pymongo==4.8.0 # via onadata pyparsing==3.1.2 # via httplib2 @@ -411,6 +415,7 @@ python-memcached==1.62 # via onadata pytz==2024.1 # via + # django-oauth-toolkit # django-query-builder # fleming pyxform==2.0.3 @@ -423,7 +428,7 @@ pyyaml==6.0.1 # prospector recaptcha-client==1.0.6 # via onadata -redis==5.0.4 +redis==5.0.7 # via # django-redis # onadata @@ -431,7 +436,7 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 +requests==2.32.3 # via # analytics-python # dataflows-tabulator @@ -452,13 +457,13 @@ requirements-detector==1.2.2 # via prospector rfc3986==2.0.0 # via tableschema -rpds-py==0.18.0 +rpds-py==0.18.1 # via # jsonschema # referencing rsa==4.9 # via google-auth -s3transfer==0.10.1 +s3transfer==0.10.2 # via boto3 semver==3.0.2 # via requirements-detector @@ -483,7 +488,7 @@ smmap==5.0.1 # via gitdb snowballstemmer==2.2.0 # via pydocstyle -sqlalchemy==2.0.30 +sqlalchemy==2.0.31 # via dataflows-tabulator sqlparse==0.5.0 # via @@ -500,21 +505,26 @@ toml==0.10.2 # prospector # requirements-detector tomli==2.0.1 - # via yapf -tomlkit==0.12.4 + # via + # ipdb + # pylint + # yapf +tomlkit==0.12.5 # via pylint traitlets==5.14.3 # via # ipython # matplotlib-inline -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via + # asgiref + # astroid # ipython # jwcrypto # sqlalchemy tzdata==2024.1 # via celery -ujson==5.9.0 +ujson==5.10.0 # via onadata unicodecsv==0.14.1 # via @@ -522,19 +532,19 @@ unicodecsv==0.14.1 # datapackage # onadata # tableschema -urllib3==2.2.1 +urllib3==2.2.2 # via # botocore # requests # sentry-sdk -uwsgi==2.0.25.1 +uwsgi==2.0.26 # via onadata vine==5.1.0 # via # amqp # celery # kombu -virtualenv==20.26.1 +virtualenv==20.26.3 # via pre-commit wcwidth==0.2.13 # via prompt-toolkit @@ -552,8 +562,5 @@ xmltodict==0.13.0 # via onadata yapf==0.40.2 # via -r requirements/dev.in -zipp==3.18.1 +zipp==3.19.2 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/docs.pip b/requirements/docs.pip index 981a9f8d7c..6d4beda516 100644 --- a/requirements/docs.pip +++ b/requirements/docs.pip @@ -8,7 +8,7 @@ alabaster==0.7.16 # via sphinx babel==2.15.0 # via sphinx -certifi==2024.2.2 +certifi==2024.6.2 # via requests charset-normalizer==3.3.2 # via requests @@ -22,11 +22,11 @@ jinja2==3.1.4 # via sphinx markupsafe==2.1.5 # via jinja2 -packaging==24.0 +packaging==24.1 # via sphinx pygments==2.18.0 # via sphinx -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx @@ -44,5 +44,5 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.2.1 +urllib3==2.2.2 # via requests diff --git a/requirements/s3.pip b/requirements/s3.pip index 60197bf2f6..58b0e45d25 100644 --- a/requirements/s3.pip +++ b/requirements/s3.pip @@ -6,13 +6,13 @@ # asgiref==3.8.1 # via django -boto3==1.34.98 +boto3==1.34.134 # via -r requirements/s3.in -botocore==1.34.98 +botocore==1.34.134 # via # boto3 # s3transfer -django==4.2.11 +django==4.2.13 # via # -r requirements/s3.in # django-storages @@ -24,11 +24,13 @@ jmespath==1.0.1 # botocore python-dateutil==2.9.0.post0 # via botocore -s3transfer==0.10.1 +s3transfer==0.10.2 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.5.0 # via django -urllib3==2.2.1 +typing-extensions==4.12.2 + # via asgiref +urllib3==2.2.2 # via botocore diff --git a/requirements/ses.pip b/requirements/ses.pip index d3f3d8f86a..3e3736fb76 100644 --- a/requirements/ses.pip +++ b/requirements/ses.pip @@ -8,17 +8,17 @@ asgiref==3.8.1 # via django boto==2.49.0 # via -r requirements/ses.in -boto3==1.34.98 +boto3==1.34.134 # via django-ses -botocore==1.34.98 +botocore==1.34.134 # via # boto3 # s3transfer -django==4.2.11 +django==4.2.13 # via # -r requirements/ses.in # django-ses -django-ses==4.0.0 +django-ses==4.1.0 # via -r requirements/ses.in jmespath==1.0.1 # via @@ -28,11 +28,13 @@ python-dateutil==2.9.0.post0 # via botocore pytz==2024.1 # via django-ses -s3transfer==0.10.1 +s3transfer==0.10.2 # via boto3 six==1.16.0 # via python-dateutil sqlparse==0.5.0 # via django -urllib3==2.2.1 +typing-extensions==4.12.2 + # via asgiref +urllib3==2.2.2 # via botocore From 3ae0f6b370da47f51b6e62595e54322d512ea003 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Tue, 2 Jul 2024 10:52:46 +0300 Subject: [PATCH 234/270] Tag release v4.3.2 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 10 +++++++++- onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index c0a2fcdc2c..5e098561b4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,13 +3,21 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.3.2(2024-07-02) +------------------ +- Security Updates + `PR #2624 ` + [@KipSigei] +- Cleanup github action yaml file + `PR #2627 ` + [@ukanga] + v4.3.1(2024-06-28) ------------------ - Fix 0 integer, decimal values parsed as string when saving json `PR #2621 ` [@kelvin-muchiri] - v4.3.0(2024-06-26) ------------------ - Separate ecr image build to use arm64 runner for arm64 builds diff --git a/onadata/__init__.py b/onadata/__init__.py index c9932d2532..8ba8c53cac 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.3.1" +__version__ = "4.3.2" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index 0ff9bd4420..a4fb38f836 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.3.1 +version = 4.3.2 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 9087da66c14b6c7b7d70322904387c6dcd08dd23 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 2 Jul 2024 17:24:23 +0300 Subject: [PATCH 235/270] Update flaky tests --- .../api/tests/viewsets/test_attachment_viewset.py | 10 ++++++++-- .../apps/api/tests/viewsets/test_xform_viewset.py | 12 +++--------- onadata/apps/logger/tests/test_briefcase_client.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py index 64d7a9562f..9194345ec9 100644 --- a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py @@ -1,16 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Test Attachment viewsets. +""" import os - from django.utils import timezone +from flaky import flaky + from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet from onadata.apps.api.viewsets.attachment_viewset import AttachmentViewSet from onadata.apps.logger.import_tools import django_file from onadata.apps.logger.models.attachment import Attachment from onadata.apps.logger.models.instance import get_attachment_url from onadata.apps.main.models.meta_data import MetaData -from onadata.libs.permissions import EditorRole from onadata.libs.models.share_xform import ShareXForm +from onadata.libs.permissions import EditorRole def attachment_url(attachment, suffix=None): @@ -28,6 +33,7 @@ def setUp(self): self._publish_xls_form_to_project() + @flaky(max_runs=3) def test_retrieve_view(self): self._submit_transport_instance_w_attachment() diff --git a/onadata/apps/api/tests/viewsets/test_xform_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_viewset.py index 2fcee5a6a2..33a1cd9dce 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_viewset.py @@ -16,7 +16,6 @@ from io import StringIO from unittest.mock import Mock, patch from xml.dom import Node -from defusedxml import minidom from django.conf import settings from django.contrib.contenttypes.models import ContentType @@ -30,6 +29,7 @@ from django.utils.timezone import utc import jwt +from defusedxml import minidom from django_digest.test import DigestAuth from flaky import flaky from httmock import HTTMock @@ -55,13 +55,7 @@ ) from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import ( - Attachment, - Instance, - Project, - XForm, - EntityList, -) +from onadata.apps.logger.models import Attachment, EntityList, Instance, Project, XForm from onadata.apps.logger.models.xform_version import XFormVersion from onadata.apps.logger.views import delete_xform from onadata.apps.logger.xform_instance_parser import XLSFormError @@ -3567,7 +3561,7 @@ def test_failed_form_publishing_after_maximum_retries( self.assertEqual(response.status_code, 202) self.assertEqual(response.data, error_message) - @flaky(max_runs=8) + @flaky(max_runs=10) def test_survey_preview_endpoint(self): view = XFormViewSet.as_view({"post": "survey_preview", "get": "survey_preview"}) diff --git a/onadata/apps/logger/tests/test_briefcase_client.py b/onadata/apps/logger/tests/test_briefcase_client.py index 1e80ea8272..32394fcd64 100644 --- a/onadata/apps/logger/tests/test_briefcase_client.py +++ b/onadata/apps/logger/tests/test_briefcase_client.py @@ -169,7 +169,7 @@ def _download_submissions(self): mocker.head(requests_mock.ANY, content=submission_list) self.briefcase_client.download_instances(self.xform.id_string) - @flaky(max_runs=10) + @flaky(max_runs=11) def test_download_xform_xml(self): """ Download xform via briefcase api From fca9208c4592a68a516b816ab0c6e76124c95bf4 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 3 Jul 2024 16:13:17 +0300 Subject: [PATCH 236/270] Run api and logger tests separately on GitHub actions --- .github/workflows/ci.yml | 70 ++++++---------------------------------- 1 file changed, 10 insertions(+), 60 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 39b85c7495..34e2b149a6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,65 +50,15 @@ jobs: - name: Run Prospector run: prospector -X -s veryhigh onadata - unit-tests-1: - name: Django Unit Tests (Libraries, Main, RestServices, SMS Support, Viewer, Messaging) - runs-on: ubuntu-22.04 - needs: static-analysis - env: - DJANGO_SETTINGS_MODULE: onadata.settings.github_actions_test - services: - postgres: - image: postgis/postgis:13-3.0 - env: - POSTGRES_PASSWORD: onadata - POSTGRES_DB: onadata - POSTGRES_USER: onadata - ports: - - 5432:5432 - # Set health checks to wait until postgres has started - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Setup Java - uses: actions/setup-java@v3 - with: - distribution: "adopt" - java-version: "8" - - - name: Setup python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - architecture: "x64" - cache: "pip" - cache-dependency-path: | - requirements/base.pip - requirements/dev.pip - requirements/azure.pip - - - name: Update apt sources - run: sudo apt-get update - - - name: Install APT requirements - run: sudo apt-get install -y --no-install-recommends libjpeg-dev zlib1g-dev software-properties-common ghostscript libxslt1-dev binutils libproj-dev gdal-bin memcached libmemcached-dev libxml2-dev libxslt-dev - - - name: Install Pip requirements - run: | - pip install -U pip - pip install -r requirements/base.pip - pip install -r requirements/dev.pip - - - name: Run tests - run: | - python manage.py test onadata/libs onadata/apps/main onadata/apps/restservice onadata/apps/sms_support onadata/apps/viewer onadata/apps/messaging --noinput --timing --settings=onadata.settings.github_actions_test --verbosity=2 --parallel=4 - unit-tests-2: - name: Django Unit Tests (API, Logger) + unit-tests: + strategy: + fail-fast: false + matrix: + test_path: + - [" Django Unit Tests (Libraries, Main, RestServices, SMS Support, Viewer, Messaging)", "python manage.py test onadata/libs onadata/apps/main onadata/apps/restservice onadata/apps/sms_support onadata/apps/viewer onadata/apps/messaging --noinput --timing --settings=onadata.settings.github_actions_test --verbosity=2 --parallel=4"] + - ["Django Unit Tests API", "python manage.py test onadata/apps/api --noinput --timing --settings=onadata.settings.github_actions_test --verbosity=2 --parallel=4"] + - ["Django Unit Tests Logger", "python manage.py test onadata/apps/logger --noinput --timing --settings=onadata.settings.github_actions_test --verbosity=2 --parallel=4"] + name: "${{ matrix.test_path[0] }}" runs-on: ubuntu-22.04 needs: static-analysis env: @@ -164,7 +114,7 @@ jobs: - name: Run tests run: | - python manage.py test onadata/apps/api onadata/apps/logger --noinput --timing --settings=onadata.settings.github_actions_test --verbosity=2 --parallel=4 + ${{ matrix.test_path[1] }} security-check: name: Trivy Security Checks runs-on: ubuntu-22.04 From 8e2e6420d2a4b1663aa1a5106e9e31e16dad1201 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 3 Jul 2024 17:17:41 +0300 Subject: [PATCH 237/270] Node.js 16 actions are deprecated, use Node.js 20 --- .github/workflows/ci.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34e2b149a6..6b3a3a43e5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,10 +18,10 @@ jobs: fail-fast: false steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" architecture: "x64" @@ -80,16 +80,16 @@ jobs: --health-retries 5 steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Java - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: "adopt" java-version: "8" - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" architecture: "x64" @@ -120,7 +120,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Update apt sources run: sudo apt-get update @@ -131,7 +131,7 @@ jobs: run: echo "version=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV - name: Build Docker image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . file: ./docker/onadata-uwsgi/Dockerfile.ubuntu @@ -162,7 +162,7 @@ jobs: output: "trivy_results.sarif" - name: Upload vulnerability scan results - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 if: github.event_name == 'push' || github.event_name == 'pull_request' with: sarif_file: "trivy_results.sarif" From f4e8adbe161bc0ac37f934c3b5b8155aff028419 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 3 Jul 2024 19:08:41 +0300 Subject: [PATCH 238/270] Set default value in the event the key is already cleared --- onadata/libs/authentication.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/libs/authentication.py b/onadata/libs/authentication.py index 57b1083a05..054709514d 100644 --- a/onadata/libs/authentication.py +++ b/onadata/libs/authentication.py @@ -308,7 +308,7 @@ def login_attempts(request): if attempts: cache.incr(attempts_key) - attempts = cache.get(attempts_key) + attempts = cache.get(attempts_key, 0) if attempts >= getattr(settings, "MAX_LOGIN_ATTEMPTS", 10): lockout_key = safe_key(f"{LOCKOUT_IP}{ip_address}-{username}") lockout = cache.get(lockout_key) From d856b7e9cfd72362b7fda73065b716b156782bec Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 4 Jul 2024 14:07:28 +0300 Subject: [PATCH 239/270] Tune migration performance (#2631) * tune migration performance * fix lint warning line too long --- ...ntitylistgroupobjectpermission_and_more.py | 99 +++++++++++++++++-- onadata/apps/logger/models/entity.py | 10 +- onadata/apps/logger/models/entity_list.py | 22 ++++- 3 files changed, 122 insertions(+), 9 deletions(-) diff --git a/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py b/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py index 484b073e67..00270bd804 100644 --- a/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py +++ b/onadata/apps/logger/migrations/0018_entityhistory_entitylistgroupobjectpermission_and_more.py @@ -21,6 +21,7 @@ def rename_entity_label_key(apps, schema_editor): class Migration(migrations.Migration): + atomic = False dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -126,42 +127,54 @@ class Migration(migrations.Migration): model_name="entitylistuserobjectpermission", name="content_object", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="logger.entitylist" + on_delete=django.db.models.deletion.CASCADE, + to="logger.entitylist", + db_index=False, ), ), migrations.AddField( model_name="entitylistuserobjectpermission", name="permission", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="auth.permission" + on_delete=django.db.models.deletion.CASCADE, + to="auth.permission", + db_index=False, ), ), migrations.AddField( model_name="entitylistuserobjectpermission", name="user", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + db_index=False, ), ), migrations.AddField( model_name="entitylistgroupobjectpermission", name="content_object", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="logger.entitylist" + on_delete=django.db.models.deletion.CASCADE, + to="logger.entitylist", + db_index=False, ), ), migrations.AddField( model_name="entitylistgroupobjectpermission", name="group", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="auth.group" + on_delete=django.db.models.deletion.CASCADE, + to="auth.group", + db_index=False, ), ), migrations.AddField( model_name="entitylistgroupobjectpermission", name="permission", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="auth.permission" + on_delete=django.db.models.deletion.CASCADE, + to="auth.permission", + db_index=False, ), ), migrations.AddField( @@ -171,6 +184,7 @@ class Migration(migrations.Migration): null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, + db_index=False, ), ), migrations.AddField( @@ -180,6 +194,7 @@ class Migration(migrations.Migration): on_delete=django.db.models.deletion.CASCADE, related_name="history", to="logger.entity", + db_index=False, ), ), migrations.AddField( @@ -191,6 +206,7 @@ class Migration(migrations.Migration): on_delete=django.db.models.deletion.SET_NULL, related_name="entity_history", to="logger.instance", + db_index=False, ), ), migrations.AddField( @@ -202,6 +218,7 @@ class Migration(migrations.Migration): on_delete=django.db.models.deletion.CASCADE, related_name="entity_history", to="logger.registrationform", + db_index=False, ), ), migrations.AlterUniqueTogether( @@ -215,4 +232,74 @@ class Migration(migrations.Migration): migrations.RunPython( rename_entity_label_key, reverse_code=migrations.RunPython.noop ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistuserobjec_content_object_id_5b69ec8c_idx" ' + 'ON "logger_entitylistuserobjectpermission" ("content_object_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistuserobjec_content_object_id_5b69ec8c_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistuserobjectpermission_permission_id_690665e1_idx" ' + 'ON "logger_entitylistuserobjectpermission" ("permission_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistuserobjectpermission_permission_id_690665e1_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistuserobjectpermission_user_id_d0c4b31a_idx" ' + 'ON "logger_entitylistuserobjectpermission" ("user_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistuserobjectpermission_user_id_d0c4b31a_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistgroupobje_content_object_id_a7a535f3_idx" ' + 'ON "logger_entitylistgroupobjectpermission" ("content_object_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistgroupobje_content_object_id_a7a535f3_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistgroupobjectpermission_group_id_c7f010ff_idx" ' + 'ON "logger_entitylistgroupobjectpermission" ("group_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistgroupobjectpermission_group_id_c7f010ff_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entitylistgroupobjectpermission_permission_id_7cfe1bbe_idx" ' + 'ON "logger_entitylistgroupobjectpermission" ("permission_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entitylistgroupobjectpermission_permission_id_7cfe1bbe_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entityhistory_created_by_id_17e666ff" ' + 'ON "logger_entityhistory" ("created_by_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entityhistory_created_by_id_17e666ff";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entityhistory_entity_id_f1ca62b3" ' + 'ON "logger_entityhistory" ("entity_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entityhistory_entity_id_f1ca62b3";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entityhistory_instance_id_274e1bc1" ' + 'ON "logger_entityhistory" ("instance_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entityhistory_instance_id_274e1bc1";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_entityhistory_registration_form_id_9f0aaece" ' + 'ON "logger_entityhistory" ("registration_form_id");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_entityhistory_registration_form_id_9f0aaece";', + ), ] diff --git a/onadata/apps/logger/models/entity.py b/onadata/apps/logger/models/entity.py index 300592ff96..3fb7e3ae2b 100644 --- a/onadata/apps/logger/models/entity.py +++ b/onadata/apps/logger/models/entity.py @@ -55,10 +55,14 @@ class EntityHistory(BaseModel): class Meta(BaseModel.Meta): app_label = "logger" + # Set db_index=False so that we can create indexes manually concurrently in the + # migration (0018_entityhistory_entitylistgroupobjectpermission_and_more) for + # improved performance in huge databases entity = models.ForeignKey( Entity, related_name="history", on_delete=models.CASCADE, + db_index=False, ) registration_form = models.ForeignKey( RegistrationForm, @@ -66,6 +70,7 @@ class Meta(BaseModel.Meta): related_name="entity_history", null=True, blank=True, + db_index=False, ) instance = models.ForeignKey( Instance, @@ -73,8 +78,11 @@ class Meta(BaseModel.Meta): related_name="entity_history", null=True, blank=True, + db_index=False, ) xml = models.TextField(blank=True, null=True) json = models.JSONField(default=dict) form_version = models.CharField(max_length=255, null=True, blank=True) - created_by = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) + created_by = models.ForeignKey( + User, null=True, on_delete=models.SET_NULL, db_index=False + ) diff --git a/onadata/apps/logger/models/entity_list.py b/onadata/apps/logger/models/entity_list.py index c0af121c50..1734d9b2bc 100644 --- a/onadata/apps/logger/models/entity_list.py +++ b/onadata/apps/logger/models/entity_list.py @@ -3,12 +3,14 @@ """ from django.contrib.auth import get_user_model +from django.contrib.auth.models import Group, Permission from django.contrib.contenttypes.fields import GenericRelation from django.db import models from django.utils.translation import gettext_lazy as _ from django.utils import timezone from guardian.models import UserObjectPermissionBase, GroupObjectPermissionBase +from guardian.compat import user_model_label from onadata.apps.logger.models.project import Project from onadata.libs.models import BaseModel @@ -82,11 +84,27 @@ def soft_delete(self, deleted_by=None): class EntityListUserObjectPermission(UserObjectPermissionBase): """Guardian model to create direct foreign keys.""" - content_object = models.ForeignKey(EntityList, on_delete=models.CASCADE) + content_object = models.ForeignKey( + EntityList, on_delete=models.CASCADE, db_index=False + ) + # Override fields' (db_index=False) so that we can create indexes manually + # concurrently in the migration + # (0018_entityhistory_entitylistgroupobjectpermission_and_more) for + # improved performance in huge databases + user = models.ForeignKey(user_model_label, on_delete=models.CASCADE, db_index=False) + permission = models.ForeignKey(Permission, on_delete=models.CASCADE, db_index=False) # pylint: disable=too-few-public-methods class EntityListGroupObjectPermission(GroupObjectPermissionBase): """Guardian model to create direct foreign keys.""" - content_object = models.ForeignKey(EntityList, on_delete=models.CASCADE) + content_object = models.ForeignKey( + EntityList, on_delete=models.CASCADE, db_index=False + ) + # Override fields' (db_index=False) so that we can create indexes manually + # concurrently in the migration + # (0018_entityhistory_entitylistgroupobjectpermission_and_more) for + # improved performance in huge databases + group = models.ForeignKey(Group, on_delete=models.CASCADE, db_index=False) + permission = models.ForeignKey(Permission, on_delete=models.CASCADE, db_index=False) From 4eab88f69ad23bb9e0dc4243cdc248a8dfe571f4 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 4 Jul 2024 15:33:04 +0300 Subject: [PATCH 240/270] make pending migrations (#2632) --- ...logger_inst_deleted_da31a3_idx_and_more.py | 62 +++++++++++++++++++ onadata/apps/logger/models/instance.py | 10 ++- 2 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 onadata/apps/logger/migrations/0020_rename_logger_inst_deleted_at_da31a3_idx_logger_inst_deleted_da31a3_idx_and_more.py diff --git a/onadata/apps/logger/migrations/0020_rename_logger_inst_deleted_at_da31a3_idx_logger_inst_deleted_da31a3_idx_and_more.py b/onadata/apps/logger/migrations/0020_rename_logger_inst_deleted_at_da31a3_idx_logger_inst_deleted_da31a3_idx_and_more.py new file mode 100644 index 0000000000..dc126a410a --- /dev/null +++ b/onadata/apps/logger/migrations/0020_rename_logger_inst_deleted_at_da31a3_idx_logger_inst_deleted_da31a3_idx_and_more.py @@ -0,0 +1,62 @@ +# Generated by Django 4.2.13 on 2024-07-04 11:26 + +from django.db import migrations, models +import onadata.apps.logger.models.instance + + +class Migration(migrations.Migration): + + dependencies = [ + ("logger", "0019_alter_project_options_and_more"), + ] + + operations = [ + migrations.RenameIndex( + model_name="instance", + new_name="logger_inst_deleted_da31a3_idx", + old_name="logger_inst_deleted_at_da31a3_idx", + ), + migrations.RenameIndex( + model_name="instance", + new_name="logger_inst_xform_i_504638_idx", + old_name="logger_instance_id_xform_id_index", + ), + migrations.RenameIndex( + model_name="instancehistory", + new_name="logger_inst_checksu_05f7bf_idx", + old_name="logger_inst_hist_checksum_05f7bf_idx", + ), + migrations.RenameIndex( + model_name="instancehistory", + new_name="logger_inst_uuid_f5ae42_idx", + old_name="logger_inst_hist_uuid_f5ae42_idx", + ), + migrations.AlterField( + model_name="instance", + name="date_created", + field=models.DateTimeField( + blank=True, + default=onadata.apps.logger.models.instance.now, + editable=False, + ), + ), + migrations.AlterField( + model_name="instance", + name="date_modified", + field=models.DateTimeField( + blank=True, + default=onadata.apps.logger.models.instance.now, + editable=False, + ), + ), + migrations.AlterField( + model_name="instancehistory", + name="checksum", + field=models.CharField(blank=True, db_index=True, max_length=64, null=True), + ), + migrations.AlterField( + model_name="instancehistory", + name="uuid", + field=models.CharField(db_index=True, default="", max_length=249), + ), + ] diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 6543423a06..611c50696a 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -884,9 +884,6 @@ def permanently_delete_attachments(sender, instance=None, created=False, **kwarg class InstanceHistory(models.Model, InstanceBaseClass): """Stores deleted submission XML to maintain a history of edits.""" - class Meta: - app_label = "logger" - xform_instance = models.ForeignKey( Instance, related_name="submission_history", on_delete=models.CASCADE ) @@ -902,6 +899,13 @@ class Meta: geom = models.GeometryCollectionField(null=True) checksum = models.CharField(max_length=64, null=True, blank=True, db_index=True) + class Meta: + app_label = "logger" + indexes = [ + models.Index(fields=["checksum"]), + models.Index(fields=["uuid"]), + ] + @property def xform(self): """Returns the XForm object linked to this submission.""" From 365755e31e5531318c72bfe47593e31238f82224 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 14:45:42 +0300 Subject: [PATCH 241/270] Update Spinx==7.3.7 --- docs/_themes/sphinx_rtd_theme/layout.html | 2 +- docs/conf.py | 62 +++++++++++------------ requirements/docs.in | 2 +- requirements/docs.pip | 6 +-- 4 files changed, 36 insertions(+), 36 deletions(-) diff --git a/docs/_themes/sphinx_rtd_theme/layout.html b/docs/_themes/sphinx_rtd_theme/layout.html index ef3775f63b..3b69b19f4d 100644 --- a/docs/_themes/sphinx_rtd_theme/layout.html +++ b/docs/_themes/sphinx_rtd_theme/layout.html @@ -34,7 +34,7 @@ {# RTD hosts this file, so just load on non RTD builds #} {% if not READTHEDOCS %} - + {% endif %} {% for cssfile in css_files %} diff --git a/docs/conf.py b/docs/conf.py index 1d9d1ab19f..e18223a4dc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,10 +20,9 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, abspath(dirname(dirname(__file__)))) -sys.path.append(abspath('_themes')) +sys.path.append(abspath("_themes")) -os.environ.setdefault("DJANGO_SETTINGS_MODULE", - "onadata.settings.common") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onadata.settings.common") # -- General configuration ------------------------------------------------ @@ -34,36 +33,36 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.imgmath', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.imgmath", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Ona API' -copyright = u'2018, Ona' +project = "Ona Data API" +copyright = "2024, Ona Systems, Inc." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '0.1' +version = "0.1" # The full version, including alpha/beta/rc tags. -release = '1.0' +release = "1.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -77,7 +76,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -95,7 +94,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -108,8 +107,8 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme_path = ['_themes'] -html_theme = 'sphinx_rtd_theme' +html_theme_path = ["_themes"] +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -138,7 +137,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -187,7 +186,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'OnaAPIdoc' +htmlhelp_basename = "OnaAPIdoc" # -- Options for LaTeX output --------------------------------------------- @@ -196,7 +195,6 @@ # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', } @@ -205,8 +203,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'OnaAPI.tex', u'Ona API Documentation', u'Ona', - 'manual'), + ("index", "OnaAPI.tex", "Ona API Documentation", "Ona", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -234,10 +231,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'onaapi', u'Ona API Documentation', - [u'Ona'], 1) -] +man_pages = [("index", "onaapi", "Ona API Documentation", ["Ona"], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -249,9 +243,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'OnaAPI', u'Ona API Documentation', - u'Ona', 'OnaAPI', 'Ona API Documentation.', - 'Miscellaneous'), + ( + "index", + "OnaAPI", + "Ona API Documentation", + "Ona", + "OnaAPI", + "Ona API Documentation.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. @@ -268,4 +268,4 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {"http://docs.python.org/": None} diff --git a/requirements/docs.in b/requirements/docs.in index d0d294c97e..6966869c70 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1 +1 @@ -sphinx>=6.2,<7 +sphinx diff --git a/requirements/docs.pip b/requirements/docs.pip index 6d4beda516..0fe737c8dd 100644 --- a/requirements/docs.pip +++ b/requirements/docs.pip @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # pip-compile --output-file=requirements/docs.pip --strip-extras requirements/docs.in @@ -12,7 +12,7 @@ certifi==2024.6.2 # via requests charset-normalizer==3.3.2 # via requests -docutils==0.19 +docutils==0.21.2 # via sphinx idna==3.7 # via requests @@ -30,7 +30,7 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==6.2.1 +sphinx==7.3.7 # via -r requirements/docs.in sphinxcontrib-applehelp==1.0.8 # via sphinx From 4256a9126412be92df038510021a4f60be671bbb Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 16:24:44 +0300 Subject: [PATCH 242/270] Regenerate apidocs sphinx-apidoc -f -o . .. ../onadata/settings ../setup.py ../manage.py --- docs/authentication.rst | 12 +- docs/conf.py | 10 +- docs/index.rst | 14 +- docs/modules.rst | 4 +- docs/onadata.apps.api.management.commands.rst | 155 ++-- docs/onadata.apps.api.management.rst | 9 +- docs/onadata.apps.api.migrations.rst | 65 +- docs/onadata.apps.api.models.rst | 41 +- docs/onadata.apps.api.rst | 59 +- ...ata.apps.api.tests.management.commands.rst | 45 ++ docs/onadata.apps.api.tests.management.rst | 9 +- docs/onadata.apps.api.tests.models.rst | 65 +- docs/onadata.apps.api.tests.permissions.rst | 21 +- docs/onadata.apps.api.tests.rst | 43 +- docs/onadata.apps.api.tests.views.rst | 25 +- docs/onadata.apps.api.tests.viewsets.rst | 303 +++++--- docs/onadata.apps.api.urls.rst | 29 + docs/onadata.apps.api.viewsets.rst | 283 ++++--- docs/onadata.apps.api.viewsets.v2.rst | 21 + ...nadata.apps.logger.management.commands.rst | 305 +++++--- ....apps.logger.management.commands.tests.rst | 21 + docs/onadata.apps.logger.management.rst | 9 +- docs/onadata.apps.logger.migrations.rst | 705 ++++++++++++++---- docs/onadata.apps.logger.models.rst | 147 +++- docs/onadata.apps.logger.models.tests.rst | 21 + docs/onadata.apps.logger.rst | 85 ++- docs/onadata.apps.logger.templatetags.rst | 21 + ....apps.logger.tests.management.commands.rst | 37 + docs/onadata.apps.logger.tests.management.rst | 18 + docs/onadata.apps.logger.tests.models.rst | 101 ++- docs/onadata.apps.logger.tests.rst | 167 +++-- .../onadata.apps.main.management.commands.rst | 101 +-- docs/onadata.apps.main.management.rst | 9 +- docs/onadata.apps.main.migrations.rst | 131 +++- docs/onadata.apps.main.models.rst | 51 +- docs/onadata.apps.main.rst | 84 +-- docs/onadata.apps.main.templatetags.rst | 17 +- docs/onadata.apps.main.tests.rst | 285 +++---- docs/onadata.apps.messaging.backends.rst | 29 + docs/onadata.apps.messaging.migrations.rst | 10 + docs/onadata.apps.messaging.rst | 127 ++++ docs/onadata.apps.messaging.tests.rst | 69 ++ ...a.apps.restservice.management.commands.rst | 21 +- docs/onadata.apps.restservice.management.rst | 13 +- docs/onadata.apps.restservice.migrations.rst | 49 +- docs/onadata.apps.restservice.rst | 61 +- docs/onadata.apps.restservice.services.rst | 39 +- docs/onadata.apps.restservice.tests.rst | 20 +- ...nadata.apps.restservice.tests.viewsets.rst | 17 +- docs/onadata.apps.restservice.viewsets.rst | 17 +- docs/onadata.apps.rst | 20 +- docs/onadata.apps.sms_support.providers.rst | 51 +- docs/onadata.apps.sms_support.rst | 64 +- docs/onadata.apps.sms_support.tests.rst | 41 +- docs/onadata.apps.stats.rst | 54 -- ...nadata.apps.viewer.management.commands.rst | 49 +- docs/onadata.apps.viewer.management.rst | 9 +- docs/onadata.apps.viewer.migrations.rst | 115 ++- docs/onadata.apps.viewer.models.rst | 51 +- docs/onadata.apps.viewer.models.tests.rst | 21 + docs/onadata.apps.viewer.rst | 64 +- docs/onadata.apps.viewer.templatetags.rst | 17 +- docs/onadata.apps.viewer.tests.rst | 149 ++-- docs/onadata.libs.data.rst | 26 +- docs/onadata.libs.data.tests.rst | 30 - docs/onadata.libs.mixins.rst | 141 ++-- docs/onadata.libs.models.rst | 79 +- docs/onadata.libs.profiling.rst | 13 +- docs/onadata.libs.renderers.rst | 13 +- docs/onadata.libs.rst | 69 +- docs/onadata.libs.serializers.fields.rst | 115 +-- docs/onadata.libs.serializers.rst | 316 ++++---- docs/onadata.libs.test_utils.rst | 29 + docs/onadata.libs.tests.data.rst | 29 + docs/onadata.libs.tests.models.rst | 21 + docs/onadata.libs.tests.rst | 54 +- docs/onadata.libs.tests.serializers.rst | 129 +++- docs/onadata.libs.tests.utils.rst | 213 ++++-- docs/onadata.libs.utils.rst | 349 +++++---- docs/onadata.rst | 33 +- docs/onadata.settings.rst | 78 -- .../management/commands/remove_odk_prefix.py | 22 +- .../apps/main/tests/test_service_health.py | 9 +- 83 files changed, 4077 insertions(+), 2396 deletions(-) create mode 100644 docs/onadata.apps.api.tests.management.commands.rst create mode 100644 docs/onadata.apps.api.urls.rst create mode 100644 docs/onadata.apps.api.viewsets.v2.rst create mode 100644 docs/onadata.apps.logger.management.commands.tests.rst create mode 100644 docs/onadata.apps.logger.models.tests.rst create mode 100644 docs/onadata.apps.logger.templatetags.rst create mode 100644 docs/onadata.apps.logger.tests.management.commands.rst create mode 100644 docs/onadata.apps.logger.tests.management.rst create mode 100644 docs/onadata.apps.messaging.backends.rst create mode 100644 docs/onadata.apps.messaging.migrations.rst create mode 100644 docs/onadata.apps.messaging.rst create mode 100644 docs/onadata.apps.messaging.tests.rst delete mode 100644 docs/onadata.apps.stats.rst create mode 100644 docs/onadata.apps.viewer.models.tests.rst delete mode 100644 docs/onadata.libs.data.tests.rst create mode 100644 docs/onadata.libs.test_utils.rst create mode 100644 docs/onadata.libs.tests.data.rst create mode 100644 docs/onadata.libs.tests.models.rst delete mode 100644 docs/onadata.settings.rst diff --git a/docs/authentication.rst b/docs/authentication.rst index 7335501134..478ea14b1d 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -57,6 +57,7 @@ To activate authentication via temporary token you must add the TemporaryToken class to your local_settings.py file, for example: :: + REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = ( 'onadata.libs.authentication.DigestAuthentication', 'onadata.libs.authentication.TempTokenAuthentication', @@ -65,7 +66,7 @@ class to your local_settings.py file, for example: Using Oauth2 with the Ona API ----------------------------- -You can learn more about oauth2 `here`_. +You can learn more about oauth2 `here `_. 1. Register your client application with Ona - `register`_ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -78,7 +79,6 @@ You can learn more about oauth2 `here`_. Keep note of the ``client_id`` and the ``client_secret``, it is required when requesting for an ``access_token``. -.. _here: http://tools.ietf.org/html/rfc6749 .. _register: /o/applications/register/ 2. Authorize client application. @@ -191,7 +191,7 @@ Example using curl: Making CORS - Cross-Origin Resource Sharing - requests to the Ona API ---------------------------------------------------------------------- -To find out more about CORS, you can read about it `here `_. The following is a javascript code snippet on how to make a CORS request. +To find out more about CORS, you can read about it `here `__. The following is a javascript code snippet on how to make a CORS request. .. code-block:: javascript @@ -221,8 +221,8 @@ The following is a jquery code snippet on how to make a CORS request. OpenID Connect Authentication ------------------------------ - + .. toctree:: - :maxdepth: 2 + :maxdepth: 2 -open-id-connect \ No newline at end of file + open-id-connect diff --git a/docs/conf.py b/docs/conf.py index e18223a4dc..d6b704b5f1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,6 +16,8 @@ import sys from os.path import abspath, dirname +import django + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -76,7 +78,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = ["_build", "_themes"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -137,7 +139,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -268,4 +270,6 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"http://docs.python.org/": None} +intersphinx_mapping = {"python": ("http://docs.python.org/3", None)} + +django.setup() diff --git a/docs/index.rst b/docs/index.rst index 9379226b00..7e31919856 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -77,26 +77,26 @@ Flow Results Packages --------------------- .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - flow-results + flow-results Onadata-Tableau Intergration ------------------------------ .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - onadata-tableau + onadata-tableau Event tracking and messaging (Beta) ----------------------------------- .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - messaging - messaging_stats + messaging + messaging_stats Ona Tagging API ~~~~~~~~~~~~~~~ diff --git a/docs/modules.rst b/docs/modules.rst index 7973ba574d..d1b9a75b83 100644 --- a/docs/modules.rst +++ b/docs/modules.rst @@ -1,5 +1,5 @@ -onadata -======= +onadata-api +=========== .. toctree:: :maxdepth: 4 diff --git a/docs/onadata.apps.api.management.commands.rst b/docs/onadata.apps.api.management.commands.rst index d9bef4eb48..91aef84d7c 100644 --- a/docs/onadata.apps.api.management.commands.rst +++ b/docs/onadata.apps.api.management.commands.rst @@ -4,99 +4,130 @@ onadata.apps.api.management.commands package Submodules ---------- -onadata.apps.api.management.commands.assign_team_member_permission module -------------------------------------------------------------------------- +onadata.apps.api.management.commands.apply\_can\_add\_project\_perms module +--------------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.management.commands.apply_can_add_project_perms + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.management.commands.assign\_team\_member\_permission module +---------------------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.assign_team_member_permission - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.cleanup_permissions module ---------------------------------------------------------------- +onadata.apps.api.management.commands.cleanup\_permissions module +---------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.cleanup_permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.create_default_project module ------------------------------------------------------------------- +onadata.apps.api.management.commands.create\_default\_project module +-------------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.create_default_project - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.management.commands.create\_user\_profiles module +------------------------------------------------------------------ -onadata.apps.api.management.commands.fix_readonly_role_perms module -------------------------------------------------------------------- +.. automodule:: onadata.apps.api.management.commands.create_user_profiles + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.management.commands.delete\_users module +--------------------------------------------------------- + +.. automodule:: onadata.apps.api.management.commands.delete_users + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.management.commands.fix\_readonly\_role\_perms module +---------------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.fix_readonly_role_perms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.migrate_group_permissions module ---------------------------------------------------------------------- +onadata.apps.api.management.commands.increase\_odk\_token\_lifetime module +-------------------------------------------------------------------------- -.. automodule:: onadata.apps.api.management.commands.migrate_group_permissions - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.api.management.commands.increase_odk_token_lifetime + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.migrate_permissions module ---------------------------------------------------------------- +onadata.apps.api.management.commands.migrate\_group\_permissions module +----------------------------------------------------------------------- -.. automodule:: onadata.apps.api.management.commands.migrate_permissions - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.api.management.commands.migrate_group_permissions + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.print_profiler_results module ------------------------------------------------------------------- +onadata.apps.api.management.commands.migrate\_permissions module +---------------------------------------------------------------- -.. automodule:: onadata.apps.api.management.commands.print_profiler_results - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.api.management.commands.migrate_permissions + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.reassign_permission module ---------------------------------------------------------------- +onadata.apps.api.management.commands.reassign\_permission module +---------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.reassign_permission - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.regenerate_auth_tokens module ------------------------------------------------------------------- +onadata.apps.api.management.commands.regenerate\_auth\_tokens module +-------------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.regenerate_auth_tokens - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.reset_rest_services module ---------------------------------------------------------------- +onadata.apps.api.management.commands.reset\_rest\_services module +----------------------------------------------------------------- .. automodule:: onadata.apps.api.management.commands.reset_rest_services - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.management.commands.set_api_permissions module ---------------------------------------------------------------- +onadata.apps.api.management.commands.retrieve\_org\_or\_project\_list module +---------------------------------------------------------------------------- -.. automodule:: onadata.apps.api.management.commands.set_api_permissions - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.api.management.commands.retrieve_org_or_project_list + :members: + :undoc-members: + :show-inheritance: +onadata.apps.api.management.commands.set\_api\_permissions module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.api.management.commands.set_api_permissions + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.management.commands - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.management.rst b/docs/onadata.apps.api.management.rst index f004f9756f..9749a95656 100644 --- a/docs/onadata.apps.api.management.rst +++ b/docs/onadata.apps.api.management.rst @@ -5,13 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.api.management.commands + onadata.apps.api.management.commands Module contents --------------- .. automodule:: onadata.apps.api.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.migrations.rst b/docs/onadata.apps.api.migrations.rst index fb5f333c8a..8754f5b227 100644 --- a/docs/onadata.apps.api.migrations.rst +++ b/docs/onadata.apps.api.migrations.rst @@ -4,27 +4,66 @@ onadata.apps.api.migrations package Submodules ---------- -onadata.apps.api.migrations.0001_initial module ------------------------------------------------ +onadata.apps.api.migrations.0001\_initial module +------------------------------------------------ .. automodule:: onadata.apps.api.migrations.0001_initial - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.migrations.0002_auto_20151014_0909 module ----------------------------------------------------------- +onadata.apps.api.migrations.0002\_auto\_20151014\_0909 module +------------------------------------------------------------- .. automodule:: onadata.apps.api.migrations.0002_auto_20151014_0909 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.migrations.0003\_auto\_20180425\_0754 module +------------------------------------------------------------- + +.. automodule:: onadata.apps.api.migrations.0003_auto_20180425_0754 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.migrations.0004\_auto\_20190125\_0517 module +------------------------------------------------------------- + +.. automodule:: onadata.apps.api.migrations.0004_auto_20190125_0517 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.migrations.0005\_auto\_20191018\_0735 module +------------------------------------------------------------- + +.. automodule:: onadata.apps.api.migrations.0005_auto_20191018_0735 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.migrations.0006\_auto\_20191025\_0730 module +------------------------------------------------------------- + +.. automodule:: onadata.apps.api.migrations.0006_auto_20191025_0730 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.migrations.0007\_odktoken\_expires module +---------------------------------------------------------- +.. automodule:: onadata.apps.api.migrations.0007_odktoken_expires + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.migrations - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.models.rst b/docs/onadata.apps.api.models.rst index 9386e64fee..0de2f911af 100644 --- a/docs/onadata.apps.api.models.rst +++ b/docs/onadata.apps.api.models.rst @@ -4,35 +4,42 @@ onadata.apps.api.models package Submodules ---------- -onadata.apps.api.models.organization_profile module ---------------------------------------------------- +onadata.apps.api.models.odk\_token module +----------------------------------------- + +.. automodule:: onadata.apps.api.models.odk_token + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.models.organization\_profile module +---------------------------------------------------- .. automodule:: onadata.apps.api.models.organization_profile - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.api.models.team module ----------------------------------- .. automodule:: onadata.apps.api.models.team - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.models.temp_token module ------------------------------------------ +onadata.apps.api.models.temp\_token module +------------------------------------------ .. automodule:: onadata.apps.api.models.temp_token - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.rst b/docs/onadata.apps.api.rst index a15e289d57..825e9ae102 100644 --- a/docs/onadata.apps.api.rst +++ b/docs/onadata.apps.api.rst @@ -5,12 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.api.management - onadata.apps.api.migrations - onadata.apps.api.models - onadata.apps.api.tests - onadata.apps.api.viewsets + onadata.apps.api.management + onadata.apps.api.migrations + onadata.apps.api.models + onadata.apps.api.tests + onadata.apps.api.urls + onadata.apps.api.viewsets Submodules ---------- @@ -19,47 +21,46 @@ onadata.apps.api.admin module ----------------------------- .. automodule:: onadata.apps.api.admin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.api.permissions module ----------------------------------- .. automodule:: onadata.apps.api.permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.storage module +------------------------------- + +.. automodule:: onadata.apps.api.storage + :members: + :undoc-members: + :show-inheritance: onadata.apps.api.tasks module ----------------------------- .. automodule:: onadata.apps.api.tasks - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.api.tools module ----------------------------- .. automodule:: onadata.apps.api.tools - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.api.urls module ----------------------------- - -.. automodule:: onadata.apps.api.urls - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.management.commands.rst b/docs/onadata.apps.api.tests.management.commands.rst new file mode 100644 index 0000000000..b46e99a988 --- /dev/null +++ b/docs/onadata.apps.api.tests.management.commands.rst @@ -0,0 +1,45 @@ +onadata.apps.api.tests.management.commands package +================================================== + +Submodules +---------- + +onadata.apps.api.tests.management.commands.test\_create\_user\_profiles module +------------------------------------------------------------------------------ + +.. automodule:: onadata.apps.api.tests.management.commands.test_create_user_profiles + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.management.commands.test\_delete\_users module +--------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.management.commands.test_delete_users + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.management.commands.test\_increase\_odk\_token\_lifetime module +-------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.management.commands.test_increase_odk_token_lifetime + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.management.commands.test\_retrieve\_org\_or\_project\_list module +---------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.management.commands.test_retrieve_org_or_project_list + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.api.tests.management.commands + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.management.rst b/docs/onadata.apps.api.tests.management.rst index 48bf694ff2..13eb58a9ab 100644 --- a/docs/onadata.apps.api.tests.management.rst +++ b/docs/onadata.apps.api.tests.management.rst @@ -5,13 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.api.tests.management.commands + onadata.apps.api.tests.management.commands Module contents --------------- .. automodule:: onadata.apps.api.tests.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.models.rst b/docs/onadata.apps.api.tests.models.rst index 54efc86bf2..50911973da 100644 --- a/docs/onadata.apps.api.tests.models.rst +++ b/docs/onadata.apps.api.tests.models.rst @@ -4,51 +4,58 @@ onadata.apps.api.tests.models package Submodules ---------- -onadata.apps.api.tests.models.test_abstract_models module ---------------------------------------------------------- +onadata.apps.api.tests.models.test\_abstract\_models module +----------------------------------------------------------- .. automodule:: onadata.apps.api.tests.models.test_abstract_models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.models.test_organization_profile module --------------------------------------------------------------- +onadata.apps.api.tests.models.test\_odk\_token module +----------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.models.test_odk_token + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.models.test\_organization\_profile module +---------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.models.test_organization_profile - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.models.test_project module -------------------------------------------------- +onadata.apps.api.tests.models.test\_project module +-------------------------------------------------- .. automodule:: onadata.apps.api.tests.models.test_project - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.models.test_team module ----------------------------------------------- +onadata.apps.api.tests.models.test\_team module +----------------------------------------------- .. automodule:: onadata.apps.api.tests.models.test_team - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.models.test_temp_token module ----------------------------------------------------- +onadata.apps.api.tests.models.test\_temp\_token module +------------------------------------------------------ .. automodule:: onadata.apps.api.tests.models.test_temp_token - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.tests.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.permissions.rst b/docs/onadata.apps.api.tests.permissions.rst index 909ad3e23c..2fce4eeee4 100644 --- a/docs/onadata.apps.api.tests.permissions.rst +++ b/docs/onadata.apps.api.tests.permissions.rst @@ -1,22 +1,21 @@ -onadata\.apps\.api\.tests\.permissions package -============================================== +onadata.apps.api.tests.permissions package +========================================== Submodules ---------- -onadata\.apps\.api\.tests\.permissions\.test\_permissions module ----------------------------------------------------------------- +onadata.apps.api.tests.permissions.test\_permissions module +----------------------------------------------------------- .. automodule:: onadata.apps.api.tests.permissions.test_permissions - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.tests.permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.rst b/docs/onadata.apps.api.tests.rst index 9ca40dbb77..35a7e90503 100644 --- a/docs/onadata.apps.api.tests.rst +++ b/docs/onadata.apps.api.tests.rst @@ -5,16 +5,45 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.api.tests.management - onadata.apps.api.tests.models - onadata.apps.api.tests.views - onadata.apps.api.tests.viewsets + onadata.apps.api.tests.management + onadata.apps.api.tests.models + onadata.apps.api.tests.permissions + onadata.apps.api.tests.views + onadata.apps.api.tests.viewsets + +Submodules +---------- + +onadata.apps.api.tests.mocked\_data module +------------------------------------------ + +.. automodule:: onadata.apps.api.tests.mocked_data + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.test\_tasks module +----------------------------------------- + +.. automodule:: onadata.apps.api.tests.test_tasks + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.test\_tools module +----------------------------------------- + +.. automodule:: onadata.apps.api.tests.test_tools + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.views.rst b/docs/onadata.apps.api.tests.views.rst index 66e8bb30a3..7aa3225c26 100644 --- a/docs/onadata.apps.api.tests.views.rst +++ b/docs/onadata.apps.api.tests.views.rst @@ -4,19 +4,26 @@ onadata.apps.api.tests.views package Submodules ---------- -onadata.apps.api.tests.views.test_user_permissions module ---------------------------------------------------------- +onadata.apps.api.tests.views.test\_tasks module +----------------------------------------------- -.. automodule:: onadata.apps.api.tests.views.test_user_permissions - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.api.tests.views.test_tasks + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.views.test\_user\_permissions module +----------------------------------------------------------- +.. automodule:: onadata.apps.api.tests.views.test_user_permissions + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.tests.views - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.tests.viewsets.rst b/docs/onadata.apps.api.tests.viewsets.rst index be6874d20e..e3fb29eb57 100644 --- a/docs/onadata.apps.api.tests.viewsets.rst +++ b/docs/onadata.apps.api.tests.viewsets.rst @@ -4,203 +4,258 @@ onadata.apps.api.tests.viewsets package Submodules ---------- -onadata.apps.api.tests.viewsets.test_abstract_viewset module ------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_abstract\_viewset module +-------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_abstract_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_attachment_viewset module --------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_attachment\_viewset module +---------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_attachment_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_briefcase_viewset module -------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_briefcase\_viewset module +--------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_briefcase_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_charts_viewset module ----------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_charts\_viewset module +------------------------------------------------------------ .. automodule:: onadata.apps.api.tests.viewsets.test_charts_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_connect_viewset module ------------------------------------------------------------ +onadata.apps.api.tests.viewsets.test\_connect\_viewset module +------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_connect_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_data_viewset module --------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_data\_viewset module +---------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_data_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_dataview_viewset module ------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_dataview\_viewset module +-------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_dataview_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_export_viewset module ----------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_entity\_list\_viewset module +------------------------------------------------------------------ + +.. automodule:: onadata.apps.api.tests.viewsets.test_entity_list_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_export\_viewset module +------------------------------------------------------------ .. automodule:: onadata.apps.api.tests.viewsets.test_export_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_legacy_exports module ----------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_floip\_viewset module +----------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.viewsets.test_floip_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_legacy\_exports module +------------------------------------------------------------ .. automodule:: onadata.apps.api.tests.viewsets.test_legacy_exports - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_media_viewset module ---------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_media\_viewset module +----------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_media_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_metadata_viewset module ------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_merged\_xform\_viewset module +------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.viewsets.test_merged_xform_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_messaging\_stats\_viewset module +---------------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.viewsets.test_messaging_stats_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_metadata\_viewset module +-------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_metadata_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_note_viewset module --------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_note\_viewset module +---------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_note_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_ona_api module ---------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_ona\_api module +----------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_ona_api - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_organization_profile_viewset module ------------------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_open\_data\_viewset module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.viewsets.test_open_data_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_organization\_profile\_viewset module +--------------------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_organization_profile_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_osm_viewset module -------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_osm\_viewset module +--------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_osm_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_project_viewset module ------------------------------------------------------------ +onadata.apps.api.tests.viewsets.test\_project\_viewset module +------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_project_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_stats_viewset module ---------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_stats\_viewset module +----------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_stats_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_submission\_review\_viewset module +------------------------------------------------------------------------ -onadata.apps.api.tests.viewsets.test_team_viewset module --------------------------------------------------------- +.. automodule:: onadata.apps.api.tests.viewsets.test_submission_review_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_tableau\_viewset module +------------------------------------------------------------- + +.. automodule:: onadata.apps.api.tests.viewsets.test_tableau_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.tests.viewsets.test\_team\_viewset module +---------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_team_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_user_profile_viewset module ----------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_user\_profile\_viewset module +------------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_user_profile_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_user_viewset module --------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_user\_viewset module +---------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_user_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_widget_viewset module ----------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_widget\_viewset module +------------------------------------------------------------ .. automodule:: onadata.apps.api.tests.viewsets.test_widget_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_xform_list_viewset module --------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_xform\_list\_viewset module +----------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_xform_list_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_xform_submission_viewset module --------------------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_xform\_submission\_viewset module +----------------------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_xform_submission_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.tests.viewsets.test_xform_viewset module ---------------------------------------------------------- +onadata.apps.api.tests.viewsets.test\_xform\_viewset module +----------------------------------------------------------- .. automodule:: onadata.apps.api.tests.viewsets.test_xform_viewset - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.tests.viewsets - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.urls.rst b/docs/onadata.apps.api.urls.rst new file mode 100644 index 0000000000..fa7d10520f --- /dev/null +++ b/docs/onadata.apps.api.urls.rst @@ -0,0 +1,29 @@ +onadata.apps.api.urls package +============================= + +Submodules +---------- + +onadata.apps.api.urls.v1\_urls module +------------------------------------- + +.. automodule:: onadata.apps.api.urls.v1_urls + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.urls.v2\_urls module +------------------------------------- + +.. automodule:: onadata.apps.api.urls.v2_urls + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.api.urls + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.viewsets.rst b/docs/onadata.apps.api.viewsets.rst index 3c9c1f2aa4..8d95c56789 100644 --- a/docs/onadata.apps.api.viewsets.rst +++ b/docs/onadata.apps.api.viewsets.rst @@ -1,190 +1,245 @@ onadata.apps.api.viewsets package ================================= +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.api.viewsets.v2 + Submodules ---------- -onadata.apps.api.viewsets.attachment_viewset module ---------------------------------------------------- +onadata.apps.api.viewsets.attachment\_viewset module +---------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.attachment_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.briefcase_viewset module --------------------------------------------------- +onadata.apps.api.viewsets.briefcase\_viewset module +--------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.briefcase_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.charts_viewset module ------------------------------------------------ +onadata.apps.api.viewsets.charts\_viewset module +------------------------------------------------ .. automodule:: onadata.apps.api.viewsets.charts_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.connect_viewset module ------------------------------------------------- +onadata.apps.api.viewsets.connect\_viewset module +------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.connect_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.data_viewset module ---------------------------------------------- +onadata.apps.api.viewsets.data\_viewset module +---------------------------------------------- .. automodule:: onadata.apps.api.viewsets.data_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.dataview_viewset module -------------------------------------------------- +onadata.apps.api.viewsets.dataview\_viewset module +-------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.dataview_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.export_viewset module ------------------------------------------------ +onadata.apps.api.viewsets.entity\_list\_viewset module +------------------------------------------------------ + +.. automodule:: onadata.apps.api.viewsets.entity_list_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.export\_viewset module +------------------------------------------------ .. automodule:: onadata.apps.api.viewsets.export_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.media_viewset module ----------------------------------------------- +onadata.apps.api.viewsets.floip\_viewset module +----------------------------------------------- + +.. automodule:: onadata.apps.api.viewsets.floip_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.media\_viewset module +----------------------------------------------- .. automodule:: onadata.apps.api.viewsets.media_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.metadata_viewset module -------------------------------------------------- +onadata.apps.api.viewsets.merged\_xform\_viewset module +------------------------------------------------------- + +.. automodule:: onadata.apps.api.viewsets.merged_xform_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.messaging\_stats\_viewset module +---------------------------------------------------------- + +.. automodule:: onadata.apps.api.viewsets.messaging_stats_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.metadata\_viewset module +-------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.metadata_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.note_viewset module ---------------------------------------------- +onadata.apps.api.viewsets.note\_viewset module +---------------------------------------------- .. automodule:: onadata.apps.api.viewsets.note_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.organization_profile_viewset module -------------------------------------------------------------- +onadata.apps.api.viewsets.open\_data\_viewset module +---------------------------------------------------- + +.. automodule:: onadata.apps.api.viewsets.open_data_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.organization\_profile\_viewset module +--------------------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.organization_profile_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.osm_viewset module --------------------------------------------- +onadata.apps.api.viewsets.osm\_viewset module +--------------------------------------------- .. automodule:: onadata.apps.api.viewsets.osm_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.project_viewset module ------------------------------------------------- +onadata.apps.api.viewsets.project\_viewset module +------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.project_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.stats_viewset module ----------------------------------------------- +onadata.apps.api.viewsets.stats\_viewset module +----------------------------------------------- .. automodule:: onadata.apps.api.viewsets.stats_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.submissionstats_viewset module --------------------------------------------------------- +onadata.apps.api.viewsets.submission\_review\_viewset module +------------------------------------------------------------ + +.. automodule:: onadata.apps.api.viewsets.submission_review_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.api.viewsets.submissionstats\_viewset module +--------------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.submissionstats_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.team_viewset module ---------------------------------------------- +onadata.apps.api.viewsets.team\_viewset module +---------------------------------------------- .. automodule:: onadata.apps.api.viewsets.team_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.user_profile_viewset module ------------------------------------------------------ +onadata.apps.api.viewsets.user\_profile\_viewset module +------------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.user_profile_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.user_viewset module ---------------------------------------------- +onadata.apps.api.viewsets.user\_viewset module +---------------------------------------------- .. automodule:: onadata.apps.api.viewsets.user_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.widget_viewset module ------------------------------------------------ +onadata.apps.api.viewsets.widget\_viewset module +------------------------------------------------ .. automodule:: onadata.apps.api.viewsets.widget_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.xform_list_viewset module ---------------------------------------------------- +onadata.apps.api.viewsets.xform\_list\_viewset module +----------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.xform_list_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.xform_submission_viewset module ---------------------------------------------------------- +onadata.apps.api.viewsets.xform\_submission\_viewset module +----------------------------------------------------------- .. automodule:: onadata.apps.api.viewsets.xform_submission_viewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.api.viewsets.xform_viewset module ----------------------------------------------- +onadata.apps.api.viewsets.xform\_viewset module +----------------------------------------------- .. automodule:: onadata.apps.api.viewsets.xform_viewset - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.api.viewsets - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.api.viewsets.v2.rst b/docs/onadata.apps.api.viewsets.v2.rst new file mode 100644 index 0000000000..7a51648939 --- /dev/null +++ b/docs/onadata.apps.api.viewsets.v2.rst @@ -0,0 +1,21 @@ +onadata.apps.api.viewsets.v2 package +==================================== + +Submodules +---------- + +onadata.apps.api.viewsets.v2.tableau\_viewset module +---------------------------------------------------- + +.. automodule:: onadata.apps.api.viewsets.v2.tableau_viewset + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.api.viewsets.v2 + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.management.commands.rst b/docs/onadata.apps.logger.management.commands.rst index 28be343761..bf94a82210 100644 --- a/docs/onadata.apps.logger.management.commands.rst +++ b/docs/onadata.apps.logger.management.commands.rst @@ -1,198 +1,269 @@ onadata.apps.logger.management.commands package =============================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.logger.management.commands.tests + Submodules ---------- -onadata.apps.logger.management.commands.add_id module ------------------------------------------------------ +onadata.apps.logger.management.commands.add\_id module +------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.add_id - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.change_s3_media_permissions module --------------------------------------------------------------------------- +onadata.apps.logger.management.commands.change\_s3\_media\_permissions module +----------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.change_s3_media_permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.create_backup module ------------------------------------------------------------- +onadata.apps.logger.management.commands.create\_backup module +------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.create_backup - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.create_image_thumbnails module ----------------------------------------------------------------------- +onadata.apps.logger.management.commands.create\_image\_thumbnails module +------------------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.create_image_thumbnails - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.export_gps_points module ----------------------------------------------------------------- +onadata.apps.logger.management.commands.export\_gps\_points module +------------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.export_gps_points - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.export_xforms_and_instances module --------------------------------------------------------------------------- +onadata.apps.logger.management.commands.export\_xforms\_and\_instances module +----------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.export_xforms_and_instances - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.fix\_attachments\_counts module +----------------------------------------------------------------------- -onadata.apps.logger.management.commands.fix_duplicate_instances module ----------------------------------------------------------------------- +.. automodule:: onadata.apps.logger.management.commands.fix_attachments_counts + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.fix\_duplicate\_instances module +------------------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.fix_duplicate_instances - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.fix_submission_count module -------------------------------------------------------------------- +onadata.apps.logger.management.commands.fix\_submission\_count module +--------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.fix_submission_count - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.generate\_platform\_stats module +------------------------------------------------------------------------ + +.. automodule:: onadata.apps.logger.management.commands.generate_platform_stats + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.management.commands.import module ----------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.import - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.import_briefcase module ---------------------------------------------------------------- +onadata.apps.logger.management.commands.import\_briefcase module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.import_briefcase - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.import_forms module ------------------------------------------------------------ +onadata.apps.logger.management.commands.import\_forms module +------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.import_forms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.import_instances module ---------------------------------------------------------------- +onadata.apps.logger.management.commands.import\_instances module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.import_instances - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.import_tools module ------------------------------------------------------------ +onadata.apps.logger.management.commands.import\_tools module +------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.import_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.move_media_to_s3 module ---------------------------------------------------------------- +onadata.apps.logger.management.commands.move\_media\_to\_s3 module +------------------------------------------------------------------ .. automodule:: onadata.apps.logger.management.commands.move_media_to_s3 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.populate_osmdata_model module ---------------------------------------------------------------------- +onadata.apps.logger.management.commands.populate\_osmdata\_model module +----------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.populate_osmdata_model - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.publish_xls module ----------------------------------------------------------- +onadata.apps.logger.management.commands.publish\_xls module +----------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.publish_xls - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.pull_from_aggregate module ------------------------------------------------------------------- +onadata.apps.logger.management.commands.pull\_from\_aggregate module +-------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.pull_from_aggregate - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.restore_backup module -------------------------------------------------------------- +onadata.apps.logger.management.commands.reapplyperms module +----------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.reapplyperms + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.recover\_deleted\_attachments module +---------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.recover_deleted_attachments + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.regenerate\_instance\_json module +------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.regenerate_instance_json + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.remove\_columns\_from\_briefcase\_data module +------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.remove_columns_from_briefcase_data + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.replace\_form\_id\_root\_node module +---------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.replace_form_id_root_node + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.restore\_backup module +-------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.restore_backup - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.set_xform_surveys_with_geopoints module -------------------------------------------------------------------------------- +onadata.apps.logger.management.commands.set\_xform\_surveys\_with\_geopoints module +----------------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.set_xform_surveys_with_geopoints - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.set_xform_surveys_with_osm module -------------------------------------------------------------------------- +onadata.apps.logger.management.commands.set\_xform\_surveys\_with\_osm module +----------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.set_xform_surveys_with_osm - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.sync_deleted_instances_fix module -------------------------------------------------------------------------- +onadata.apps.logger.management.commands.sync\_deleted\_instances\_fix module +---------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.sync_deleted_instances_fix - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.update_moved_forms module ------------------------------------------------------------------ +onadata.apps.logger.management.commands.transferproject module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.transferproject + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.management.commands.update\_moved\_forms module +------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.update_moved_forms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.management.commands.update_xform_uuids module ------------------------------------------------------------------ +onadata.apps.logger.management.commands.update\_xform\_uuids module +------------------------------------------------------------------- .. automodule:: onadata.apps.logger.management.commands.update_xform_uuids - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger.management.commands - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.management.commands.tests.rst b/docs/onadata.apps.logger.management.commands.tests.rst new file mode 100644 index 0000000000..47b8163842 --- /dev/null +++ b/docs/onadata.apps.logger.management.commands.tests.rst @@ -0,0 +1,21 @@ +onadata.apps.logger.management.commands.tests package +===================================================== + +Submodules +---------- + +onadata.apps.logger.management.commands.tests.test\_regenerate\_instance\_json module +------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.management.commands.tests.test_regenerate_instance_json + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.logger.management.commands.tests + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.management.rst b/docs/onadata.apps.logger.management.rst index c6012659b3..f40536ab0c 100644 --- a/docs/onadata.apps.logger.management.rst +++ b/docs/onadata.apps.logger.management.rst @@ -5,13 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.logger.management.commands + onadata.apps.logger.management.commands Module contents --------------- .. automodule:: onadata.apps.logger.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.migrations.rst b/docs/onadata.apps.logger.migrations.rst index 7baabf90f0..d356fb1284 100644 --- a/docs/onadata.apps.logger.migrations.rst +++ b/docs/onadata.apps.logger.migrations.rst @@ -4,227 +4,658 @@ onadata.apps.logger.migrations package Submodules ---------- -onadata.apps.logger.migrations.0001_initial module --------------------------------------------------- +onadata.apps.logger.migrations.0001\_initial module +--------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0001_initial - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0002_auto_20150717_0048 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0001\_pre\-django\-3\-upgrade module +------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0001_pre-django-3-upgrade + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0002\_auto\_20150717\_0048 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0002_auto_20150717_0048 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0002\_auto\_20220425\_0340 module +---------------------------------------------------------------- -onadata.apps.logger.migrations.0003_dataview_instances_with_geopoints module ----------------------------------------------------------------------------- +.. automodule:: onadata.apps.logger.migrations.0002_auto_20220425_0340 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0003\_alter\_instance\_media\_all\_received module +--------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0003_alter_instance_media_all_received + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0003\_dataview\_instances\_with\_geopoints module +-------------------------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0003_dataview_instances_with_geopoints - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0004_auto_20150910_0056 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0004\_auto\_20150910\_0056 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0004_auto_20150910_0056 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0005_auto_20151015_0758 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0004\_update\_instance\_geoms module +------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0004_update_instance_geoms + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0005\_auto\_20151015\_0758 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0005_auto_20151015_0758 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0006_auto_20151106_0130 module +onadata.apps.logger.migrations.0005\_projectinvitation module ------------------------------------------------------------- +.. automodule:: onadata.apps.logger.migrations.0005_projectinvitation + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0006\_auto\_20151106\_0130 module +---------------------------------------------------------------- + .. automodule:: onadata.apps.logger.migrations.0006_auto_20151106_0130 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0007_osmdata_field_name module -------------------------------------------------------------- +onadata.apps.logger.migrations.0006\_auto\_20230622\_0533 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0006_auto_20230622_0533 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0007\_alter\_projectinvitation\_unique\_together module +-------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0007_alter_projectinvitation_unique_together + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0007\_osmdata\_field\_name module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0007_osmdata_field_name - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0008\_add\_date\_fields\_indexing module +----------------------------------------------------------------------- -onadata.apps.logger.migrations.0008_osmdata_osm_type module ------------------------------------------------------------ +.. automodule:: onadata.apps.logger.migrations.0008_add_date_fields_indexing + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0008\_osmdata\_osm\_type module +-------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0008_osmdata_osm_type - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0009_auto_20151111_0438 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0009\_auto\_20151111\_0438 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0009_auto_20151111_0438 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0010_attachment_file_size module ---------------------------------------------------------------- +onadata.apps.logger.migrations.0009\_auto\_20230914\_0927 module +---------------------------------------------------------------- -.. automodule:: onadata.apps.logger.migrations.0010_attachment_file_size - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.logger.migrations.0009_auto_20230914_0927 + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0011_dataview_matches_parent module +onadata.apps.logger.migrations.0010\_attachment\_file\_size module ------------------------------------------------------------------ +.. automodule:: onadata.apps.logger.migrations.0010_attachment_file_size + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0010\_auto\_20230921\_0346 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0010_auto_20230921_0346 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0011\_add\_xform\_id\_instance\_id\_idx module +----------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0011_add_xform_id_instance_id_idx + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0011\_dataview\_matches\_parent module +--------------------------------------------------------------------- + .. automodule:: onadata.apps.logger.migrations.0011_dataview_matches_parent - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0012_auto_20160114_0708 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0012\_add\_instance\_history\_uuid\_and\_checksum\_idx module +-------------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0012_add_instance_history_uuid_and_checksum_idx + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0012\_auto\_20160114\_0708 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0012_auto_20160114_0708 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0013_note_created_by module ----------------------------------------------------------- +onadata.apps.logger.migrations.0013\_add\_xform\_to\_logger\_attachment module +------------------------------------------------------------------------------ + +.. automodule:: onadata.apps.logger.migrations.0013_add_xform_to_logger_attachment + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0013\_note\_created\_by module +------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0013_note_created_by - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0014_note_instance_field module --------------------------------------------------------------- +onadata.apps.logger.migrations.0014\_note\_instance\_field module +----------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0014_note_instance_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0015_auto_20160222_0559 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0014\_populate\_attachment\_xform module +----------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0014_populate_attachment_xform + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0015\_auto\_20160222\_0559 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0015_auto_20160222_0559 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0016_widget_aggregation module -------------------------------------------------------------- +onadata.apps.logger.migrations.0015\_entity\_entitylist\_followupform\_registrationform\_and\_more module +--------------------------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0015_entity_entitylist_followupform_registrationform_and_more + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0016\_widget\_aggregation module +--------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0016_widget_aggregation - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0017_auto_20160224_0130 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0017\_auto\_20160224\_0130 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0017_auto_20160224_0130 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0018_auto_20160301_0330 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0018\_auto\_20160301\_0330 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0018_auto_20160301_0330 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0019_auto_20160307_0256 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0019\_auto\_20160307\_0256 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0019_auto_20160307_0256 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0020_auto_20160408_0325 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0020\_auto\_20160408\_0325 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0020_auto_20160408_0325 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0021_auto_20160408_0919 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0021\_auto\_20160408\_0919 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0021_auto_20160408_0919 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0022_auto_20160418_0518 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0022\_auto\_20160418\_0518 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0022_auto_20160418_0518 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0023_auto_20160419_0403 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0023\_auto\_20160419\_0403 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0023_auto_20160419_0403 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0024_xform_has_hxl_support module ----------------------------------------------------------------- +onadata.apps.logger.migrations.0024\_xform\_has\_hxl\_support module +-------------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0024_xform_has_hxl_support - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0025_xform_last_updated_at module ----------------------------------------------------------------- +onadata.apps.logger.migrations.0025\_xform\_last\_updated\_at module +-------------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0025_xform_last_updated_at - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0026_auto_20160913_0239 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0026\_auto\_20160913\_0239 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0026_auto_20160913_0239 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.migrations.0027_auto_20161201_0730 module -------------------------------------------------------------- +onadata.apps.logger.migrations.0027\_auto\_20161201\_0730 module +---------------------------------------------------------------- .. automodule:: onadata.apps.logger.migrations.0027_auto_20161201_0730 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0028\_auto\_20170217\_0502 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0028_auto_20170217_0502 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0028\_auto\_20170221\_0838 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0028_auto_20170221_0838 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0029\_auto\_20170221\_0908 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0029_auto_20170221_0908 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0030\_auto\_20170227\_0137 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0030_auto_20170227_0137 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0031\_merge module +------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0031_merge + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0032\_project\_deleted\_at module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0032_project_deleted_at + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0033\_auto\_20170705\_0159 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0033_auto_20170705_0159 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0034\_auto\_20170814\_0432 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0034_auto_20170814_0432 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0034\_mergedxform module +------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0034_mergedxform + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0035\_auto\_20170712\_0529 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0035_auto_20170712_0529 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0036\_xform\_is\_merged\_dataset module +---------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0036_xform_is_merged_dataset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0037\_merge\_20170825\_0238 module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0037_merge_20170825_0238 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0038\_auto\_20170828\_1718 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0038_auto_20170828_1718 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0039\_auto\_20170909\_2052 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0039_auto_20170909_2052 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0040\_auto\_20170912\_1504 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0040_auto_20170912_1504 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0041\_auto\_20170912\_1512 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0041_auto_20170912_1512 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0042\_xform\_hash module +------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0042_xform_hash + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0043\_auto\_20171010\_0403 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0043_auto_20171010_0403 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0044\_xform\_hash\_sql\_update module +-------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0044_xform_hash_sql_update + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0045\_attachment\_name module +------------------------------------------------------------ + +.. automodule:: onadata.apps.logger.migrations.0045_attachment_name + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0046\_auto\_20180314\_1618 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0046_auto_20180314_1618 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0047\_dataview\_deleted\_at module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0047_dataview_deleted_at + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0048\_dataview\_deleted\_by module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0048_dataview_deleted_by + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0049\_xform\_deleted\_by module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0049_xform_deleted_by + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0050\_project\_deleted\_by module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0050_project_deleted_by + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0051\_auto\_20180522\_1118 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0051_auto_20180522_1118 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0052\_auto\_20180805\_2233 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0052_auto_20180805_2233 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0053\_submissionreview module +------------------------------------------------------------ + +.. automodule:: onadata.apps.logger.migrations.0053_submissionreview + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0054\_instance\_has\_a\_review module +-------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0054_instance_has_a_review + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0055\_auto\_20180904\_0713 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0055_auto_20180904_0713 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0056\_auto\_20190125\_0517 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0056_auto_20190125_0517 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0057\_xform\_public\_key module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0057_xform_public_key + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0058\_auto\_20191211\_0900 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0058_auto_20191211_0900 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0059\_attachment\_deleted\_by module +------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0059_attachment_deleted_by + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0060\_auto\_20200305\_0357 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0060_auto_20200305_0357 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0061\_auto\_20200713\_0814 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0061_auto_20200713_0814 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0062\_auto\_20210202\_0248 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0062_auto_20210202_0248 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0063\_xformversion module +-------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0063_xformversion + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.migrations.0064\_auto\_20210304\_0314 module +---------------------------------------------------------------- +.. automodule:: onadata.apps.logger.migrations.0064_auto_20210304_0314 + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger.migrations - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.models.rst b/docs/onadata.apps.logger.models.rst index 5728e3a3ae..f84829981f 100644 --- a/docs/onadata.apps.logger.models.rst +++ b/docs/onadata.apps.logger.models.rst @@ -1,6 +1,14 @@ onadata.apps.logger.models package ================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.logger.models.tests + Submodules ---------- @@ -8,79 +16,150 @@ onadata.apps.logger.models.attachment module -------------------------------------------- .. automodule:: onadata.apps.logger.models.attachment - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.models.data_view module -------------------------------------------- +onadata.apps.logger.models.data\_view module +-------------------------------------------- .. automodule:: onadata.apps.logger.models.data_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.entity module +---------------------------------------- + +.. automodule:: onadata.apps.logger.models.entity + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.entity\_list module +---------------------------------------------- + +.. automodule:: onadata.apps.logger.models.entity_list + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.follow\_up\_form module +-------------------------------------------------- + +.. automodule:: onadata.apps.logger.models.follow_up_form + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.instance module ------------------------------------------ .. automodule:: onadata.apps.logger.models.instance - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.merged\_xform module +----------------------------------------------- + +.. automodule:: onadata.apps.logger.models.merged_xform + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.note module -------------------------------------- .. automodule:: onadata.apps.logger.models.note - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.open\_data module +-------------------------------------------- + +.. automodule:: onadata.apps.logger.models.open_data + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.osmdata module ----------------------------------------- .. automodule:: onadata.apps.logger.models.osmdata - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.project module ----------------------------------------- .. automodule:: onadata.apps.logger.models.project - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.project\_invitation module +----------------------------------------------------- + +.. automodule:: onadata.apps.logger.models.project_invitation + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.models.survey_type module ---------------------------------------------- +onadata.apps.logger.models.registration\_form module +---------------------------------------------------- + +.. automodule:: onadata.apps.logger.models.registration_form + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.submission\_review module +---------------------------------------------------- + +.. automodule:: onadata.apps.logger.models.submission_review + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.survey\_type module +---------------------------------------------- .. automodule:: onadata.apps.logger.models.survey_type - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.widget module ---------------------------------------- .. automodule:: onadata.apps.logger.models.widget - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.models.xform module --------------------------------------- .. automodule:: onadata.apps.logger.models.xform - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.models.xform\_version module +------------------------------------------------ +.. automodule:: onadata.apps.logger.models.xform_version + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.models.tests.rst b/docs/onadata.apps.logger.models.tests.rst new file mode 100644 index 0000000000..641e0cfa75 --- /dev/null +++ b/docs/onadata.apps.logger.models.tests.rst @@ -0,0 +1,21 @@ +onadata.apps.logger.models.tests package +======================================== + +Submodules +---------- + +onadata.apps.logger.models.tests.test\_merged\_xform module +----------------------------------------------------------- + +.. automodule:: onadata.apps.logger.models.tests.test_merged_xform + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.logger.models.tests + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.rst b/docs/onadata.apps.logger.rst index 4b87fe7bab..48e53ffb71 100644 --- a/docs/onadata.apps.logger.rst +++ b/docs/onadata.apps.logger.rst @@ -5,11 +5,13 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.logger.management - onadata.apps.logger.migrations - onadata.apps.logger.models - onadata.apps.logger.tests + onadata.apps.logger.management + onadata.apps.logger.migrations + onadata.apps.logger.models + onadata.apps.logger.templatetags + onadata.apps.logger.tests Submodules ---------- @@ -18,63 +20,70 @@ onadata.apps.logger.admin module -------------------------------- .. automodule:: onadata.apps.logger.admin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.apps module +------------------------------- + +.. automodule:: onadata.apps.logger.apps + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.factory module ---------------------------------- .. automodule:: onadata.apps.logger.factory - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.import_tools module ---------------------------------------- +onadata.apps.logger.import\_tools module +---------------------------------------- .. automodule:: onadata.apps.logger.import_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tasks module --------------------------------- +onadata.apps.logger.signals module +---------------------------------- -.. automodule:: onadata.apps.logger.tasks - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.logger.signals + :members: + :undoc-members: + :show-inheritance: onadata.apps.logger.views module -------------------------------- .. automodule:: onadata.apps.logger.views - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.xform_fs module ------------------------------------ +onadata.apps.logger.xform\_fs module +------------------------------------ .. automodule:: onadata.apps.logger.xform_fs - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.xform_instance_parser module ------------------------------------------------- +onadata.apps.logger.xform\_instance\_parser module +-------------------------------------------------- .. automodule:: onadata.apps.logger.xform_instance_parser - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.templatetags.rst b/docs/onadata.apps.logger.templatetags.rst new file mode 100644 index 0000000000..30e994ace6 --- /dev/null +++ b/docs/onadata.apps.logger.templatetags.rst @@ -0,0 +1,21 @@ +onadata.apps.logger.templatetags package +======================================== + +Submodules +---------- + +onadata.apps.logger.templatetags.customize\_template\_by\_domain module +----------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.templatetags.customize_template_by_domain + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.logger.templatetags + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.tests.management.commands.rst b/docs/onadata.apps.logger.tests.management.commands.rst new file mode 100644 index 0000000000..fceb0340f6 --- /dev/null +++ b/docs/onadata.apps.logger.tests.management.commands.rst @@ -0,0 +1,37 @@ +onadata.apps.logger.tests.management.commands package +===================================================== + +Submodules +---------- + +onadata.apps.logger.tests.management.commands.test\_recover\_deleted\_attachments module +---------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.management.commands.test_recover_deleted_attachments + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.management.commands.test\_remove\_columns\_from\_briefcase\_data module +------------------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.management.commands.test_remove_columns_from_briefcase_data + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.management.commands.test\_replace\_form\_id\_root\_node module +---------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.management.commands.test_replace_form_id_root_node + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.logger.tests.management.commands + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.tests.management.rst b/docs/onadata.apps.logger.tests.management.rst new file mode 100644 index 0000000000..34093d2f5c --- /dev/null +++ b/docs/onadata.apps.logger.tests.management.rst @@ -0,0 +1,18 @@ +onadata.apps.logger.tests.management package +============================================ + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.logger.tests.management.commands + +Module contents +--------------- + +.. automodule:: onadata.apps.logger.tests.management + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.tests.models.rst b/docs/onadata.apps.logger.tests.models.rst index 003bb1b5cd..faa9cbf853 100644 --- a/docs/onadata.apps.logger.tests.models.rst +++ b/docs/onadata.apps.logger.tests.models.rst @@ -4,43 +4,98 @@ onadata.apps.logger.tests.models package Submodules ---------- -onadata.apps.logger.tests.models.test_attachment module -------------------------------------------------------- +onadata.apps.logger.tests.models.test\_attachment module +-------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.models.test_attachment - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.models.test_data_view module ------------------------------------------------------- +onadata.apps.logger.tests.models.test\_data\_view module +-------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.models.test_data_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_entity module +---------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_entity + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_entity\_list module +---------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_entity_list + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.models.test_instance module ------------------------------------------------------ +onadata.apps.logger.tests.models.test\_follow\_up\_form module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_follow_up_form + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_instance module +------------------------------------------------------ .. automodule:: onadata.apps.logger.tests.models.test_instance - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.models.test_xform module +onadata.apps.logger.tests.models.test\_note module -------------------------------------------------- -.. automodule:: onadata.apps.logger.tests.models.test_xform - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.logger.tests.models.test_note + :members: + :undoc-members: + :show-inheritance: +onadata.apps.logger.tests.models.test\_project\_invitation module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_project_invitation + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_registration\_form module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_registration_form + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_submission\_review module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_submission_review + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.models.test\_xform module +--------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.models.test_xform + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger.tests.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.logger.tests.rst b/docs/onadata.apps.logger.tests.rst index 760208dafc..cf1f18ccb8 100644 --- a/docs/onadata.apps.logger.tests.rst +++ b/docs/onadata.apps.logger.tests.rst @@ -5,129 +5,146 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.logger.tests.models + onadata.apps.logger.tests.management + onadata.apps.logger.tests.models Submodules ---------- -onadata.apps.logger.tests.test_backup_tools module --------------------------------------------------- +onadata.apps.logger.tests.test\_backup\_tools module +---------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_backup_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_briefcase_api module ---------------------------------------------------- +onadata.apps.logger.tests.test\_briefcase\_api module +----------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_briefcase_api - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_briefcase_client module ------------------------------------------------------- +onadata.apps.logger.tests.test\_briefcase\_client module +-------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_briefcase_client - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_digest_authentication module ------------------------------------------------------------ +onadata.apps.logger.tests.test\_customize\_template\_by\_domain module +---------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.test_customize_template_by_domain + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.test\_digest\_authentication module +------------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_digest_authentication - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_encrypted_submissions module ------------------------------------------------------------ +onadata.apps.logger.tests.test\_encrypted\_submissions module +------------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_encrypted_submissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_form_list module ------------------------------------------------ +onadata.apps.logger.tests.test\_form\_list module +------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_form_list - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_form_submission module ------------------------------------------------------ +onadata.apps.logger.tests.test\_form\_submission module +------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_form_submission - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_importing_database module --------------------------------------------------------- +onadata.apps.logger.tests.test\_importing\_database module +---------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_importing_database - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_instance_creation module -------------------------------------------------------- +onadata.apps.logger.tests.test\_instance\_creation module +--------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_instance_creation - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_parsing module ---------------------------------------------- +onadata.apps.logger.tests.test\_parsing module +---------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_parsing - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_publish_xls module -------------------------------------------------- +onadata.apps.logger.tests.test\_publish\_xls module +--------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_publish_xls - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_simple_submission module -------------------------------------------------------- +onadata.apps.logger.tests.test\_simple\_submission module +--------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_simple_submission - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_update_xform_uuid module -------------------------------------------------------- +onadata.apps.logger.tests.test\_transfer\_project\_command module +----------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.test_transfer_project_command + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.logger.tests.test\_update\_xform\_uuid module +---------------------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_update_xform_uuid - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.logger.tests.test_webforms module ----------------------------------------------- +onadata.apps.logger.tests.test\_webforms module +----------------------------------------------- .. automodule:: onadata.apps.logger.tests.test_webforms - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.logger.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.management.commands.rst b/docs/onadata.apps.main.management.commands.rst index 673b115440..a149721236 100644 --- a/docs/onadata.apps.main.management.commands.rst +++ b/docs/onadata.apps.main.management.commands.rst @@ -4,83 +4,90 @@ onadata.apps.main.management.commands package Submodules ---------- -onadata.apps.main.management.commands.create_enketo_express_urls module ------------------------------------------------------------------------ +onadata.apps.main.management.commands.create\_enketo\_express\_urls module +-------------------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.create_enketo_express_urls - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.create_metadata_for_kpi_deployed_forms module ------------------------------------------------------------------------------------ +onadata.apps.main.management.commands.create\_metadata\_for\_kpi\_deployed\_forms module +---------------------------------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.create_metadata_for_kpi_deployed_forms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.export_user_emails module ---------------------------------------------------------------- +onadata.apps.main.management.commands.export\_user\_emails module +----------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.export_user_emails - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.get_accounts_with_duplicate_id_strings module ------------------------------------------------------------------------------------ +onadata.apps.main.management.commands.get\_accounts\_with\_duplicate\_id\_strings module +---------------------------------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.get_accounts_with_duplicate_id_strings - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.management.commands.mailer module --------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.mailer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.migrate_audit_log module --------------------------------------------------------------- +onadata.apps.main.management.commands.migrate\_audit\_log module +---------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.migrate_audit_log - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.remove_odk_prefix module --------------------------------------------------------------- +onadata.apps.main.management.commands.remove\_odk\_prefix module +---------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.remove_odk_prefix - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.set_media_file_hash module ----------------------------------------------------------------- +onadata.apps.main.management.commands.send\_email\_w\_attachment module +----------------------------------------------------------------------- + +.. automodule:: onadata.apps.main.management.commands.send_email_w_attachment + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.management.commands.set\_media\_file\_hash module +------------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.set_media_file_hash - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.management.commands.update_enketo_urls module ---------------------------------------------------------------- +onadata.apps.main.management.commands.update\_enketo\_urls module +----------------------------------------------------------------- .. automodule:: onadata.apps.main.management.commands.update_enketo_urls - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main.management.commands - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.management.rst b/docs/onadata.apps.main.management.rst index a0e9bd15b3..fae5d109ef 100644 --- a/docs/onadata.apps.main.management.rst +++ b/docs/onadata.apps.main.management.rst @@ -5,13 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.main.management.commands + onadata.apps.main.management.commands Module contents --------------- .. automodule:: onadata.apps.main.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.migrations.rst b/docs/onadata.apps.main.migrations.rst index e85bfbfe53..f4e0762220 100644 --- a/docs/onadata.apps.main.migrations.rst +++ b/docs/onadata.apps.main.migrations.rst @@ -4,67 +4,122 @@ onadata.apps.main.migrations package Submodules ---------- -onadata.apps.main.migrations.0001_initial module ------------------------------------------------- +onadata.apps.main.migrations.0001\_initial module +------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0001_initial - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0002_auto_20150717_1149 module ------------------------------------------------------------ +onadata.apps.main.migrations.0002\_auto\_20150717\_1149 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0002_auto_20150717_1149 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0003_auto_20151015_0822 module ------------------------------------------------------------ +onadata.apps.main.migrations.0003\_auto\_20151015\_0822 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0003_auto_20151015_0822 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0004_auto_20160215_0345 module ------------------------------------------------------------ +onadata.apps.main.migrations.0004\_auto\_20160215\_0345 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0004_auto_20160215_0345 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0005_auto_20160404_0837 module ------------------------------------------------------------ +onadata.apps.main.migrations.0005\_auto\_20160404\_0837 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0005_auto_20160404_0837 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0006_auto_20160408_0325 module ------------------------------------------------------------ +onadata.apps.main.migrations.0006\_auto\_20160408\_0325 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0006_auto_20160408_0325 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.migrations.0007_auto_20160418_0525 module ------------------------------------------------------------ +onadata.apps.main.migrations.0007\_auto\_20160418\_0525 module +-------------------------------------------------------------- .. automodule:: onadata.apps.main.migrations.0007_auto_20160418_0525 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: +onadata.apps.main.migrations.0008\_auto\_20180425\_0754 module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0008_auto_20180425_0754 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0009\_auto\_20190125\_0517 module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0009_auto_20190125_0517 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0010\_auto\_20220425\_0313 module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0010_auto_20220425_0313 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0011\_auto\_20220510\_0907 module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0011_auto_20220510_0907 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0012\_metadata\_extra\_data module +--------------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0012_metadata_extra_data + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0013\_passwordhistory module +--------------------------------------------------------- + +.. automodule:: onadata.apps.main.migrations.0013_passwordhistory + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.migrations.0014\_update\_enketo\_old\_ids module +------------------------------------------------------------------ + +.. automodule:: onadata.apps.main.migrations.0014_update_enketo_old_ids + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main.migrations - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.models.rst b/docs/onadata.apps.main.models.rst index 7ee810d495..a746b15e4b 100644 --- a/docs/onadata.apps.main.models.rst +++ b/docs/onadata.apps.main.models.rst @@ -8,39 +8,46 @@ onadata.apps.main.models.audit module ------------------------------------- .. automodule:: onadata.apps.main.models.audit - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.models.google_oath module -------------------------------------------- +onadata.apps.main.models.google\_oath module +-------------------------------------------- .. automodule:: onadata.apps.main.models.google_oath - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.models.meta_data module ------------------------------------------ +onadata.apps.main.models.meta\_data module +------------------------------------------ .. automodule:: onadata.apps.main.models.meta_data - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.models.user_profile module --------------------------------------------- +onadata.apps.main.models.password\_history module +------------------------------------------------- -.. automodule:: onadata.apps.main.models.user_profile - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.apps.main.models.password_history + :members: + :undoc-members: + :show-inheritance: +onadata.apps.main.models.user\_profile module +--------------------------------------------- + +.. automodule:: onadata.apps.main.models.user_profile + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.rst b/docs/onadata.apps.main.rst index 8d41cd9c5e..2c0eb1488b 100644 --- a/docs/onadata.apps.main.rst +++ b/docs/onadata.apps.main.rst @@ -5,12 +5,13 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.main.management - onadata.apps.main.migrations - onadata.apps.main.models - onadata.apps.main.templatetags - onadata.apps.main.tests + onadata.apps.main.management + onadata.apps.main.migrations + onadata.apps.main.models + onadata.apps.main.templatetags + onadata.apps.main.tests Submodules ---------- @@ -19,79 +20,78 @@ onadata.apps.main.backends module --------------------------------- .. automodule:: onadata.apps.main.backends - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.context_processors module -------------------------------------------- +onadata.apps.main.context\_processors module +-------------------------------------------- .. automodule:: onadata.apps.main.context_processors - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.forms module ------------------------------ .. automodule:: onadata.apps.main.forms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.registration_urls module ------------------------------------------- +onadata.apps.main.registration\_urls module +------------------------------------------- .. automodule:: onadata.apps.main.registration_urls - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.registration_views module -------------------------------------------- +onadata.apps.main.registration\_views module +-------------------------------------------- .. automodule:: onadata.apps.main.registration_views - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.signals module -------------------------------- .. automodule:: onadata.apps.main.signals - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.urls module ----------------------------- .. automodule:: onadata.apps.main.urls - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.views module ------------------------------ .. automodule:: onadata.apps.main.views - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.main.wsgi module ----------------------------- .. automodule:: onadata.apps.main.wsgi - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.templatetags.rst b/docs/onadata.apps.main.templatetags.rst index f5dcdf6e6c..a89435cb49 100644 --- a/docs/onadata.apps.main.templatetags.rst +++ b/docs/onadata.apps.main.templatetags.rst @@ -4,19 +4,18 @@ onadata.apps.main.templatetags package Submodules ---------- -onadata.apps.main.templatetags.lookup_filter module ---------------------------------------------------- +onadata.apps.main.templatetags.lookup\_filter module +---------------------------------------------------- .. automodule:: onadata.apps.main.templatetags.lookup_filter - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main.templatetags - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.main.tests.rst b/docs/onadata.apps.main.tests.rst index 76c9e1b3a4..1467aa9135 100644 --- a/docs/onadata.apps.main.tests.rst +++ b/docs/onadata.apps.main.tests.rst @@ -4,219 +4,234 @@ onadata.apps.main.tests package Submodules ---------- -onadata.apps.main.tests.test_audit_log module ---------------------------------------------- +onadata.apps.main.tests.test\_audit\_log module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_audit_log - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_base module ----------------------------------------- +onadata.apps.main.tests.test\_base module +----------------------------------------- .. automodule:: onadata.apps.main.tests.test_base - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_csv_export module ----------------------------------------------- +onadata.apps.main.tests.test\_csv\_export module +------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_csv_export - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_custom_context_processors module -------------------------------------------------------------- +onadata.apps.main.tests.test\_custom\_context\_processors module +---------------------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_custom_context_processors - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_api module --------------------------------------------- +onadata.apps.main.tests.test\_form\_api module +---------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_api - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_api_delete module ---------------------------------------------------- +onadata.apps.main.tests.test\_form\_api\_delete module +------------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_form_api_delete - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_auth module ---------------------------------------------- +onadata.apps.main.tests.test\_form\_auth module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_auth - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_edit module ---------------------------------------------- +onadata.apps.main.tests.test\_form\_edit module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_edit - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_enter_data module ---------------------------------------------------- +onadata.apps.main.tests.test\_form\_enter\_data module +------------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_form_enter_data - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_errors module ------------------------------------------------ +onadata.apps.main.tests.test\_form\_errors module +------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_errors - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_exports module ------------------------------------------------- +onadata.apps.main.tests.test\_form\_exports module +-------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_exports - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_gallery module ------------------------------------------------- +onadata.apps.main.tests.test\_form\_gallery module +-------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_gallery - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_metadata module -------------------------------------------------- +onadata.apps.main.tests.test\_form\_metadata module +--------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_metadata - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_permissions module ----------------------------------------------------- +onadata.apps.main.tests.test\_form\_permissions module +------------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_form_permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_public_api module ---------------------------------------------------- +onadata.apps.main.tests.test\_form\_public\_api module +------------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_form_public_api - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_form_show module ---------------------------------------------- +onadata.apps.main.tests.test\_form\_show module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_form_show - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_gps module ---------------------------------------- +onadata.apps.main.tests.test\_gps module +---------------------------------------- .. automodule:: onadata.apps.main.tests.test_gps - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_http_auth module ---------------------------------------------- +onadata.apps.main.tests.test\_http\_auth module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_http_auth - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_metadata module --------------------------------------------- +onadata.apps.main.tests.test\_metadata module +--------------------------------------------- .. automodule:: onadata.apps.main.tests.test_metadata - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_past_bugs module ---------------------------------------------- +onadata.apps.main.tests.test\_password\_history module +------------------------------------------------------ + +.. automodule:: onadata.apps.main.tests.test_password_history + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.main.tests.test\_past\_bugs module +----------------------------------------------- .. automodule:: onadata.apps.main.tests.test_past_bugs - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_process module -------------------------------------------- +onadata.apps.main.tests.test\_process module +-------------------------------------------- .. automodule:: onadata.apps.main.tests.test_process - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_style module ------------------------------------------ +onadata.apps.main.tests.test\_service\_health module +---------------------------------------------------- + +.. automodule:: onadata.apps.main.tests.test_service_health + :members: + :undoc-members: + :show-inheritance: -.. automodule:: onadata.apps.main.tests.test_style - :members: - :undoc-members: - :show-inheritance: +onadata.apps.main.tests.test\_signals module +-------------------------------------------- + +.. automodule:: onadata.apps.main.tests.test_signals + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_user_id_string_unique_together module ------------------------------------------------------------------- +onadata.apps.main.tests.test\_user\_id\_string\_unique\_together module +----------------------------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_user_id_string_unique_together - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_user_login module ----------------------------------------------- +onadata.apps.main.tests.test\_user\_login module +------------------------------------------------ .. automodule:: onadata.apps.main.tests.test_user_login - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_user_profile module ------------------------------------------------- +onadata.apps.main.tests.test\_user\_profile module +-------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_user_profile - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.main.tests.test_user_settings module -------------------------------------------------- +onadata.apps.main.tests.test\_user\_settings module +--------------------------------------------------- .. automodule:: onadata.apps.main.tests.test_user_settings - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.main.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.messaging.backends.rst b/docs/onadata.apps.messaging.backends.rst new file mode 100644 index 0000000000..96dfdcc7e0 --- /dev/null +++ b/docs/onadata.apps.messaging.backends.rst @@ -0,0 +1,29 @@ +onadata.apps.messaging.backends package +======================================= + +Submodules +---------- + +onadata.apps.messaging.backends.base module +------------------------------------------- + +.. automodule:: onadata.apps.messaging.backends.base + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.backends.mqtt module +------------------------------------------- + +.. automodule:: onadata.apps.messaging.backends.mqtt + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.messaging.backends + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.messaging.migrations.rst b/docs/onadata.apps.messaging.migrations.rst new file mode 100644 index 0000000000..58a8d25cef --- /dev/null +++ b/docs/onadata.apps.messaging.migrations.rst @@ -0,0 +1,10 @@ +onadata.apps.messaging.migrations package +========================================= + +Module contents +--------------- + +.. automodule:: onadata.apps.messaging.migrations + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.messaging.rst b/docs/onadata.apps.messaging.rst new file mode 100644 index 0000000000..fb30acee6b --- /dev/null +++ b/docs/onadata.apps.messaging.rst @@ -0,0 +1,127 @@ +onadata.apps.messaging package +============================== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.messaging.backends + onadata.apps.messaging.migrations + onadata.apps.messaging.tests + +Submodules +---------- + +onadata.apps.messaging.admin module +----------------------------------- + +.. automodule:: onadata.apps.messaging.admin + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.apps module +---------------------------------- + +.. automodule:: onadata.apps.messaging.apps + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.constants module +--------------------------------------- + +.. automodule:: onadata.apps.messaging.constants + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.filters module +------------------------------------- + +.. automodule:: onadata.apps.messaging.filters + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.models module +------------------------------------ + +.. automodule:: onadata.apps.messaging.models + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.permissions module +----------------------------------------- + +.. automodule:: onadata.apps.messaging.permissions + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.serializers module +----------------------------------------- + +.. automodule:: onadata.apps.messaging.serializers + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.signals module +------------------------------------- + +.. automodule:: onadata.apps.messaging.signals + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tasks module +----------------------------------- + +.. automodule:: onadata.apps.messaging.tasks + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.urls module +---------------------------------- + +.. automodule:: onadata.apps.messaging.urls + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.utils module +----------------------------------- + +.. automodule:: onadata.apps.messaging.utils + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.views module +----------------------------------- + +.. automodule:: onadata.apps.messaging.views + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.viewsets module +-------------------------------------- + +.. automodule:: onadata.apps.messaging.viewsets + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.messaging + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.messaging.tests.rst b/docs/onadata.apps.messaging.tests.rst new file mode 100644 index 0000000000..aed9f653d6 --- /dev/null +++ b/docs/onadata.apps.messaging.tests.rst @@ -0,0 +1,69 @@ +onadata.apps.messaging.tests package +==================================== + +Submodules +---------- + +onadata.apps.messaging.tests.test\_backends\_base module +-------------------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_backends_base + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_backends\_mqtt module +-------------------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_backends_mqtt + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_base module +---------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_base + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_messaging\_viewset module +------------------------------------------------------------ + +.. automodule:: onadata.apps.messaging.tests.test_messaging_viewset + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_signals module +------------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_signals + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_tasks module +----------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_tasks + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.messaging.tests.test\_utils module +----------------------------------------------- + +.. automodule:: onadata.apps.messaging.tests.test_utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.messaging.tests + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.management.commands.rst b/docs/onadata.apps.restservice.management.commands.rst index 3d0bdfe116..47317755e0 100644 --- a/docs/onadata.apps.restservice.management.commands.rst +++ b/docs/onadata.apps.restservice.management.commands.rst @@ -1,22 +1,21 @@ -onadata\.apps\.restservice\.management\.commands package -======================================================== +onadata.apps.restservice.management.commands package +==================================================== Submodules ---------- -onadata\.apps\.restservice\.management\.commands\.textit\_v1\_to\_v2 module ---------------------------------------------------------------------------- +onadata.apps.restservice.management.commands.textit\_v1\_to\_v2 module +---------------------------------------------------------------------- .. automodule:: onadata.apps.restservice.management.commands.textit_v1_to_v2 - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.management.commands - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.management.rst b/docs/onadata.apps.restservice.management.rst index 0cd1504b9c..4ce28a8895 100644 --- a/docs/onadata.apps.restservice.management.rst +++ b/docs/onadata.apps.restservice.management.rst @@ -1,17 +1,18 @@ -onadata\.apps\.restservice\.management package -============================================== +onadata.apps.restservice.management package +=========================================== Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.restservice.management.commands + onadata.apps.restservice.management.commands Module contents --------------- .. automodule:: onadata.apps.restservice.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.migrations.rst b/docs/onadata.apps.restservice.migrations.rst index 7e140d36de..c19be47a9b 100644 --- a/docs/onadata.apps.restservice.migrations.rst +++ b/docs/onadata.apps.restservice.migrations.rst @@ -4,27 +4,50 @@ onadata.apps.restservice.migrations package Submodules ---------- -onadata.apps.restservice.migrations.0001_initial module -------------------------------------------------------- +onadata.apps.restservice.migrations.0001\_initial module +-------------------------------------------------------- .. automodule:: onadata.apps.restservice.migrations.0001_initial - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.restservice.migrations.0002_auto_20160524_0458 module ------------------------------------------------------------------- +onadata.apps.restservice.migrations.0002\_auto\_20160524\_0458 module +--------------------------------------------------------------------- .. automodule:: onadata.apps.restservice.migrations.0002_auto_20160524_0458 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: +onadata.apps.restservice.migrations.0003\_restservice\_active module +-------------------------------------------------------------------- + +.. automodule:: onadata.apps.restservice.migrations.0003_restservice_active + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.restservice.migrations.0004\_restservice\_inactive\_reason module +------------------------------------------------------------------------------ + +.. automodule:: onadata.apps.restservice.migrations.0004_restservice_inactive_reason + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.restservice.migrations.0005\_auto\_20190125\_0517 module +--------------------------------------------------------------------- + +.. automodule:: onadata.apps.restservice.migrations.0005_auto_20190125_0517 + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.migrations - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.rst b/docs/onadata.apps.restservice.rst index 9d58c07e36..c035962ed6 100644 --- a/docs/onadata.apps.restservice.rst +++ b/docs/onadata.apps.restservice.rst @@ -5,11 +5,13 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.restservice.migrations - onadata.apps.restservice.services - onadata.apps.restservice.tests - onadata.apps.restservice.viewsets + onadata.apps.restservice.management + onadata.apps.restservice.migrations + onadata.apps.restservice.services + onadata.apps.restservice.tests + onadata.apps.restservice.viewsets Submodules ---------- @@ -18,55 +20,62 @@ onadata.apps.restservice.RestServiceInterface module ---------------------------------------------------- .. automodule:: onadata.apps.restservice.RestServiceInterface - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.forms module ------------------------------------- .. automodule:: onadata.apps.restservice.forms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.models module -------------------------------------- .. automodule:: onadata.apps.restservice.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.restservice.signals module +--------------------------------------- + +.. automodule:: onadata.apps.restservice.signals + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.tasks module ------------------------------------- .. automodule:: onadata.apps.restservice.tasks - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.utils module ------------------------------------- .. automodule:: onadata.apps.restservice.utils - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.views module ------------------------------------- .. automodule:: onadata.apps.restservice.views - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.services.rst b/docs/onadata.apps.restservice.services.rst index 1e09dff048..6e09ef04b6 100644 --- a/docs/onadata.apps.restservice.services.rst +++ b/docs/onadata.apps.restservice.services.rst @@ -8,39 +8,38 @@ onadata.apps.restservice.services.f2dhis2 module ------------------------------------------------ .. automodule:: onadata.apps.restservice.services.f2dhis2 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.restservice.services.generic_json module ------------------------------------------------------ +onadata.apps.restservice.services.generic\_json module +------------------------------------------------------ .. automodule:: onadata.apps.restservice.services.generic_json - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.restservice.services.generic_xml module ----------------------------------------------------- +onadata.apps.restservice.services.generic\_xml module +----------------------------------------------------- .. automodule:: onadata.apps.restservice.services.generic_xml - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.restservice.services.textit module ----------------------------------------------- .. automodule:: onadata.apps.restservice.services.textit - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.services - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.tests.rst b/docs/onadata.apps.restservice.tests.rst index 0f1f57be06..ee239a0e76 100644 --- a/docs/onadata.apps.restservice.tests.rst +++ b/docs/onadata.apps.restservice.tests.rst @@ -5,25 +5,25 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.restservice.tests.viewsets + onadata.apps.restservice.tests.viewsets Submodules ---------- -onadata.apps.restservice.tests.test_restservice module ------------------------------------------------------- +onadata.apps.restservice.tests.test\_restservice module +------------------------------------------------------- .. automodule:: onadata.apps.restservice.tests.test_restservice - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.tests.viewsets.rst b/docs/onadata.apps.restservice.tests.viewsets.rst index 560d0fc8fb..145523454c 100644 --- a/docs/onadata.apps.restservice.tests.viewsets.rst +++ b/docs/onadata.apps.restservice.tests.viewsets.rst @@ -4,19 +4,18 @@ onadata.apps.restservice.tests.viewsets package Submodules ---------- -onadata.apps.restservice.tests.viewsets.test_restservicesviewset module ------------------------------------------------------------------------ +onadata.apps.restservice.tests.viewsets.test\_restservicesviewset module +------------------------------------------------------------------------ .. automodule:: onadata.apps.restservice.tests.viewsets.test_restservicesviewset - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.tests.viewsets - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.restservice.viewsets.rst b/docs/onadata.apps.restservice.viewsets.rst index 6dd2ce3066..da9a05c0b4 100644 --- a/docs/onadata.apps.restservice.viewsets.rst +++ b/docs/onadata.apps.restservice.viewsets.rst @@ -4,19 +4,18 @@ onadata.apps.restservice.viewsets package Submodules ---------- -onadata.apps.restservice.viewsets.restservices_viewset module -------------------------------------------------------------- +onadata.apps.restservice.viewsets.restservices\_viewset module +-------------------------------------------------------------- .. automodule:: onadata.apps.restservice.viewsets.restservices_viewset - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.restservice.viewsets - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.rst b/docs/onadata.apps.rst index 2d08eee941..5aadf721da 100644 --- a/docs/onadata.apps.rst +++ b/docs/onadata.apps.rst @@ -5,18 +5,20 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.api - onadata.apps.logger - onadata.apps.main - onadata.apps.restservice - onadata.apps.sms_support - onadata.apps.viewer + onadata.apps.api + onadata.apps.logger + onadata.apps.main + onadata.apps.messaging + onadata.apps.restservice + onadata.apps.sms_support + onadata.apps.viewer Module contents --------------- .. automodule:: onadata.apps - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.sms_support.providers.rst b/docs/onadata.apps.sms_support.providers.rst index 5927b45d6b..c0f412d8f8 100644 --- a/docs/onadata.apps.sms_support.providers.rst +++ b/docs/onadata.apps.sms_support.providers.rst @@ -1,46 +1,45 @@ -onadata.apps.sms_support.providers package -========================================== +onadata.apps.sms\_support.providers package +=========================================== Submodules ---------- -onadata.apps.sms_support.providers.smssync module -------------------------------------------------- +onadata.apps.sms\_support.providers.smssync module +-------------------------------------------------- .. automodule:: onadata.apps.sms_support.providers.smssync - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.providers.telerivet module ---------------------------------------------------- +onadata.apps.sms\_support.providers.telerivet module +---------------------------------------------------- .. automodule:: onadata.apps.sms_support.providers.telerivet - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.providers.textit module ------------------------------------------------- +onadata.apps.sms\_support.providers.textit module +------------------------------------------------- .. automodule:: onadata.apps.sms_support.providers.textit - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.providers.twilio module ------------------------------------------------- +onadata.apps.sms\_support.providers.twilio module +------------------------------------------------- .. automodule:: onadata.apps.sms_support.providers.twilio - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.sms_support.providers - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.sms_support.rst b/docs/onadata.apps.sms_support.rst index 91c80cb3bd..49f99200f0 100644 --- a/docs/onadata.apps.sms_support.rst +++ b/docs/onadata.apps.sms_support.rst @@ -1,54 +1,62 @@ -onadata.apps.sms_support package -================================ +onadata.apps.sms\_support package +================================= Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.sms_support.providers - onadata.apps.sms_support.tests + onadata.apps.sms_support.providers + onadata.apps.sms_support.tests Submodules ---------- -onadata.apps.sms_support.autodoc module ---------------------------------------- +onadata.apps.sms\_support.autodoc module +---------------------------------------- .. automodule:: onadata.apps.sms_support.autodoc - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.parser module --------------------------------------- +onadata.apps.sms\_support.models module +--------------------------------------- + +.. automodule:: onadata.apps.sms_support.models + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.sms\_support.parser module +--------------------------------------- .. automodule:: onadata.apps.sms_support.parser - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.tools module -------------------------------------- +onadata.apps.sms\_support.tools module +-------------------------------------- .. automodule:: onadata.apps.sms_support.tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.views module -------------------------------------- +onadata.apps.sms\_support.views module +-------------------------------------- .. automodule:: onadata.apps.sms_support.views - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.sms_support - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.sms_support.tests.rst b/docs/onadata.apps.sms_support.tests.rst index a6ed90833f..dd46e68c45 100644 --- a/docs/onadata.apps.sms_support.tests.rst +++ b/docs/onadata.apps.sms_support.tests.rst @@ -1,38 +1,37 @@ -onadata.apps.sms_support.tests package -====================================== +onadata.apps.sms\_support.tests package +======================================= Submodules ---------- -onadata.apps.sms_support.tests.test_base_sms module ---------------------------------------------------- +onadata.apps.sms\_support.tests.test\_base\_sms module +------------------------------------------------------ .. automodule:: onadata.apps.sms_support.tests.test_base_sms - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.tests.test_notallowed module ------------------------------------------------------ +onadata.apps.sms\_support.tests.test\_notallowed module +------------------------------------------------------- .. automodule:: onadata.apps.sms_support.tests.test_notallowed - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.sms_support.tests.test_parser module -------------------------------------------------- +onadata.apps.sms\_support.tests.test\_parser module +--------------------------------------------------- .. automodule:: onadata.apps.sms_support.tests.test_parser - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.sms_support.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.stats.rst b/docs/onadata.apps.stats.rst deleted file mode 100644 index 5e10d75a74..0000000000 --- a/docs/onadata.apps.stats.rst +++ /dev/null @@ -1,54 +0,0 @@ -onadata.apps.stats package -========================== - -Submodules ----------- - -onadata.apps.stats.models module --------------------------------- - -.. automodule:: onadata.apps.stats.models - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.stats.tasks module -------------------------------- - -.. automodule:: onadata.apps.stats.tasks - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.stats.tests module -------------------------------- - -.. automodule:: onadata.apps.stats.tests - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.stats.utils module -------------------------------- - -.. automodule:: onadata.apps.stats.utils - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.stats.views module -------------------------------- - -.. automodule:: onadata.apps.stats.views - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: onadata.apps.stats - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/onadata.apps.viewer.management.commands.rst b/docs/onadata.apps.viewer.management.commands.rst index 793ac2b540..fdb7b4373a 100644 --- a/docs/onadata.apps.viewer.management.commands.rst +++ b/docs/onadata.apps.viewer.management.commands.rst @@ -8,47 +8,30 @@ onadata.apps.viewer.management.commands.import module ----------------------------------------------------- .. automodule:: onadata.apps.viewer.management.commands.import - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.management.commands.import_forms module ------------------------------------------------------------ - -.. automodule:: onadata.apps.viewer.management.commands.import_forms - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.viewer.management.commands.mark_start_times module ---------------------------------------------------------------- +onadata.apps.viewer.management.commands.mark\_start\_times module +----------------------------------------------------------------- .. automodule:: onadata.apps.viewer.management.commands.mark_start_times - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.management.commands.set_uuid_in_xml module --------------------------------------------------------------- +onadata.apps.viewer.management.commands.set\_uuid\_in\_xml module +----------------------------------------------------------------- .. automodule:: onadata.apps.viewer.management.commands.set_uuid_in_xml - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.viewer.management.commands.update_delete_from_mongo module ------------------------------------------------------------------------ - -.. automodule:: onadata.apps.viewer.management.commands.update_delete_from_mongo - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer.management.commands - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.management.rst b/docs/onadata.apps.viewer.management.rst index e96735856e..93006c52a3 100644 --- a/docs/onadata.apps.viewer.management.rst +++ b/docs/onadata.apps.viewer.management.rst @@ -5,13 +5,14 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.viewer.management.commands + onadata.apps.viewer.management.commands Module contents --------------- .. automodule:: onadata.apps.viewer.management - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.migrations.rst b/docs/onadata.apps.viewer.migrations.rst index ea7e2d1c8d..ccbdc9c741 100644 --- a/docs/onadata.apps.viewer.migrations.rst +++ b/docs/onadata.apps.viewer.migrations.rst @@ -4,67 +4,106 @@ onadata.apps.viewer.migrations package Submodules ---------- -onadata.apps.viewer.migrations.0001_initial module --------------------------------------------------- +onadata.apps.viewer.migrations.0001\_initial module +--------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0001_initial - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0002_export_options module ---------------------------------------------------------- +onadata.apps.viewer.migrations.0001\_pre\-django\-3\-upgrade module +------------------------------------------------------------------- + +.. automodule:: onadata.apps.viewer.migrations.0001_pre-django-3-upgrade + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.migrations.0002\_alter\_export\_export\_type module +----------------------------------------------------------------------- + +.. automodule:: onadata.apps.viewer.migrations.0002_alter_export_export_type + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.migrations.0002\_export\_options module +----------------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0002_export_options - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0003_auto_20151226_0100 module -------------------------------------------------------------- +onadata.apps.viewer.migrations.0003\_auto\_20151226\_0100 module +---------------------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0003_auto_20151226_0100 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0004_auto_20151226_0109 module -------------------------------------------------------------- +onadata.apps.viewer.migrations.0003\_genericexport module +--------------------------------------------------------- + +.. automodule:: onadata.apps.viewer.migrations.0003_genericexport + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.migrations.0004\_auto\_20151226\_0109 module +---------------------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0004_auto_20151226_0109 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0005_auto_20160408_0325 module -------------------------------------------------------------- +onadata.apps.viewer.migrations.0005\_auto\_20160408\_0325 module +---------------------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0005_auto_20160408_0325 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0006_auto_20160418_0525 module -------------------------------------------------------------- +onadata.apps.viewer.migrations.0006\_auto\_20160418\_0525 module +---------------------------------------------------------------- .. automodule:: onadata.apps.viewer.migrations.0006_auto_20160418_0525 - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.migrations.0007_export_error_message module ---------------------------------------------------------------- +onadata.apps.viewer.migrations.0007\_export\_error\_message module +------------------------------------------------------------------ .. automodule:: onadata.apps.viewer.migrations.0007_export_error_message - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.migrations.0008\_auto\_20190125\_0517 module +---------------------------------------------------------------- + +.. automodule:: onadata.apps.viewer.migrations.0008_auto_20190125_0517 + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.migrations.0009\_alter\_export\_options module +------------------------------------------------------------------ +.. automodule:: onadata.apps.viewer.migrations.0009_alter_export_options + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer.migrations - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.models.rst b/docs/onadata.apps.viewer.models.rst index e0904b0b94..15c5a76bcb 100644 --- a/docs/onadata.apps.viewer.models.rst +++ b/docs/onadata.apps.viewer.models.rst @@ -1,46 +1,53 @@ onadata.apps.viewer.models package ================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + onadata.apps.viewer.models.tests + Submodules ---------- -onadata.apps.viewer.models.column_rename module ------------------------------------------------ +onadata.apps.viewer.models.column\_rename module +------------------------------------------------ .. automodule:: onadata.apps.viewer.models.column_rename - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.models.data_dictionary module -------------------------------------------------- +onadata.apps.viewer.models.data\_dictionary module +-------------------------------------------------- .. automodule:: onadata.apps.viewer.models.data_dictionary - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.viewer.models.export module ---------------------------------------- .. automodule:: onadata.apps.viewer.models.export - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.models.parsed_instance module -------------------------------------------------- +onadata.apps.viewer.models.parsed\_instance module +-------------------------------------------------- .. automodule:: onadata.apps.viewer.models.parsed_instance - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.models.tests.rst b/docs/onadata.apps.viewer.models.tests.rst new file mode 100644 index 0000000000..ad24890e22 --- /dev/null +++ b/docs/onadata.apps.viewer.models.tests.rst @@ -0,0 +1,21 @@ +onadata.apps.viewer.models.tests package +======================================== + +Submodules +---------- + +onadata.apps.viewer.models.tests.test\_data\_dictionary module +-------------------------------------------------------------- + +.. automodule:: onadata.apps.viewer.models.tests.test_data_dictionary + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.apps.viewer.models.tests + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.rst b/docs/onadata.apps.viewer.rst index 279daea758..d1c8082f41 100644 --- a/docs/onadata.apps.viewer.rst +++ b/docs/onadata.apps.viewer.rst @@ -5,12 +5,13 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps.viewer.management - onadata.apps.viewer.migrations - onadata.apps.viewer.models - onadata.apps.viewer.templatetags - onadata.apps.viewer.tests + onadata.apps.viewer.management + onadata.apps.viewer.migrations + onadata.apps.viewer.models + onadata.apps.viewer.templatetags + onadata.apps.viewer.tests Submodules ---------- @@ -19,47 +20,54 @@ onadata.apps.viewer.admin module -------------------------------- .. automodule:: onadata.apps.viewer.admin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.parsed_instance_tools module ------------------------------------------------- +onadata.apps.viewer.parsed\_instance\_tools module +-------------------------------------------------- .. automodule:: onadata.apps.viewer.parsed_instance_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.apps.viewer.signals module +---------------------------------- + +.. automodule:: onadata.apps.viewer.signals + :members: + :undoc-members: + :show-inheritance: onadata.apps.viewer.tasks module -------------------------------- .. automodule:: onadata.apps.viewer.tasks - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.apps.viewer.views module -------------------------------- .. automodule:: onadata.apps.viewer.views - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.xls_writer module -------------------------------------- +onadata.apps.viewer.xls\_writer module +-------------------------------------- .. automodule:: onadata.apps.viewer.xls_writer - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.templatetags.rst b/docs/onadata.apps.viewer.templatetags.rst index 2f881f3248..80f811fb9b 100644 --- a/docs/onadata.apps.viewer.templatetags.rst +++ b/docs/onadata.apps.viewer.templatetags.rst @@ -4,19 +4,18 @@ onadata.apps.viewer.templatetags package Submodules ---------- -onadata.apps.viewer.templatetags.charts_snippet module ------------------------------------------------------- +onadata.apps.viewer.templatetags.charts\_snippet module +------------------------------------------------------- .. automodule:: onadata.apps.viewer.templatetags.charts_snippet - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer.templatetags - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.apps.viewer.tests.rst b/docs/onadata.apps.viewer.tests.rst index aefbbfe53a..b442860abe 100644 --- a/docs/onadata.apps.viewer.tests.rst +++ b/docs/onadata.apps.viewer.tests.rst @@ -4,131 +4,106 @@ onadata.apps.viewer.tests package Submodules ---------- -onadata.apps.viewer.tests.export_helpers module ------------------------------------------------ +onadata.apps.viewer.tests.export\_helpers module +------------------------------------------------ .. automodule:: onadata.apps.viewer.tests.export_helpers - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.surveyor_registration module +onadata.apps.viewer.tests.test\_attachment\_url module ------------------------------------------------------ -.. automodule:: onadata.apps.viewer.tests.surveyor_registration - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.viewer.tests.test_attachment_url module ----------------------------------------------------- - .. automodule:: onadata.apps.viewer.tests.test_attachment_url - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_charts_view module -------------------------------------------------- +onadata.apps.viewer.tests.test\_charts\_view module +--------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_charts_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_data_view module ------------------------------------------------ +onadata.apps.viewer.tests.test\_data\_view module +------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_data_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_export_builder module ----------------------------------------------------- - -.. automodule:: onadata.apps.viewer.tests.test_export_builder - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.viewer.tests.test_export_list module -------------------------------------------------- +onadata.apps.viewer.tests.test\_export\_list module +--------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_export_list - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_exports module ---------------------------------------------- +onadata.apps.viewer.tests.test\_exports module +---------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_exports - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_instance_view module ---------------------------------------------------- +onadata.apps.viewer.tests.test\_instance\_view module +----------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_instance_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_kml_export module ------------------------------------------------- +onadata.apps.viewer.tests.test\_kml\_export module +-------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_kml_export - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_map_view module ----------------------------------------------- +onadata.apps.viewer.tests.test\_map\_view module +------------------------------------------------ .. automodule:: onadata.apps.viewer.tests.test_map_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_parsed_instance module ------------------------------------------------------ +onadata.apps.viewer.tests.test\_parsed\_instance module +------------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_parsed_instance - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_stats_table_view module ------------------------------------------------------- +onadata.apps.viewer.tests.test\_stats\_table\_view module +--------------------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_stats_table_view - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.apps.viewer.tests.test_tasks module -------------------------------------------- +onadata.apps.viewer.tests.test\_tasks module +-------------------------------------------- .. automodule:: onadata.apps.viewer.tests.test_tasks - :members: - :undoc-members: - :show-inheritance: - -onadata.apps.viewer.tests.test_viewer_tools module --------------------------------------------------- - -.. automodule:: onadata.apps.viewer.tests.test_viewer_tools - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.apps.viewer.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.data.rst b/docs/onadata.libs.data.rst index 10dbad28b5..b4679befb6 100644 --- a/docs/onadata.libs.data.rst +++ b/docs/onadata.libs.data.rst @@ -1,13 +1,6 @@ onadata.libs.data package ========================= -Subpackages ------------ - -.. toctree:: - - onadata.libs.data.tests - Submodules ---------- @@ -15,23 +8,22 @@ onadata.libs.data.query module ------------------------------ .. automodule:: onadata.libs.data.query - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.data.statistics module ----------------------------------- .. automodule:: onadata.libs.data.statistics - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.data - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.data.tests.rst b/docs/onadata.libs.data.tests.rst deleted file mode 100644 index 75c628e446..0000000000 --- a/docs/onadata.libs.data.tests.rst +++ /dev/null @@ -1,30 +0,0 @@ -onadata.libs.data.tests package -=============================== - -Submodules ----------- - -onadata.libs.data.tests.test_statistics module ----------------------------------------------- - -.. automodule:: onadata.libs.data.tests.test_statistics - :members: - :undoc-members: - :show-inheritance: - -onadata.libs.data.tests.test_tools module ------------------------------------------ - -.. automodule:: onadata.libs.data.tests.test_tools - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: onadata.libs.data.tests - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/onadata.libs.mixins.rst b/docs/onadata.libs.mixins.rst index 77b34cdcea..aa4957aa98 100644 --- a/docs/onadata.libs.mixins.rst +++ b/docs/onadata.libs.mixins.rst @@ -4,115 +4,114 @@ onadata.libs.mixins package Submodules ---------- -onadata.libs.mixins.anonymous_user_mixin module ------------------------------------------------ +onadata.libs.mixins.anonymous\_user\_mixin module +------------------------------------------------- .. automodule:: onadata.libs.mixins.anonymous_user_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.anonymous_user_public_forms_mixin module ------------------------------------------------------------- +onadata.libs.mixins.anonymous\_user\_public\_forms\_mixin module +---------------------------------------------------------------- .. automodule:: onadata.libs.mixins.anonymous_user_public_forms_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.authenticate_header_mixin module ----------------------------------------------------- +onadata.libs.mixins.authenticate\_header\_mixin module +------------------------------------------------------ .. automodule:: onadata.libs.mixins.authenticate_header_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.cache_control_mixin module +onadata.libs.mixins.bulk\_create\_mixin module ---------------------------------------------- +.. automodule:: onadata.libs.mixins.bulk_create_mixin + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.mixins.cache\_control\_mixin module +------------------------------------------------ + .. automodule:: onadata.libs.mixins.cache_control_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.etags_mixin module --------------------------------------- +onadata.libs.mixins.etags\_mixin module +--------------------------------------- .. automodule:: onadata.libs.mixins.etags_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.labels_mixin module ---------------------------------------- +onadata.libs.mixins.labels\_mixin module +---------------------------------------- .. automodule:: onadata.libs.mixins.labels_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.last_modified_mixin module ----------------------------------------------- +onadata.libs.mixins.last\_modified\_mixin module +------------------------------------------------ .. automodule:: onadata.libs.mixins.last_modified_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.multi_lookup_mixin module ---------------------------------------------- +onadata.libs.mixins.multi\_lookup\_mixin module +----------------------------------------------- .. automodule:: onadata.libs.mixins.multi_lookup_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.object_lookup_mixin module ----------------------------------------------- +onadata.libs.mixins.object\_lookup\_mixin module +------------------------------------------------ .. automodule:: onadata.libs.mixins.object_lookup_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.openrosa_headers_mixin module -------------------------------------------------- +onadata.libs.mixins.openrosa\_headers\_mixin module +--------------------------------------------------- .. automodule:: onadata.libs.mixins.openrosa_headers_mixin - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.mixins.profiler_mixin module ------------------------------------------ +onadata.libs.mixins.profiler\_mixin module +------------------------------------------ .. automodule:: onadata.libs.mixins.profiler_mixin - :members: - :undoc-members: - :show-inheritance: - -onadata.libs.mixins.total_header_mixin module ---------------------------------------------- + :members: + :undoc-members: + :show-inheritance: -.. automodule:: onadata.libs.mixins.total_header_mixin - :members: - :undoc-members: - :show-inheritance: - -onadata.libs.mixins.xform_id_string_lookup module -------------------------------------------------- +onadata.libs.mixins.xform\_id\_string\_lookup module +---------------------------------------------------- .. automodule:: onadata.libs.mixins.xform_id_string_lookup - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.mixins - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.models.rst b/docs/onadata.libs.models.rst index cda91271f1..2eed04c5d9 100644 --- a/docs/onadata.libs.models.rst +++ b/docs/onadata.libs.models.rst @@ -4,75 +4,74 @@ onadata.libs.models package Submodules ---------- -onadata.libs.models.base_model module -------------------------------------- +onadata.libs.models.base\_model module +-------------------------------------- .. automodule:: onadata.libs.models.base_model - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.models.clone_xform module --------------------------------------- +onadata.libs.models.clone\_xform module +--------------------------------------- .. automodule:: onadata.libs.models.clone_xform - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.models.share_project module ----------------------------------------- +onadata.libs.models.share\_project module +----------------------------------------- .. automodule:: onadata.libs.models.share_project - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.models.share_team_project module ---------------------------------------------- +onadata.libs.models.share\_team\_project module +----------------------------------------------- .. automodule:: onadata.libs.models.share_team_project - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.models.share_xform module --------------------------------------- +onadata.libs.models.share\_xform module +--------------------------------------- .. automodule:: onadata.libs.models.share_xform - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.models.signals module ---------------------------------- .. automodule:: onadata.libs.models.signals - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.models.sorting module ---------------------------------- .. automodule:: onadata.libs.models.sorting - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.models.textit_service module ------------------------------------------ +onadata.libs.models.textit\_service module +------------------------------------------ .. automodule:: onadata.libs.models.textit_service - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.models - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.profiling.rst b/docs/onadata.libs.profiling.rst index 6bf08ff27e..afee58e734 100644 --- a/docs/onadata.libs.profiling.rst +++ b/docs/onadata.libs.profiling.rst @@ -8,15 +8,14 @@ onadata.libs.profiling.sql module --------------------------------- .. automodule:: onadata.libs.profiling.sql - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.profiling - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.renderers.rst b/docs/onadata.libs.renderers.rst index 726331ebd8..c39693950a 100644 --- a/docs/onadata.libs.renderers.rst +++ b/docs/onadata.libs.renderers.rst @@ -8,15 +8,14 @@ onadata.libs.renderers.renderers module --------------------------------------- .. automodule:: onadata.libs.renderers.renderers - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.renderers - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.rst b/docs/onadata.libs.rst index 65b5fb48ad..a8312ebdb1 100644 --- a/docs/onadata.libs.rst +++ b/docs/onadata.libs.rst @@ -5,15 +5,17 @@ Subpackages ----------- .. toctree:: - - onadata.libs.data - onadata.libs.mixins - onadata.libs.models - onadata.libs.profiling - onadata.libs.renderers - onadata.libs.serializers - onadata.libs.tests - onadata.libs.utils + :maxdepth: 4 + + onadata.libs.data + onadata.libs.mixins + onadata.libs.models + onadata.libs.profiling + onadata.libs.renderers + onadata.libs.serializers + onadata.libs.test_utils + onadata.libs.tests + onadata.libs.utils Submodules ---------- @@ -22,55 +24,62 @@ onadata.libs.authentication module ---------------------------------- .. automodule:: onadata.libs.authentication - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.baseviewset module ------------------------------- .. automodule:: onadata.libs.baseviewset - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.exceptions module ------------------------------ .. automodule:: onadata.libs.exceptions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.filters module --------------------------- .. automodule:: onadata.libs.filters - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.pagination module ------------------------------ .. automodule:: onadata.libs.pagination - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.permissions module ------------------------------- .. automodule:: onadata.libs.permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.throttle module +---------------------------- +.. automodule:: onadata.libs.throttle + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.serializers.fields.rst b/docs/onadata.libs.serializers.fields.rst index 2e2a4160e3..c2ec8765ce 100644 --- a/docs/onadata.libs.serializers.fields.rst +++ b/docs/onadata.libs.serializers.fields.rst @@ -4,91 +4,98 @@ onadata.libs.serializers.fields package Submodules ---------- -onadata.libs.serializers.fields.hyperlinked_multi_identity_field module ------------------------------------------------------------------------ +onadata.libs.serializers.fields.hyperlinked\_multi\_identity\_field module +-------------------------------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.hyperlinked_multi_identity_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.hyperlinked_multi_related_field module ----------------------------------------------------------------------- +onadata.libs.serializers.fields.hyperlinked\_multi\_related\_field module +------------------------------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.hyperlinked_multi_related_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.instance_related_field module -------------------------------------------------------------- +onadata.libs.serializers.fields.instance\_related\_field module +--------------------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.instance_related_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.json_field module -------------------------------------------------- +onadata.libs.serializers.fields.json\_field module +-------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.json_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.organization_field module ---------------------------------------------------------- +onadata.libs.serializers.fields.organization\_field module +---------------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.organization_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.project_field module ----------------------------------------------------- +onadata.libs.serializers.fields.project\_field module +----------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.project_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.project_related_field module ------------------------------------------------------------- +onadata.libs.serializers.fields.project\_related\_field module +-------------------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.project_related_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.team_field module -------------------------------------------------- +onadata.libs.serializers.fields.team\_field module +-------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.team_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.xform_field module --------------------------------------------------- +onadata.libs.serializers.fields.utils module +-------------------------------------------- + +.. automodule:: onadata.libs.serializers.fields.utils + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.fields.xform\_field module +--------------------------------------------------- .. automodule:: onadata.libs.serializers.fields.xform_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.fields.xform_related_field module ----------------------------------------------------------- +onadata.libs.serializers.fields.xform\_related\_field module +------------------------------------------------------------ .. automodule:: onadata.libs.serializers.fields.xform_related_field - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.serializers.fields - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.serializers.rst b/docs/onadata.libs.serializers.rst index 540802507b..62f13f78af 100644 --- a/docs/onadata.libs.serializers.rst +++ b/docs/onadata.libs.serializers.rst @@ -5,217 +5,273 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.libs.serializers.fields + onadata.libs.serializers.fields Submodules ---------- -onadata.libs.serializers.attachment_serializer module ------------------------------------------------------ +onadata.libs.serializers.attachment\_serializer module +------------------------------------------------------ .. automodule:: onadata.libs.serializers.attachment_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.chart_serializer module ------------------------------------------------- +onadata.libs.serializers.chart\_serializer module +------------------------------------------------- .. automodule:: onadata.libs.serializers.chart_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.clone_xform_serializer module ------------------------------------------------------- +onadata.libs.serializers.clone\_xform\_serializer module +-------------------------------------------------------- .. automodule:: onadata.libs.serializers.clone_xform_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.data_serializer module ------------------------------------------------ +onadata.libs.serializers.data\_serializer module +------------------------------------------------ .. automodule:: onadata.libs.serializers.data_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.dataview_serializer module ---------------------------------------------------- +onadata.libs.serializers.dataview\_serializer module +---------------------------------------------------- .. automodule:: onadata.libs.serializers.dataview_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.export_serializer module -------------------------------------------------- +onadata.libs.serializers.entity\_serializer module +-------------------------------------------------- -.. automodule:: onadata.libs.serializers.export_serializer - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.serializers.entity_serializer + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.geojson_serializer module +onadata.libs.serializers.export\_serializer module -------------------------------------------------- -.. automodule:: onadata.libs.serializers.geojson_serializer - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.serializers.export_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.floip\_serializer module +------------------------------------------------- + +.. automodule:: onadata.libs.serializers.floip_serializer + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.metadata_serializer module +onadata.libs.serializers.geojson\_serializer module --------------------------------------------------- +.. automodule:: onadata.libs.serializers.geojson_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.merged\_xform\_serializer module +--------------------------------------------------------- + +.. automodule:: onadata.libs.serializers.merged_xform_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.metadata\_serializer module +---------------------------------------------------- + .. automodule:: onadata.libs.serializers.metadata_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.monthly\_submissions\_serializer module +---------------------------------------------------------------- -onadata.libs.serializers.note_serializer module ------------------------------------------------ +.. automodule:: onadata.libs.serializers.monthly_submissions_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.note\_serializer module +------------------------------------------------ .. automodule:: onadata.libs.serializers.note_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.organization_member_serializer module --------------------------------------------------------------- +onadata.libs.serializers.open\_data\_serializer module +------------------------------------------------------ + +.. automodule:: onadata.libs.serializers.open_data_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.organization\_member\_serializer module +---------------------------------------------------------------- .. automodule:: onadata.libs.serializers.organization_member_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.organization_serializer module -------------------------------------------------------- +onadata.libs.serializers.organization\_serializer module +-------------------------------------------------------- .. automodule:: onadata.libs.serializers.organization_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.password_reset_serializer module ---------------------------------------------------------- +onadata.libs.serializers.password\_reset\_serializer module +----------------------------------------------------------- .. automodule:: onadata.libs.serializers.password_reset_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.project_serializer module --------------------------------------------------- +onadata.libs.serializers.project\_invitation\_serializer module +--------------------------------------------------------------- + +.. automodule:: onadata.libs.serializers.project_invitation_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.project\_serializer module +--------------------------------------------------- .. automodule:: onadata.libs.serializers.project_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.restservices_serializer module -------------------------------------------------------- +onadata.libs.serializers.restservices\_serializer module +-------------------------------------------------------- .. automodule:: onadata.libs.serializers.restservices_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.share_project_serializer module --------------------------------------------------------- +onadata.libs.serializers.share\_project\_serializer module +---------------------------------------------------------- .. automodule:: onadata.libs.serializers.share_project_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.share_team_project_serializer module -------------------------------------------------------------- +onadata.libs.serializers.share\_team\_project\_serializer module +---------------------------------------------------------------- .. automodule:: onadata.libs.serializers.share_team_project_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.share_xform_serializer module ------------------------------------------------------- +onadata.libs.serializers.share\_xform\_serializer module +-------------------------------------------------------- .. automodule:: onadata.libs.serializers.share_xform_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.stats_serializer module ------------------------------------------------- +onadata.libs.serializers.stats\_serializer module +------------------------------------------------- .. automodule:: onadata.libs.serializers.stats_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.tag_list_serializer module ---------------------------------------------------- +onadata.libs.serializers.submission\_review\_serializer module +-------------------------------------------------------------- + +.. automodule:: onadata.libs.serializers.submission_review_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.serializers.tag\_list\_serializer module +----------------------------------------------------- .. automodule:: onadata.libs.serializers.tag_list_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.team_serializer module ------------------------------------------------ +onadata.libs.serializers.team\_serializer module +------------------------------------------------ .. automodule:: onadata.libs.serializers.team_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.textit_serializer module -------------------------------------------------- +onadata.libs.serializers.textit\_serializer module +-------------------------------------------------- .. automodule:: onadata.libs.serializers.textit_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.user_profile_serializer module -------------------------------------------------------- +onadata.libs.serializers.user\_profile\_serializer module +--------------------------------------------------------- .. automodule:: onadata.libs.serializers.user_profile_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.user_serializer module ------------------------------------------------ +onadata.libs.serializers.user\_serializer module +------------------------------------------------ .. automodule:: onadata.libs.serializers.user_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.widget_serializer module -------------------------------------------------- +onadata.libs.serializers.widget\_serializer module +-------------------------------------------------- .. automodule:: onadata.libs.serializers.widget_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.serializers.xform_serializer module ------------------------------------------------- +onadata.libs.serializers.xform\_serializer module +------------------------------------------------- .. automodule:: onadata.libs.serializers.xform_serializer - :members: - :undoc-members: - :show-inheritance: - + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.serializers - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.test_utils.rst b/docs/onadata.libs.test_utils.rst new file mode 100644 index 0000000000..29759cc40d --- /dev/null +++ b/docs/onadata.libs.test_utils.rst @@ -0,0 +1,29 @@ +onadata.libs.test\_utils package +================================ + +Submodules +---------- + +onadata.libs.test\_utils.md\_table module +----------------------------------------- + +.. automodule:: onadata.libs.test_utils.md_table + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.test\_utils.pyxform\_test\_case module +--------------------------------------------------- + +.. automodule:: onadata.libs.test_utils.pyxform_test_case + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.libs.test_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.tests.data.rst b/docs/onadata.libs.tests.data.rst new file mode 100644 index 0000000000..75ce76ef94 --- /dev/null +++ b/docs/onadata.libs.tests.data.rst @@ -0,0 +1,29 @@ +onadata.libs.tests.data package +=============================== + +Submodules +---------- + +onadata.libs.tests.data.test\_statistics module +----------------------------------------------- + +.. automodule:: onadata.libs.tests.data.test_statistics + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.data.test\_tools module +------------------------------------------ + +.. automodule:: onadata.libs.tests.data.test_tools + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.libs.tests.data + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.tests.models.rst b/docs/onadata.libs.tests.models.rst new file mode 100644 index 0000000000..e7124f68ae --- /dev/null +++ b/docs/onadata.libs.tests.models.rst @@ -0,0 +1,21 @@ +onadata.libs.tests.models package +================================= + +Submodules +---------- + +onadata.libs.tests.models.test\_share\_project module +----------------------------------------------------- + +.. automodule:: onadata.libs.tests.models.test_share_project + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: onadata.libs.tests.models + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.tests.rst b/docs/onadata.libs.tests.rst index 81a271605b..ab4b7985ec 100644 --- a/docs/onadata.libs.tests.rst +++ b/docs/onadata.libs.tests.rst @@ -5,34 +5,60 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.libs.tests.serializers - onadata.libs.tests.utils + onadata.libs.tests.data + onadata.libs.tests.models + onadata.libs.tests.serializers + onadata.libs.tests.utils Submodules ---------- -onadata.libs.tests.test_authentication module ---------------------------------------------- +onadata.libs.tests.test\_authentication module +---------------------------------------------- .. automodule:: onadata.libs.tests.test_authentication - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.test_permissions module +onadata.libs.tests.test\_pagination module ------------------------------------------ +.. automodule:: onadata.libs.tests.test_pagination + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.test\_permissions module +------------------------------------------- + .. automodule:: onadata.libs.tests.test_permissions - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.test\_renderers module +----------------------------------------- + +.. automodule:: onadata.libs.tests.test_renderers + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.test\_throttle module +---------------------------------------- +.. automodule:: onadata.libs.tests.test_throttle + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.tests - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.tests.serializers.rst b/docs/onadata.libs.tests.serializers.rst index 4ab4f20ae7..0e2b6e1b47 100644 --- a/docs/onadata.libs.tests.serializers.rst +++ b/docs/onadata.libs.tests.serializers.rst @@ -4,59 +4,122 @@ onadata.libs.tests.serializers package Submodules ---------- -onadata.libs.tests.serializers.test_attachment_serializer module ----------------------------------------------------------------- +onadata.libs.tests.serializers.test\_attachment\_serializer module +------------------------------------------------------------------ .. automodule:: onadata.libs.tests.serializers.test_attachment_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.serializers.test_dataview_serializer module --------------------------------------------------------------- +onadata.libs.tests.serializers.test\_data\_serializer module +------------------------------------------------------------ + +.. automodule:: onadata.libs.tests.serializers.test_data_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_dataview\_serializer module +---------------------------------------------------------------- .. automodule:: onadata.libs.tests.serializers.test_dataview_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.serializers.test_organization_serializer module ------------------------------------------------------------------- +onadata.libs.tests.serializers.test\_export\_serializer module +-------------------------------------------------------------- -.. automodule:: onadata.libs.tests.serializers.test_organization_serializer - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.tests.serializers.test_export_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_geojson\_serializer module +--------------------------------------------------------------- + +.. automodule:: onadata.libs.tests.serializers.test_geojson_serializer + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.serializers.test_password_reset_serializer module +onadata.libs.tests.serializers.test\_merged\_xform\_serializer module +--------------------------------------------------------------------- + +.. automodule:: onadata.libs.tests.serializers.test_merged_xform_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_metadata\_serializer module +---------------------------------------------------------------- + +.. automodule:: onadata.libs.tests.serializers.test_metadata_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_organization\_serializer module -------------------------------------------------------------------- +.. automodule:: onadata.libs.tests.serializers.test_organization_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_password\_reset\_serializer module +----------------------------------------------------------------------- + .. automodule:: onadata.libs.tests.serializers.test_password_reset_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.serializers.test_project_serializer module -------------------------------------------------------------- +onadata.libs.tests.serializers.test\_project\_serializer module +--------------------------------------------------------------- .. automodule:: onadata.libs.tests.serializers.test_project_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.serializers.test_user_profile_serializer module ------------------------------------------------------------------- +onadata.libs.tests.serializers.test\_share\_project\_serializer module +---------------------------------------------------------------------- + +.. automodule:: onadata.libs.tests.serializers.test_share_project_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_submission\_review\_serializer module +-------------------------------------------------------------------------- + +.. automodule:: onadata.libs.tests.serializers.test_submission_review_serializer + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_user\_profile\_serializer module +--------------------------------------------------------------------- .. automodule:: onadata.libs.tests.serializers.test_user_profile_serializer - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.serializers.test\_xform\_serializer module +------------------------------------------------------------- +.. automodule:: onadata.libs.tests.serializers.test_xform_serializer + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.tests.serializers - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.tests.utils.rst b/docs/onadata.libs.tests.utils.rst index 56d01b7df0..cf43ebf5c1 100644 --- a/docs/onadata.libs.tests.utils.rst +++ b/docs/onadata.libs.tests.utils.rst @@ -4,107 +4,194 @@ onadata.libs.tests.utils package Submodules ---------- -onadata.libs.tests.utils.test_api_export_tools module ------------------------------------------------------ +onadata.libs.tests.utils.test\_analytics module +----------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_analytics + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_api\_export\_tools module +-------------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_api_export_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_async_status module -------------------------------------------------- +onadata.libs.tests.utils.test\_async\_status module +--------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_async_status - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_backup_tools module -------------------------------------------------- +onadata.libs.tests.utils.test\_backup\_tools module +--------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_backup_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_chart_tools module ------------------------------------------------- +onadata.libs.tests.utils.test\_cache\_tools module +-------------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_cache_tools + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_chart\_tools module +-------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_chart_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_csv_builder module ------------------------------------------------- +onadata.libs.tests.utils.test\_csv\_builder module +-------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_csv_builder - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_csv_import module ------------------------------------------------ +onadata.libs.tests.utils.test\_csv\_import module +------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_csv_import - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_dict_tools module ------------------------------------------------ +onadata.libs.tests.utils.test\_dict\_tools module +------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_dict_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_export_tools module -------------------------------------------------- +onadata.libs.tests.utils.test\_email module +------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_email + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_export\_builder module +----------------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_export_builder + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_export\_tools module +--------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_export_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_image_tools module ------------------------------------------------- +onadata.libs.tests.utils.test\_image\_tools module +-------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_image_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_logger_tools module -------------------------------------------------- +onadata.libs.tests.utils.test\_logger\_tools module +--------------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_logger_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_middleware module +------------------------------------------------ + +.. automodule:: onadata.libs.tests.utils.test_middleware + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_model\_tools module +-------------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_model_tools + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_numeric module +--------------------------------------------- -onadata.libs.tests.utils.test_osm module ----------------------------------------- +.. automodule:: onadata.libs.tests.utils.test_numeric + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_osm module +----------------------------------------- .. automodule:: onadata.libs.tests.utils.test_osm - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.tests.utils.test_qrcode module -------------------------------------------- +onadata.libs.tests.utils.test\_password\_validator module +--------------------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_password_validator + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_project\_utils module +---------------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_project_utils + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_qrcode module +-------------------------------------------- .. automodule:: onadata.libs.tests.utils.test_qrcode - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_sorting module +--------------------------------------------- + +.. automodule:: onadata.libs.tests.utils.test_sorting + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.tests.utils.test\_viewer\_tools module +--------------------------------------------------- +.. automodule:: onadata.libs.tests.utils.test_viewer_tools + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.tests.utils - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.libs.utils.rst b/docs/onadata.libs.utils.rst index 4090cefee3..338dbd90dc 100644 --- a/docs/onadata.libs.utils.rst +++ b/docs/onadata.libs.utils.rst @@ -4,291 +4,314 @@ onadata.libs.utils package Submodules ---------- -onadata.libs.utils.api_export_tools module ------------------------------------------- +onadata.libs.utils.analytics module +----------------------------------- -.. automodule:: onadata.libs.utils.api_export_tools - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.utils.analytics + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.async_status module --------------------------------------- +onadata.libs.utils.api\_export\_tools module +-------------------------------------------- -.. automodule:: onadata.libs.utils.async_status - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.utils.api_export_tools + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.audit module -------------------------------- +onadata.libs.utils.async\_status module +--------------------------------------- -.. automodule:: onadata.libs.utils.audit - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.utils.async_status + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.backup_tools module --------------------------------------- +onadata.libs.utils.backup\_tools module +--------------------------------------- .. automodule:: onadata.libs.utils.backup_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.briefcase_client module ------------------------------------------- +onadata.libs.utils.briefcase\_client module +------------------------------------------- .. automodule:: onadata.libs.utils.briefcase_client - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.cache_tools module -------------------------------------- +onadata.libs.utils.cache\_tools module +-------------------------------------- .. automodule:: onadata.libs.utils.cache_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.chart_tools module -------------------------------------- +onadata.libs.utils.chart\_tools module +-------------------------------------- .. automodule:: onadata.libs.utils.chart_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.common_tags module -------------------------------------- +onadata.libs.utils.common\_tags module +-------------------------------------- .. automodule:: onadata.libs.utils.common_tags - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.common_tools module --------------------------------------- +onadata.libs.utils.common\_tools module +--------------------------------------- .. automodule:: onadata.libs.utils.common_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.country_field module ---------------------------------------- +onadata.libs.utils.country\_field module +---------------------------------------- .. automodule:: onadata.libs.utils.country_field - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.csv_builder module -------------------------------------- +onadata.libs.utils.csv\_builder module +-------------------------------------- .. automodule:: onadata.libs.utils.csv_builder - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.csv_import module ------------------------------------- +onadata.libs.utils.csv\_import module +------------------------------------- .. automodule:: onadata.libs.utils.csv_import - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.csv_reader module ------------------------------------- +onadata.libs.utils.csv\_reader module +------------------------------------- .. automodule:: onadata.libs.utils.csv_reader - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.decorators module ------------------------------------ .. automodule:: onadata.libs.utils.decorators - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.dict_tools module ------------------------------------- +onadata.libs.utils.dict\_tools module +------------------------------------- .. automodule:: onadata.libs.utils.dict_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.export_builder module ----------------------------------------- +onadata.libs.utils.email module +------------------------------- + +.. automodule:: onadata.libs.utils.email + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.utils.export\_builder module +----------------------------------------- .. automodule:: onadata.libs.utils.export_builder - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.export_tools module --------------------------------------- +onadata.libs.utils.export\_tools module +--------------------------------------- .. automodule:: onadata.libs.utils.export_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.google module -------------------------------- .. automodule:: onadata.libs.utils.google - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.gravatar module ---------------------------------- .. automodule:: onadata.libs.utils.gravatar - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.image_tools module -------------------------------------- +onadata.libs.utils.image\_tools module +-------------------------------------- .. automodule:: onadata.libs.utils.image_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.log module ----------------------------- .. automodule:: onadata.libs.utils.log - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.logger_tools module --------------------------------------- +onadata.libs.utils.logger\_tools module +--------------------------------------- .. automodule:: onadata.libs.utils.logger_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.middleware module ------------------------------------ .. automodule:: onadata.libs.utils.middleware - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.model_tools module -------------------------------------- +onadata.libs.utils.model\_tools module +-------------------------------------- .. automodule:: onadata.libs.utils.model_tools - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.mongo module ------------------------------- .. automodule:: onadata.libs.utils.mongo - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.numeric module --------------------------------- .. automodule:: onadata.libs.utils.numeric - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.utils.openid\_connect\_tools module +------------------------------------------------ + +.. automodule:: onadata.libs.utils.openid_connect_tools + :members: + :undoc-members: + :show-inheritance: + +onadata.libs.utils.organization\_utils module +--------------------------------------------- + +.. automodule:: onadata.libs.utils.organization_utils + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.osm module ----------------------------- .. automodule:: onadata.libs.utils.osm - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.profiler module ----------------------------------- - -.. automodule:: onadata.libs.utils.profiler - :members: - :undoc-members: - :show-inheritance: - -onadata.libs.utils.project_utils module ---------------------------------------- +onadata.libs.utils.project\_utils module +---------------------------------------- .. automodule:: onadata.libs.utils.project_utils - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.qrcode module -------------------------------- .. automodule:: onadata.libs.utils.qrcode - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.quick_converter module ------------------------------------------ +onadata.libs.utils.quick\_converter module +------------------------------------------ .. automodule:: onadata.libs.utils.quick_converter - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.string module -------------------------------- .. automodule:: onadata.libs.utils.string - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: onadata.libs.utils.timing module -------------------------------- .. automodule:: onadata.libs.utils.timing - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.user_auth module ------------------------------------ +onadata.libs.utils.user\_auth module +------------------------------------ .. automodule:: onadata.libs.utils.user_auth - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: -onadata.libs.utils.viewer_tools module --------------------------------------- +onadata.libs.utils.validators module +------------------------------------ -.. automodule:: onadata.libs.utils.viewer_tools - :members: - :undoc-members: - :show-inheritance: +.. automodule:: onadata.libs.utils.validators + :members: + :undoc-members: + :show-inheritance: +onadata.libs.utils.viewer\_tools module +--------------------------------------- + +.. automodule:: onadata.libs.utils.viewer_tools + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata.libs.utils - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.rst b/docs/onadata.rst index 5186d58103..6cd6c51088 100644 --- a/docs/onadata.rst +++ b/docs/onadata.rst @@ -5,35 +5,26 @@ Subpackages ----------- .. toctree:: + :maxdepth: 4 - onadata.apps - onadata.libs - onadata.settings + onadata.apps + onadata.libs Submodules ---------- -onadata.celery module ---------------------- - -.. automodule:: onadata.celery - :members: - :undoc-members: - :show-inheritance: - -onadata.devwsgi module ----------------------- - -.. automodule:: onadata.devwsgi - :members: - :undoc-members: - :show-inheritance: +onadata.celeryapp module +------------------------ +.. automodule:: onadata.celeryapp + :members: + :undoc-members: + :show-inheritance: Module contents --------------- .. automodule:: onadata - :members: - :undoc-members: - :show-inheritance: + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/onadata.settings.rst b/docs/onadata.settings.rst deleted file mode 100644 index 8e11380a74..0000000000 --- a/docs/onadata.settings.rst +++ /dev/null @@ -1,78 +0,0 @@ -onadata.settings package -======================== - -Submodules ----------- - -onadata.settings.common module ------------------------------- - -.. automodule:: onadata.settings.common - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.debug_toolbar_settings module ----------------------------------------------- - -.. automodule:: onadata.settings.debug_toolbar_settings - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.default_settings module ----------------------------------------- - -.. automodule:: onadata.settings.default_settings - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.drone_test module ----------------------------------- - -.. automodule:: onadata.settings.drone_test - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.local_settings module --------------------------------------- - -.. automodule:: onadata.settings.local_settings - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.production_example module ------------------------------------------- - -.. automodule:: onadata.settings.production_example - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.staging_example module ---------------------------------------- - -.. automodule:: onadata.settings.staging_example - :members: - :undoc-members: - :show-inheritance: - -onadata.settings.travis_test module ------------------------------------ - -.. automodule:: onadata.settings.travis_test - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: onadata.settings - :members: - :undoc-members: - :show-inheritance: diff --git a/onadata/apps/main/management/commands/remove_odk_prefix.py b/onadata/apps/main/management/commands/remove_odk_prefix.py index 093b51bff4..49645c929b 100644 --- a/onadata/apps/main/management/commands/remove_odk_prefix.py +++ b/onadata/apps/main/management/commands/remove_odk_prefix.py @@ -1,16 +1,26 @@ +# -*- coding: utf-8 -*- +""" +remove_odk_prefix - removes the odk prefix from logger and viewer apps. +""" from django.core.management.base import BaseCommand from django.db import connection from django.utils.translation import gettext_lazy class Command(BaseCommand): - help = gettext_lazy("Remove from logger and viewer apps") + """ + remove_odk_prefix - removes the odk prefix from logger and viewer apps. + """ - option_list = BaseCommand.option_list + help = gettext_lazy("Remove from logger and viewer apps") def handle(self, *args, **kwargs): cursor = connection.cursor() - cursor.execute('UPDATE south_migrationhistory SET app_name=%s WHERE ' - 'app_name=%s', ['logger', 'odk_logger']) - cursor.execute('UPDATE south_migrationhistory SET app_name=%s WHERE ' - 'app_name=%s', ['viewer', 'odk_viewer']) + cursor.execute( + "UPDATE south_migrationhistory SET app_name=%s WHERE " "app_name=%s", + ["logger", "odk_logger"], + ) + cursor.execute( + "UPDATE south_migrationhistory SET app_name=%s WHERE " "app_name=%s", + ["viewer", "odk_viewer"], + ) diff --git a/onadata/apps/main/tests/test_service_health.py b/onadata/apps/main/tests/test_service_health.py index b97dc295f9..c6e1c4918a 100644 --- a/onadata/apps/main/tests/test_service_health.py +++ b/onadata/apps/main/tests/test_service_health.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- import json + from django.http import HttpRequest from django.test import override_settings +import onadata from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import service_health -import onadata class TestServiceHealthView(TestBase): @@ -12,8 +14,9 @@ def test_service_health(self): """ Test that the `service_health` view function works as expected: - 1. Returns a 200 when secondary services are healthy - 2. Returns a 500 when a secondary service is not available + + 1. Returns a 200 when secondary services are healthy + 2. Returns a 500 when a secondary service is not available """ req = HttpRequest() resp = service_health(req) From e796d938342fb82c067f678598a63564d42f84fd Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 18:05:30 +0300 Subject: [PATCH 243/270] Remove devwsgi.py - no longer relevant --- onadata/devwsgi.py | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100644 onadata/devwsgi.py diff --git a/onadata/devwsgi.py b/onadata/devwsgi.py deleted file mode 100644 index d84bdb0830..0000000000 --- a/onadata/devwsgi.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -WSGI config - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application -from django.utils import autoreload - -import uwsgi # pylint: disable=import-error -from uwsgidecorators import timer - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onadata.settings.common") - -application = get_wsgi_application() - - -@timer(3) -def change_code_gracefull_reload(sig): # pylint: disable=unused-argument - """Reload uWSGI whenever the code changes""" - if autoreload.file_changed: - uwsgi.reload() From b2b78ca89351e18dab3afb922f3df51910047c39 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 18:25:22 +0300 Subject: [PATCH 244/270] Remove unused test - surveyor_manager is no longer in use --- .../viewer/tests/surveyor_registration.py | 113 ------------------ 1 file changed, 113 deletions(-) delete mode 100644 onadata/apps/viewer/tests/surveyor_registration.py diff --git a/onadata/apps/viewer/tests/surveyor_registration.py b/onadata/apps/viewer/tests/surveyor_registration.py deleted file mode 100644 index 3afe5ae0cc..0000000000 --- a/onadata/apps/viewer/tests/surveyor_registration.py +++ /dev/null @@ -1,113 +0,0 @@ -from datetime import datetime - -from django.test import TestCase -from surveyor_manager.models import Surveyor - -from onadata.apps.logger.factory import XFormManagerFactory -from onadata.apps.logger.models import XForm - -xform_factory = XFormManagerFactory() - - -class TestSurveyorRegistration(TestCase): - def setUp(self): - [xf.delete() for xf in XForm.objects.all()] - self.xf = xform_factory.create_registration_xform() - - def tearDown(self): - self.xf.delete() - - def test_registration_form_loaded(self): - registration_forms = XForm.objects.filter(title=u"registration") - self.assertTrue(len(registration_forms) > 0) - - def test_registration_creates_surveyor(self): - xform_factory.create_registration_instance({ - u'start': datetime.now(), u'name': u'Steak Sauce', - u'device_id': u'12345'}) - - self.assertEqual(Surveyor.objects.count(), 1) - self.assertEqual(Surveyor.objects.all()[0].name, u"Steak Sauce") - - def test_multiple_registrations_on_the_same_phone(self): - """ - Two users registered to phone '12345'. - 1: Betty (hour 1) - 2: Alex (hour 2) - One submission: - 1. WaterSimple (hour 3) - - Submission should be attributed to "Alex Adams" - """ - xform_factory.create_simple_xform() - - now = datetime.now() - ordered_times = [datetime(now.year, now.month, now.day, 1), - datetime(now.year, now.month, now.day, 2), - datetime(now.year, now.month, now.day, 3)] - - xform_factory.create_registration_instance({ - u'start': ordered_times[0], u'name': u'Betty Bimbob', - u'sex': u'female', u'birth_date': u'1970-07-07', - u'device_id': u'12345'}) - - xform_factory.create_registration_instance({ - u'start': ordered_times[1], u'name': u'Alex Adams', - u'birth_date': u'1986-08-15', u'device_id': u'12345'}) - - self.assertTrue(Surveyor.objects.count(), 2) - - submission = xform_factory.create_simple_instance( - {u'start': ordered_times[2]}) - - self.assertTrue(submission.parsed_instance.surveyor is not None) - self.assertEqual(submission.parsed_instance.surveyor.name, - u'Alex Adams') - - def test_multiple_submissions_out_of_order(self): - """ - Two users registered to phone '12345'. - User Submission - -- -- - 1: user_one (named Betty, hour 1) - 2. submission_one # hour 2 - should be attributed to betty - 3: user_two (named Alex, hour 3) - 4. submission_two # hour 4 - should be attributed to alex - Registrations performed in order, - Submissions entered out of order. - """ - xform_factory.create_simple_xform() - - now = datetime.now() - ordered_times = [datetime(now.year, now.month, now.day, 1), - datetime(now.year, now.month, now.day, 2), - datetime(now.year, now.month, now.day, 3), - datetime(now.year, now.month, now.day, 4)] - - xform_factory.create_registration_instance({ - u'form_id': self.xf.id_string, u'start': ordered_times[0], - u'name': u'Betty Bimbob', u'sex': u'female', - u'birth_date': u'1970-07-07', u'device_id': u'12345'}) - - xform_factory.create_registration_instance({ - u'form_id': self.xf.id_string, u'start': ordered_times[2], - u'name': u'Alex Adams', u'birth_date': u'1986-08-15', - u'device_id': u'12345'}) - - self.assertTrue(Surveyor.objects.count(), 2) - - # submissions are sometimes parsed out of order, so we are saving the - # 2nd submission first - submission_two = xform_factory.create_simple_instance({ - u'start': ordered_times[3]}) - - submission_one = xform_factory.create_simple_instance({ - u'start': ordered_times[1]}) - - self.assertEqual(submission_one.parsed_instance.phone.imei, u"12345") - self.assertEqual(submission_one.parsed_instance.start_time, - ordered_times[1]) - self.assertEqual(submission_one.parsed_instance.surveyor.name, - u'Betty Bimbob') - self.assertEqual(submission_two.parsed_instance.surveyor.name, - u'Alex Adams') From 8be038462b87190518f8653fd6c72c29573bf903 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 19:15:25 +0300 Subject: [PATCH 245/270] Remove skipped test --- .../apps/logger/tests/test_form_submission.py | 31 ------------------- 1 file changed, 31 deletions(-) diff --git a/onadata/apps/logger/tests/test_form_submission.py b/onadata/apps/logger/tests/test_form_submission.py index 16c2d6b7ad..7e7328c3ba 100644 --- a/onadata/apps/logger/tests/test_form_submission.py +++ b/onadata/apps/logger/tests/test_form_submission.py @@ -13,7 +13,6 @@ from django_digest.test import Client as DigestClient from django_digest.test import DigestAuth from guardian.shortcuts import assign_perm -from nose import SkipTest from onadata.apps.logger.models import Instance from onadata.apps.logger.models.instance import InstanceHistory @@ -163,36 +162,6 @@ def test_submission_to_require_auth_without_perm(self): self.assertEqual(self.response.status_code, 403) - def test_submission_to_require_auth_with_perm(self): - """ - Test submission to a private form by non-owner is forbidden. - - TODO send authentication challenge when xform.require_auth is set. - This is non-trivial because we do not know the xform until we have - parsed the XML. - """ - raise SkipTest - - self.xform.require_auth = True - self.xform.save() - self.xform.refresh_from_db() - self.assertTrue(self.xform.require_auth) - - # create a new user - username = "alice" - alice = self._create_user(username, username) - - # assign report perms to user - assign_perm("report_xform", alice, self.xform) - auth = DigestAuth(username, username) - - xml_submission_file_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "../fixtures/tutorial/instances/tutorial_2012-06-27_11-27-53.xml", - ) - self._make_submission(xml_submission_file_path, auth=auth) - self.assertEqual(self.response.status_code, 201) - def test_form_post_to_missing_form(self): xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), From 452f9727d55ef1118ad9d22ad9e52045de865a20 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 20 Jun 2024 19:19:41 +0300 Subject: [PATCH 246/270] Use unnittest.skipTest() instead of one from nose --- onadata/apps/main/tests/test_form_enter_data.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/onadata/apps/main/tests/test_form_enter_data.py b/onadata/apps/main/tests/test_form_enter_data.py index 0eb2b18186..a7963213a0 100644 --- a/onadata/apps/main/tests/test_form_enter_data.py +++ b/onadata/apps/main/tests/test_form_enter_data.py @@ -9,7 +9,6 @@ from django.urls import reverse from six.moves.urllib.parse import urlparse from httmock import HTTMock, urlmatch -from nose import SkipTest from onadata.apps.logger.views import enter_data from onadata.apps.main.models import MetaData @@ -72,7 +71,7 @@ def _running_enketo(self, check_url=False): def test_enketo_remote_server(self): if not self._running_enketo(): - raise SkipTest + self.skipTest("Requires Enketo server to be running.") with HTTMock(enketo_mock): server_url = "https://testserver.com/bob" form_id = "test_%s" % re.sub(re.compile("\."), "_", str(time())) # noqa @@ -82,7 +81,7 @@ def test_enketo_remote_server(self): def test_enketo_url_with_http_protocol_on_formlist(self): if not self._running_enketo(): - raise SkipTest + self.skipTest("Requires Enketo server to be running.") with HTTMock(enketo_mock_http): server_url = "http://testserver.com/bob" form_id = "test_%s" % re.sub(re.compile("\."), "_", str(time())) # noqa @@ -111,7 +110,7 @@ def test_qrcode_view_with_enketo_error(self): def test_enter_data_redir(self): if not self._running_enketo(): - raise SkipTest + self.skipTest("Requires Enketo server to be running.") with HTTMock(enketo_mock): factory = RequestFactory() request = factory.get("/") @@ -145,7 +144,7 @@ def test_public_with_link_to_share_toggle_on(self): response = self.anon.get(self.show_url) self.assertEqual(response.status_code, 302) if not self._running_enketo(): - raise SkipTest + self.skipTest("Requires Enketo server to be running.") with HTTMock(enketo_mock): factory = RequestFactory() request = factory.get("/") From 1a67ae515ecdd0fa5b464877dfe43c0d3dce3651 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 24 Jun 2024 16:04:48 +0300 Subject: [PATCH 247/270] docs cleanup --- docs/authentication.rst | 4 +- docs/data.rst | 63 +++++++++++----------- docs/messaging_stats.rst | 10 ++-- docs/metadata.rst | 2 +- docs/onadata.apps.logger.migrations.rst | 24 +++++++++ docs/onadata.apps.logger.rst | 8 +++ docs/onadata.apps.logger.tests.rst | 8 +++ docs/profiles.rst | 2 +- docs/user.rst | 6 +-- onadata/apps/api/tools.py | 3 +- onadata/apps/api/viewsets/data_viewset.py | 4 +- onadata/apps/api/viewsets/media_viewset.py | 50 ++++++++--------- 12 files changed, 108 insertions(+), 76 deletions(-) diff --git a/docs/authentication.rst b/docs/authentication.rst index 478ea14b1d..32f41fcfc7 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -116,7 +116,7 @@ What happens: account that provides access. 2. redirection to the client application occurs, the url is of the form: - REDIRECT\_URI/?state=abc&code=YYYYYYYYY + REDIRECT_URI/?state=abc&code=YYYYYYYYY example redirect uri @@ -128,7 +128,7 @@ example redirect uri - ``state`` - same state string used during authorization request Your client application should use the ``code`` to request for an -access\_token. +access_token. 3. Request for access token. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/data.rst b/docs/data.rst index 9dcedd8860..3a1525ceb6 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -11,9 +11,7 @@ This endpoint provides access to submitted data in JSON format. Where: GET JSON List of data end points -------------------------------- -Lists the data endpoints accessible to requesting user, for anonymous access - -a list of public data endpoints is returned. +Lists the data endpoints accessible to requesting user, for anonymous access a list of public data endpoints is returned. .. raw:: html @@ -68,7 +66,7 @@ of records and the limit parameter to limit the number of records returned. :: - curl -X GET 'https://api.ona.io/api/v1/data/2?limit=2' + curl -X GET 'https://api.ona.io/api/v1/data/2?limit=2' .. raw:: html @@ -78,7 +76,7 @@ of records and the limit parameter to limit the number of records returned. :: - curl -X GET 'https://api.ona.io/api/v1/data/2?start=3&limit=4' + curl -X GET 'https://api.ona.io/api/v1/data/2?start=3&limit=4' Fetch XForm ODK data for all forms per account in `csv` format -------------------------------------------------------------- @@ -133,7 +131,7 @@ Example curl -X GET https://api.ona.io/api/v1/data?owner=ona GET JSON list of submitted data for a specific form ------------------------------------------- +--------------------------------------------------- Provides a JSON list of submitted data for a specific form. Note: Responses are automatically paginated when requesting a list of data that surpasses 10,000 records. @@ -201,6 +199,7 @@ GET XML list of submitted data for a specific form Provides an XML list of submitted data for a specific form. .. raw:: html +
     GET /api/v1/data/{pk}.xml
     
@@ -259,7 +258,7 @@ Response Get FLOIP flow results for a specific form ------------------------------------------ -Provides a list of rows of submitted data for a specific form. Each row contains 6 values as specified |FLOIPSubmissionAPI|. The data is accessed from the data endpoint by specifiying the header ``Accept: "application/vnd.org.flowinterop.results+json"``. +Provides a list of rows of submitted data for a specific form. Each row contains 6 values as specified |FLOIPSubmissionAPI|. The data is accessed from the data endpoint by specifying the header ``Accept: "application/vnd.org.flowinterop.results+json"``. .. |FLOIPSubmissionAPI| raw:: html @@ -570,11 +569,11 @@ Response Query submitted data of a specific form ---------------------------------------- -Use the `query` or `data` parameter to pass in a JSON key/value query. +Use the `query` or `data` parameter to pass in a JSON key/value query. When quering a date time field whose value is in ISO format such as ``2020-12-18T09:36:19.767455+00:00``, it is important to ensure the ``+`` (plus) is encoded to ``%2b``. -``+`` without encoding is parsed as whitespace. So ``2020-12-18T09:36:19.767455+00:00`` should be converted to ``2020-12-18T09:36:19.767455%2b00:00``. +``+`` without encoding is parsed as whitespace. So ``2020-12-18T09:36:19.767455+00:00`` should be converted to ``2020-12-18T09:36:19.767455%2b00:00``. Example I @@ -667,7 +666,7 @@ Query submissions with pending submission review status or NULL Example XII ^^^^^^^^^^^ -Query submissions with `NULL` submission review status +Query submissions with `NULL` submission review status :: @@ -968,7 +967,7 @@ Response ^^^^^^^^^ :: - HTTP 200 OK + HTTP 200 OK Get list of public data endpoints ---------------------------------- @@ -1382,27 +1381,27 @@ Response ^^^^^^^^^ :: -{ - "type": "Feature", - "geometry": { - "type": "GeometryCollection", - "geometries": [ - { - "type": "Point", - "coordinates": [ - 36.744421, - -1.29943 - ] - } - ] - }, - "properties": { - "id": 60549136, - "xform": 513322, - "_id": 60549136, - "_last_edited": null + { + "type": "Feature", + "geometry": { + "type": "GeometryCollection", + "geometries": [ + { + "type": "Point", + "coordinates": [ + 36.744421, + -1.29943 + ] + } + ] + }, + "properties": { + "id": 60549136, + "xform": 513322, + "_id": 60549136, + "_last_edited": null + } } -} **List all the geojson values for a given form with simplestyle-spec enabled and title prop set** @@ -1534,7 +1533,7 @@ Example ^^^^^^^^^ :: - curl -X GET https://api.ona.io/api/v1/data/28058.osm + curl -X GET https://api.ona.io/api/v1/data/28058.osm OSM endpoint with all osm files for a specific submission concatenated. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/messaging_stats.rst b/docs/messaging_stats.rst index 8d84f6b0c1..2117bb7d57 100644 --- a/docs/messaging_stats.rst +++ b/docs/messaging_stats.rst @@ -5,15 +5,15 @@ Provides a count of each unique messaging event grouped by either day, month or The endpoint accepts the following *required* query parameters: -* *group_by* - field specifying whether to group events by day, month or year. +* *group_by* - field specifying whether to group events by `day`, `month` or `year`. -* *target_type* - field to be used to determine the target object type i.e xform +* *target_type* - field to be used to determine the target object type i.e `xform`. -* *target_id* - field used to identify the target object. +* *target_id* - field used to identify the target object - e.g. for `XForm` this is the `id` field. -* *verb*: field used to filter returned responses by a specific verb +* *verb*: field used to filter returned responses by a specific verb. -* *timestamp*: used to filter by actions that occurred in a specific timeframe. This query parameter support date time lookups i.e `timestamp__day`, `timestamp__year +* *timestamp*: used to filter by actions that occurred in a specific timeframe. This query parameter support date time lookups i.e `timestamp__day`, `timestamp__year`. Example ^^^^^^^^ diff --git a/docs/metadata.rst b/docs/metadata.rst index d9cb178294..5c4e2940ab 100644 --- a/docs/metadata.rst +++ b/docs/metadata.rst @@ -226,7 +226,7 @@ Link XForm or Dataview as a media example: Link XForm as a GeoJSON media attachment example: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ :: diff --git a/docs/onadata.apps.logger.migrations.rst b/docs/onadata.apps.logger.migrations.rst index d356fb1284..6251298f82 100644 --- a/docs/onadata.apps.logger.migrations.rst +++ b/docs/onadata.apps.logger.migrations.rst @@ -244,6 +244,14 @@ onadata.apps.logger.migrations.0015\_entity\_entitylist\_followupform\_registrat :undoc-members: :show-inheritance: +onadata.apps.logger.migrations.0016\_add\_entity\_entity\_list module +--------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0016_add_entity_entity_list + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.migrations.0016\_widget\_aggregation module --------------------------------------------------------------- @@ -260,6 +268,14 @@ onadata.apps.logger.migrations.0017\_auto\_20160224\_0130 module :undoc-members: :show-inheritance: +onadata.apps.logger.migrations.0017\_populate\_entity\_entity\_list module +-------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0017_populate_entity_entity_list + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.migrations.0018\_auto\_20160301\_0330 module ---------------------------------------------------------------- @@ -268,6 +284,14 @@ onadata.apps.logger.migrations.0018\_auto\_20160301\_0330 module :undoc-members: :show-inheritance: +onadata.apps.logger.migrations.0018\_entityhistory\_entitylistgroupobjectpermission\_and\_more module +----------------------------------------------------------------------------------------------------- + +.. automodule:: onadata.apps.logger.migrations.0018_entityhistory_entitylistgroupobjectpermission_and_more + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.migrations.0019\_auto\_20160307\_0256 module ---------------------------------------------------------------- diff --git a/docs/onadata.apps.logger.rst b/docs/onadata.apps.logger.rst index 48e53ffb71..6aa1091ca0 100644 --- a/docs/onadata.apps.logger.rst +++ b/docs/onadata.apps.logger.rst @@ -56,6 +56,14 @@ onadata.apps.logger.signals module :undoc-members: :show-inheritance: +onadata.apps.logger.tasks module +-------------------------------- + +.. automodule:: onadata.apps.logger.tasks + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.views module -------------------------------- diff --git a/docs/onadata.apps.logger.tests.rst b/docs/onadata.apps.logger.tests.rst index cf1f18ccb8..9d38135f87 100644 --- a/docs/onadata.apps.logger.tests.rst +++ b/docs/onadata.apps.logger.tests.rst @@ -117,6 +117,14 @@ onadata.apps.logger.tests.test\_simple\_submission module :undoc-members: :show-inheritance: +onadata.apps.logger.tests.test\_tasks module +-------------------------------------------- + +.. automodule:: onadata.apps.logger.tests.test_tasks + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.tests.test\_transfer\_project\_command module ----------------------------------------------------------------- diff --git a/docs/profiles.rst b/docs/profiles.rst index 86449d37b0..c85174313f 100644 --- a/docs/profiles.rst +++ b/docs/profiles.rst @@ -5,7 +5,7 @@ Register a new User ------------------- ``username, email, first_name`` Are required fields. \ ``username`` may -contain alphanumeric, \_, @, +, . and - characters +contain alphanumeric, _, @, +, . and - characters .. raw:: html diff --git a/docs/user.rst b/docs/user.rst index c94d0355fc..d99740f570 100644 --- a/docs/user.rst +++ b/docs/user.rst @@ -53,7 +53,7 @@ Request password reset - Sends an email to the user’s email with a url that redirects to a reset password form on the API consumer’s website. - ``email`` and ``reset_url`` are expected in the POST payload ``email_subject`` is optional. -- Expected reset\_url format is ``reset_url=https:/domain/path/to/reset/form``. +- Expected reset_url format is ``reset_url=https:/domain/path/to/reset/form``. - Example of reset url sent to user’s email is ``http://mydomain.com/reset_form?uid=Mg&token=2f3f334g3r3434&username=dXNlcg==``. - ``uid`` is the users ``unique key`` which is a base64 encoded integer value that can be used to access the users info at ``/api/v1/users/`` or ``/api/v1/profiles/``. You can retrieve the integer value in ``javascript`` using the ``window.atob();`` function. ``username`` is a base64 encoded value of the user’s username - ``token`` is a onetime use token that allows password reset @@ -63,7 +63,7 @@ Example :: - curl -X POST -d email=demouser@mail.com -d reset\_url=http://example-url.com/reset https://api.ona.io/api/v1/user/reset -d email_subject="Reset password requested" + curl -X POST -d email=demouser@mail.com -d reset_url=http://example-url.com/reset https://api.ona.io/api/v1/user/reset -d email_subject="Reset password requested" Response -------- @@ -90,7 +90,7 @@ Example :: - curl -X POST -d uid=Mg -d token=qndoi209jf02n4 -d new\_password=usernewpass https://api.ona.io/api/v1/user/reset + curl -X POST -d uid=Mg -d token=qndoi209jf02n4 -d new_password=usernewpass https://api.ona.io/api/v1/user/reset Response -------- diff --git a/onadata/apps/api/tools.py b/onadata/apps/api/tools.py index 599fd73e72..396ac234c3 100644 --- a/onadata/apps/api/tools.py +++ b/onadata/apps/api/tools.py @@ -285,8 +285,7 @@ def create_organization_project(organization, project_name, created_by): """Creates a project for a given organization :param organization: User organization :param project_name - :param created_by: User with permissions to create projects within the - organization + :param created_by: User with permissions to create projects within the organization :returns: a Project instance """ diff --git a/onadata/apps/api/viewsets/data_viewset.py b/onadata/apps/api/viewsets/data_viewset.py index 99617d5407..534ff61cd0 100644 --- a/onadata/apps/api/viewsets/data_viewset.py +++ b/onadata/apps/api/viewsets/data_viewset.py @@ -97,8 +97,8 @@ def get_data_and_form(kwargs): def delete_instance(instance, user): """ - Function that calls Instance.set_deleted and catches any exception that may - occur. + Function that calls Instance.set_deleted and catches any exception that may occur. + :param instance: :param user: :return: diff --git a/onadata/apps/api/viewsets/media_viewset.py b/onadata/apps/api/viewsets/media_viewset.py index 87f77b6050..741428c9ec 100644 --- a/onadata/apps/api/viewsets/media_viewset.py +++ b/onadata/apps/api/viewsets/media_viewset.py @@ -45,10 +45,8 @@ def retrieve(self, request, *args, **kwargs): Redirect to final attachment url param pk: the attachment id - query param filename: the filename of the associated attachment is - required and has to match - query param suffix: (optional) - specify small | medium | large to - return resized images. + query param filename: the filename of the associated attachment is required and has to match + query param suffix: (optional) - specify small | medium | large to return resized images. return HttpResponseRedirect: redirects to final image url """ @@ -57,39 +55,35 @@ def retrieve(self, request, *args, **kwargs): int(pk) except ValueError as exc: raise Http404() from exc - else: - filename = request.query_params.get("filename") - obj = self.get_object() + filename = request.query_params.get("filename") + obj = self.get_object() - if obj.media_file.name != filename: - raise Http404() + if obj.media_file.name != filename: + raise Http404() - url = None + url = None - if obj.mimetype.startswith("image"): - suffix = request.query_params.get("suffix") + if obj.mimetype.startswith("image"): + suffix = request.query_params.get("suffix") - if suffix: - if suffix in list(settings.THUMB_CONF): - try: - url = image_url(obj, suffix) - except Exception as e: - raise ParseError(e) from e - else: - raise Http404() + if suffix: + if suffix in list(settings.THUMB_CONF): + try: + url = image_url(obj, suffix) + except Exception as e: + raise ParseError(e) from e + else: + raise Http404() - if not url: - response = generate_media_download_url(obj) + if not url: + response = generate_media_download_url(obj) - return response + return response - return HttpResponseRedirect(url) - - raise Http404() + return HttpResponseRedirect(url) def list(self, request, *args, **kwargs): """ - Action NOT IMPLEMENTED, only needed because of the automatic url - routing in /api/v1/ + Action NOT IMPLEMENTED, only needed because of the automatic url routing in /api/v1/ """ return Response(data=[]) From e96480516d6e8c8d831e453e34df69ea15803105 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 24 Jun 2024 17:11:26 +0300 Subject: [PATCH 248/270] Update pylint configuration --- .pylintrc | 761 +++++++++++++++++++++++++++++++++++------------------- 1 file changed, 490 insertions(+), 271 deletions(-) diff --git a/.pylintrc b/.pylintrc index 76c33d3520..7a100fa0b3 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,108 +1,329 @@ -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code +# run arbitrary code. extension-pkg-allow-list=ujson,lxml.etree -# Add files or directories to the blacklist. They should be base names, not -# paths. +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. ignore=CVS -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" +#init-hook= -# Use multiple processes to speed up Pylint. +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=1 -# List of plugins (as comma separated values of python modules names) to load, +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins=pylint_django,pylint_celery # Pickle collected data for later comparisons. persistent=yes -# Specify a configuration file. -#rcfile= +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +prefer-stubs=no + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.12 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= -[MESSAGES CONTROL] -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= +[BASIC] -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,too-few-public-methods,django-not-configured +# Naming style matching correct argument names. +argument-naming-style=snake_case -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= +# Naming style matching correct attribute names. +attr-naming-style=snake_case -[REPORTS] +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text +# Naming style matching correct class attribute names. +class-attribute-naming-style=any -# Tells whether to display a full report or only the messages -reports=no +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= -# Activate the evaluation score. -score=yes +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= -[REFACTORING] +# Naming style matching correct class names. +class-naming-style=PascalCase -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE -[SIMILARITIES] +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= -# Ignore comments when computing similarities. -ignore-comments=yes +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 -# Ignore docstrings when computing similarities. -ignore-docstrings=yes +# Naming style matching correct function names. +function-naming-style=snake_case -# Ignore imports when computing similarities. -ignore-imports=no +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= -# Minimum lines number of a similarity. -min-similarity-lines=4 +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception [FORMAT] @@ -113,7 +334,7 @@ expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ -# Number of spaces of indent required inside a hanging or continued line. +# Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 @@ -123,15 +344,9 @@ indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 -# Maximum number of lines in a module +# Maximum number of lines in a module. max-module-lines=1000 -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no @@ -141,285 +356,289 @@ single-line-class-stmt=no single-line-if-stmt=no -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,future.builtins +[IMPORTS] +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= -[SPELLING] +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no -# List of comma separated words that should not be checked. -spelling-ignore-words= +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= -[TYPECHECK] +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes +[LOGGING] -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 +[MESSAGES CONTROL] -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero -[BASIC] +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= -# Naming hint for argument names -argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ -# Regular expression matching correct argument names -argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +[METHOD_ARGS] -# Naming hint for attribute names -attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request -# Regular expression matching correct attribute names -attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata +[MISCELLANEOUS] -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ +# Regular expression of note tags to take in consideration. +notes-rgx= -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ +[REFACTORING] -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 +# Let 'consider-using-join' be raised when the separator to join on would be +# non-empty (resulting in expected fixes of the type: ``"- " + " - +# ".join(items)``) +suggest-join-with-non-empty-separator=yes -# Naming hint for function names -function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ -# Regular expression matching correct function names -function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +[REPORTS] -# Good variable names which should always be accepted, separated by a comma -good-names=e,f,i,j,k,v,z,ex,Run,_ +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ +# Tells whether to display a full report or only the messages. +reports=no -# Naming hint for method names -method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +# Activate the evaluation score. +score=yes -# Regular expression matching correct method names -method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ +[SIMILARITIES] -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ +# Comments are removed from the similarity computation +ignore-comments=yes -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= +# Docstrings are removed from the similarity computation +ignore-docstrings=yes -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ +# Imports are removed from the similarity computation +ignore-imports=yes -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty +# Signatures are removed from the similarity computation +ignore-signatures=yes -# Naming hint for variable names -variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +# Minimum lines number of a similarity. +min-similarity-lines=4 -# Regular expression matching correct variable names -variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ +[SPELLING] -[LOGGING] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. +spelling-dict= +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: -[IMPORTS] +# List of comma separated words that should not be checked. +spelling-ignore-words= -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= +[STRING] -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant +[TYPECHECK] +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager -[CLASSES] +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes -[DESIGN] +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 -# Maximum number of arguments for function / method -max-args=5 +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 -# Maximum number of attributes for a class (see R0902). -max-attributes=7 +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 +# List of decorators that change the signature of a decorated function. +signature-mutators= -# Maximum number of branch for function / method body -max-branches=12 -# Maximum number of locals for function / method body -max-locals=15 +[VARIABLES] -# Maximum number of parents for a class (see R0901). -max-parents=7 +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes -# Maximum number of return / yield for function / method body -max-returns=6 +# List of names allowed to shadow builtins +allowed-redefined-builtins= -# Maximum number of statements in function / method body -max-statements=50 +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ -[EXCEPTIONS] +# Tells whether we should check for unused import in __init__ files. +init-import=no -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=builtins.Exception +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io From 1b6c8c32e21aa189a5a932ce76c644d6b40808cb Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Mon, 1 Jul 2024 19:43:16 +0300 Subject: [PATCH 249/270] Add djangodocs.py for sphinx documentation --- docs/_ext/djangodocs.py | 412 ++++++++++++++++++ docs/conf.py | 4 +- docs/onadata.apps.logger.migrations.rst | 8 + .../api/viewsets/messaging_stats_viewset.py | 22 +- 4 files changed, 434 insertions(+), 12 deletions(-) create mode 100644 docs/_ext/djangodocs.py diff --git a/docs/_ext/djangodocs.py b/docs/_ext/djangodocs.py new file mode 100644 index 0000000000..5a59b01039 --- /dev/null +++ b/docs/_ext/djangodocs.py @@ -0,0 +1,412 @@ +""" +Sphinx plugins for Django documentation. +""" +import json +import os +import re + +from docutils import nodes +from docutils.parsers.rst import Directive +from docutils.statemachine import ViewList +from sphinx import addnodes +from sphinx import version_info as sphinx_version +from sphinx.builders.html import StandaloneHTMLBuilder +from sphinx.directives.code import CodeBlock +from sphinx.domains.std import Cmdoption +from sphinx.errors import ExtensionError +from sphinx.util import logging +from sphinx.util.console import bold +from sphinx.writers.html import HTMLTranslator + +logger = logging.getLogger(__name__) +# RE for option descriptions without a '--' prefix +simple_option_desc_re = re.compile(r"([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)") + + +def setup(app): + app.add_crossref_type( + directivename="setting", + rolename="setting", + indextemplate="pair: %s; setting", + ) + app.add_crossref_type( + directivename="templatetag", + rolename="ttag", + indextemplate="pair: %s; template tag", + ) + app.add_crossref_type( + directivename="templatefilter", + rolename="tfilter", + indextemplate="pair: %s; template filter", + ) + app.add_crossref_type( + directivename="fieldlookup", + rolename="lookup", + indextemplate="pair: %s; field lookup type", + ) + app.add_object_type( + directivename="django-admin", + rolename="djadmin", + indextemplate="pair: %s; django-admin command", + parse_node=parse_django_admin_node, + ) + app.add_directive("django-admin-option", Cmdoption) + app.add_config_value("django_next_version", "0.0", True) + app.add_directive("versionadded", VersionDirective) + app.add_directive("versionchanged", VersionDirective) + app.add_builder(DjangoStandaloneHTMLBuilder) + app.set_translator("djangohtml", DjangoHTMLTranslator) + app.set_translator("json", DjangoHTMLTranslator) + app.add_node( + ConsoleNode, + html=(visit_console_html, None), + latex=(visit_console_dummy, depart_console_dummy), + man=(visit_console_dummy, depart_console_dummy), + text=(visit_console_dummy, depart_console_dummy), + texinfo=(visit_console_dummy, depart_console_dummy), + ) + app.add_directive("console", ConsoleDirective) + app.connect("html-page-context", html_page_context_hook) + app.add_role("default-role-error", default_role_error) + return {"parallel_read_safe": True} + + +class VersionDirective(Directive): + has_content = True + required_arguments = 1 + optional_arguments = 1 + final_argument_whitespace = True + option_spec = {} + + def run(self): + if len(self.arguments) > 1: + msg = """Only one argument accepted for directive '{directive_name}::'. + Comments should be provided as content, + not as an extra argument.""".format( + directive_name=self.name + ) + raise self.error(msg) + + env = self.state.document.settings.env + ret = [] + node = addnodes.versionmodified() + ret.append(node) + + if self.arguments[0] == env.config.django_next_version: + node["version"] = "Development version" + else: + node["version"] = self.arguments[0] + + node["type"] = self.name + if self.content: + self.state.nested_parse(self.content, self.content_offset, node) + try: + env.get_domain("changeset").note_changeset(node) + except ExtensionError: + # Sphinx < 1.8: Domain 'changeset' is not registered + env.note_versionchange(node["type"], node["version"], node, self.lineno) + return ret + + +class DjangoHTMLTranslator(HTMLTranslator): + """ + Django-specific reST to HTML tweaks. + """ + + # Don't use border=1, which docutils does by default. + def visit_table(self, node): + self.context.append(self.compact_p) + self.compact_p = True + # Needed by Sphinx. + if sphinx_version >= (4, 3): + self._table_row_indices.append(0) + else: + self._table_row_index = 0 + self.body.append(self.starttag(node, "table", CLASS="docutils")) + + def depart_table(self, node): + self.compact_p = self.context.pop() + if sphinx_version >= (4, 3): + self._table_row_indices.pop() + self.body.append("\n") + + def visit_desc_parameterlist(self, node): + self.body.append("(") # by default sphinx puts around the "(" + self.optional_param_level = 0 + self.param_separator = node.child_text_separator + # Counts 'parameter groups' being either a required parameter, or a set + # of contiguous optional ones. + required_params = [ + isinstance(c, addnodes.desc_parameter) for c in node.children + ] + # How many required parameters are left. + self.required_params_left = sum(required_params) + if sphinx_version < (7, 1): + self.first_param = 1 + else: + self.is_first_param = True + self.params_left_at_level = 0 + self.param_group_index = 0 + self.list_is_required_param = required_params + self.multi_line_parameter_list = False + + def depart_desc_parameterlist(self, node): + self.body.append(")") + + # + # Turn the "new in version" stuff (versionadded/versionchanged) into a + # better callout -- the Sphinx default is just a little span, + # which is a bit less obvious that I'd like. + # + # FIXME: these messages are all hardcoded in English. We need to change + # that to accommodate other language docs, but I can't work out how to make + # that work. + # + version_text = { + "versionchanged": "Changed in Django %s", + "versionadded": "New in Django %s", + } + + def visit_versionmodified(self, node): + self.body.append(self.starttag(node, "div", CLASS=node["type"])) + version_text = self.version_text.get(node["type"]) + if version_text: + title = "%s%s" % (version_text % node["version"], ":" if len(node) else ".") + self.body.append('%s ' % title) + + def depart_versionmodified(self, node): + self.body.append("\n") + + # Give each section a unique ID -- nice for custom CSS hooks + def visit_section(self, node): + old_ids = node.get("ids", []) + node["ids"] = ["s-" + i for i in old_ids] + node["ids"].extend(old_ids) + super().visit_section(node) + node["ids"] = old_ids + + +def parse_django_admin_node(env, sig, signode): + command = sig.split(" ")[0] + env.ref_context["std:program"] = command + title = "django-admin %s" % sig + signode += addnodes.desc_name(title, title) + return command + + +class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder): + """ + Subclass to add some extra things we need. + """ + + name = "djangohtml" + + def finish(self): + super().finish() + logger.info(bold("writing templatebuiltins.js...")) + xrefs = self.env.domaindata["std"]["objects"] + templatebuiltins = { + "ttags": [ + n + for ((t, n), (k, a)) in xrefs.items() + if t == "templatetag" and k == "ref/templates/builtins" + ], + "tfilters": [ + n + for ((t, n), (k, a)) in xrefs.items() + if t == "templatefilter" and k == "ref/templates/builtins" + ], + } + outfilename = os.path.join(self.outdir, "templatebuiltins.js") + with open(outfilename, "w") as fp: + fp.write("var django_template_builtins = ") + json.dump(templatebuiltins, fp) + fp.write(";\n") + + +class ConsoleNode(nodes.literal_block): + """ + Custom node to override the visit/depart event handlers at registration + time. Wrap a literal_block object and defer to it. + """ + + tagname = "ConsoleNode" + + def __init__(self, litblk_obj): + self.wrapped = litblk_obj + + def __getattr__(self, attr): + if attr == "wrapped": + return self.__dict__.wrapped + return getattr(self.wrapped, attr) + + +def visit_console_dummy(self, node): + """Defer to the corresponding parent's handler.""" + self.visit_literal_block(node) + + +def depart_console_dummy(self, node): + """Defer to the corresponding parent's handler.""" + self.depart_literal_block(node) + + +def visit_console_html(self, node): + """Generate HTML for the console directive.""" + if self.builder.name in ("djangohtml", "json") and node["win_console_text"]: + # Put a mark on the document object signaling the fact the directive + # has been used on it. + self.document._console_directive_used_flag = True + uid = node["uid"] + self.body.append( + """\ +
+ + + + +
\n""" + % {"id": uid} + ) + try: + self.visit_literal_block(node) + except nodes.SkipNode: + pass + self.body.append("
\n") + + self.body.append( + '
\n' % {"id": uid} + ) + win_text = node["win_console_text"] + highlight_args = {"force": True} + linenos = node.get("linenos", False) + + def warner(msg): + self.builder.warn(msg, (self.builder.current_docname, node.line)) + + highlighted = self.highlighter.highlight_block( + win_text, "doscon", warn=warner, linenos=linenos, **highlight_args + ) + self.body.append(highlighted) + self.body.append("
\n") + self.body.append("
\n") + raise nodes.SkipNode + else: + self.visit_literal_block(node) + + +class ConsoleDirective(CodeBlock): + """ + A reStructuredText directive which renders a two-tab code block in which + the second tab shows a Windows command line equivalent of the usual + Unix-oriented examples. + """ + + required_arguments = 0 + # The 'doscon' Pygments formatter needs a prompt like this. '>' alone + # won't do it because then it simply paints the whole command line as a + # gray comment with no highlighting at all. + WIN_PROMPT = r"...\> " + + def run(self): + def args_to_win(cmdline): + changed = False + out = [] + for token in cmdline.split(): + if token[:2] == "./": + token = token[2:] + changed = True + elif token[:2] == "~/": + token = "%HOMEPATH%\\" + token[2:] + changed = True + elif token == "make": + token = "make.bat" + changed = True + if "://" not in token and "git" not in cmdline: + out.append(token.replace("/", "\\")) + changed = True + else: + out.append(token) + if changed: + return " ".join(out) + return cmdline + + def cmdline_to_win(line): + if line.startswith("# "): + return "REM " + args_to_win(line[2:]) + if line.startswith("$ # "): + return "REM " + args_to_win(line[4:]) + if line.startswith("$ ./manage.py"): + return "manage.py " + args_to_win(line[13:]) + if line.startswith("$ manage.py"): + return "manage.py " + args_to_win(line[11:]) + if line.startswith("$ ./runtests.py"): + return "runtests.py " + args_to_win(line[15:]) + if line.startswith("$ ./"): + return args_to_win(line[4:]) + if line.startswith("$ python3"): + return "py " + args_to_win(line[9:]) + if line.startswith("$ python"): + return "py " + args_to_win(line[8:]) + if line.startswith("$ "): + return args_to_win(line[2:]) + return None + + def code_block_to_win(content): + bchanged = False + lines = [] + for line in content: + modline = cmdline_to_win(line) + if modline is None: + lines.append(line) + else: + lines.append(self.WIN_PROMPT + modline) + bchanged = True + if bchanged: + return ViewList(lines) + return None + + env = self.state.document.settings.env + self.arguments = ["console"] + lit_blk_obj = super().run()[0] + + # Only do work when the djangohtml HTML Sphinx builder is being used, + # invoke the default behavior for the rest. + if env.app.builder.name not in ("djangohtml", "json"): + return [lit_blk_obj] + + lit_blk_obj["uid"] = str(env.new_serialno("console")) + # Only add the tabbed UI if there is actually a Windows-specific + # version of the CLI example. + win_content = code_block_to_win(self.content) + if win_content is None: + lit_blk_obj["win_console_text"] = None + else: + self.content = win_content + lit_blk_obj["win_console_text"] = super().run()[0].rawsource + + # Replace the literal_node object returned by Sphinx's CodeBlock with + # the ConsoleNode wrapper. + return [ConsoleNode(lit_blk_obj)] + + +def html_page_context_hook(app, pagename, templatename, context, doctree): + # Put a bool on the context used to render the template. It's used to + # control inclusion of console-tabs.css and activation of the JavaScript. + # This way it's include only from HTML files rendered from reST files where + # the ConsoleDirective is used. + context["include_console_assets"] = getattr( + doctree, "_console_directive_used_flag", False + ) + + +def default_role_error( + name, rawtext, text, lineno, inliner, options=None, content=None +): + msg = ( + "Default role used (`single backticks`): %s. Did you mean to use two " + "backticks for ``code``, or miss an underscore for a `link`_ ?" % rawtext + ) + logger.warning(msg, location=(inliner.document.current_source, lineno)) + return [nodes.Text(text)], [] diff --git a/docs/conf.py b/docs/conf.py index d6b704b5f1..6e543fe64e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,7 +14,7 @@ import os import sys -from os.path import abspath, dirname +from os.path import abspath, dirname, join import django @@ -23,6 +23,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, abspath(dirname(dirname(__file__)))) sys.path.append(abspath("_themes")) +sys.path.append(abspath(join(dirname(__file__), "_ext"))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onadata.settings.common") @@ -35,6 +36,7 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + "djangodocs", "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", diff --git a/docs/onadata.apps.logger.migrations.rst b/docs/onadata.apps.logger.migrations.rst index 6251298f82..b28552f1f5 100644 --- a/docs/onadata.apps.logger.migrations.rst +++ b/docs/onadata.apps.logger.migrations.rst @@ -292,6 +292,14 @@ onadata.apps.logger.migrations.0018\_entityhistory\_entitylistgroupobjectpermiss :undoc-members: :show-inheritance: +onadata.apps.logger.migrations.0019\_alter\_project\_options\_and\_more module +------------------------------------------------------------------------------ + +.. automodule:: onadata.apps.logger.migrations.0019_alter_project_options_and_more + :members: + :undoc-members: + :show-inheritance: + onadata.apps.logger.migrations.0019\_auto\_20160307\_0256 module ---------------------------------------------------------------- diff --git a/onadata/apps/api/viewsets/messaging_stats_viewset.py b/onadata/apps/api/viewsets/messaging_stats_viewset.py index 429ec35c60..81077fa65e 100644 --- a/onadata/apps/api/viewsets/messaging_stats_viewset.py +++ b/onadata/apps/api/viewsets/messaging_stats_viewset.py @@ -1,6 +1,7 @@ """ API Endpoint implementation for Messaging statistics """ + import json from django.db.models import Count @@ -22,20 +23,19 @@ class MessagingStatsViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): - """ - Provides a count of each unique messaging event grouped by either day, month - or year. + """Provides a count of each unique messaging event grouped by either day, month or + year. The endpoint accepts the following query parameters: - - `group_by`: field specifying whether to group events by `day`, `month` or `year` - - `target_type`: field to be used to determine the target - object type i.e xform, project - - `target_id`: field used to identify the target object - - `verb`: field used to filter returned responses by a specific verb - - `timestamp`: used to filter by actions that occurred in a specific timeframe. - This query parameter support date time lookups - i.e `timestamp__day`, `timestamp__year + - `group_by`: field specifying whether to group events by `day`, `month` or `year` + - `target_type`: field to be used to determine the target + object type i.e xform, project + - `target_id`: field used to identify the target object + - `verb`: field used to filter returned responses by a specific verb + - `timestamp`: used to filter by actions that occurred in a specific time. This + query parameter support date time lookups i.e `timestamp__day`, + `timestamp__year`. Example: From 330b2c65a92e80f61ec1ff93e1b6e0f023c5005a Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Tue, 2 Jul 2024 19:54:03 +0300 Subject: [PATCH 250/270] pylint cleanup --- .pylintrc | 13 +-- .../commands/assign_team_member_permission.py | 4 +- onadata/apps/api/tasks.py | 4 +- .../apps/api/viewsets/attachment_viewset.py | 15 ++-- onadata/apps/api/viewsets/media_viewset.py | 8 +- .../api/viewsets/messaging_stats_viewset.py | 2 +- .../apps/api/viewsets/user_profile_viewset.py | 4 +- .../apps/api/viewsets/v2/tableau_viewset.py | 8 +- onadata/apps/logger/admin.py | 2 +- onadata/apps/logger/factory.py | 8 +- onadata/apps/logger/import_tools.py | 10 +-- .../apps/logger/management/commands/add_id.py | 4 +- .../commands/change_s3_media_permissions.py | 4 +- .../management/commands/create_backup.py | 19 ++-- .../commands/create_image_thumbnails.py | 10 ++- .../management/commands/import_forms.py | 6 +- .../management/commands/import_tools.py | 4 +- .../management/commands/move_media_to_s3.py | 29 ++++--- .../set_xform_surveys_with_geopoints.py | 4 +- .../commands/set_xform_surveys_with_osm.py | 4 +- .../management/commands/update_xform_uuids.py | 10 ++- onadata/apps/logger/models/data_view.py | 9 +- onadata/apps/logger/models/osmdata.py | 4 +- onadata/apps/logger/xform_fs.py | 4 +- .../management/commands/remove_odk_prefix.py | 4 +- .../apps/main/tests/test_service_health.py | 3 + .../{RestServiceInterface.py => interface.py} | 2 +- onadata/apps/restservice/services/__init__.py | 6 +- onadata/apps/restservice/services/f2dhis2.py | 10 ++- .../apps/restservice/services/generic_json.py | 4 +- .../apps/restservice/services/generic_xml.py | 9 +- onadata/apps/restservice/services/textit.py | 17 +++- .../restservice/tests/test_restservice.py | 3 +- onadata/apps/restservice/utils.py | 6 +- onadata/apps/sms_support/parser.py | 30 ++++--- onadata/apps/viewer/models/data_dictionary.py | 22 ++--- onadata/apps/viewer/models/export.py | 38 ++++---- onadata/apps/viewer/parsed_instance_tools.py | 22 ++--- onadata/apps/viewer/tasks.py | 86 +++++++++---------- onadata/apps/viewer/xls_writer.py | 16 ++-- onadata/libs/baseviewset.py | 2 +- onadata/libs/data/query.py | 12 ++- onadata/libs/mixins/anonymous_user_mixin.py | 2 +- .../anonymous_user_public_forms_mixin.py | 2 +- .../libs/mixins/authenticate_header_mixin.py | 2 +- onadata/libs/mixins/bulk_create_mixin.py | 2 +- onadata/libs/mixins/cache_control_mixin.py | 3 +- onadata/libs/mixins/last_modified_mixin.py | 2 +- onadata/libs/mixins/multi_lookup_mixin.py | 2 +- onadata/libs/mixins/object_lookup_mixin.py | 2 +- onadata/libs/mixins/openrosa_headers_mixin.py | 2 +- onadata/libs/mixins/xform_id_string_lookup.py | 2 +- onadata/libs/models/__init__.py | 4 + onadata/libs/models/base_model.py | 8 ++ onadata/libs/models/sorting.py | 10 +-- onadata/libs/profiling/sql.py | 2 +- onadata/libs/renderers/renderers.py | 4 +- onadata/libs/serializers/data_serializer.py | 2 +- onadata/libs/serializers/entity_serializer.py | 6 ++ .../fields/instance_related_field.py | 4 +- onadata/libs/serializers/fields/json_field.py | 4 +- .../serializers/fields/organization_field.py | 8 +- .../libs/serializers/fields/project_field.py | 8 +- .../fields/project_related_field.py | 4 +- onadata/libs/serializers/floip_serializer.py | 6 +- .../libs/serializers/metadata_serializer.py | 19 ++-- .../organization_member_serializer.py | 23 +++-- .../project_invitation_serializer.py | 8 +- .../libs/serializers/project_serializer.py | 22 +++-- onadata/libs/serializers/stats_serializer.py | 21 +++-- onadata/libs/serializers/widget_serializer.py | 2 +- onadata/libs/test_utils/pyxform_test_case.py | 10 +-- onadata/libs/utils/api_export_tools.py | 24 +++--- onadata/libs/utils/async_status.py | 36 ++++++-- onadata/libs/utils/backup_tools.py | 14 +-- onadata/libs/utils/briefcase_client.py | 15 ++-- onadata/libs/utils/chart_tools.py | 28 +++--- onadata/libs/utils/common_tags.py | 4 + onadata/libs/utils/common_tools.py | 12 +-- onadata/libs/utils/decorators.py | 7 +- onadata/libs/utils/dict_tools.py | 79 ++++++++--------- onadata/libs/utils/email.py | 3 +- onadata/libs/utils/export_builder.py | 85 ++++++++++-------- onadata/libs/utils/export_tools.py | 28 +++--- onadata/libs/utils/gravatar.py | 2 +- onadata/libs/utils/image_tools.py | 10 +-- onadata/libs/utils/log.py | 16 ++-- onadata/libs/utils/logger_tools.py | 3 +- onadata/libs/utils/middleware.py | 13 +-- onadata/libs/utils/openid_connect_tools.py | 11 ++- onadata/libs/utils/organization_utils.py | 9 +- onadata/libs/utils/osm.py | 22 ++--- onadata/libs/utils/qrcode.py | 2 +- onadata/libs/utils/string.py | 6 +- onadata/libs/utils/validators.py | 11 ++- onadata/libs/utils/viewer_tools.py | 30 ++++--- 96 files changed, 623 insertions(+), 524 deletions(-) rename onadata/apps/restservice/{RestServiceInterface.py => interface.py} (76%) diff --git a/.pylintrc b/.pylintrc index 7a100fa0b3..77ccbbc6db 100644 --- a/.pylintrc +++ b/.pylintrc @@ -87,10 +87,6 @@ load-plugins=pylint_django,pylint_celery # Pickle collected data for later comparisons. persistent=yes -# Resolve imports to .pyi stubs if available. May reduce no-member messages and -# increase not-an-iterable messages. -prefer-stubs=no - # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.12 @@ -433,9 +429,7 @@ disable=raw-checker-failed, suppressed-message, useless-suppression, deprecated-pragma, - use-symbolic-message-instead, - use-implicit-booleaness-not-comparison-to-string, - use-implicit-booleaness-not-comparison-to-zero + use-symbolic-message-instead # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -473,11 +467,6 @@ max-nested-blocks=5 # printed. never-returning-functions=sys.exit,argparse.parse_error -# Let 'consider-using-join' be raised when the separator to join on would be -# non-empty (resulting in expected fixes of the type: ``"- " + " - -# ".join(items)``) -suggest-join-with-non-empty-separator=yes - [REPORTS] diff --git a/onadata/apps/api/management/commands/assign_team_member_permission.py b/onadata/apps/api/management/commands/assign_team_member_permission.py index b2fe8d6a13..3972d2edd4 100644 --- a/onadata/apps/api/management/commands/assign_team_member_permission.py +++ b/onadata/apps/api/management/commands/assign_team_member_permission.py @@ -37,9 +37,9 @@ def handle(self, *args, **options): count += 1 total += 1 - except ObjectDoesNotExist as e: + except ObjectDoesNotExist as error: fail += 1 - self.stdout.write(str(e), ending="\n") + self.stdout.write(str(error), ending="\n") else: # Get all the teams for team in queryset_iterator( diff --git a/onadata/apps/api/tasks.py b/onadata/apps/api/tasks.py index ddbaca238a..f92fe65d67 100644 --- a/onadata/apps/api/tasks.py +++ b/onadata/apps/api/tasks.py @@ -190,7 +190,9 @@ def regenerate_form_instance_json(xform_id: int): safe_delete(cache_key) -class ShareProjectBaseTask(app.Task): +class ShareProjectBaseTask(app.Task): # pylint: disable=too-few-public-methods + """A Task base class for sharing a project.""" + autoretry_for = ( DatabaseError, ConnectionError, diff --git a/onadata/apps/api/viewsets/attachment_viewset.py b/onadata/apps/api/viewsets/attachment_viewset.py index 2a92b1ecd8..6552044712 100644 --- a/onadata/apps/api/viewsets/attachment_viewset.py +++ b/onadata/apps/api/viewsets/attachment_viewset.py @@ -34,8 +34,8 @@ def get_attachment_data(attachment, suffix): if suffix in list(settings.THUMB_CONF): image_url(attachment, suffix) suffix = settings.THUMB_CONF.get(suffix).get("suffix") - f = default_storage.open(get_path(attachment.media_file.name, suffix)) - return f.read() + media_file = default_storage.open(get_path(attachment.media_file.name, suffix)) + return media_file.read() return attachment.media_file.read() @@ -77,13 +77,12 @@ def retrieve(self, request, *args, **kwargs): suffix = request.query_params.get("suffix") try: data = get_attachment_data(self.object, suffix) - except IOError as e: - if str(e).startswith("File does not exist"): - raise Http404() from e + except IOError as error: + if str(error).startswith("File does not exist"): + raise Http404() from error - raise ParseError(e) from e - else: - return Response(data, content_type=self.object.mimetype) + raise ParseError(error) from error + return Response(data, content_type=self.object.mimetype) filename = request.query_params.get("filename") serializer = self.get_serializer(self.object) diff --git a/onadata/apps/api/viewsets/media_viewset.py b/onadata/apps/api/viewsets/media_viewset.py index 741428c9ec..d1057c865f 100644 --- a/onadata/apps/api/viewsets/media_viewset.py +++ b/onadata/apps/api/viewsets/media_viewset.py @@ -45,8 +45,9 @@ def retrieve(self, request, *args, **kwargs): Redirect to final attachment url param pk: the attachment id - query param filename: the filename of the associated attachment is required and has to match - query param suffix: (optional) - specify small | medium | large to return resized images. + query param filename: the filename of the attachment is required and must match + query param suffix: (optional) - specify small | medium | large to + return resized images. return HttpResponseRedirect: redirects to final image url """ @@ -84,6 +85,7 @@ def retrieve(self, request, *args, **kwargs): def list(self, request, *args, **kwargs): """ - Action NOT IMPLEMENTED, only needed because of the automatic url routing in /api/v1/ + Action NOT IMPLEMENTED. + It is only needed because of the automatic URL routing in /api/v1/ """ return Response(data=[]) diff --git a/onadata/apps/api/viewsets/messaging_stats_viewset.py b/onadata/apps/api/viewsets/messaging_stats_viewset.py index 81077fa65e..1aeeb276af 100644 --- a/onadata/apps/api/viewsets/messaging_stats_viewset.py +++ b/onadata/apps/api/viewsets/messaging_stats_viewset.py @@ -28,7 +28,7 @@ class MessagingStatsViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): The endpoint accepts the following query parameters: - - `group_by`: field specifying whether to group events by `day`, `month` or `year` + - `group_by`: field to group events by `day`, `month` or `year` - `target_type`: field to be used to determine the target object type i.e xform, project - `target_id`: field used to identify the target object diff --git a/onadata/apps/api/viewsets/user_profile_viewset.py b/onadata/apps/api/viewsets/user_profile_viewset.py index aa12ae157f..80aeae83b8 100644 --- a/onadata/apps/api/viewsets/user_profile_viewset.py +++ b/onadata/apps/api/viewsets/user_profile_viewset.py @@ -269,9 +269,9 @@ def change_password(self, request, *args, **kwargs): # noqa try: validate_password(new_password, user=user_profile.user) - except ValidationError as e: + except ValidationError as error: return Response( - data={"errors": e.messages}, status=status.HTTP_400_BAD_REQUEST + data={"errors": error.messages}, status=status.HTTP_400_BAD_REQUEST ) data = {"username": user_profile.user.username} diff --git a/onadata/apps/api/viewsets/v2/tableau_viewset.py b/onadata/apps/api/viewsets/v2/tableau_viewset.py index 81ebeace2d..025456136a 100644 --- a/onadata/apps/api/viewsets/v2/tableau_viewset.py +++ b/onadata/apps/api/viewsets/v2/tableau_viewset.py @@ -123,10 +123,10 @@ def unpack_repeat_data(repeat_data, flat_dict): cleaned_data = [] for data_dict in repeat_data: remove_keys = [] - for k, v in data_dict.items(): - if isinstance(v, list): - remove_keys.append(k) - flat_dict[k].extend(v) + for key, value in data_dict.items(): + if isinstance(value, list): + remove_keys.append(key) + flat_dict[key].extend(value) # pylint: disable=expression-not-assigned [data_dict.pop(k) for k in remove_keys] cleaned_data.append(data_dict) diff --git a/onadata/apps/logger/admin.py b/onadata/apps/logger/admin.py index d1a14f8cfd..76be89c99f 100644 --- a/onadata/apps/logger/admin.py +++ b/onadata/apps/logger/admin.py @@ -9,7 +9,7 @@ from onadata.apps.logger.models import Project, XForm -class FilterByUserMixin: +class FilterByUserMixin: # pylint: disable=too-few-public-methods """Filter queryset by ``request.user``.""" # A user should only see forms/projects that belong to him. diff --git a/onadata/apps/logger/factory.py b/onadata/apps/logger/factory.py index 9236a67ca8..c108aefa56 100644 --- a/onadata/apps/logger/factory.py +++ b/onadata/apps/logger/factory.py @@ -147,8 +147,8 @@ def get_registration_instance(self, custom_values=None): # pylint: disable=protected-access reg_instance._id = xform.id_string - for k, v in values.items(): - reg_instance.answer(name=k, value=v) + for key, value in values.items(): + reg_instance.answer(name=key, value=value) instance_xml = reg_instance.to_xml() @@ -198,8 +198,8 @@ def get_simple_instance(self, custom_values=None): water_simple_survey = _load_simple_survey_object() simple_survey = water_simple_survey.instantiate() - for k, v in values.items(): - simple_survey.answer(name=k, value=v) + for key, value in values.items(): + simple_survey.answer(name=key, value=value) # setting the id_string so that it doesn't end up # with the timestamp of the new survey object diff --git a/onadata/apps/logger/import_tools.py b/onadata/apps/logger/import_tools.py index 099ba76810..d7eddf8f93 100644 --- a/onadata/apps/logger/import_tools.py +++ b/onadata/apps/logger/import_tools.py @@ -39,11 +39,11 @@ def django_file(path, field_name, content_type): # http://groups.google.com/group/django-users/browse_thread/thread/ # 834f988876ff3c45/ # pylint: disable=consider-using-with - f = open(path, "rb") + a_file = open(path, "rb") return InMemoryUploadedFile( - file=f, + file=a_file, field_name=field_name, - name=f.name, + name=a_file.name, content_type=content_type, size=os.path.getsize(path), charset=None, @@ -142,8 +142,8 @@ def import_instances_from_zip(zipfile_path, user, status="zip"): try: with zipfile.ZipFile(zipfile_path) as zip_file: zip_file.extractall(temp_directory) - except zipfile.BadZipfile as e: - errors = [f"{e}"] + except zipfile.BadZipfile as error: + errors = [f"{error}"] return 0, 0, errors else: return import_instances_from_path(temp_directory, user, status) diff --git a/onadata/apps/logger/management/commands/add_id.py b/onadata/apps/logger/management/commands/add_id.py index f05434176f..cbf74cfb82 100644 --- a/onadata/apps/logger/management/commands/add_id.py +++ b/onadata/apps/logger/management/commands/add_id.py @@ -51,9 +51,9 @@ def add_id(self, user): instance.save() count += 1 # pylint: disable=broad-except - except Exception as e: + except Exception as error: failed += 1 - self.stdout.write(str(e), ending="\n") + self.stdout.write(str(error), ending="\n") self.stdout.write( f"Syncing for account {user.username}. Done. " diff --git a/onadata/apps/logger/management/commands/change_s3_media_permissions.py b/onadata/apps/logger/management/commands/change_s3_media_permissions.py index 1956f03447..a40a9ad4c3 100644 --- a/onadata/apps/logger/management/commands/change_s3_media_permissions.py +++ b/onadata/apps/logger/management/commands/change_s3_media_permissions.py @@ -32,8 +32,8 @@ def handle(self, *args, **kwargs): all_files = s3_storage.bucket.list() num = 0 - for i, f in enumerate(all_files): - f.set_acl(permission) + for i, a_file in enumerate(all_files): + a_file.set_acl(permission) if i % 1000 == 0: self.stdout.write(_(f"{i} file objects processed")) num = i diff --git a/onadata/apps/logger/management/commands/create_backup.py b/onadata/apps/logger/management/commands/create_backup.py index 9a05c5830f..99a047a4ca 100644 --- a/onadata/apps/logger/management/commands/create_backup.py +++ b/onadata/apps/logger/management/commands/create_backup.py @@ -24,24 +24,23 @@ def handle(self, *args, **options): # noqa C901 """Create a zip backup of a form and all its submissions.""" try: output_file = args[0] - except IndexError as e: + except IndexError as error: raise CommandError( _("Provide the path to the zip file to backup to") - ) from e - else: - output_file = os.path.realpath(output_file) + ) from error + output_file = os.path.realpath(output_file) try: username = args[1] - except IndexError as e: + except IndexError as error: raise CommandError( _("You must provide the username to publish the form to.") - ) from e + ) from error # make sure user exists try: user = get_user_model().objects.get(username=username) - except get_user_model().DoesNotExist as e: - raise CommandError(_(f"The user '{username}' does not exist.")) from e + except get_user_model().DoesNotExist as error: + raise CommandError(_(f"The user '{username}' does not exist.")) from error try: id_string = args[2] @@ -51,8 +50,8 @@ def handle(self, *args, **options): # noqa C901 # make sure xform exists try: xform = XForm.objects.get(user=user, id_string=id_string) - except XForm.DoesNotExist as e: + except XForm.DoesNotExist as error: raise CommandError( _(f"The id_string '{id_string}' does not exist.") - ) from e + ) from error create_zip_backup(output_file, user, xform) diff --git a/onadata/apps/logger/management/commands/create_image_thumbnails.py b/onadata/apps/logger/management/commands/create_image_thumbnails.py index eeb6b80575..10e1b508bc 100644 --- a/onadata/apps/logger/management/commands/create_image_thumbnails.py +++ b/onadata/apps/logger/management/commands/create_image_thumbnails.py @@ -48,17 +48,19 @@ def handle(self, *args, **options): username = options.get("username") try: user = User.objects.get(username=username) - except User.DoesNotExist as e: - raise CommandError(f"Error: username {username} does not exist") from e + except User.DoesNotExist as error: + raise CommandError( + f"Error: username {username} does not exist" + ) from error attachments_qs = attachments_qs.filter(instance__user=user) if options.get("id_string"): id_string = options.get("id_string") try: xform = XForm.objects.get(id_string=id_string) - except XForm.DoesNotExist as e: + except XForm.DoesNotExist as error: raise CommandError( f"Error: Form with id_string {id_string} does not exist" - ) from e + ) from error attachments_qs = attachments_qs.filter(instance__xform=xform) file_storage = get_storage_class( "django.core.files.storage.FileSystemStorage" diff --git a/onadata/apps/logger/management/commands/import_forms.py b/onadata/apps/logger/management/commands/import_forms.py index f33a592042..99bc18a357 100644 --- a/onadata/apps/logger/management/commands/import_forms.py +++ b/onadata/apps/logger/management/commands/import_forms.py @@ -25,5 +25,7 @@ def handle(self, *args, **kwargs): """Import a folder of XForms for ODK.""" path = args[0] for form in glob.glob(os.path.join(path, "*")): - with open(form, encoding="utf-8") as f: - XForm.objects.get_or_create(xml=f.read(), downloadable=False) + with open(form, encoding="utf-8") as xform_xml_file: + XForm.objects.get_or_create( + xml=xform_xml_file.read(), downloadable=False + ) diff --git a/onadata/apps/logger/management/commands/import_tools.py b/onadata/apps/logger/management/commands/import_tools.py index 49e46d37a6..f496b4d7d9 100644 --- a/onadata/apps/logger/management/commands/import_tools.py +++ b/onadata/apps/logger/management/commands/import_tools.py @@ -32,8 +32,8 @@ def handle(self, *args, **kwargs): username = args[1] try: user = get_user_model().objects.get(username=username) - except get_user_model().DoesNotExist as e: - raise CommandError(_(f"Invalid username {username}")) from e + except get_user_model().DoesNotExist as error: + raise CommandError(_(f"Invalid username {username}")) from error debug = False if debug: self.stdout.write(_(f"[Importing XForm Instances from {path}]\n")) diff --git a/onadata/apps/logger/management/commands/move_media_to_s3.py b/onadata/apps/logger/management/commands/move_media_to_s3.py index a123cc1e5d..e79d9cde74 100644 --- a/onadata/apps/logger/management/commands/move_media_to_s3.py +++ b/onadata/apps/logger/management/commands/move_media_to_s3.py @@ -46,22 +46,29 @@ def handle(self, *args, **kwargs): for cls, file_field, upload_to in classes_to_move: self.stdout.write(_("Moving %(class)ss to s3...") % {"class": cls.__name__}) for i in cls.objects.all(): - f = getattr(i, file_field) - old_filename = f.name + media_file = getattr(i, file_field) + old_filename = media_file.name if ( - f.name - and local_fs.exists(f.name) - and not s3_fs.exists(upload_to(i, f.name)) + old_filename + and local_fs.exists(old_filename) + and not s3_fs.exists(upload_to(i, old_filename)) ): - f.save(local_fs.path(f.name), local_fs.open(local_fs.path(f.name))) + media_file.name.save( + local_fs.path(old_filename), + local_fs.open(local_fs.path(old_filename)), + ) self.stdout.write( _("\t+ '%(fname)s'\n\t---> '%(url)s'") - % {"fname": local_fs.path(old_filename), "url": f.url} + % { + "fname": local_fs.path(old_filename), + "url": media_file.url, + } ) else: - exists_locally = local_fs.exists(f.name) - exists_s3 = not s3_fs.exists(upload_to(i, f.name)) + exists_locally = local_fs.exists(old_filename) + exists_s3 = not s3_fs.exists(upload_to(i, old_filename)) self.stderr.write( - f"\t- (f.name={f.name}, fs.exists(f.name)={exists_locally}," - f" not s3.exist s3upload_to(i, f.name))={exists_s3})" + f"\t- (old_filename={old_filename}, " + f"fs.exists(old_filename)={exists_locally}," + f" not s3.exist s3upload_to(i, old_filename))={exists_s3})" ) diff --git a/onadata/apps/logger/management/commands/set_xform_surveys_with_geopoints.py b/onadata/apps/logger/management/commands/set_xform_surveys_with_geopoints.py index daf2a9c749..cc38c74b35 100644 --- a/onadata/apps/logger/management/commands/set_xform_surveys_with_geopoints.py +++ b/onadata/apps/logger/management/commands/set_xform_surveys_with_geopoints.py @@ -25,8 +25,8 @@ def handle(self, *args, **kwargs): xform.instances_with_geopoints = has_geo xform.save() # pylint: disable=broad-except - except Exception as e: - self.stderr.write(e) + except Exception as error: + self.stderr.write(error) else: count += 1 self.stdout.write(f"{count} of {total} forms processed.") diff --git a/onadata/apps/logger/management/commands/set_xform_surveys_with_osm.py b/onadata/apps/logger/management/commands/set_xform_surveys_with_osm.py index d4236808b6..cfaf04ea03 100644 --- a/onadata/apps/logger/management/commands/set_xform_surveys_with_osm.py +++ b/onadata/apps/logger/management/commands/set_xform_surveys_with_osm.py @@ -33,8 +33,8 @@ def handle(self, *args, **kwargs): xform.instances_with_osm = True xform.save() # pylint: disable=broad-except - except Exception as e: - self.stderr.write(e) + except Exception as error: + self.stderr.write(error) else: count += 1 diff --git a/onadata/apps/logger/management/commands/update_xform_uuids.py b/onadata/apps/logger/management/commands/update_xform_uuids.py index 929f55f1c7..653e060eac 100644 --- a/onadata/apps/logger/management/commands/update_xform_uuids.py +++ b/onadata/apps/logger/management/commands/update_xform_uuids.py @@ -34,8 +34,8 @@ def handle(self, *args, **kwargs): raise CommandError("You must provide a path to the csv file") # try open the file try: - with open(kwargs.get("file"), "r", encoding="utf-8") as f: - lines = csv.reader(f) + with open(kwargs.get("file"), "r", encoding="utf-8") as csv_file: + lines = csv.reader(csv_file) i = 0 for line in lines: try: @@ -55,5 +55,7 @@ def handle(self, *args, **kwargs): else: i += 1 self.stdout.write(f"Updated {i} rows") - except IOError as e: - raise CommandError(f"file {kwargs.get('file')} could not be open") from e + except IOError as error: + raise CommandError( + f"file {kwargs.get('file')} could not be open" + ) from error diff --git a/onadata/apps/logger/models/data_view.py b/onadata/apps/logger/models/data_view.py index 21a8525c68..51278f4f25 100644 --- a/onadata/apps/logger/models/data_view.py +++ b/onadata/apps/logger/models/data_view.py @@ -56,10 +56,12 @@ def _json_sql_str(key, known_integers=None, known_dates=None, known_decimals=Non def get_name_from_survey_element(element): + """Returns the abbreviated xpath of a given ``SurveyElement``.""" return element.get_abbreviated_xpath() def append_where_list(comp, t_list, json_str): + """Concatenates an SQL query based on the ``comp`` comparison value.""" if comp in ["=", ">", "<", ">=", "<="]: t_list.append(f"{json_str} {comp}" + " %s") elif comp in ["<>", "!="]: @@ -266,6 +268,8 @@ def _get_where_clause( # pylint: disable=too-many-locals @classmethod def query_iterator(cls, sql, fields=None, params=None, count=False): + """A database query iterator.""" + def parse_json(data): try: return json.loads(data) @@ -313,6 +317,7 @@ def generate_query_string( sort, filter_query=None, ): + """Returns an SQL string based on the passed in parameters.""" additional_columns = [GEOLOCATION] if data_view.instances_with_geopoints else [] if has_attachments_fields(data_view): @@ -417,8 +422,8 @@ def query_data( # pylint: disable=too-many-arguments try: records = list(DataView.query_iterator(sql, columns, params, count)) - except DataError as e: - return {"error": _(str(e))} + except DataError as error: + return {"error": _(str(error))} return records diff --git a/onadata/apps/logger/models/osmdata.py b/onadata/apps/logger/models/osmdata.py index b269692bec..330094b666 100644 --- a/onadata/apps/logger/models/osmdata.py +++ b/onadata/apps/logger/models/osmdata.py @@ -50,8 +50,8 @@ def get_tags_with_prefix(self): Returns tags prefixed by the field_name. """ doc = {self.field_name + ":" + self.osm_type + ":id": self.osm_id} - for k, v in self.tags.items(): - doc[self.field_name + ":" + k] = v + for key, value in self.tags.items(): + doc[self.field_name + ":" + key] = value return doc diff --git a/onadata/apps/logger/xform_fs.py b/onadata/apps/logger/xform_fs.py index 0bc337afe4..1a2373fa0a 100644 --- a/onadata/apps/logger/xform_fs.py +++ b/onadata/apps/logger/xform_fs.py @@ -56,8 +56,8 @@ def metadata_directory(self): def xml(self): """Returns the submission XML""" if not getattr(self, "_xml"): - with open(self.path, "r", encoding="utf-8") as f: - self._xml = f.read() + with open(self.path, "r", encoding="utf-8") as xml_submission_file: + self._xml = xml_submission_file.read() return self._xml @classmethod diff --git a/onadata/apps/main/management/commands/remove_odk_prefix.py b/onadata/apps/main/management/commands/remove_odk_prefix.py index 49645c929b..66ac7db0ef 100644 --- a/onadata/apps/main/management/commands/remove_odk_prefix.py +++ b/onadata/apps/main/management/commands/remove_odk_prefix.py @@ -17,10 +17,10 @@ class Command(BaseCommand): def handle(self, *args, **kwargs): cursor = connection.cursor() cursor.execute( - "UPDATE south_migrationhistory SET app_name=%s WHERE " "app_name=%s", + "UPDATE south_migrationhistory SET app_name=%s WHERE app_name=%s", ["logger", "odk_logger"], ) cursor.execute( - "UPDATE south_migrationhistory SET app_name=%s WHERE " "app_name=%s", + "UPDATE south_migrationhistory SET app_name=%s WHERE app_name=%s", ["viewer", "odk_viewer"], ) diff --git a/onadata/apps/main/tests/test_service_health.py b/onadata/apps/main/tests/test_service_health.py index c6e1c4918a..ccc75301f8 100644 --- a/onadata/apps/main/tests/test_service_health.py +++ b/onadata/apps/main/tests/test_service_health.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +""" +Test service health view. +""" import json from django.http import HttpRequest diff --git a/onadata/apps/restservice/RestServiceInterface.py b/onadata/apps/restservice/interface.py similarity index 76% rename from onadata/apps/restservice/RestServiceInterface.py rename to onadata/apps/restservice/interface.py index dca06ec26e..9a0db6e112 100644 --- a/onadata/apps/restservice/RestServiceInterface.py +++ b/onadata/apps/restservice/interface.py @@ -4,7 +4,7 @@ """ -class RestServiceInterface: +class RestServiceInterface: # pylint: disable=too-few-public-methods """RestServiceInterface base class.""" def send(self, url, data=None): diff --git a/onadata/apps/restservice/services/__init__.py b/onadata/apps/restservice/services/__init__.py index 01e1168dcc..f16e0b0d81 100644 --- a/onadata/apps/restservice/services/__init__.py +++ b/onadata/apps/restservice/services/__init__.py @@ -1 +1,5 @@ -__all__ = ('f2dhis2', 'generic_json', 'generic_xml', 'textit') +# -*- coding: utf-8 -*- +""" +REST services module. +""" +__all__ = ("f2dhis2", "generic_json", "generic_xml", "textit") diff --git a/onadata/apps/restservice/services/f2dhis2.py b/onadata/apps/restservice/services/f2dhis2.py index c80171c04a..0a7cd58d55 100644 --- a/onadata/apps/restservice/services/f2dhis2.py +++ b/onadata/apps/restservice/services/f2dhis2.py @@ -2,12 +2,16 @@ """ Formhub/Ona Data to DHIS2 service - push submissions to DHIS2 instance. """ +from django.conf import settings + import requests -from onadata.apps.restservice.RestServiceInterface import RestServiceInterface +from onadata.apps.restservice.interface import RestServiceInterface + +WEBHOOK_TIMEOUT = getattr(settings, "WEBHOOK_TIMEOUT", 30) -class ServiceDefinition(RestServiceInterface): +class ServiceDefinition(RestServiceInterface): # pylint: disable=too-few-public-methods """Post submission to DHIS2 instance.""" # pylint: disable=invalid-name @@ -22,4 +26,4 @@ def send(self, url, data=None): "uuid": data.uuid, } valid_url = url % info - requests.get(valid_url) + requests.get(valid_url, timeout=WEBHOOK_TIMEOUT) diff --git a/onadata/apps/restservice/services/generic_json.py b/onadata/apps/restservice/services/generic_json.py index b2acfdf423..7185a74836 100644 --- a/onadata/apps/restservice/services/generic_json.py +++ b/onadata/apps/restservice/services/generic_json.py @@ -9,12 +9,12 @@ import requests from requests.exceptions import ConnectionError as RequestsConnectionError -from onadata.apps.restservice.RestServiceInterface import RestServiceInterface +from onadata.apps.restservice.interface import RestServiceInterface WEBHOOK_TIMEOUT = getattr(settings, "WEBHOOK_TIMEOUT", 30) -class ServiceDefinition(RestServiceInterface): +class ServiceDefinition(RestServiceInterface): # pylint: disable=too-few-public-methods """Post submisison JSON data to an external service that accepts a JSON post.""" # pylint: disable=invalid-name diff --git a/onadata/apps/restservice/services/generic_xml.py b/onadata/apps/restservice/services/generic_xml.py index 2ac3104cd4..7283459d8c 100644 --- a/onadata/apps/restservice/services/generic_xml.py +++ b/onadata/apps/restservice/services/generic_xml.py @@ -2,12 +2,15 @@ """ Post submisison XML data to an external service that accepts an XML post. """ +from django.conf import settings import requests -from onadata.apps.restservice.RestServiceInterface import RestServiceInterface +from onadata.apps.restservice.interface import RestServiceInterface +WEBHOOK_TIMEOUT = getattr(settings, "WEBHOOK_TIMEOUT", 30) -class ServiceDefinition(RestServiceInterface): + +class ServiceDefinition(RestServiceInterface): # pylint: disable=too-few-public-methods """ Post submisison XML data to an external service that accepts an XML post. """ @@ -21,4 +24,4 @@ def send(self, url, data=None): Post submisison XML data to an external service that accepts an XML post. """ headers = {"Content-Type": "application/xml"} - requests.post(url, data=data.xml, headers=headers) + requests.post(url, data=data.xml, headers=headers, timeout=WEBHOOK_TIMEOUT) diff --git a/onadata/apps/restservice/services/textit.py b/onadata/apps/restservice/services/textit.py index e47c85c071..945e40d1af 100644 --- a/onadata/apps/restservice/services/textit.py +++ b/onadata/apps/restservice/services/textit.py @@ -3,15 +3,19 @@ Post submission data to a textit/rapidpro server. """ import json + +from django.conf import settings + import requests -from six import iteritems -from six import string_types +from six import iteritems, string_types from onadata.apps.main.models import MetaData -from onadata.apps.restservice.RestServiceInterface import RestServiceInterface +from onadata.apps.restservice.interface import RestServiceInterface from onadata.libs.utils.common_tags import TEXTIT from onadata.settings.common import METADATA_SEPARATOR +WEBHOOK_TIMEOUT = getattr(settings, "WEBHOOK_TIMEOUT", 30) + class ServiceDefinition(RestServiceInterface): """ @@ -46,7 +50,12 @@ def send(self, url, data=None): "Authorization": f"Token {token}", } - requests.post(url, headers=headers, data=json.dumps(post_data)) + requests.post( + url, + headers=headers, + data=json.dumps(post_data), + timeout=WEBHOOK_TIMEOUT, + ) def clean_keys_of_slashes(self, record): """ diff --git a/onadata/apps/restservice/tests/test_restservice.py b/onadata/apps/restservice/tests/test_restservice.py index d85f6d6657..d3290d2275 100644 --- a/onadata/apps/restservice/tests/test_restservice.py +++ b/onadata/apps/restservice/tests/test_restservice.py @@ -14,7 +14,7 @@ from onadata.apps.main.tests.test_base import TestBase from onadata.apps.main.views import show from onadata.apps.restservice.models import RestService -from onadata.apps.restservice.RestServiceInterface import RestServiceInterface +from onadata.apps.restservice.interface import RestServiceInterface from onadata.apps.restservice.services.textit import ServiceDefinition from onadata.apps.restservice.views import add_service, delete_service @@ -34,7 +34,6 @@ def setUp(self): self._publish_xls_file(path) self.xform = XForm.objects.all().reverse()[0] - # pylint: disable=no-self-use def wait(self, duration=1): """Sleep for 1 second or as defined by ``duration``.""" time.sleep(duration) diff --git a/onadata/apps/restservice/utils.py b/onadata/apps/restservice/utils.py index 795157968e..09575b14aa 100644 --- a/onadata/apps/restservice/utils.py +++ b/onadata/apps/restservice/utils.py @@ -22,6 +22,6 @@ def call_service(submission_instance): try: service = service_def.get_service_definition()() service.send(service_def.service_url, submission_instance) - except Exception as e: - report_exception(f"Service call failed: {e}", e, sys.exc_info()) - logging.exception("Service threw exception: %s", e) + except Exception as error: + report_exception(f"Service call failed: {error}", error, sys.exc_info()) + logging.exception("Service threw exception: %s", error) diff --git a/onadata/apps/sms_support/parser.py b/onadata/apps/sms_support/parser.py index 66677e98af..8a38a2712e 100644 --- a/onadata/apps/sms_support/parser.py +++ b/onadata/apps/sms_support/parser.py @@ -93,8 +93,10 @@ def cast_sms_value(value, question, medias=None): def safe_wrap(func): try: return func() - except Exception as e: - raise SMSCastingError(_("%(error)s") % {"error": e}, xlsf_name) from e + except Exception as error: + raise SMSCastingError( + _("%(error)s") % {"error": error}, xlsf_name + ) from error def media_value(value, medias): """handle media values @@ -105,11 +107,11 @@ def media_value(value, medias): filename, b64content = value.split(";", 1) medias.append((filename, base64.b64decode(b64content))) return filename - except (AttributeError, TypeError, binascii.Error) as e: + except (AttributeError, TypeError, binascii.Error) as error: raise SMSCastingError( - _("Media file format incorrect. %(except)r") % {"except": e}, + _("Media file format incorrect. %(except)r") % {"except": error}, xlsf_name, - ) from e + ) from error if xlsf_type == "text": return safe_wrap(lambda: str(value)) @@ -145,13 +147,13 @@ def media_value(value, medias): raise SMSCastingError(err_msg, xlsf_name) if len(geodata) == 4: # check that altitude and accuracy are integers - for v in geodata[2:4]: - int(v) + for geo_value in geodata[2:4]: + int(geo_value) elif len(geodata) == 3: # check that altitude is integer int(geodata[2]) - except ValueError as e: - raise SMSCastingError(e, xlsf_name) from e + except ValueError as error: + raise SMSCastingError(error, xlsf_name) from error return " ".join(geodata) if xlsf_type in MEDIA_TYPES: # media content (image, video, audio) must be formatted as: @@ -375,10 +377,10 @@ def process_incoming(incoming, id_string): for idx, note in enumerate(notes): try: notes[idx] = note.replace("${", "{").format(**data) - except AttributeError as e: - logging.exception("Updating note threw exception: %s", str(e)) + except AttributeError as error: + logging.exception("Updating note threw exception: %s", str(error)) - # process_incoming expectes submission to be a file-like object + # process_incoming expects submission to be a file-like object xforms.append(BytesIO(xml_submission.encode("utf-8"))) medias.append(medias_submission) json_submissions.append(json_submission) @@ -387,8 +389,8 @@ def process_incoming(incoming, id_string): for incoming in incomings: try: process_incoming(incoming, id_string) - except (SMSCastingError, SMSSyntaxError, ValueError) as e: - responses.append({"code": SMS_PARSING_ERROR, "text": str(e)}) + except (SMSCastingError, SMSSyntaxError, ValueError) as error: + responses.append({"code": SMS_PARSING_ERROR, "text": str(error)}) for idx, xform in enumerate(xforms): # generate_instance expects media as a request.FILES.values() list diff --git a/onadata/apps/viewer/models/data_dictionary.py b/onadata/apps/viewer/models/data_dictionary.py index 3ff689c574..1e6cb6894a 100644 --- a/onadata/apps/viewer/models/data_dictionary.py +++ b/onadata/apps/viewer/models/data_dictionary.py @@ -36,7 +36,7 @@ from onadata.libs.utils.model_tools import get_columns_with_hxl, set_uuid -def is_newline_error(e): +def is_newline_error(error): """ Return True is e is a new line error based on the error text. Otherwise return False. @@ -45,7 +45,7 @@ def is_newline_error(e): "new-line character seen in unquoted field - do you need" " to open the file in universal-newline mode?" ) - return newline_error == str(e) + return newline_error == str(error) def process_xlsform(xls, default_name): @@ -70,15 +70,15 @@ def process_xlsform(xls, default_name): try: return parse_file_to_json(xls.name, file_object=file_object) - except csv.Error as e: - if is_newline_error(e): + except csv.Error as error: + if is_newline_error(error): xls.seek(0) file_object = StringIO("\n".join(xls.read().splitlines())) return parse_file_to_json( xls.name, default_name=default_name, file_object=file_object ) - raise e + raise error # adopted from pyxform.utils.sheet_to_csv @@ -95,7 +95,7 @@ def sheet_to_csv(xls_content, sheet_name): sheet = workbook[sheet_name] if not sheet or sheet.max_column < 2: - raise Exception(_(f"Sheet <'{sheet_name}'> has no data.")) + raise ValueError(_(f"Sheet <'{sheet_name}'> has no data.")) csv_file = BytesIO() @@ -243,13 +243,13 @@ def set_object_permissions(sender, instance=None, created=False, **kwargs): if hasattr(instance, "has_external_choices") and instance.has_external_choices: instance.xls.seek(0) - f = sheet_to_csv(instance.xls, "external_choices") - f.seek(0, os.SEEK_END) - size = f.tell() - f.seek(0) + choices_file = sheet_to_csv(instance.xls, "external_choices") + choices_file.seek(0, os.SEEK_END) + size = choices_file.tell() + choices_file.seek(0) data_file = InMemoryUploadedFile( - file=f, + file=choices_file, field_name="data_file", name="itemsets.csv", content_type="text/csv", diff --git a/onadata/apps/viewer/models/export.py b/onadata/apps/viewer/models/export.py index ced32d9a81..fd2623e93e 100644 --- a/onadata/apps/viewer/models/export.py +++ b/onadata/apps/viewer/models/export.py @@ -65,6 +65,10 @@ def __str__(self): class ExportBaseModel(models.Model): + """ + Export base model class + """ + XLSX_EXPORT = "xlsx" CSV_EXPORT = "csv" KML_EXPORT = "kml" @@ -206,8 +210,8 @@ def full_filepath(self): _name, ext = os.path.splitext(self.filepath) # pylint: disable=consider-using-with tmp = NamedTemporaryFile(suffix=ext, delete=False) - f = default_storage.open(self.filepath) - tmp.write(f.read()) + export_file = default_storage.open(self.filepath) + tmp.write(export_file.read()) tmp.close() return tmp.name return None @@ -292,19 +296,18 @@ def exports_outdated(cls, xform, export_type, options=None): ).latest("created_on") except cls.DoesNotExist: return True - else: - if ( - latest_export.time_of_last_submission is not None - and xform.time_of_last_submission_update() is not None - ): - return ( - latest_export.time_of_last_submission - < xform.time_of_last_submission_update() - ) - - # return true if we can't determine the status, to force - # auto-generation - return True + if ( + latest_export.time_of_last_submission is not None + and xform.time_of_last_submission_update() is not None + ): + return ( + latest_export.time_of_last_submission + < xform.time_of_last_submission_update() + ) + + # return true if we can't determine the status, to force + # auto-generation + return True @classmethod def is_filename_unique(cls, xform, filename): @@ -318,6 +321,10 @@ def is_filename_unique(cls, xform, filename): class GenericExport(ExportBaseModel): + """ + Generic Export model. + """ + content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey() @@ -328,6 +335,7 @@ class Meta(ExportBaseModel.Meta): @classmethod def get_object_content_type(cls, instance): + """Returns the content_type of the instance.""" return ContentType.objects.get_for_model(instance) @classmethod diff --git a/onadata/apps/viewer/parsed_instance_tools.py b/onadata/apps/viewer/parsed_instance_tools.py index 607dff8450..00135e7a8e 100644 --- a/onadata/apps/viewer/parsed_instance_tools.py +++ b/onadata/apps/viewer/parsed_instance_tools.py @@ -47,7 +47,7 @@ def _parse_where(query, known_integers, known_decimals, or_where, or_params): # other table columns where, where_params = [], [] # pylint: disable=too-many-nested-blocks - for (field_key, field_value) in six.iteritems(query): + for field_key, field_value in six.iteritems(query): if isinstance(field_value, dict): if field_key in NONE_JSON_FIELDS: json_str = NONE_JSON_FIELDS.get(field_key) @@ -55,7 +55,7 @@ def _parse_where(query, known_integers, known_decimals, or_where, or_params): json_str = _json_sql_str( field_key, known_integers, KNOWN_DATES, known_decimals ) - for (key, value) in six.iteritems(field_value): + for key, value in six.iteritems(field_value): _v = None if key in OPERANDS: where.append(" ".join([json_str, OPERANDS.get(key), "%s"])) @@ -130,16 +130,16 @@ def get_where_clause(query, form_integer_fields=None, form_decimal_fields=None): or_dict = query.pop("$or") for or_query in or_dict: - for k, v in or_query.items(): - if v is None: - or_where.extend([f"json->>'{k}' IS NULL"]) - elif isinstance(v, list): - for value in v: + for key, value in or_query.items(): + if value is None: + or_where.extend([f"json->>'{key}' IS NULL"]) + elif isinstance(value, list): + for item in value: or_where.extend(["json->>%s = %s"]) - or_params.extend([k, value]) + or_params.extend([key, item]) else: or_where.extend(["json->>%s = %s"]) - or_params.extend([k, v]) + or_params.extend([key, value]) or_where = ["".join(["(", " OR ".join(or_where), ")"])] @@ -147,9 +147,9 @@ def get_where_clause(query, form_integer_fields=None, form_decimal_fields=None): query, known_integers, known_decimals, or_where, or_params ) - except (ValueError, AttributeError) as e: + except (ValueError, AttributeError) as error: if query and isinstance(query, six.string_types) and query.startswith("{"): - raise e + raise error # cast query param to text where = ["json::text ~* cast(%s as text)"] where_params = [query] diff --git a/onadata/apps/viewer/tasks.py b/onadata/apps/viewer/tasks.py index 2e812ae6d5..afbdc322f3 100644 --- a/onadata/apps/viewer/tasks.py +++ b/onadata/apps/viewer/tasks.py @@ -102,12 +102,12 @@ def _create_export(xform, export_type, options): if export_type in export_types: try: result = export_types[export_type].apply_async((), kwargs=options) - except OperationalError as e: + except OperationalError as error: export.internal_status = Export.FAILED export.error_message = "Error connecting to broker." export.save() - report_exception(export.error_message, e, sys.exc_info()) - raise ExportConnectionError() from e + report_exception(export.error_message, error, sys.exc_info()) + raise ExportConnectionError() from error else: raise ExportTypeError @@ -144,7 +144,7 @@ def create_xlsx_export(username, id_string, export_id, **options): gen_export = generate_export( Export.XLSX_EXPORT, export.xform, export_id, options ) - except (Exception, NoRecordsFoundError) as e: + except (Exception, NoRecordsFoundError) as error: export.internal_status = Export.FAILED export.save() # mail admins @@ -153,14 +153,13 @@ def create_xlsx_export(username, id_string, export_id, **options): report_exception( "XLS Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) # Raise for now to let celery know we failed # - doesnt seem to break celery` raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -183,9 +182,9 @@ def create_csv_export(username, id_string, export_id, **options): # should not even be on this page if the survey has no records export.internal_status = Export.FAILED export.save() - except Exception as e: + except Exception as error: export.internal_status = Export.FAILED - export.error_message = str(e) + export.error_message = str(error) export.save() # mail admins details = _get_export_details(username, id_string, export_id) @@ -193,7 +192,7 @@ def create_csv_export(username, id_string, export_id, **options): report_exception( "CSV Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise @@ -220,7 +219,7 @@ def create_kml_export(username, id_string, export_id, **options): options, xform=export.xform, ) - except (Exception, NoRecordsFoundError) as e: + except (Exception, NoRecordsFoundError) as error: export.internal_status = Export.FAILED export.save() # mail admins @@ -228,12 +227,11 @@ def create_kml_export(username, id_string, export_id, **options): report_exception( "KML Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -256,21 +254,20 @@ def create_osm_export(username, id_string, export_id, **options): options, xform=export.xform, ) - except (Exception, NoRecordsFoundError) as e: + except (Exception, NoRecordsFoundError) as error: export.internal_status = Export.FAILED - export.error_message = str(e) + export.error_message = str(error) export.save() # mail admins details = _get_export_details(username, id_string, export_id) report_exception( "OSM Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -288,26 +285,25 @@ def create_zip_export(username, id_string, export_id, **options): options, xform=export.xform, ) - except (Exception, NoRecordsFoundError) as e: + except (Exception, NoRecordsFoundError) as error: export.internal_status = Export.FAILED - export.error_message = str(e) + export.error_message = str(error) export.save() # mail admins details = _get_export_details(username, id_string, export_id) report_exception( "Zip Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, ) raise - else: - if not settings.TESTING_MODE: - delete_export.apply_async( - (), - {"export_id": gen_export.id}, - countdown=settings.ZIP_EXPORT_COUNTDOWN, - ) - return gen_export.id + if not settings.TESTING_MODE: + delete_export.apply_async( + (), + {"export_id": gen_export.id}, + countdown=settings.ZIP_EXPORT_COUNTDOWN, + ) + return gen_export.id @app.task(track_started=True) @@ -323,21 +319,20 @@ def create_csv_zip_export(username, id_string, export_id, **options): gen_export = generate_export( Export.CSV_ZIP_EXPORT, export.xform, export_id, options ) - except (Exception, NoRecordsFoundError) as e: + except (Exception, NoRecordsFoundError) as error: export.internal_status = Export.FAILED - export.error_message = str(e) + export.error_message = str(error) export.save() # mail admins details = _get_export_details(username, id_string, export_id) report_exception( "CSV ZIP Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -353,7 +348,7 @@ def create_sav_zip_export(username, id_string, export_id, **options): gen_export = generate_export( Export.SAV_ZIP_EXPORT, export.xform, export_id, options ) - except (Exception, NoRecordsFoundError, TypeError) as e: + except (Exception, NoRecordsFoundError, TypeError) as error: export.internal_status = Export.FAILED export.save() # mail admins @@ -361,12 +356,11 @@ def create_sav_zip_export(username, id_string, export_id, **options): report_exception( "SAV ZIP Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -386,7 +380,7 @@ def create_external_export(username, id_string, export_id, **options): options, xform=export.xform, ) - except (Exception, NoRecordsFoundError, ConnectionError) as e: + except (Exception, NoRecordsFoundError, ConnectionError) as error: export.internal_status = Export.FAILED export.save() # mail admins @@ -394,12 +388,11 @@ def create_external_export(username, id_string, export_id, **options): report_exception( "External Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) @@ -416,7 +409,7 @@ def create_google_sheet_export(username, id_string, export_id, **options): gen_export = generate_export( Export.GOOGLE_SHEETS_EXPORT, export.xform, export_id, options ) - except (Exception, NoRecordsFoundError, ConnectionError) as e: + except (Exception, NoRecordsFoundError, ConnectionError) as error: export.internal_status = Export.FAILED export.save() # mail admins @@ -424,12 +417,11 @@ def create_google_sheet_export(username, id_string, export_id, **options): report_exception( "Google Export Exception: Export ID - " "%(export_id)s, /%(username)s/%(id_string)s" % details, - e, + error, sys.exc_info(), ) raise - else: - return gen_export.id + return gen_export.id @app.task(track_started=True) diff --git a/onadata/apps/viewer/xls_writer.py b/onadata/apps/viewer/xls_writer.py index f7e1079bc3..5c301288e9 100644 --- a/onadata/apps/viewer/xls_writer.py +++ b/onadata/apps/viewer/xls_writer.py @@ -116,15 +116,15 @@ def set_data_dictionary(self, data_dictionary): def _add_sheets(self): if self._data_dictionary: - for e in self._data_dictionary.get_survey_elements(): - if isinstance(e, Section): - sheet_name = e.name + for survey_element in self._data_dictionary.get_survey_elements(): + if isinstance(survey_element, Section): + sheet_name = survey_element.name self.add_sheet(sheet_name) - for f in e.children: - if isinstance(f, Question) and not question_types_to_exclude( - f.type - ): - self.add_column(sheet_name, f.name) + for field in survey_element.children: + if isinstance( + field, Question + ) and not question_types_to_exclude(field.type): + self.add_column(sheet_name, field.name) def _unique_name_for_xls(self, sheet_name): # excel worksheet name limit seems to be 31 characters (30 to be safe) diff --git a/onadata/libs/baseviewset.py b/onadata/libs/baseviewset.py index 9329b85651..47257ae5cd 100644 --- a/onadata/libs/baseviewset.py +++ b/onadata/libs/baseviewset.py @@ -4,7 +4,7 @@ """ -class DefaultBaseViewset: +class DefaultBaseViewset: # pylint: disable=too-few-public-methods """ The DefaultBaseViewset class """ diff --git a/onadata/libs/data/query.py b/onadata/libs/data/query.py index 96e350f965..833165a488 100644 --- a/onadata/libs/data/query.py +++ b/onadata/libs/data/query.py @@ -210,9 +210,9 @@ def _query_args(field, name, xform, group_by=None): qargs["restrict_value"] = xforms if isinstance(group_by, list): - for i, v in enumerate(group_by): - qargs[f"group_name{i}"] = v - qargs[f"group_by{i}"] = _json_query(v) + for index, value in enumerate(group_by): + qargs[f"group_name{index}"] = value + qargs[f"group_by{index}"] = _json_query(value) else: qargs["group_name"] = group_by qargs["group_by"] = _json_query(group_by) @@ -222,11 +222,9 @@ def _query_args(field, name, xform, group_by=None): def _select_key(field, name, xform): if using_postgres(): - result = _postgres_select_key(field, name, xform) - else: - raise Exception("Unsupported Database") + return _postgres_select_key(field, name, xform) - return result + raise ValueError("Unsupported Database") def flatten(lst): diff --git a/onadata/libs/mixins/anonymous_user_mixin.py b/onadata/libs/mixins/anonymous_user_mixin.py index a920c00086..f8422f4525 100644 --- a/onadata/libs/mixins/anonymous_user_mixin.py +++ b/onadata/libs/mixins/anonymous_user_mixin.py @@ -12,7 +12,7 @@ User = get_user_model() -class AnonymousUserMixin: +class AnonymousUserMixin: # pylint: disable=too-few-public-methods """ Implements AnonymousUserMixin class diff --git a/onadata/libs/mixins/anonymous_user_public_forms_mixin.py b/onadata/libs/mixins/anonymous_user_public_forms_mixin.py index 6735ebdbcd..49e1539e58 100644 --- a/onadata/libs/mixins/anonymous_user_public_forms_mixin.py +++ b/onadata/libs/mixins/anonymous_user_public_forms_mixin.py @@ -7,7 +7,7 @@ from onadata.apps.logger.models.xform import XForm -class AnonymousUserPublicFormsMixin: +class AnonymousUserPublicFormsMixin: # pylint: disable=too-few-public-methods """ Implements the AnonymousUserPublicFormsMixin class diff --git a/onadata/libs/mixins/authenticate_header_mixin.py b/onadata/libs/mixins/authenticate_header_mixin.py index f332ca40f0..e248bd4dc7 100644 --- a/onadata/libs/mixins/authenticate_header_mixin.py +++ b/onadata/libs/mixins/authenticate_header_mixin.py @@ -9,7 +9,7 @@ from onadata.libs.authentication import TempTokenAuthentication -class AuthenticateHeaderMixin: +class AuthenticateHeaderMixin: # pylint: disable=too-few-public-methods """ Implements the AuthenticateHeaderMixin class diff --git a/onadata/libs/mixins/bulk_create_mixin.py b/onadata/libs/mixins/bulk_create_mixin.py index ff5b524319..70ea73da30 100644 --- a/onadata/libs/mixins/bulk_create_mixin.py +++ b/onadata/libs/mixins/bulk_create_mixin.py @@ -5,7 +5,7 @@ from __future__ import unicode_literals -class BulkCreateMixin: +class BulkCreateMixin: # pylint: disable=too-few-public-methods """ Bulk Create Mixin Allows the bulk creation of resources diff --git a/onadata/libs/mixins/cache_control_mixin.py b/onadata/libs/mixins/cache_control_mixin.py index db975bbc1b..319cac8cee 100644 --- a/onadata/libs/mixins/cache_control_mixin.py +++ b/onadata/libs/mixins/cache_control_mixin.py @@ -1,13 +1,14 @@ """ Cache control mixin """ + from typing import Optional from django.conf import settings from django.utils.cache import patch_cache_control -class CacheControlBase: +class CacheControlBase: # pylint: disable=too-few-public-methods """ Base class for Cache Control header handlers """ diff --git a/onadata/libs/mixins/last_modified_mixin.py b/onadata/libs/mixins/last_modified_mixin.py index ab53d12191..3edff30049 100644 --- a/onadata/libs/mixins/last_modified_mixin.py +++ b/onadata/libs/mixins/last_modified_mixin.py @@ -9,7 +9,7 @@ from onadata.libs.utils.timing import get_date, last_modified_header -class LastModifiedMixin: +class LastModifiedMixin: # pylint: disable=too-few-public-methods """ Implements the LastModifiedMixin class diff --git a/onadata/libs/mixins/multi_lookup_mixin.py b/onadata/libs/mixins/multi_lookup_mixin.py index 2f623b20d6..e4ea672185 100644 --- a/onadata/libs/mixins/multi_lookup_mixin.py +++ b/onadata/libs/mixins/multi_lookup_mixin.py @@ -10,7 +10,7 @@ from rest_framework.exceptions import ParseError -class MultiLookupMixin: +class MultiLookupMixin: # pylint: disable=too-few-public-methods """ Implements MultiLookupMixin class diff --git a/onadata/libs/mixins/object_lookup_mixin.py b/onadata/libs/mixins/object_lookup_mixin.py index fac3584fac..6ab8e93b62 100644 --- a/onadata/libs/mixins/object_lookup_mixin.py +++ b/onadata/libs/mixins/object_lookup_mixin.py @@ -10,7 +10,7 @@ from rest_framework.generics import get_object_or_404 -class ObjectLookupMixin: +class ObjectLookupMixin: # pylint: disable=too-few-public-methods """ Implements ObjectLookupMixin class diff --git a/onadata/libs/mixins/openrosa_headers_mixin.py b/onadata/libs/mixins/openrosa_headers_mixin.py index 29ef886ca1..0f47e5588c 100644 --- a/onadata/libs/mixins/openrosa_headers_mixin.py +++ b/onadata/libs/mixins/openrosa_headers_mixin.py @@ -27,7 +27,7 @@ def get_openrosa_headers(request, location=True): return data -class OpenRosaHeadersMixin: +class OpenRosaHeadersMixin: # pylint: disable=too-few-public-methods """ OpenRosaHeadersMixin class - sets OpenRosa headers in a response for a View or Viewset. diff --git a/onadata/libs/mixins/xform_id_string_lookup.py b/onadata/libs/mixins/xform_id_string_lookup.py index a42689257a..885e07ea41 100644 --- a/onadata/libs/mixins/xform_id_string_lookup.py +++ b/onadata/libs/mixins/xform_id_string_lookup.py @@ -8,7 +8,7 @@ from django.shortcuts import get_object_or_404 -class XFormIdStringLookupMixin: +class XFormIdStringLookupMixin: # pylint: disable=too-few-public-methods """ XForm id_strng lookup mixin class diff --git a/onadata/libs/models/__init__.py b/onadata/libs/models/__init__.py index 69c059af27..1de471a73a 100644 --- a/onadata/libs/models/__init__.py +++ b/onadata/libs/models/__init__.py @@ -1 +1,5 @@ +# -*- coding: utf-8 -*- +""" +Model utility classes and functions. +""" from .base_model import BaseModel # noqa diff --git a/onadata/libs/models/base_model.py b/onadata/libs/models/base_model.py index 240fdd4dd2..074aa9fa8d 100644 --- a/onadata/libs/models/base_model.py +++ b/onadata/libs/models/base_model.py @@ -1,7 +1,15 @@ +# -*- coding: utf-8 -*- +""" +BaseModel abstract class - sets date_created/date_modified fields. +""" from django.db import models class BaseModel(models.Model): + """ + BaseModel abstract class - sets date_created/date_modified fields. + """ + date_created = models.DateTimeField(auto_now_add=True) date_modified = models.DateTimeField(auto_now=True) diff --git a/onadata/libs/models/sorting.py b/onadata/libs/models/sorting.py index 708db7b209..01913b67f6 100644 --- a/onadata/libs/models/sorting.py +++ b/onadata/libs/models/sorting.py @@ -14,14 +14,14 @@ def sort_from_mongo_sort_str(sort_str): if isinstance(sort_str, six.string_types): if sort_str.startswith("{"): sort_dict = json.loads(sort_str) - for k, v in sort_dict.items(): + for key, value in sort_dict.items(): try: - v = int(v) + value = int(value) except ValueError: pass - if v < 0: - k = f"-{k}" - sort_values.append(k) + if value < 0: + key = f"-{key}" + sort_values.append(key) else: sort_values.append(sort_str) diff --git a/onadata/libs/profiling/sql.py b/onadata/libs/profiling/sql.py index c9b24fe5fc..20181fca57 100644 --- a/onadata/libs/profiling/sql.py +++ b/onadata/libs/profiling/sql.py @@ -13,7 +13,7 @@ # http://johnparsons.net/index.php/2013/08/15/easy-sql-query-counting-in-django -class SqlTimingMiddleware: +class SqlTimingMiddleware: # pylint: disable=too-few-public-methods """ Logs the time taken by each sql query over requests. Logs the total time taken to run sql queries and the number of sql queries diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index 08f003b980..b7cf15e04f 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -484,9 +484,9 @@ def _to_xml(self, xml, data): if not key: self._to_xml(xml, value) elif isinstance(value, (list, tuple)): - for v in value: + for item in value: xml.startElement(key, {}) - self._to_xml(xml, v) + self._to_xml(xml, item) xml.endElement(key) elif isinstance(value, dict): diff --git a/onadata/libs/serializers/data_serializer.py b/onadata/libs/serializers/data_serializer.py index 7b3b0ad8a7..3510111e9d 100644 --- a/onadata/libs/serializers/data_serializer.py +++ b/onadata/libs/serializers/data_serializer.py @@ -230,7 +230,7 @@ def to_representation(self, instance): return ret -class SubmissionSuccessMixin: +class SubmissionSuccessMixin: # pylint: disable=too-few-public-methods """ SubmissionSuccessMixin - prepares submission success data/message. """ diff --git a/onadata/libs/serializers/entity_serializer.py b/onadata/libs/serializers/entity_serializer.py index 66583f5ae7..fad26d3d53 100644 --- a/onadata/libs/serializers/entity_serializer.py +++ b/onadata/libs/serializers/entity_serializer.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Entities serializer module. +""" from django.utils.translation import gettext as _ from pyxform.constants import ENTITIES_RESERVED_PREFIX @@ -175,6 +179,7 @@ class EntitySerializer(serializers.ModelSerializer): data = serializers.JSONField(write_only=True, required=False) def validate_data(self, value): + """Validates Entity dataset properties""" if value: for key in value.keys(): if key not in self.context["entity_list"].properties: @@ -233,6 +238,7 @@ class EntityArraySerializer(EntitySerializer): url = serializers.SerializerMethodField() def get_url(self, obj): + """Returns the URL to an Entity list.""" entity_list = self.context["entity_list"] request = self.context["request"] response_format = self.context.get("format") diff --git a/onadata/libs/serializers/fields/instance_related_field.py b/onadata/libs/serializers/fields/instance_related_field.py index 76965f908f..593a8c23a6 100644 --- a/onadata/libs/serializers/fields/instance_related_field.py +++ b/onadata/libs/serializers/fields/instance_related_field.py @@ -22,8 +22,8 @@ def to_internal_value(self, data): """Validates if the instance exists.""" try: return Instance.objects.get(pk=data) - except ValueError as e: - raise Exception("instance id should be an integer") from e + except ValueError as error: + raise ValueError("instance id should be an integer") from error def to_representation(self, value): """Serialize instance object""" diff --git a/onadata/libs/serializers/fields/json_field.py b/onadata/libs/serializers/fields/json_field.py index 75ea8fa9e6..a1414413a1 100644 --- a/onadata/libs/serializers/fields/json_field.py +++ b/onadata/libs/serializers/fields/json_field.py @@ -29,9 +29,9 @@ def to_internal_value(self, data): if isinstance(data, str): try: return json.loads(data) - except ValueError as e: + except ValueError as error: # invalid JSON - raise serializers.ValidationError(str(e)) from e + raise serializers.ValidationError(str(error)) from error return data @classmethod diff --git a/onadata/libs/serializers/fields/organization_field.py b/onadata/libs/serializers/fields/organization_field.py index 37b751b06b..295d5c6779 100644 --- a/onadata/libs/serializers/fields/organization_field.py +++ b/onadata/libs/serializers/fields/organization_field.py @@ -21,12 +21,12 @@ def to_internal_value(self, data): if data is not None: try: organization = OrganizationProfile.objects.get(pk=data) - except OrganizationProfile.DoesNotExist as e: + except OrganizationProfile.DoesNotExist as error: raise serializers.ValidationError( _(f"Organization with id '{data}' does not exist.") - ) from e - except ValueError as e: - raise serializers.ValidationError(str(e)) from e + ) from error + except ValueError as error: + raise serializers.ValidationError(str(error)) from error return organization return data diff --git a/onadata/libs/serializers/fields/project_field.py b/onadata/libs/serializers/fields/project_field.py index 4b2f973032..d1118361e0 100644 --- a/onadata/libs/serializers/fields/project_field.py +++ b/onadata/libs/serializers/fields/project_field.py @@ -21,12 +21,12 @@ def to_internal_value(self, data): if data is not None: try: project = Project.objects.get(pk=data) - except Project.DoesNotExist as e: + except Project.DoesNotExist as error: raise serializers.ValidationError( _(f"Project with id '{data}' does not exist.") - ) from e - except ValueError as e: - raise serializers.ValidationError(str(e)) from e + ) from error + except ValueError as error: + raise serializers.ValidationError(str(error)) from error return project return data diff --git a/onadata/libs/serializers/fields/project_related_field.py b/onadata/libs/serializers/fields/project_related_field.py index 921a79d9d2..0d7d9c0533 100644 --- a/onadata/libs/serializers/fields/project_related_field.py +++ b/onadata/libs/serializers/fields/project_related_field.py @@ -21,8 +21,8 @@ def get_attribute(self, instance): def to_internal_value(self, data): try: return Project.objects.get(pk=data) - except ValueError as e: - raise Exception("project id should be an integer") from e + except ValueError as error: + raise ValueError("project id should be an integer") from error def to_representation(self, value): """Serialize project object""" diff --git a/onadata/libs/serializers/floip_serializer.py b/onadata/libs/serializers/floip_serializer.py index 37497a0431..213a03e676 100644 --- a/onadata/libs/serializers/floip_serializer.py +++ b/onadata/libs/serializers/floip_serializer.py @@ -111,7 +111,7 @@ class FloipListSerializer(serializers.HyperlinkedModelSerializer): created = serializers.ReadOnlyField(source="date_created") modified = serializers.ReadOnlyField(source="date_modified") - class JSONAPIMeta: + class JSONAPIMeta: # pylint: disable=too-few-public-methods """ JSON API metaclass. """ @@ -138,7 +138,7 @@ class FloipSerializer(serializers.HyperlinkedModelSerializer): flow_results_specification_version = serializers.SerializerMethodField() resources = serializers.SerializerMethodField() - class JSONAPIMeta: + class JSONAPIMeta: # pylint: disable=too-few-public-methods """ JSON API metaclass. """ @@ -264,7 +264,7 @@ class FlowResultsResponseSerializer(serializers.Serializer): responses = serializers.ListField() duplicates = serializers.IntegerField(read_only=True) - class JSONAPIMeta: + class JSONAPIMeta: # pylint: disable=too-few-public-methods """ JSON API metaclass. """ diff --git a/onadata/libs/serializers/metadata_serializer.py b/onadata/libs/serializers/metadata_serializer.py index 28b1ef13dd..8990865c5b 100644 --- a/onadata/libs/serializers/metadata_serializer.py +++ b/onadata/libs/serializers/metadata_serializer.py @@ -81,7 +81,7 @@ def get_linked_object(parts): obj_pk = parts[1] try: obj_pk = int(obj_pk) - except ValueError as e: + except ValueError as error: raise serializers.ValidationError( { "data_value": _( @@ -89,11 +89,10 @@ def get_linked_object(parts): % {"type": obj_type, "id": obj_pk} ) } - ) from e - else: - model = DataView if obj_type == DATAVIEW_TAG else XForm + ) from error + model = DataView if obj_type == DATAVIEW_TAG else XForm - return get_object_or_404(model, pk=obj_pk) + return get_object_or_404(model, pk=obj_pk) return None @@ -203,7 +202,7 @@ def validate(self, attrs): if data_type == "media" and data_file is None: try: URLValidator()(value) - except ValidationError as e: + except ValidationError as error: parts = value.split() if len(parts) < 3: raise serializers.ValidationError( @@ -226,11 +225,11 @@ def validate(self, attrs): if not has_perm: raise serializers.ValidationError( {"data_value": _("User has no permission to the dataview.")} - ) from e + ) from error else: raise serializers.ValidationError( {"data_value": _(f"Invalid url '{value}'.")} - ) from e + ) from error else: # check if we have a value for the filename. if not os.path.basename(urlparse(value).path): @@ -319,8 +318,8 @@ def create(self, validated_data): ) return metadata - except IntegrityError as e: - raise serializers.ValidationError(_(UNIQUE_TOGETHER_ERROR)) from e + except IntegrityError as error: + raise serializers.ValidationError(_(UNIQUE_TOGETHER_ERROR)) from error return None def update(self, instance, validated_data): diff --git a/onadata/libs/serializers/organization_member_serializer.py b/onadata/libs/serializers/organization_member_serializer.py index 22d58be9f2..1a73e0dfb4 100644 --- a/onadata/libs/serializers/organization_member_serializer.py +++ b/onadata/libs/serializers/organization_member_serializer.py @@ -53,20 +53,19 @@ def validate_username(self, value): raise serializers.ValidationError( _(f"User '{value}' does not exist.") ) from exc - else: - if not user.is_active: - raise serializers.ValidationError(_("User is not active")) + if not user.is_active: + raise serializers.ValidationError(_("User is not active")) - # create user profile if missing - try: - profile = user.profile - except UserProfile.DoesNotExist: - profile = UserProfile.objects.create(user=user) + # create user profile if missing + try: + profile = user.profile + except UserProfile.DoesNotExist: + profile = UserProfile.objects.create(user=user) - if is_organization(profile): - raise serializers.ValidationError( - _(f"Cannot add org account `{user.username}` as member.") - ) + if is_organization(profile): + raise serializers.ValidationError( + _(f"Cannot add org account `{user.username}` as member.") + ) return value diff --git a/onadata/libs/serializers/project_invitation_serializer.py b/onadata/libs/serializers/project_invitation_serializer.py index 31f0601ecd..ef58691da6 100644 --- a/onadata/libs/serializers/project_invitation_serializer.py +++ b/onadata/libs/serializers/project_invitation_serializer.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Project invitations serializer +""" import re from django.conf import settings from django.utils.translation import gettext as _ @@ -115,8 +119,8 @@ def validate_invitation_id(self, invitation_id): try: ProjectInvitation.objects.get(pk=invitation_id) - except ProjectInvitation.DoesNotExist as e: - raise serializers.ValidationError(_("Invalid invitation_id.")) from e + except ProjectInvitation.DoesNotExist as error: + raise serializers.ValidationError(_("Invalid invitation_id.")) from error return invitation_id diff --git a/onadata/libs/serializers/project_serializer.py b/onadata/libs/serializers/project_serializer.py index 833f5b43f8..3693047913 100644 --- a/onadata/libs/serializers/project_serializer.py +++ b/onadata/libs/serializers/project_serializer.py @@ -513,9 +513,8 @@ def validate_metadata(self, value): json_val = JsonField.to_json(value) except ValueError as e: raise serializers.ValidationError(msg) from e - else: - if json_val is None: - raise serializers.ValidationError(msg) + if json_val is None: + raise serializers.ValidationError(msg) return value def update(self, instance, validated_data): @@ -593,15 +592,14 @@ def create(self, validated_data): raise serializers.ValidationError( "The fields name, organization must make a unique set." ) from e - else: - project.xform_set.exclude(shared=project.shared).update( - shared=project.shared, shared_data=project.shared - ) - request = self.context.get("request") - serializer = ProjectSerializer(project, context={"request": request}) - response = serializer.data - cache.set(f"{PROJ_OWNER_CACHE}{project.pk}", response) - return project + project.xform_set.exclude(shared=project.shared).update( + shared=project.shared, shared_data=project.shared + ) + request = self.context.get("request") + serializer = ProjectSerializer(project, context={"request": request}) + response = serializer.data + cache.set(f"{PROJ_OWNER_CACHE}{project.pk}", response) + return project def get_users(self, obj): """ diff --git a/onadata/libs/serializers/stats_serializer.py b/onadata/libs/serializers/stats_serializer.py index 36e2b394d8..4f3c14a97c 100644 --- a/onadata/libs/serializers/stats_serializer.py +++ b/onadata/libs/serializers/stats_serializer.py @@ -69,16 +69,15 @@ def to_representation(self, instance): try: data = get_form_submissions_grouped_by_field(instance, field, name) - except ValueError as e: - raise exceptions.ParseError(detail=e) - else: - if data: - element = instance.get_survey_element(field) + except ValueError as error: + raise exceptions.ParseError(detail=error) + if data: + element = instance.get_survey_element(field) - if element and element.type in SELECT_FIELDS: - for record in data: - label = instance.get_choice_label(element, record[name]) - record[name] = label + if element and element.type in SELECT_FIELDS: + for record in data: + label = instance.get_choice_label(element, record[name]) + record[name] = label cache.set(cache_key, data, settings.XFORM_SUBMISSION_STAT_CACHE_TIME) @@ -126,7 +125,7 @@ def to_representation(self, instance): try: data = stats_function(instance, field) - except ValueError as e: - raise exceptions.ParseError(detail=e) + except ValueError as error: + raise exceptions.ParseError(detail=error) return data diff --git a/onadata/libs/serializers/widget_serializer.py b/onadata/libs/serializers/widget_serializer.py index f531153dcb..f563fa431b 100644 --- a/onadata/libs/serializers/widget_serializer.py +++ b/onadata/libs/serializers/widget_serializer.py @@ -59,7 +59,7 @@ def to_representation(self, value): # pylint: disable=attribute-defined-outside-init self.view_name = "dataviews-detail" else: - raise Exception(_("Unknown type for content_object.")) + raise ValueError(_("Unknown type for content_object.")) self._setup_field(self.view_name) diff --git a/onadata/libs/test_utils/pyxform_test_case.py b/onadata/libs/test_utils/pyxform_test_case.py index 9aa34cdf81..0d396e3011 100644 --- a/onadata/libs/test_utils/pyxform_test_case.py +++ b/onadata/libs/test_utils/pyxform_test_case.py @@ -49,7 +49,7 @@ class MatcherContext: content_str: str -class PyxformMarkdown: +class PyxformMarkdown: # pylint: disable=too-few-public-methods """Transform markdown formatted XLSForm to a pyxform survey object""" def md_to_pyxform_survey(self, md_raw, kwargs=None, autoname=True, warnings=None): @@ -399,7 +399,7 @@ def check_content(content, expected): if "warnings_count" in kwargs: c = kwargs.get("warnings_count") if not isinstance(c, int): - PyxformTestError("warnings_count must be an integer.") + raise PyxformTestError("warnings_count must be an integer.") self.assertEqual(c, len(warnings)) @staticmethod @@ -586,9 +586,9 @@ def xpath_evaluate( """ try: results = content.xpath(xpath, namespaces=matcher_context.nsmap_xpath) - except etree.XPathEvalError as e: - msg = f"Error processing XPath: {xpath}\n" + "\n".join(e.args) - raise PyxformTestError(msg) from e + except etree.XPathEvalError as error: + msg = f"Error processing XPath: {xpath}\n" + "\n".join(error.args) + raise PyxformTestError(msg) from error if matcher_context.debug: if 0 == len(results): logger.debug("Results for XPath: %s\n(No matches)\n", xpath) diff --git a/onadata/libs/utils/api_export_tools.py b/onadata/libs/utils/api_export_tools.py index 144970e5be..13e544d507 100644 --- a/onadata/libs/utils/api_export_tools.py +++ b/onadata/libs/utils/api_export_tools.py @@ -329,13 +329,13 @@ def _generate_new_export( # noqa: C0901 audit, request, ) - except NoRecordsFoundError as e: - raise Http404(_("No records found to export")) from e - except J2XException as e: + except NoRecordsFoundError as error: + raise Http404(_("No records found to export")) from error + except J2XException as error: # j2x exception - return async_status(FAILED, str(e)) - except SPSSIOError as e: - raise exceptions.ParseError(str(e)) from e + return async_status(FAILED, str(error)) + except SPSSIOError as error: + raise exceptions.ParseError(str(error)) from error else: return export @@ -399,12 +399,11 @@ def _set_start_end_params(request, query): query[SUBMISSION_TIME]["$lte"] = _format_date_for_mongo( request.GET["end"] ) - except ValueError as e: + except ValueError as error: raise exceptions.ParseError( _("Dates must be in the format YY_MM_DD_hh_mm_ss") - ) from e - else: - query = json.dumps(query) + ) from error + query = json.dumps(query) return query @@ -463,9 +462,8 @@ def process_async_export(request, xform, export_type, options=None): status=status.HTTP_403_FORBIDDEN, content_type="application/json", ) - else: - if query: - options["query"] = query + if query: + options["query"] = query if ( export_type in EXTERNAL_EXPORT_TYPES diff --git a/onadata/libs/utils/async_status.py b/onadata/libs/utils/async_status.py index e494188fd9..0e36ebf144 100644 --- a/onadata/libs/utils/async_status.py +++ b/onadata/libs/utils/async_status.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +async_status - helper functions to return the status string for celery processes. +""" from celery import states PENDING = 0 @@ -7,21 +11,35 @@ RETRY = 4 STARTED = 5 -status_msg = {PENDING: 'PENDING', SUCCESSFUL: 'SUCCESS', FAILED: 'FAILURE', - PROGRESS: 'PROGRESS', RETRY: 'RETRY', STARTED: 'STARTED'} +status_msg = { + PENDING: "PENDING", + SUCCESSFUL: "SUCCESS", + FAILED: "FAILURE", + PROGRESS: "PROGRESS", + RETRY: "RETRY", + STARTED: "STARTED", +} def celery_state_to_status(state): - status_map = {states.PENDING: PENDING, states.STARTED: STARTED, - states.RETRY: RETRY, states.SUCCESS: SUCCESSFUL, - states.FAILURE: FAILED, 'PROGRESS': PROGRESS} + """Takes a numeric celery task status and returns equivalent string + representation of the state.""" + status_map = { + states.PENDING: PENDING, + states.STARTED: STARTED, + states.RETRY: RETRY, + states.SUCCESS: SUCCESSFUL, + states.FAILURE: FAILED, + "PROGRESS": PROGRESS, + } return status_map[state] if state in status_map else FAILED def async_status(status, error=None): - status = { - 'job_status': status_msg[status] - } + """Takes a numeric celery task status and returns equivalent status + dictionary with the string representation of the state. If `error` + is passed in the error message is added to the status dictionary.""" + status = {"job_status": status_msg[status]} if error: - status['error'] = error + status["error"] = error return status diff --git a/onadata/libs/utils/backup_tools.py b/onadata/libs/utils/backup_tools.py index 6d62312c8f..cc4dab2bcf 100644 --- a/onadata/libs/utils/backup_tools.py +++ b/onadata/libs/utils/backup_tools.py @@ -66,8 +66,8 @@ def create_zip_backup(zip_output_file, user, xform=None): if not os.path.exists(full_path): try: os.makedirs(full_path) - except OSError as e: - if e.errno != errno.EEXIST: + except OSError as error: + if error.errno != errno.EEXIST: raise full_xml_path = os.path.join(full_path, date_time_str + ".xml") @@ -77,8 +77,8 @@ def create_zip_backup(zip_output_file, user, xform=None): full_xml_path = os.path.join(full_path, f"{date_time_str}-{file_index}.xml") file_index += 1 # create the instance xml - with codecs.open(full_xml_path, "wb", "utf-8") as f: - f.write(instance.xml) + with codecs.open(full_xml_path, "wb", "utf-8") as xml_file: + xml_file.write(instance.xml) done += 1 # pylint: disable=consider-using-f-string sys.stdout.write("\r%.2f %% done" % (float(done) / float(num_instances) * 100)) @@ -147,8 +147,10 @@ def restore_backup_from_xml_file(xml_instance_path, username): username, xml_file, media_files, date_created_override=date_created ) return 1 - except Exception as e: # pylint: disable=broad-except - sys.stderr.write(f"Could not restore {file_name}, create instance said: {e}\n") + except Exception as error: # pylint: disable=broad-except + sys.stderr.write( + f"Could not restore {file_name}, create instance said: {error}\n" + ) return 0 diff --git a/onadata/libs/utils/briefcase_client.py b/onadata/libs/utils/briefcase_client.py index 5a43c8ad59..bd92ad7aed 100644 --- a/onadata/libs/utils/briefcase_client.py +++ b/onadata/libs/utils/briefcase_client.py @@ -21,6 +21,7 @@ from onadata.libs.utils.logger_tools import PublishXForm, create_instance, publish_form NUM_RETRIES = 3 +DEFAULT_REQUEST_TIMEOUT = 45 def django_file(file_obj, field_name, content_type): @@ -151,7 +152,9 @@ def _get_response(self, url, params=None): Downloads the url and sets self._current_response with the contents. """ setattr(self, "_current_response", None) - response = requests.get(url, auth=self.auth, params=params) + response = requests.get( + url, auth=self.auth, params=params, timeout=DEFAULT_REQUEST_TIMEOUT + ) success = response.status_code == 200 setattr(self, "_current_response", response) @@ -163,13 +166,15 @@ def _get_media_response(self, url): Downloads the media file and sets self._current_response with the contents. """ setattr(self, "_current_response", None) - head_response = requests.head(url, auth=self.auth) + head_response = requests.head( + url, auth=self.auth, timeout=DEFAULT_REQUEST_TIMEOUT + ) # S3 redirects, avoid using formhub digest on S3 if head_response.status_code == 302: url = head_response.headers.get("location") - response = requests.get(url) + response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT) success = response.status_code == 200 setattr(self, "_current_response", response) @@ -322,14 +327,14 @@ def _upload_instances(self, path): except ExpatError: continue # pylint: disable=broad-except - except Exception as e: + except Exception as error: # keep going despite some errors. logging.exception( ( "Ignoring exception, processing XML submission " "raised exception: %s" ), - str(e), + str(error), ) else: instances_count += 1 diff --git a/onadata/libs/utils/chart_tools.py b/onadata/libs/utils/chart_tools.py index f5683e9a0f..c5d406c06b 100644 --- a/onadata/libs/utils/chart_tools.py +++ b/onadata/libs/utils/chart_tools.py @@ -233,9 +233,6 @@ def build_chart_data_for_field( # noqa C901 if isinstance(field, str): field_label, field_xpath, field_type = FIELD_DATA_MAP.get(field) else: - # TODO: merge choices with results and set 0's on any missing fields, - # i.e. they didn't have responses - field_label = get_field_label(field, language_index) field_xpath = field.get_abbreviated_xpath() field_type = field.type @@ -400,8 +397,8 @@ def build_chart_data_from_widget(widget, language_index=0): choices = choices.get(field_name) try: data = build_chart_data_for_field(xform, field, language_index, choices=choices) - except DataError as e: - raise ParseError(str(e)) from e + except DataError as error: + raise ParseError(str(error)) from error return data @@ -477,16 +474,15 @@ def get_chart_data_for_field( # noqa C901 data = build_chart_data_for_field( xform, field, choices=choices, group_by=group_by, data_view=data_view ) - except DataError as e: - raise ParseError(str(e)) from e - else: - if accepted_format == "json" or not accepted_format: - xform = xform.pk - elif accepted_format == "html" and "data" in data: - for item in data["data"]: - if isinstance(item[field_name], list): - item[field_name] = ", ".join(item[field_name]) - - data.update({"xform": xform}) + except DataError as error: + raise ParseError(str(error)) from error + if accepted_format == "json" or not accepted_format: + xform = xform.pk + elif accepted_format == "html" and "data" in data: + for item in data["data"]: + if isinstance(item[field_name], list): + item[field_name] = ", ".join(item[field_name]) + + data.update({"xform": xform}) return data diff --git a/onadata/libs/utils/common_tags.py b/onadata/libs/utils/common_tags.py index 8b59c84fad..20b091b6d9 100644 --- a/onadata/libs/utils/common_tags.py +++ b/onadata/libs/utils/common_tags.py @@ -1,3 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Common tags. +""" from __future__ import unicode_literals from django.utils.translation import gettext_lazy as _ diff --git a/onadata/libs/utils/common_tools.py b/onadata/libs/utils/common_tools.py index 8eba194e95..cc574ea42b 100644 --- a/onadata/libs/utils/common_tools.py +++ b/onadata/libs/utils/common_tools.py @@ -4,7 +4,6 @@ """ from __future__ import unicode_literals -import logging import math import sys import time @@ -27,6 +26,10 @@ TRUE_VALUES = ["TRUE", "T", "1", 1] +class FilenameMissing(Exception): + """Custom Exception for a missing filename.""" + + def str_to_bool(str_var): """ Return boolean True or False if string s represents a boolean value @@ -71,10 +74,7 @@ def report_exception(subject, info, exc_info=None): message += "".join(traceback.format_exception(*exc_info)) # send to sentry - try: - sentry_sdk.capture_exception(exc_info) - except Exception: # pylint: disable=broad-except - logging.exception(_("Sending to Sentry failed.")) + sentry_sdk.capture_exception(exc_info) else: message = f"{info}" @@ -92,7 +92,7 @@ def filename_from_disposition(content_disposition): filename_pos = content_disposition.index("filename=") if filename_pos == -1: - raise Exception('"filename=" not found in content disposition file') + raise FilenameMissing('"filename=" not found in content disposition file') return content_disposition[filename_pos + len("filename=") :] diff --git a/onadata/libs/utils/decorators.py b/onadata/libs/utils/decorators.py index faa39d7814..97234d12a2 100644 --- a/onadata/libs/utils/decorators.py +++ b/onadata/libs/utils/decorators.py @@ -9,13 +9,14 @@ from django.http import HttpResponseRedirect -def check_obj(f): +def check_obj(func): """Checks if the first argument is truthy and then calls the underlying function.""" + # pylint: disable=inconsistent-return-statements - @wraps(f) + @wraps(func) def with_check_obj(*args, **kwargs): if args[0]: - return f(*args, **kwargs) + return func(*args, **kwargs) return with_check_obj diff --git a/onadata/libs/utils/dict_tools.py b/onadata/libs/utils/dict_tools.py index e5624337a2..5630383ea2 100644 --- a/onadata/libs/utils/dict_tools.py +++ b/onadata/libs/utils/dict_tools.py @@ -16,21 +16,21 @@ def _get_values(doc, key): if key in doc: yield doc[key] - for z in doc.items(): - v = z[1] - if isinstance(v, dict): - for item in _get_values(v, key): + for doc_item in doc.items(): + value = doc_item[1] + if isinstance(value, dict): + for item in _get_values(value, key): yield item - elif isinstance(v, list): - for i in v: - if isinstance(i, (dict, list)): + elif isinstance(value, list): + for item_i in value: + if isinstance(item_i, (dict, list)): try: - for j in _get_values(i, key): - yield j + for item_j in _get_values(item_i, key): + yield item_j except StopIteration: continue - elif i == key: - yield i + elif item_i == key: + yield item_i return _get_values(doc, key) @@ -62,20 +62,21 @@ def merge_list_of_dicts(list_of_dicts, override_keys: list = None): # pylint: disable=too-many-nested-blocks for row in list_of_dicts: - for k, v in row.items(): - if isinstance(v, list): - z = merge_list_of_dicts( - result[k] + v if k in result else v, override_keys=override_keys + for key, value in row.items(): + if isinstance(value, list): + item_z = merge_list_of_dicts( + result[key] + value if key in result else value, + override_keys=override_keys, ) - result[k] = z if isinstance(z, list) else [z] + result[key] = item_z if isinstance(item_z, list) else [item_z] else: - if k in result: - if isinstance(v, dict): + if key in result: + if isinstance(value, dict): try: - result[k] = merge_list_of_dicts( - [result[k], v], override_keys=override_keys + result[key] = merge_list_of_dicts( + [result[key], value], override_keys=override_keys ) - except AttributeError as e: + except AttributeError as error: # If the key is within the override_keys # (Is a select_multiple question) We make # the assumption that the dict values are @@ -84,19 +85,19 @@ def merge_list_of_dicts(list_of_dicts, override_keys: list = None): # separate columns for each choice if ( override_keys - and isinstance(result[k], str) - and k in override_keys + and isinstance(result[key], str) + and key in override_keys ): - result[k] = {} - result[k] = merge_list_of_dicts( - [result[k], v], override_keys=override_keys + result[key] = {} + result[key] = merge_list_of_dicts( + [result[key], value], override_keys=override_keys ) else: - raise e + raise error else: result = [result, row] else: - result[k] = v + result[key] = value return result @@ -157,11 +158,11 @@ def dict_lists2strings(adict): :param d: The dict to convert. :returns: The converted dict.""" - for k, v in adict.items(): - if isinstance(v, list) and all(isinstance(e, str) for e in v): - adict[k] = " ".join(v) - elif isinstance(v, dict): - adict[k] = dict_lists2strings(v) + for key, value in adict.items(): + if isinstance(value, list) and all(isinstance(item, str) for item in value): + adict[key] = " ".join(value) + elif isinstance(value, dict): + adict[key] = dict_lists2strings(value) return adict @@ -172,15 +173,15 @@ def dict_paths2dict(adict): """ result = {} - for k, v in adict.items(): - if k.find("/") > 0: - parts = k.split("/") + for key, value in adict.items(): + if key.find("/") > 0: + parts = key.split("/") if len(parts) > 1: - k = parts[0] + key = parts[0] for part in parts[1:]: - v = {part: v} + value = {part: value} - result[k] = v + result[key] = value return result diff --git a/onadata/libs/utils/email.py b/onadata/libs/utils/email.py index 7541875f3e..574cf377f1 100644 --- a/onadata/libs/utils/email.py +++ b/onadata/libs/utils/email.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ -email utility functions. +Email utility functions. """ from django.conf import settings from django.core.mail import EmailMultiAlternatives, send_mail @@ -85,6 +85,7 @@ def get_account_lockout_email_data(username, ip_address, end=False): def send_generic_email(email, message_txt, subject): + """Sends an email.""" if any(a in [None, ""] for a in [email, message_txt, subject]): raise ValueError("email, message_txt amd subject arguments are ALL required.") diff --git a/onadata/libs/utils/export_builder.py b/onadata/libs/utils/export_builder.py index dae69ab941..e660d6c961 100644 --- a/onadata/libs/utils/export_builder.py +++ b/onadata/libs/utils/export_builder.py @@ -18,7 +18,7 @@ from openpyxl.workbook import Workbook from pyxform.question import Question from pyxform.section import RepeatingSection, Section -from savReaderWriter import SavWriter +from savReaderWriter import SavWriter # pylint: disable=no-name-in-module from six import iteritems from onadata.apps.logger.models.osmdata import OsmData @@ -221,8 +221,7 @@ def string_to_date_with_xls_validation(date_str): to_excel(date_obj) except ValueError: return date_str - else: - return date_obj + return date_obj # pylint: disable=invalid-name @@ -732,42 +731,40 @@ def split_select_multiples( if show_choice_labels: row.update( { - choice["label"]: choice["_label"] - if selections and choice["xpath"] in selections - else None + choice["label"]: ( + choice["_label"] + if selections and choice["xpath"] in selections + else None + ) for choice in choices } ) else: row.update( { - choice["xpath"]: data.split()[ - selections.index(choice["xpath"]) - ] - if selections and choice["xpath"] in selections - else None + choice["xpath"]: ( + data.split()[selections.index(choice["xpath"])] + if selections and choice["xpath"] in selections + else None + ) for choice in choices } ) elif binary_select_multiples: row.update( { - choice["label"] - if show_choice_labels - else choice["xpath"]: YES - if choice["xpath"] in selections - else NO + choice["label"] if show_choice_labels else choice["xpath"]: ( + YES if choice["xpath"] in selections else NO + ) for choice in choices } ) else: row.update( { - choice["label"] - if show_choice_labels - else choice["xpath"]: choice["xpath"] in selections - if selections - else None + choice["label"] if show_choice_labels else choice["xpath"]: ( + choice["xpath"] in selections if selections else None + ) for choice in choices } ) @@ -1335,29 +1332,37 @@ def _get_element_type(element_xpath): [ ( _var_types[element["xpath"]], - SAV_NUMERIC_TYPE - if _is_numeric( - element["xpath"], element["type"], self.data_dicionary - ) - else SAV_255_BYTES_TYPE, + ( + SAV_NUMERIC_TYPE + if _is_numeric( + element["xpath"], element["type"], self.data_dicionary + ) + else SAV_255_BYTES_TYPE + ), ) for element in elements ] + [ # noqa W503 ( _var_types[item], - SAV_NUMERIC_TYPE - if item in ["_id", "_index", "_parent_index", SUBMISSION_TIME] - else SAV_255_BYTES_TYPE, + ( + SAV_NUMERIC_TYPE + if item in ["_id", "_index", "_parent_index", SUBMISSION_TIME] + else SAV_255_BYTES_TYPE + ), ) for item in self.extra_columns ] + [ # noqa W503 ( x[1], - SAV_NUMERIC_TYPE - if _is_numeric(x[0], _get_element_type(x[0]), self.data_dicionary) - else SAV_255_BYTES_TYPE, + ( + SAV_NUMERIC_TYPE + if _is_numeric( + x[0], _get_element_type(x[0]), self.data_dicionary + ) + else SAV_255_BYTES_TYPE + ), ) for x in duplicate_names ] @@ -1476,16 +1481,20 @@ def get_fields(self, dataview, section, key): """ if dataview: return [ - element.get("_label_xpath") or element[key] - if self.SHOW_CHOICE_LABELS - else element[key] + ( + element.get("_label_xpath") or element[key] + if self.SHOW_CHOICE_LABELS + else element[key] + ) for element in section["elements"] if element["title"] in dataview.columns ] + self.extra_columns return [ - element.get("_label_xpath") or element[key] - if self.SHOW_CHOICE_LABELS - else element[key] + ( + element.get("_label_xpath") or element[key] + if self.SHOW_CHOICE_LABELS + else element[key] + ) for element in section["elements"] ] + self.extra_columns diff --git a/onadata/libs/utils/export_tools.py b/onadata/libs/utils/export_tools.py index 5755b2d356..8cf16e5356 100644 --- a/onadata/libs/utils/export_tools.py +++ b/onadata/libs/utils/export_tools.py @@ -79,7 +79,6 @@ EXPORT_QUERY_KEY = "query" MAX_RETRIES = 3 -# pylint: disable=invalid-name User = get_user_model() @@ -275,7 +274,7 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C export_builder.INCLUDE_REVIEWS = include_reviews # noqa export_builder.set_survey(xform.survey, xform, include_reviews=include_reviews) - temp_file = NamedTemporaryFile(suffix=("." + extension)) + temp_file = NamedTemporaryFile(suffix="." + extension) columns_with_hxl = export_builder.INCLUDE_HXL and get_columns_with_hxl( xform.survey_elements @@ -301,12 +300,12 @@ def generate_export(export_type, xform, export_id=None, options=None): # noqa C ) except NoRecordsFoundError: pass - except SPSSIOError as e: + except SPSSIOError as error: export = get_or_create_export(export_id, xform, export_type, options) - export.error_message = str(e) + export.error_message = str(error) export.internal_status = Export.FAILED export.save() - report_exception("SAV Export Failure", e, sys.exc_info()) + report_exception("SAV Export Failure", error, sys.exc_info()) return export # generate filename @@ -839,8 +838,8 @@ def clean_keys_of_slashes(record): # Check if the value is a list containing nested dict and apply same if value: if isinstance(value, list) and isinstance(value[0], dict): - for v in value: - clean_keys_of_slashes(v) + for item in value: + clean_keys_of_slashes(item) return record @@ -856,8 +855,8 @@ def _get_server_from_metadata(xform, meta, token): if meta: try: int(meta) - except ValueError as e: - raise Exception(f"Invalid metadata pk {meta}") from e + except ValueError as error: + raise ValueError(f"Invalid metadata pk {meta}") from error # Get the external server from the metadata result = report_templates.get(pk=meta) @@ -869,7 +868,7 @@ def _get_server_from_metadata(xform, meta, token): else: # Take the latest value in the metadata if not report_templates: - raise Exception( + raise ValueError( "Could not find the template token: Please upload template." ) @@ -941,11 +940,11 @@ def generate_external_export( # noqa C901 if hasattr(client.xls.conn, "last_response"): status_code = client.xls.conn.last_response.status_code - except Exception as e: + except Exception as error: raise J2XException( f"J2X client could not generate report. Server -> {server}," - f" Error-> {e}" - ) from e + f" Error-> {error}" + ) from error else: if not server: raise J2XException("External server not set") @@ -978,7 +977,6 @@ def generate_external_export( # noqa C901 return export -# pylint: disable=invalid-name def upload_template_for_external_export(server, file_obj): """ Uploads an Excel template to the XLSReport server. @@ -1108,7 +1106,7 @@ def generate_entity_list_export(entity_list: EntityList) -> GenericExport: records = get_entity_list_dataset(entity_list) export_builder = ExportBuilder() extension = Export.CSV_EXPORT - temp_file = NamedTemporaryFile(suffix=("." + extension)) + temp_file = NamedTemporaryFile(suffix="." + extension) export_builder.to_flat_csv_export( temp_file.name, records, username, None, None, entity_list=entity_list ) diff --git a/onadata/libs/utils/gravatar.py b/onadata/libs/utils/gravatar.py index f508e32ed0..7cd7d6c47c 100644 --- a/onadata/libs/utils/gravatar.py +++ b/onadata/libs/utils/gravatar.py @@ -32,4 +32,4 @@ def get_gravatar_img_link(user): def gravatar_exists(user): """Checks if the Gravatar URL exists""" url = GRAVATAR_ENDPOINT + email_md5(user) + "?" + "d=404" - return requests.get(url).status_code != 404 + return requests.get(url, timeout=20).status_code != 404 diff --git a/onadata/libs/utils/image_tools.py b/onadata/libs/utils/image_tools.py index b0d404f748..6074d4b002 100644 --- a/onadata/libs/utils/image_tools.py +++ b/onadata/libs/utils/image_tools.py @@ -55,11 +55,10 @@ def generate_media_download_url(obj, expiration: int = 3600): if isinstance(default_storage, type(s3_class)): try: url = generate_aws_media_url(file_path, content_disposition, expiration) - except ClientError as e: - logging.error(e) + except ClientError as error: + logging.error(error) return None - else: - return HttpResponseRedirect(url) + return HttpResponseRedirect(url) if isinstance(default_storage, type(azure)): media_url = generate_media_url_with_sas(file_path, expiration) @@ -151,6 +150,7 @@ def _save_thumbnails(image, path, size, suffix, extension): try: # Ensure conversion to float in operations + # pylint: disable=no-member image.thumbnail(get_dimensions(image.size, float(size)), Image.LANCZOS) except ZeroDivisionError: pass @@ -178,7 +178,7 @@ def resize(filename, extension): settings.DEFAULT_IMG_FILE_TYPE if extension == "non" else extension, ) except IOError as exc: - raise Exception("The image file couldn't be identified") from exc + raise ValueError("The image file couldn't be identified") from exc def resize_local_env(filename, extension): diff --git a/onadata/libs/utils/log.py b/onadata/libs/utils/log.py index 0e35001209..0e29d5874b 100644 --- a/onadata/libs/utils/log.py +++ b/onadata/libs/utils/log.py @@ -110,8 +110,8 @@ def emit(self, record): # save to mongodb audit_log try: model = self.get_model(self.model_name) - except Exception as e: # pylint: disable=broad-except - logging.exception("Get model threw exception: %s", str(e)) + except Exception as error: # pylint: disable=broad-except + logging.exception("Get model threw exception: %s", str(error)) else: log_entry = model(data) log_entry.save() @@ -144,12 +144,12 @@ def audit_log( logger = logging.getLogger("audit_logger") extra = { "formhub_action": action, - "request_username": request_user.username - if request_user.username - else str(request_user), - "account_username": account_user.username - if account_user.username - else str(account_user), + "request_username": ( + request_user.username if request_user.username else str(request_user) + ), + "account_username": ( + account_user.username if account_user.username else str(account_user) + ), "client_ip": get_client_ip(request), "audit": audit, } diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index 3787a0313b..afbb501736 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -926,6 +926,7 @@ def __init__(self, *args, **kwargs): def inject_instanceid(xml_str, uuid): + """Adds the `uuid` as the to an XML string `xml_str`.""" if get_uuid_from_xml(xml_str) is None: xml = clean_and_parse_xml(xml_str) children = xml.childNodes @@ -963,7 +964,7 @@ def inject_instanceid(xml_str, uuid): return xml_str -class PublishXForm: +class PublishXForm: # pylint: disable=too-few-public-methods "A class to publish an XML XForm file." def __init__(self, xml_file, user): diff --git a/onadata/libs/utils/middleware.py b/onadata/libs/utils/middleware.py index 5eb11cd789..7e967d756f 100644 --- a/onadata/libs/utils/middleware.py +++ b/onadata/libs/utils/middleware.py @@ -17,7 +17,7 @@ from multidb.pinning import use_master -class BaseMiddleware: +class BaseMiddleware: # pylint: disable=too-few-public-methods """BaseMiddleware - The base middleware class.""" def __init__(self, get_response): @@ -27,7 +27,7 @@ def __call__(self, request): return self.get_response(request) -class ExceptionLoggingMiddleware: +class ExceptionLoggingMiddleware: # pylint: disable=too-few-public-methods """The exception logging middleware class - prints the exception traceback.""" def __init__(self, get_response): @@ -39,7 +39,7 @@ def process_exception(self, request, exception): print(traceback.format_exc()) -class HTTPResponseNotAllowedMiddleware: +class HTTPResponseNotAllowedMiddleware: # pylint: disable=too-few-public-methods """The HTTP Not Allowed middleware class - renders the 405.html template.""" def __init__(self, get_response): @@ -65,19 +65,19 @@ def process_request(self, request): codes = [code for code, r in parse_accept_lang_header(accept)] if "km" in codes and "km-kh" not in codes: request.META["HTTP_ACCEPT_LANGUAGE"] = accept.replace("km", "km-kh") - except Exception as e: # pylint: disable=broad-except + except Exception as error: # pylint: disable=broad-except # this might fail if i18n is disabled. logging.exception( _( "Settings request META HTTP accept language " - f"threw exceptions: {str(e)}" + f"threw exceptions: {str(error)}" ) ) super().process_request(request) -class SqlLogging: +class SqlLogging: # pylint: disable=too-few-public-methods """ SQL logging middleware. """ @@ -97,6 +97,7 @@ def __call__(self, request): return response +# pylint: disable=too-few-public-methods class OperationalErrorMiddleware(BaseMiddleware): """ Captures requests returning 500 status code. diff --git a/onadata/libs/utils/openid_connect_tools.py b/onadata/libs/utils/openid_connect_tools.py index 1ecdb6e729..7551bec2d8 100644 --- a/onadata/libs/utils/openid_connect_tools.py +++ b/onadata/libs/utils/openid_connect_tools.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ - OpenID Connect Tools +OpenID Connect Tools """ import json @@ -17,6 +17,7 @@ FIRST_NAME = "given_name" LAST_NAME = "family_name" NONCE = "nonce" +DEFAULT_REQUEST_TIMEOUT = 30 class OpenIDHandler: @@ -110,7 +111,10 @@ def _retrieve_jwk_related_to_kid(self, kid): if "jwks_endpoint" not in self.provider_configuration: raise ValueError("jwks_endpoint not found in provider configuration") - response = requests.get(self.provider_configuration["jwks_endpoint"]) + response = requests.get( + self.provider_configuration["jwks_endpoint"], + timeout=DEFAULT_REQUEST_TIMEOUT, + ) if response.status_code == 200: jwks = response.json() @@ -140,6 +144,7 @@ def obtain_id_token_from_code(self, code: str, openid_provider: str = ""): self.provider_configuration["token_endpoint"], params=payload, headers=headers, + timeout=DEFAULT_REQUEST_TIMEOUT, ) if response.status_code == 200: @@ -182,7 +187,7 @@ def verify_and_decode_id_token( provider_initiated_for = cache.get(decoded_token.get(NONCE)) if provider_initiated_for != openid_provider: - raise Exception("Incorrect nonce value returned") + raise ValueError("Incorrect nonce value returned") return decoded_token return None diff --git a/onadata/libs/utils/organization_utils.py b/onadata/libs/utils/organization_utils.py index 3910135dbf..e75a2edc5d 100644 --- a/onadata/libs/utils/organization_utils.py +++ b/onadata/libs/utils/organization_utils.py @@ -1,17 +1,18 @@ +# -*- coding: utf-8 -*- """ OrganizationProfile utility functions """ -from onadata.libs.serializers.organization_serializer import \ - OrganizationSerializer +from onadata.libs.serializers.organization_serializer import OrganizationSerializer def get_organization_members(organization): + """Returns a dictionary of organizations members""" ret = {} data = OrganizationSerializer().get_users(organization) for user_data in data: - username = user_data.pop('user') - user_data.pop('gravatar') + username = user_data.pop("user") + user_data.pop("gravatar") ret[username] = user_data return ret diff --git a/onadata/libs/utils/osm.py b/onadata/libs/utils/osm.py index fc721fb9b5..f22e94a1de 100644 --- a/onadata/libs/utils/osm.py +++ b/onadata/libs/utils/osm.py @@ -25,8 +25,10 @@ def _get_xml_obj(xml): xml = xml.strip().encode() try: return fromstring(xml) - except _etree.XMLSyntaxError as e: # pylint: disable=c-extension-no-member - if "Attribute action redefined" in str(e): + except ( + _etree.XMLSyntaxError + ) as _etree_error: # pylint: disable=c-extension-no-member + if "Attribute action redefined" in str(_etree_error): xml = xml.replace(b'action="modify" ', b"") return _get_xml_obj(xml) @@ -167,16 +169,16 @@ def save_osm_data(instance_id): osm_xml = osm.media_file.read() if isinstance(osm_xml, bytes): osm_xml = osm_xml.decode("utf-8") - except IOError as e: - logging.exception("IOError saving osm data: %s", str(e)) + except IOError as io_error: + logging.exception("IOError saving osm data: %s", str(io_error)) continue else: filename = None field_name = None - for k, v in osm_filenames.items(): - if osm.filename.startswith(v.replace(".osm", "")): - filename = v - field_name = k + for key, value in osm_filenames.items(): + if osm.filename.startswith(value.replace(".osm", "")): + filename = value + field_name = key break if field_name is None: @@ -227,7 +229,7 @@ def osm_flat_dict(instance_id): for osm in osm_data: for tag in osm.tags: - for (k, v) in iteritems(tag): - tags.update({f"osm_{k}": v}) + for key, value in iteritems(tag): + tags.update({f"osm_{key}": value}) return tags diff --git a/onadata/libs/utils/qrcode.py b/onadata/libs/utils/qrcode.py index 0f13a4c238..48a2ab5b6a 100644 --- a/onadata/libs/utils/qrcode.py +++ b/onadata/libs/utils/qrcode.py @@ -26,7 +26,7 @@ def generate_qrcode(message): img = barcode( "qrcode", message, - options=dict(version=9, eclevel=eclevel), + options={"version": 9, "eclevel": eclevel}, margin=margin, data_mode=data_mode, scale=scale, diff --git a/onadata/libs/utils/string.py b/onadata/libs/utils/string.py index 1ece191bc0..82853a5491 100644 --- a/onadata/libs/utils/string.py +++ b/onadata/libs/utils/string.py @@ -5,9 +5,11 @@ """ -def str2bool(v): +def str2bool(value): """ String utility function str2bool - converts "yes", "true", "t", "1" to True else returns the argument value v. """ - return v.lower() in ("yes", "true", "t", "1") if isinstance(v, str) else v + return ( + value.lower() in ("yes", "true", "t", "1") if isinstance(value, str) else value + ) diff --git a/onadata/libs/utils/validators.py b/onadata/libs/utils/validators.py index 89d00a238b..ee3e05d6f4 100644 --- a/onadata/libs/utils/validators.py +++ b/onadata/libs/utils/validators.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Module containing custom validator classes for the User Model """ @@ -7,21 +8,23 @@ class PreviousPasswordValidator: + """Class validates password was not previously recorded.""" + def __init__(self, history_limt=5): self.message = _("You cannot use a previously used password.") self.history_limit = history_limt def validate(self, password, user=None): + """Checks password was not used previously.""" if user and user.pk and user.is_active: if user.check_password(password): raise ValidationError(self.message) - pw_history = user.password_history.all()[:self.history_limit] + pw_history = user.password_history.all()[: self.history_limit] for pw_hist in pw_history: if check_password(password, pw_hist.hashed_password): raise ValidationError(self.message) def get_help_text(self): - return _( - "Your password cannot be the same as your previous password." - ) + """Returns the help text.""" + return _("Your password cannot be the same as your previous password.") diff --git a/onadata/libs/utils/viewer_tools.py b/onadata/libs/utils/viewer_tools.py index 1249ee8bab..18c9fc3039 100644 --- a/onadata/libs/utils/viewer_tools.py +++ b/onadata/libs/utils/viewer_tools.py @@ -178,6 +178,7 @@ def get_enketo_urls( data=values, auth=(settings.ENKETO_API_TOKEN, ""), verify=getattr(settings, "VERIFY_SSL", True), + timeout=20, ) resp_content = response.content resp_content = ( @@ -203,19 +204,18 @@ def handle_enketo_error(response): """Handle enketo error response.""" try: data = json.loads(response.content) - except (ValueError, JSONDecodeError) as e: + except (ValueError, JSONDecodeError) as enketo_error: report_exception( f"HTTP Error {response.status_code}", response.text, sys.exc_info() ) if response.status_code == 502: raise EnketoError( "Sorry, we cannot load your form right now. Please try again later." - ) from e - raise EnketoError() from e - else: - if "message" in data: - raise EnketoError(data["message"]) - raise EnketoError(response.text) + ) from enketo_error + raise EnketoError() from enketo_error + if "message" in data: + raise EnketoError(data["message"]) + raise EnketoError(response.text) def generate_enketo_form_defaults(xform, **kwargs): @@ -223,7 +223,7 @@ def generate_enketo_form_defaults(xform, **kwargs): defaults = {} if kwargs: - for (name, value) in kwargs.items(): + for name, value in kwargs.items(): field = xform.get_survey_element(name) if field: defaults[f"defaults[{field.get_xpath()}]"] = value @@ -237,15 +237,17 @@ def create_attachments_zipfile(attachments, zip_file): :param attachments: an Attachments queryset. :param zip_file: a file object, more likely a NamedTemporaryFile() object. """ - with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED, allowZip64=True) as z: + with zipfile.ZipFile( + zip_file, "w", zipfile.ZIP_DEFLATED, allowZip64=True + ) as z_file: for attachment in attachments: default_storage = get_storage_class()() filename = attachment.media_file.name if default_storage.exists(filename): try: - with default_storage.open(filename) as f: - if f.size > settings.ZIP_REPORT_ATTACHMENT_LIMIT: + with default_storage.open(filename) as a_file: + if a_file.size > settings.ZIP_REPORT_ATTACHMENT_LIMIT: report_exception( "Create attachment zip exception", ( @@ -254,9 +256,9 @@ def create_attachments_zipfile(attachments, zip_file): ), ) break - z.writestr(attachment.media_file.name, f.read()) - except IOError as e: - report_exception("Create attachment zip exception", e) + z_file.writestr(attachment.media_file.name, a_file.read()) + except IOError as io_error: + report_exception("Create attachment zip exception", io_error) break From 08c5f70a224ad3538381724b4bed1b5c4324fe3a Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Thu, 4 Jul 2024 15:19:49 +0300 Subject: [PATCH 251/270] updated docs --- docs/onadata.apps.restservice.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/onadata.apps.restservice.rst b/docs/onadata.apps.restservice.rst index c035962ed6..2bef52de1e 100644 --- a/docs/onadata.apps.restservice.rst +++ b/docs/onadata.apps.restservice.rst @@ -16,18 +16,18 @@ Subpackages Submodules ---------- -onadata.apps.restservice.RestServiceInterface module ----------------------------------------------------- +onadata.apps.restservice.forms module +------------------------------------- -.. automodule:: onadata.apps.restservice.RestServiceInterface +.. automodule:: onadata.apps.restservice.forms :members: :undoc-members: :show-inheritance: -onadata.apps.restservice.forms module -------------------------------------- +onadata.apps.restservice.interface module +----------------------------------------- -.. automodule:: onadata.apps.restservice.forms +.. automodule:: onadata.apps.restservice.interface :members: :undoc-members: :show-inheritance: From afbca36e60923497ef08aa9344e6e522a472f95d Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 5 Jul 2024 07:29:18 +0300 Subject: [PATCH 252/270] Add migration to `add_project_entitylist` permission to existing owners and managers (#2633) * update migration to add_project_entitylist perm to existing users * use iterator on queryset --- .../0019_alter_project_options_and_more.py | 26 +++++++++++++++++++ onadata/libs/utils/project_utils.py | 4 ++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py index 0c30bdbab8..468b2c83d8 100644 --- a/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py +++ b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py @@ -4,6 +4,29 @@ from django.db import migrations, models import django.db.models.deletion +from guardian.shortcuts import assign_perm + + +def add_project_entitylist_perm(apps, schema_editor): + """Assign `add_project_entitylist` permission to existing Owners, Managers""" + Project = apps.get_model("logger", "Project") + project_qs = Project.objects.filter(deleted_at__isnull=True) + eta = project_qs.count() + + for project in project_qs.iterator(chunk_size=200): + processed_users = set() + project_user_obj_perm_qs = project.projectuserobjectpermission_set.all() + + for perm in project_user_obj_perm_qs.iterator(chunk_size=100): + user = perm.user + + if user.pk not in processed_users and user.has_perm("add_project", project): + _ = assign_perm("add_project_entitylist", user, project) + processed_users.add(user.pk) + + eta -= 1 + print("eta", eta) + class Migration(migrations.Migration): @@ -41,4 +64,7 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), + migrations.RunPython( + add_project_entitylist_perm, reverse_code=migrations.RunPython.noop + ), ] diff --git a/onadata/libs/utils/project_utils.py b/onadata/libs/utils/project_utils.py index 53e9d78eb8..5b08fd8620 100644 --- a/onadata/libs/utils/project_utils.py +++ b/onadata/libs/utils/project_utils.py @@ -34,6 +34,7 @@ OWNER_TEAM_NAME, ) from onadata.libs.utils.common_tools import report_exception +from onadata.libs.utils.model_tools import queryset_iterator class ExternalServiceRequestError(Exception): @@ -45,8 +46,9 @@ class ExternalServiceRequestError(Exception): def get_project_users(project): """Return project users with the role assigned to them.""" ret = {} + project_user_obj_perm_qs = project.projectuserobjectpermission_set.all() - for perm in project.projectuserobjectpermission_set.all(): + for perm in queryset_iterator(project_user_obj_perm_qs): if perm.user.username not in ret: user = perm.user From 6fea5bbe81dffba59e59ef2e33e933ea759c1903 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 5 Jul 2024 12:16:57 +0300 Subject: [PATCH 253/270] Ignore EntityList model permissions on API endpoints (#2635) * ignore EntityList model permissions on API endpoints ignore permissions on API endpoints to avoid manually setting model permissions when a user is created. Also avoids having a migration to add the EntityList permissions to existing users * fix lint warning unused argument --- onadata/apps/api/permissions.py | 15 +++++++++++++++ onadata/apps/api/viewsets/entity_list_viewset.py | 4 ++-- onadata/libs/utils/user_auth.py | 12 +----------- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/onadata/apps/api/permissions.py b/onadata/apps/api/permissions.py index 07485f9903..261abfd058 100644 --- a/onadata/apps/api/permissions.py +++ b/onadata/apps/api/permissions.py @@ -558,3 +558,18 @@ def has_permission(self, request, view): return False return True + + +class DjangoObjectPermissionsIgnoreModelPerm(DjangoObjectPermissions): + """ + Similar to DjangoModelPermissions, except that model permissions + are ignored. + """ + + # pylint: disable=unused-argument + def has_permission(self, request, view): + """Override `has_permission` method""" + if request.user.is_anonymous and request.method not in SAFE_METHODS: + return False + + return True diff --git a/onadata/apps/api/viewsets/entity_list_viewset.py b/onadata/apps/api/viewsets/entity_list_viewset.py index 617046dcd9..c1b2993d1b 100644 --- a/onadata/apps/api/viewsets/entity_list_viewset.py +++ b/onadata/apps/api/viewsets/entity_list_viewset.py @@ -12,7 +12,7 @@ ) -from onadata.apps.api.permissions import DjangoObjectPermissionsAllowAnon +from onadata.apps.api.permissions import DjangoObjectPermissionsIgnoreModelPerm from onadata.apps.api.tools import get_baseviewset_class from onadata.apps.logger.models import Entity, EntityList from onadata.libs.filters import AnonUserEntityListFilter, EntityListProjectFilter @@ -53,7 +53,7 @@ class EntityListViewSet( ) ) serializer_class = EntityListSerializer - permission_classes = (DjangoObjectPermissionsAllowAnon,) + permission_classes = (DjangoObjectPermissionsIgnoreModelPerm,) pagination_class = StandardPageNumberPagination filter_backends = (AnonUserEntityListFilter, EntityListProjectFilter) diff --git a/onadata/libs/utils/user_auth.py b/onadata/libs/utils/user_auth.py index d219028030..5569c1de77 100644 --- a/onadata/libs/utils/user_auth.py +++ b/onadata/libs/utils/user_auth.py @@ -17,7 +17,6 @@ from onadata.apps.api.models.team import Team from onadata.apps.api.models.temp_token import TempToken -from onadata.apps.logger.models.entity_list import EntityList from onadata.apps.logger.models.note import Note from onadata.apps.logger.models.project import Project from onadata.apps.logger.models.xform import XForm @@ -223,16 +222,7 @@ def add_cors_headers(response): def set_api_permissions_for_user(user): """Sets the permissions to allow a ``user`` to access the APU.""" - models = [ - UserProfile, - XForm, - MergedXForm, - Project, - Team, - OrganizationProfile, - Note, - EntityList, - ] + models = [UserProfile, XForm, MergedXForm, Project, Team, OrganizationProfile, Note] for model in models: for perm in get_perms_for_model(model): assign_perm(f"{perm.content_type.app_label}.{perm.codename}", user) From cb2c06e90a2b925df7e551f0c174b4e721eeafc4 Mon Sep 17 00:00:00 2001 From: Ukang'a Dickson Date: Wed, 3 Jul 2024 12:44:39 +0300 Subject: [PATCH 254/270] Switch to debian to ensure GLIBC versions are identical with python-deps --- docker/onadata-uwsgi/Dockerfile.ubuntu | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docker/onadata-uwsgi/Dockerfile.ubuntu b/docker/onadata-uwsgi/Dockerfile.ubuntu index 50233f47a7..dfe54f5ca6 100644 --- a/docker/onadata-uwsgi/Dockerfile.ubuntu +++ b/docker/onadata-uwsgi/Dockerfile.ubuntu @@ -1,4 +1,4 @@ -FROM onaio/python-deps:3.10.14 AS base +FROM onaio/python-deps:3.10.14-20240703 AS base ARG optional_packages @@ -53,19 +53,21 @@ RUN python -m pip install --no-cache-dir -r requirements/docs.pip && \ make -C docs html -FROM ubuntu:jammy-20240530 as runtime +FROM debian:bookworm-20240701 as runtime ENV DEBIAN_FRONTEND=noninteractive # Install prerequisite packages -RUN apt-get update -q && \ - apt-get install -y --no-install-recommends locales netcat +RUN echo "deb http://deb.debian.org/debian unstable main non-free contrib" >> /etc/apt/sources.list \ + && apt-get update -q \ + && apt-get install -y --no-install-recommends locales netcat-traditional \ + && sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen \ + && dpkg-reconfigure locales -# # Generate and set en_US.UTF-8 locale -RUN locale-gen en_US.UTF-8 ENV LC_ALL=en_US.UTF-8 ENV LC_CTYPE=en_US.UTF-8 -RUN dpkg-reconfigure locales +ENV LANG=en_US.UTF-8 +ENV LANGUAGE=en_US.UTF-8 # Install OnaData runtime dependencies @@ -77,12 +79,10 @@ RUN apt-get install -y --no-install-recommends \ libxslt1-dev \ && apt-get autoremove -y \ && apt-get clean -y \ - && rm -rf /var/lib/apt/lists/* - -# Create OnaData user and add to tty group -RUN useradd -G tty -m appuser && \ - mkdir -p /srv/onadata && \ - chown -R appuser:appuser /srv/onadata + && rm -rf /var/lib/apt/lists/* \ + && useradd -G tty -m appuser \ + && mkdir -p /srv/onadata \ + && chown -R appuser:appuser /srv/onadata COPY --from=base /home/appuser/.pyenv/ /home/appuser/.pyenv/ COPY --from=docs /srv/onadata/ /srv/onadata/ From 13904bcf4ac1bc792754b41c4d2e7876dc9ec0ad Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Mon, 8 Jul 2024 15:01:52 +0300 Subject: [PATCH 255/270] tune migration performance (#2637) --- .../0019_alter_project_options_and_more.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py index 468b2c83d8..8023f6f894 100644 --- a/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py +++ b/onadata/apps/logger/migrations/0019_alter_project_options_and_more.py @@ -14,15 +14,19 @@ def add_project_entitylist_perm(apps, schema_editor): eta = project_qs.count() for project in project_qs.iterator(chunk_size=200): - processed_users = set() - project_user_obj_perm_qs = project.projectuserobjectpermission_set.all() + # Owners and Managers have the `add_project` permission + project_user_obj_perm_qs = project.projectuserobjectpermission_set.filter( + permission__codename="add_project" + ) + project_group_obj_perm_qs = project.projectgroupobjectpermission_set.filter( + permission__codename="add_project" + ) for perm in project_user_obj_perm_qs.iterator(chunk_size=100): - user = perm.user + _ = assign_perm("add_project_entitylist", perm.user, project) - if user.pk not in processed_users and user.has_perm("add_project", project): - _ = assign_perm("add_project_entitylist", user, project) - processed_users.add(user.pk) + for perm in project_group_obj_perm_qs.iterator(chunk_size=100): + _ = assign_perm("add_project_entitylist", perm.group, project) eta -= 1 print("eta", eta) From 02d442e0fcf20b142c2b85079a62dcfc420e6196 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 9 Jul 2024 18:16:07 +0300 Subject: [PATCH 256/270] Add Entity related data to forms on project retrieve endpoint (#2639) * add entites data to project retrieve endpoint * update docs * update docs --- docs/entities.md | 81 +++++++++++++++++- docs/projects.rst | 84 +++++++++++++++++-- .../tests/viewsets/test_project_viewset.py | 48 ++++++++++- .../libs/serializers/project_serializer.py | 4 +- 4 files changed, 205 insertions(+), 12 deletions(-) diff --git a/docs/entities.md b/docs/entities.md index 3f0e99c37e..de2da0fce7 100644 --- a/docs/entities.md +++ b/docs/entities.md @@ -49,7 +49,7 @@ Body: } ``` -## Get EntityLists +## Get a list of EntityLists `GET /api/v2/entity-lists` @@ -84,7 +84,7 @@ Body: ``` -To get EntityLists for a specific project +### Get a list of Entities for a specific project `GET /api/v2/entity-lists?project=` @@ -116,6 +116,44 @@ Body: ``` +### Get a paginated list of EntityLists + +`GET /api/v2/entity-lists?page=&page_size=` + +Returns a list of projects using page number and the number of items per page. Use the `page` parameter to specify page number and `page_size` parameter is used to set the custom page size. + +- `page` - Integer representing the page. +- `page_size` - Integer representing the number of records that should be returned in a single page. The maximum number of items that can be requested in a page via the `page_size` query param is `10,000` + +**Request** + +`curl -X GET https://api.ona.io/api/v2/entity-lists?page=1&page_size=100` + +**Response** + +Status: `200 OK` + +Body: + +``` +[ + { + "url":"http://testserver/api/v2/entity-lists/9", + "id":9, + "name":"trees", + "project":"http://testserver/api/v1/projects/9", + "public":false, + "date_created":"2024-04-17T11:26:24.630117Z", + "date_modified":"2024-04-17T11:26:25.050823Z", + "num_registration_forms":1, + "num_follow_up_forms":1, + "num_entities":1 + } + + ... +] +``` + ## Get a single EntityList `GET /api/v2/entity-lists/` @@ -182,7 +220,7 @@ curl -X DELETE https://api.ona.io/api/v2/entity-lists/1 \ Status: `204 No Content` -## Get Entities +## Get a list of Entities `GET api/v2/entity-lists//entities` @@ -228,6 +266,43 @@ Body: ] ``` +### Get a paginated list of Entities + +`GET /api/v2/entity-lists/1/entities?page=&page_size=` + +Returns a list of projects using page number and the number of items per page. Use the `page` parameter to specify page number and `page_size` parameter is used to set the custom page size. + +- `page` - Integer representing the page. +- `page_size` - Integer representing the number of records that should be returned in a single page. The maximum number of items that can be requested in a page via the `page_size` query param is `10,000` + +**Request** + +`curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities?page=1&page_size=100` + +**Response** + +Status: `200 OK` + +Body: + +``` +[ + { + "url":"http://testserver/api/v2/entity-lists/1/entities/3", + "id":3, + "uuid": "dbee4c32-a922-451c-9df7-42f40bf78f48", + "date_created": "2024-06-20T07:37:20.416054Z", + "data": { + "species":"purpleheart", + "geometry":"-1.286905 36.772845 0 0", + "circumference_cm":300, + "label":"300cm purpleheart", + } + }, + ... +] +``` + ## Get a single Entity `GET api/v2/entity-lists//entities/` diff --git a/docs/projects.rst b/docs/projects.rst index 21cb868636..ba62d694e2 100644 --- a/docs/projects.rst +++ b/docs/projects.rst @@ -136,13 +136,83 @@ Response ^^^^^^^^ :: - { - "url": "https://api.ona.io/api/v1/projects/1", - "owner": "https://api.ona.io/api/v1/users/ona", - "name": "project 1", - "date_created": "2013-07-24T13:37:39Z", - "date_modified": "2013-07-24T13:37:39Z" - } + { + "url":"https://api.ona.io/api/v1/projects/1", + "projectid":1, + "owner":"https://api.ona.io/api/v1/users/ona", + "created_by":"https://api.ona.io/api/v1/users/ona", + "metadata":{ + "name":"Entities", + "category":"agriculture" + }, + "starred":false, + "users":[ + { + "is_org":false, + "metadata":{ + "is_email_verified":false + }, + "first_name":"Ona", + "last_name":"", + "user":"ona", + "role":"owner" + } + ], + "forms":[ + { + "name":"Trees registration", + "formid":1, + "id_string":"trees_registration", + "num_of_submissions":7, + "downloadable":true, + "encrypted":false, + "published_by_formbuilder":null, + "last_submission_time":"2024-06-18T14:34:57.987361Z", + "date_created":"2024-05-28T12:08:07.993820Z", + "url":"https://api.ona.io/api/v1/forms/1", + "last_updated_at":"2024-06-21T08:13:06.436449Z", + "is_merged_dataset":false, + "contributes_entities_to":{ + "id":100, + "name":"trees", + "is_active":true + }, + "consumes_entities_from":[] + }, + { + "name":"Trees follow-up", + "formid":18421, + "id_string":"trees_follow_up", + "num_of_submissions":0, + "downloadable":true, + "encrypted":false, + "published_by_formbuilder":null, + "last_submission_time":null, + "date_created":"2024-05-28T12:08:39.909235Z", + "url":"https://api.ona.io/api/v1/forms/2", + "last_updated_at":"2024-06-21T08:13:58.963836Z", + "is_merged_dataset":false, + "contributes_entities_to":null, + "consumes_entities_from":[ + { + "id":100, + "name":"trees", + "is_active":true + } + ] + } + ], + "public":false, + "tags":[], + "num_datasets":2, + "last_submission_date":"2024-06-18T14:50:32.755792Z", + "teams":[], + "data_views":[], + "name":"Entities", + "date_created":"2023-11-07T07:02:09.655836Z", + "date_modified":"2024-06-21T08:15:12.634454Z", + "deleted_at":null + } Update Project Information ------------------------------ diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index cce5afb707..719ce522fa 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -34,7 +34,13 @@ from onadata.apps.api.viewsets.project_viewset import ProjectViewSet from onadata.apps.api.viewsets.team_viewset import TeamViewSet from onadata.apps.api.viewsets.xform_viewset import XFormViewSet -from onadata.apps.logger.models import Project, ProjectInvitation, XForm, XFormVersion +from onadata.apps.logger.models import ( + EntityList, + Project, + ProjectInvitation, + XForm, + XFormVersion, +) from onadata.apps.main.models import MetaData from onadata.libs import permissions as role from onadata.libs.models.share_project import ShareProject @@ -54,6 +60,7 @@ BaseProjectSerializer, ProjectSerializer, ) +from onadata.libs.utils.user_auth import get_user_default_project from onadata.libs.utils.cache_tools import PROJ_OWNER_CACHE, safe_key User = get_user_model() @@ -406,6 +413,8 @@ def test_none_empty_forms_and_dataview_properties_in_returned_json(self): data_view_obj_keys = list(response.data.get("data_views")[0]) self.assertEqual( [ + "consumes_entities_from", + "contributes_entities_to", "date_created", "downloadable", "encrypted", @@ -2785,6 +2794,43 @@ def test_project_caching(self): ) self.assertEqual(response.data["num_datasets"], 1) + def test_get_project_w_registration_form(self): + """Retrieve project with Entity registtraton form""" + self._publish_registration_form(self.user) + view = ProjectViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + entity_list = EntityList.objects.first() + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.data["forms"][0]["contributes_entities_to"], + { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + }, + ) + + def test_get_project_w_follow_up_form(self): + """Retrieve project with Entity follow up form""" + self.project = get_user_default_project(self.user) + entity_list = EntityList.objects.create(name="trees", project=self.project) + self._publish_follow_up_form(self.user) + view = ProjectViewSet.as_view({"get": "retrieve"}) + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.data["forms"][0]["consumes_entities_from"], + [ + { + "id": entity_list.pk, + "name": "trees", + "is_active": True, + } + ], + ) + class GetProjectInvitationListTestCase(TestAbstractViewSet): """Tests for get project invitation list""" diff --git a/onadata/libs/serializers/project_serializer.py b/onadata/libs/serializers/project_serializer.py index 3693047913..c5ff793333 100644 --- a/onadata/libs/serializers/project_serializer.py +++ b/onadata/libs/serializers/project_serializer.py @@ -266,7 +266,7 @@ class Meta: # pylint: disable=too-few-public-methods -class ProjectXFormSerializer(serializers.HyperlinkedModelSerializer): +class ProjectXFormSerializer(BaseProjectXFormSerializer): """ ProjectXFormSerializer class - to return project xform info. """ @@ -294,6 +294,8 @@ class Meta: "url", "last_updated_at", "is_merged_dataset", + "contributes_entities_to", + "consumes_entities_from", ) def get_published_by_formbuilder(self, obj): From c7af028e06d777daa830652fe72472ac6a5ff686 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 08:32:30 +0300 Subject: [PATCH 257/270] Tag release v4.3.3 Signed-off-by: Kipchirchir Sigei --- CHANGES.rst | 21 +++++++++++++++++++++ onadata/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 5e098561b4..270a86eb78 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,27 @@ Changelog for Onadata ``* represents releases that introduce new migrations`` +v4.3.3(2024-07-10) +------------------ +- Add Entity related data to forms on project retrieve endpoint + `PR #2639 ` + [@kelvin-muchiri] +- Ignore EntityList model permissions on API endpoints + `PR #2635 ` + [@kelvin-muchiri] +- Docs update + `PR #2629 ` + [@ukanga] +- Make pending migrations + `PR #2632 ` + [@kelvin-muchiri] +- Check attempts is truthy before int comparison + `PR #2618 ` + [@ukanga] +- Tune migration performance + `PR #2631 ` + [@kelvin-muchiri] + v4.3.2(2024-07-02) ------------------ - Security Updates diff --git a/onadata/__init__.py b/onadata/__init__.py index 8ba8c53cac..aa4b50f9f6 100644 --- a/onadata/__init__.py +++ b/onadata/__init__.py @@ -6,7 +6,7 @@ """ from __future__ import absolute_import, unicode_literals -__version__ = "4.3.2" +__version__ = "4.3.3" # This will make sure the app is always imported when diff --git a/setup.cfg b/setup.cfg index a4fb38f836..166ddcb2cf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = onadata -version = 4.3.2 +version = 4.3.3 description = Collect Analyze and Share Data long_description = file: README.rst long_description_content_type = text/x-rst From 5ac0861bb2a1400825ec5bf70f6a3e7ce550cf3b Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 11 Jul 2024 14:59:40 +0300 Subject: [PATCH 258/270] Entities search (#2643) * add search support for Entities * refactor code * fix incorrect type hint * add test case * add tests * update docs * update docs * update docs * fix lint warning unused-import * update docs --- docs/entities.md | 44 +++++++++++++++- .../viewsets/test_entity_list_viewset.py | 52 ++++++++++++++++++- .../apps/api/viewsets/entity_list_viewset.py | 34 ++++++++---- 3 files changed, 117 insertions(+), 13 deletions(-) diff --git a/docs/entities.md b/docs/entities.md index de2da0fce7..7a69ed02f1 100644 --- a/docs/entities.md +++ b/docs/entities.md @@ -53,7 +53,11 @@ Body: `GET /api/v2/entity-lists` -This endpoint is used to get all EntityList datasets the user permission to view. +This endpoint is used to get all EntityList datasets. + +The user must have view permission for each dataset. + +The maximum number of items returned is `1000`. To get more results than this, pagination is required. Refer to getting [paginated results section](#get-a-paginated-list-of-entitylists). **Request** @@ -226,6 +230,8 @@ Status: `204 No Content` This endpoint is used to get Entities belonging to a single EntityList dataset. +The maximum number of items returned is `1000`. To get more results than this, pagination is required. Refer to getting [paginated results section](#get-a-paginated-list-of-entities). + **Request** `curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities` @@ -303,6 +309,42 @@ Body: ] ``` +### Search a list of Entities + +`GET /api/v2/entity-lists/1/entities?search=` + +Limit list of Entities to those whose data partially matches the provided search term. + +Matches are case insensitive. + +**Request** + +`curl -X GET https://api.ona.io/api/v2/entity-lists/1/entities?search=wallaba` + +**Response** + +Status: `200 OK` + +Body: + +``` +[ + { + "url":"http://testserver/api/v2/entity-lists/1/entities/4", + "id":4, + "uuid": "517185b4-bc06-450c-a6ce-44605dec5480", + "date_created": "2024-06-20T07:38:20.416054Z", + "data": { + "species":"wallaba", + "geometry":"-1.305796 36.791849 0 0", + "intake_notes":"Looks malnourished", + "circumference_cm":100, + "label":"100cm wallaba", + } + } +] +``` + ## Get a single Entity `GET api/v2/entity-lists//entities/` diff --git a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py index 8c05092e28..fb56b4703b 100644 --- a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py @@ -11,6 +11,7 @@ from onadata.apps.api.viewsets.entity_list_viewset import EntityListViewSet from onadata.apps.api.tests.viewsets.test_abstract_viewset import TestAbstractViewSet +from onadata.libs.pagination import StandardPageNumberPagination from onadata.apps.logger.models import Entity, EntityHistory, EntityList, Project from onadata.libs.models.share_project import ShareProject from onadata.libs.permissions import ROLES, OwnerRole @@ -593,7 +594,7 @@ def test_already_soft_deleted(self): @override_settings(TIME_ZONE="UTC") -class GetEntitiesTestCase(TestAbstractViewSet): +class GetEntitiesListTestCase(TestAbstractViewSet): """Tests for GET Entities""" def setUp(self): @@ -718,6 +719,21 @@ def test_pagination(self): self.assertEqual( response.data[0]["uuid"], "dbee4c32-a922-451c-9df7-42f40bf78f48" ) + # Unpaginated results do not exceed default page_size + with patch.object(StandardPageNumberPagination, "page_size", 1): + request = self.factory.get("/", **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + + # Paginated page_size should not exceed max_page_size + with patch.object(StandardPageNumberPagination, "max_page_size", 1): + request = self.factory.get( + "/", data={"page": 1, "page_size": 2}, **self.extra + ) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) def test_deleted_ignored(self): """Deleted Entities are ignored""" @@ -737,6 +753,40 @@ def test_invalid_entity_list(self): response = self.view(request, pk=sys.maxsize) self.assertEqual(response.status_code, 404) + def test_search(self): + """Search works""" + # Search data json value + request = self.factory.get("/", data={"search": "wallaba"}, **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + # Search data json key + request = self.factory.get("/", data={"search": "intake_notes"}, **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + # Search by uuid + request = self.factory.get( + "/", data={"search": "dbee4c32-a922-451c-9df7-42f40bf78f48"}, **self.extra + ) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + # Search not found + request = self.factory.get("/", data={"search": "alkalalalkalal"}, **self.extra) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 0) + # Search with pagination + request = self.factory.get( + "/", + data={"search": "circumference_cm", "page": 1, "page_size": 1}, + **self.extra, + ) + response = self.view(request, pk=self.entity_list.pk) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + @override_settings(TIME_ZONE="UTC") class GetSingleEntityTestCase(TestAbstractViewSet): diff --git a/onadata/apps/api/viewsets/entity_list_viewset.py b/onadata/apps/api/viewsets/entity_list_viewset.py index c1b2993d1b..6a4a71acd6 100644 --- a/onadata/apps/api/viewsets/entity_list_viewset.py +++ b/onadata/apps/api/viewsets/entity_list_viewset.py @@ -1,8 +1,11 @@ +from django.db.models import Q from django.shortcuts import get_object_or_404 + from rest_framework import status from rest_framework.decorators import action from rest_framework.response import Response +from rest_framework.settings import api_settings from rest_framework.viewsets import GenericViewSet from rest_framework.mixins import ( CreateModelMixin, @@ -11,14 +14,15 @@ ListModelMixin, ) - from onadata.apps.api.permissions import DjangoObjectPermissionsIgnoreModelPerm from onadata.apps.api.tools import get_baseviewset_class from onadata.apps.logger.models import Entity, EntityList from onadata.libs.filters import AnonUserEntityListFilter, EntityListProjectFilter from onadata.libs.mixins.cache_control_mixin import CacheControlMixin from onadata.libs.mixins.etags_mixin import ETagsMixin -from onadata.libs.pagination import StandardPageNumberPagination +from onadata.libs.pagination import ( + StandardPageNumberPagination, +) from onadata.libs.permissions import CAN_ADD_PROJECT_ENTITYLIST from onadata.libs.serializers.entity_serializer import ( EntityArraySerializer, @@ -56,6 +60,7 @@ class EntityListViewSet( permission_classes = (DjangoObjectPermissionsIgnoreModelPerm,) pagination_class = StandardPageNumberPagination filter_backends = (AnonUserEntityListFilter, EntityListProjectFilter) + entities_search_fields = ["uuid", "json"] def get_serializer_class(self): """Override `get_serializer_class` method""" @@ -112,15 +117,7 @@ def entities(self, request, *args, **kwargs): return Response(serializer.data) - entity_qs = ( - Entity.objects.filter( - entity_list=entity_list, - deleted_at__isnull=True, - ) - # To improve performance, we specify only the column(s) - # we are interested in using .only - .only("json").order_by("pk") - ) + entity_qs = self.get_queryset_entities(request, entity_list) page = self.paginate_queryset(entity_qs) if page is not None: @@ -149,3 +146,18 @@ def create(self, request, *args, **kwargs): return Response( serializer.data, status=status.HTTP_201_CREATED, headers=headers ) + + def get_queryset_entities(self, request, entity_list): + """Returns queryset for Entities""" + search_param = api_settings.SEARCH_PARAM + search = request.query_params.get(search_param, "") + queryset = Entity.objects.filter( + entity_list_id=entity_list.pk, deleted_at__isnull=True + ) + + if search: + queryset = queryset.filter(Q(json__iregex=search) | Q(uuid=search)) + + queryset = queryset.order_by("id") + + return queryset From 57e1e6c151a0e34726a8d68a1318f901275e4de3 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 13:40:27 +0300 Subject: [PATCH 259/270] Use raw SQL to update project date_modidied field Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 611c50696a..c5d8cf4198 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -391,7 +391,15 @@ def update_project_date_modified(instance_id, _): if current_task.request.id: raise e else: - instance.xform.project.save(update_fields=["date_modified"]) + # update project date_modified using raw SQL + cursor = connection.cursor() + sql = ( + "UPDATE logger_project SET " + "date_modified = %s " + "WHERE id = %s" + ) + params = [timezone.now(), instance.xform.project.pk] + cursor.execute(sql, params) def convert_to_serializable_date(date): From 87442af3b26fa1df00a0fa243ff833b9a2139a19 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 13:53:08 +0300 Subject: [PATCH 260/270] use cursor as context manager Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index c5d8cf4198..4955eb9af3 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -392,14 +392,14 @@ def update_project_date_modified(instance_id, _): raise e else: # update project date_modified using raw SQL - cursor = connection.cursor() - sql = ( - "UPDATE logger_project SET " - "date_modified = %s " - "WHERE id = %s" - ) - params = [timezone.now(), instance.xform.project.pk] - cursor.execute(sql, params) + with connection.cursor() as c: + sql = ( + "UPDATE logger_project SET " + "date_modified = %s " + "WHERE id = %s" + ) + params = [timezone.now(), instance.xform.project.pk] + c.execute(sql, params) def convert_to_serializable_date(date): From 9c417e3f1a4150deada0988bc7655a1f89f2aa72 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 16:29:59 +0300 Subject: [PATCH 261/270] Add periodic task to batch update project date_modified Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 17 ++++++++--------- onadata/apps/logger/tasks.py | 18 +++++++++++++++++- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 4955eb9af3..153889de49 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -34,6 +34,7 @@ from onadata.celeryapp import app from onadata.libs.data.query import get_numeric_fields from onadata.libs.utils.cache_tools import ( + BATCH_PROJECT_IDS_CACHE, DATAVIEW_COUNT, IS_ORG, PROJ_NUM_DATASET_CACHE, @@ -391,15 +392,13 @@ def update_project_date_modified(instance_id, _): if current_task.request.id: raise e else: - # update project date_modified using raw SQL - with connection.cursor() as c: - sql = ( - "UPDATE logger_project SET " - "date_modified = %s " - "WHERE id = %s" - ) - params = [timezone.now(), instance.xform.project.pk] - c.execute(sql, params) + timeout = getattr(settings, "PROJECT_IDS_CACHE_TIMEOUT", 3600) + project_id = instance.xform.project_id + + # Log project id in cache with timeout + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) + project_ids.add(project_id) + cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=timeout) def convert_to_serializable_date(date): diff --git a/onadata/apps/logger/tasks.py b/onadata/apps/logger/tasks.py index cc09f523e1..fb76773793 100644 --- a/onadata/apps/logger/tasks.py +++ b/onadata/apps/logger/tasks.py @@ -1,9 +1,12 @@ import logging +from django.core.cache import cache from django.db import DatabaseError +from django.utils import timezone -from onadata.apps.logger.models import EntityList +from onadata.apps.logger.models import EntityList, Project from onadata.celeryapp import app +from onadata.libs.utils.cache_tools import BATCH_PROJECT_IDS_CACHE, safe_delete from onadata.libs.utils.project_utils import set_project_perms_to_object @@ -25,3 +28,16 @@ def set_entity_list_perms_async(entity_list_id): return set_project_perms_to_object(entity_list, entity_list.project) + + +@app.task(retry_backoff=3, autoretry_for=(DatabaseError, ConnectionError)) +def apply_project_date_modified_async(): + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) + if not project_ids: + return + + # Update project date_modified field in batches + Project.objects.filter(pk__in=project_ids).update(date_modified=timezone.now()) + + # Clear cache after updating + safe_delete(BATCH_PROJECT_IDS_CACHE) From 4200a977ab5a912383aa640ff65492546ec5096d Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 16:30:23 +0300 Subject: [PATCH 262/270] Add tests Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/tests/test_tasks.py | 32 ++++++++++++++++++++++++- onadata/libs/utils/cache_tools.py | 3 +++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/onadata/apps/logger/tests/test_tasks.py b/onadata/apps/logger/tests/test_tasks.py index ed75102c0e..6c6666f26b 100644 --- a/onadata/apps/logger/tests/test_tasks.py +++ b/onadata/apps/logger/tests/test_tasks.py @@ -6,11 +6,16 @@ from celery.exceptions import Retry +from django.core.cache import cache from django.db import DatabaseError from onadata.apps.logger.models import EntityList -from onadata.apps.logger.tasks import set_entity_list_perms_async +from onadata.apps.logger.tasks import ( + set_entity_list_perms_async, + apply_project_date_modified_async, +) from onadata.apps.main.tests.test_base import TestBase +from onadata.libs.utils.cache_tools import BATCH_PROJECT_IDS_CACHE from onadata.libs.utils.user_auth import get_user_default_project @@ -61,3 +66,28 @@ def test_invalid_pk(self, mock_logger, mock_set_perms): set_entity_list_perms_async.delay(sys.maxsize) mock_set_perms.assert_not_called() mock_logger.assert_called_once() + + +class UpdateProjectDateModified(TestBase): + """Tests for apply_project_date_modified_async""" + + def setUp(self): + super().setUp() + self.project = get_user_default_project(self.user) + + def test_update_project_date_modified(self): + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) + project_ids.add(self.project.pk) + initial_date_modified = self.project.date_modified + cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=300) + + apply_project_date_modified_async.delay() + self.project.refresh_from_db() + current_date_modified = self.project.date_modified + + # check that date_modified has changed + self.assertNotEqual(initial_date_modified, current_date_modified) + + # check if current date modified is greater than initial + self.assertGreater(current_date_modified, initial_date_modified) + cache.delete(BATCH_PROJECT_IDS_CACHE) diff --git a/onadata/libs/utils/cache_tools.py b/onadata/libs/utils/cache_tools.py index 1b0ec079b8..3f874df7f7 100644 --- a/onadata/libs/utils/cache_tools.py +++ b/onadata/libs/utils/cache_tools.py @@ -60,6 +60,9 @@ # Cache timeouts used in XForm model XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL = 24 * 60 * 60 # 24 hrs converted to seconds +# Batch project IDs +BATCH_PROJECT_IDS_CACHE = "batch_project_ids" + def safe_delete(key): """Safely deletes a given key from the cache.""" From 58eba739dabced7fd19e7e015423b5eb41eef857 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Wed, 10 Jul 2024 16:52:16 +0300 Subject: [PATCH 263/270] Add missing docstrings Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/tasks.py | 3 +++ onadata/apps/logger/tests/test_tasks.py | 1 + 2 files changed, 4 insertions(+) diff --git a/onadata/apps/logger/tasks.py b/onadata/apps/logger/tasks.py index fb76773793..73e665751e 100644 --- a/onadata/apps/logger/tasks.py +++ b/onadata/apps/logger/tasks.py @@ -32,6 +32,9 @@ def set_entity_list_perms_async(entity_list_id): @app.task(retry_backoff=3, autoretry_for=(DatabaseError, ConnectionError)) def apply_project_date_modified_async(): + """ + Batch update projects date_modified field periodically + """ project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) if not project_ids: return diff --git a/onadata/apps/logger/tests/test_tasks.py b/onadata/apps/logger/tests/test_tasks.py index 6c6666f26b..031c5dfa5d 100644 --- a/onadata/apps/logger/tests/test_tasks.py +++ b/onadata/apps/logger/tests/test_tasks.py @@ -76,6 +76,7 @@ def setUp(self): self.project = get_user_default_project(self.user) def test_update_project_date_modified(self): + """Test project date_modified field is updated""" project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) project_ids.add(self.project.pk) initial_date_modified = self.project.date_modified From 7df8210897fa98772ab4385fe2334a4bbce2faa2 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 11 Jul 2024 10:11:28 +0300 Subject: [PATCH 264/270] Refator to set date_modified for each project to cache Signed-off-by: Kipchirchir Sigei --- onadata/apps/api/tests/viewsets/test_project_viewset.py | 4 ++++ onadata/apps/logger/models/instance.py | 6 +++--- onadata/apps/logger/tasks.py | 6 +++--- onadata/apps/logger/tests/test_tasks.py | 5 +++-- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 719ce522fa..5a14ef5597 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -42,6 +42,7 @@ XFormVersion, ) from onadata.apps.main.models import MetaData +from onadata.apps.logger.tasks import apply_project_date_modified_async from onadata.libs import permissions as role from onadata.libs.models.share_project import ShareProject from onadata.libs.permissions import ( @@ -1946,6 +1947,9 @@ def test_last_date_modified_changes_when_adding_new_form(self): self._make_submissions() + # run cronjob to update date_modified field + apply_project_date_modified_async.delay() + self.project.refresh_from_db() self.assertNotEqual(current_last_date, self.project.date_modified) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 153889de49..59c6c7df41 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -395,9 +395,9 @@ def update_project_date_modified(instance_id, _): timeout = getattr(settings, "PROJECT_IDS_CACHE_TIMEOUT", 3600) project_id = instance.xform.project_id - # Log project id in cache with timeout - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) - project_ids.add(project_id) + # Log project id and date motified in cache with timeout + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) + project_ids[project_id] = timezone.now() cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=timeout) diff --git a/onadata/apps/logger/tasks.py b/onadata/apps/logger/tasks.py index 73e665751e..55b5d2b8e1 100644 --- a/onadata/apps/logger/tasks.py +++ b/onadata/apps/logger/tasks.py @@ -2,7 +2,6 @@ from django.core.cache import cache from django.db import DatabaseError -from django.utils import timezone from onadata.apps.logger.models import EntityList, Project from onadata.celeryapp import app @@ -35,12 +34,13 @@ def apply_project_date_modified_async(): """ Batch update projects date_modified field periodically """ - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) if not project_ids: return # Update project date_modified field in batches - Project.objects.filter(pk__in=project_ids).update(date_modified=timezone.now()) + for project_id, timestamp in project_ids.items(): + Project.objects.filter(pk=project_id).update(date_modified=timestamp) # Clear cache after updating safe_delete(BATCH_PROJECT_IDS_CACHE) diff --git a/onadata/apps/logger/tests/test_tasks.py b/onadata/apps/logger/tests/test_tasks.py index 031c5dfa5d..2c258da48e 100644 --- a/onadata/apps/logger/tests/test_tasks.py +++ b/onadata/apps/logger/tests/test_tasks.py @@ -8,6 +8,7 @@ from django.core.cache import cache from django.db import DatabaseError +from django.utils import timezone from onadata.apps.logger.models import EntityList from onadata.apps.logger.tasks import ( @@ -77,8 +78,8 @@ def setUp(self): def test_update_project_date_modified(self): """Test project date_modified field is updated""" - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, set()) - project_ids.add(self.project.pk) + project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) + project_ids[self.project.pk] = timezone.now() initial_date_modified = self.project.date_modified cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=300) From 0c6a7a3e321786763225cfd29d8479c22fece48a Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 11 Jul 2024 12:15:42 +0300 Subject: [PATCH 265/270] use instance.date_modified instead of timezone.now() Signed-off-by: Kipchirchir Sigei --- onadata/apps/logger/models/instance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index 59c6c7df41..cce4df2895 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -397,7 +397,7 @@ def update_project_date_modified(instance_id, _): # Log project id and date motified in cache with timeout project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) - project_ids[project_id] = timezone.now() + project_ids[project_id] = instance.date_modified cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=timeout) From 25e84083417b80b73837724980731e6559bbf7a9 Mon Sep 17 00:00:00 2001 From: Kipchirchir Sigei Date: Thu, 11 Jul 2024 14:39:24 +0300 Subject: [PATCH 266/270] Cleanup Signed-off-by: Kipchirchir Sigei --- .../tests/viewsets/test_project_viewset.py | 9 -------- onadata/apps/logger/models/instance.py | 6 +++--- onadata/apps/logger/tasks.py | 8 ++++--- onadata/apps/logger/tests/test_tasks.py | 21 +++++++++++++++---- onadata/libs/utils/cache_tools.py | 4 ++-- 5 files changed, 27 insertions(+), 21 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 5a14ef5597..1257815314 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -42,7 +42,6 @@ XFormVersion, ) from onadata.apps.main.models import MetaData -from onadata.apps.logger.tasks import apply_project_date_modified_async from onadata.libs import permissions as role from onadata.libs.models.share_project import ShareProject from onadata.libs.permissions import ( @@ -1945,14 +1944,6 @@ def test_last_date_modified_changes_when_adding_new_form(self): self.assertNotEqual(last_date, current_last_date) - self._make_submissions() - - # run cronjob to update date_modified field - apply_project_date_modified_async.delay() - - self.project.refresh_from_db() - self.assertNotEqual(current_last_date, self.project.date_modified) - def test_anon_project_form_endpoint(self): self._project_create() self._publish_xls_form_to_project() diff --git a/onadata/apps/logger/models/instance.py b/onadata/apps/logger/models/instance.py index cce4df2895..312317e783 100644 --- a/onadata/apps/logger/models/instance.py +++ b/onadata/apps/logger/models/instance.py @@ -34,7 +34,7 @@ from onadata.celeryapp import app from onadata.libs.data.query import get_numeric_fields from onadata.libs.utils.cache_tools import ( - BATCH_PROJECT_IDS_CACHE, + PROJECT_DATE_MODIFIED_CACHE, DATAVIEW_COUNT, IS_ORG, PROJ_NUM_DATASET_CACHE, @@ -396,9 +396,9 @@ def update_project_date_modified(instance_id, _): project_id = instance.xform.project_id # Log project id and date motified in cache with timeout - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) + project_ids = cache.get(PROJECT_DATE_MODIFIED_CACHE, {}) project_ids[project_id] = instance.date_modified - cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=timeout) + cache.set(PROJECT_DATE_MODIFIED_CACHE, project_ids, timeout=timeout) def convert_to_serializable_date(date): diff --git a/onadata/apps/logger/tasks.py b/onadata/apps/logger/tasks.py index 55b5d2b8e1..0dacdd5d96 100644 --- a/onadata/apps/logger/tasks.py +++ b/onadata/apps/logger/tasks.py @@ -1,3 +1,5 @@ +# pylint: disable=import-error,ungrouped-imports +"""Module for logger tasks""" import logging from django.core.cache import cache @@ -5,7 +7,7 @@ from onadata.apps.logger.models import EntityList, Project from onadata.celeryapp import app -from onadata.libs.utils.cache_tools import BATCH_PROJECT_IDS_CACHE, safe_delete +from onadata.libs.utils.cache_tools import PROJECT_DATE_MODIFIED_CACHE, safe_delete from onadata.libs.utils.project_utils import set_project_perms_to_object @@ -34,7 +36,7 @@ def apply_project_date_modified_async(): """ Batch update projects date_modified field periodically """ - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) + project_ids = cache.get(PROJECT_DATE_MODIFIED_CACHE, {}) if not project_ids: return @@ -43,4 +45,4 @@ def apply_project_date_modified_async(): Project.objects.filter(pk=project_id).update(date_modified=timestamp) # Clear cache after updating - safe_delete(BATCH_PROJECT_IDS_CACHE) + safe_delete(PROJECT_DATE_MODIFIED_CACHE) diff --git a/onadata/apps/logger/tests/test_tasks.py b/onadata/apps/logger/tests/test_tasks.py index 2c258da48e..51c1fa0d3f 100644 --- a/onadata/apps/logger/tests/test_tasks.py +++ b/onadata/apps/logger/tests/test_tasks.py @@ -16,7 +16,7 @@ apply_project_date_modified_async, ) from onadata.apps.main.tests.test_base import TestBase -from onadata.libs.utils.cache_tools import BATCH_PROJECT_IDS_CACHE +from onadata.libs.utils.cache_tools import PROJECT_DATE_MODIFIED_CACHE from onadata.libs.utils.user_auth import get_user_default_project @@ -78,10 +78,10 @@ def setUp(self): def test_update_project_date_modified(self): """Test project date_modified field is updated""" - project_ids = cache.get(BATCH_PROJECT_IDS_CACHE, {}) + project_ids = cache.get(PROJECT_DATE_MODIFIED_CACHE, {}) project_ids[self.project.pk] = timezone.now() initial_date_modified = self.project.date_modified - cache.set(BATCH_PROJECT_IDS_CACHE, project_ids, timeout=300) + cache.set(PROJECT_DATE_MODIFIED_CACHE, project_ids, timeout=300) apply_project_date_modified_async.delay() self.project.refresh_from_db() @@ -92,4 +92,17 @@ def test_update_project_date_modified(self): # check if current date modified is greater than initial self.assertGreater(current_date_modified, initial_date_modified) - cache.delete(BATCH_PROJECT_IDS_CACHE) + + # assert that cache is cleared once task completes + self.assertIsNone(cache.get(PROJECT_DATE_MODIFIED_CACHE)) + + def test_update_project_date_modified_empty_cache(self): + """Test project date modified empty cache""" + # Ensure the cache is empty, meaning no projects exist + cache.delete(PROJECT_DATE_MODIFIED_CACHE) + + # Run cronjon + apply_project_date_modified_async.delay() + + # Verify that no projects were updated + self.assertIsNone(cache.get(PROJECT_DATE_MODIFIED_CACHE)) # Cache should remain empty diff --git a/onadata/libs/utils/cache_tools.py b/onadata/libs/utils/cache_tools.py index 3f874df7f7..b2661536be 100644 --- a/onadata/libs/utils/cache_tools.py +++ b/onadata/libs/utils/cache_tools.py @@ -60,8 +60,8 @@ # Cache timeouts used in XForm model XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL = 24 * 60 * 60 # 24 hrs converted to seconds -# Batch project IDs -BATCH_PROJECT_IDS_CACHE = "batch_project_ids" +# Project date modified cache +PROJECT_DATE_MODIFIED_CACHE = "project_date_modified" def safe_delete(key): From 1d3c637003217af8f26136c80803bddc051aea4e Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 16 Jul 2024 09:44:26 +0300 Subject: [PATCH 267/270] Add support for defining Entity properties within grouped sections (#2644) * add support for Entity properties within grouped sections * refactor test * update comment * address failing tests * address failing test * refactor code * update docstring * update max_runs flaky test * rename variable --- .../tests/viewsets/test_attachment_viewset.py | 2 +- .../apps/logger/models/registration_form.py | 18 +++-- .../tests/models/test_registration_form.py | 32 +++++++- onadata/libs/tests/utils/test_logger_tools.py | 76 +++++++++++++++++-- onadata/libs/utils/logger_tools.py | 50 +++++------- 5 files changed, 134 insertions(+), 44 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py index 9194345ec9..95b0948d1e 100644 --- a/onadata/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_attachment_viewset.py @@ -33,7 +33,7 @@ def setUp(self): self._publish_xls_form_to_project() - @flaky(max_runs=3) + @flaky(max_runs=10) def test_retrieve_view(self): self._submit_transport_instance_w_attachment() diff --git a/onadata/apps/logger/models/registration_form.py b/onadata/apps/logger/models/registration_form.py index 2d38a183b6..314b891c00 100644 --- a/onadata/apps/logger/models/registration_form.py +++ b/onadata/apps/logger/models/registration_form.py @@ -57,12 +57,20 @@ def get_save_to(self, version: str | None = None) -> dict[str, str]: xform_json = self.xform.json result = {} - fields = xform_json.get("children", []) - entity_properties = filter( - lambda field: "bind" in field and "entities:saveto" in field["bind"], fields - ) + children = xform_json.get("children", []) + + def get_entity_property_fields(form_fields): + property_fields = [] + + for field in form_fields: + if "bind" in field and "entities:saveto" in field["bind"]: + property_fields.append(field) + elif field.get("children", []): + property_fields += get_entity_property_fields(field["children"]) + + return property_fields - for field in entity_properties: + for field in get_entity_property_fields(children): alias = field["bind"]["entities:saveto"] result[alias] = field["name"] diff --git a/onadata/apps/logger/tests/models/test_registration_form.py b/onadata/apps/logger/tests/models/test_registration_form.py index a22dd3ceaa..b5ef697928 100644 --- a/onadata/apps/logger/tests/models/test_registration_form.py +++ b/onadata/apps/logger/tests/models/test_registration_form.py @@ -8,7 +8,7 @@ from django.db.utils import IntegrityError from onadata.apps.main.tests.test_base import TestBase -from onadata.apps.logger.models import RegistrationForm, EntityList, XFormVersion +from onadata.apps.logger.models import RegistrationForm, EntityList, XForm, XFormVersion class RegistrationFormTestCase(TestBase): @@ -45,12 +45,12 @@ def test_creation(self, mock_now): def test_get_save_to(self): """Method `get_save_to` works correctly""" - form = RegistrationForm.objects.create( + registration_form = RegistrationForm.objects.create( entity_list=self.entity_list, xform=self.xform, ) self.assertEqual( - form.get_save_to(), + registration_form.get_save_to(), { "geometry": "location", "species": "species", @@ -132,13 +132,37 @@ def test_get_save_to(self): json=json.dumps(x_version_json), ) self.assertEqual( - form.get_save_to("x"), + registration_form.get_save_to("x"), { "location": "location", "species": "species", "circumference": "circumference", }, ) + # Properties within grouped sections + group_md = """ + | survey | + | | type | name | label | save_to | + | | begin group | tree | Tree | | + | | geopoint | location | Location | geometry| + | | text | species | Species | species | + | | end group | | | | + | settings| | | | | + | | form_title | form_id | instance_name| version | + | | Group | group | ${species} | 2022110901| + | entities| list_name | label | | | + | | trees | ${species}| | | + """ + self._publish_markdown(group_md, self.user, self.project, id_string="group") + xform = XForm.objects.get(id_string="group") + registration_form = RegistrationForm.objects.get( + xform=xform, entity_list=self.entity_list + ) + + self.assertEqual( + registration_form.get_save_to(), + {"geometry": "location", "species": "species"}, + ) def test_entity_list_xform_unique(self): """No duplicates allowed for existing entity_list and xform""" diff --git a/onadata/libs/tests/utils/test_logger_tools.py b/onadata/libs/tests/utils/test_logger_tools.py index e133dd1986..052f1a39a9 100644 --- a/onadata/libs/tests/utils/test_logger_tools.py +++ b/onadata/libs/tests/utils/test_logger_tools.py @@ -14,7 +14,14 @@ from defusedxml.ElementTree import ParseError from onadata.apps.logger.import_tools import django_file -from onadata.apps.logger.models import Instance, Entity, RegistrationForm, SurveyType +from onadata.apps.logger.models import ( + Instance, + Entity, + EntityList, + RegistrationForm, + SurveyType, + XForm, +) from onadata.apps.logger.xform_instance_parser import AttachmentNameError from onadata.apps.main.tests.test_base import TestBase from onadata.libs.test_utils.pyxform_test_case import PyxformTestCase @@ -651,8 +658,8 @@ def test_handle_parse_error(self): self.assertContains(ret[0].content.decode(), "Improperly formatted XML.") -class CreateEntityTestCase(TestBase): - """Tests for method `create_entity`""" +class CreateEntityFromInstanceTestCase(TestBase): + """Tests for method `create_entity_from_instance`""" def setUp(self): super().setUp() @@ -675,17 +682,18 @@ def setUp(self): "" "" ) - survey_type = SurveyType.objects.create(slug="slug-foo") + self.survey_type = SurveyType.objects.create(slug="slug-foo") instance = Instance( xform=self.xform, xml=self.xml, version=self.xform.version, - survey_type=survey_type, + survey_type=self.survey_type, ) # We use bulk_create to avoid calling create_entity signal Instance.objects.bulk_create([instance]) self.instance = Instance.objects.first() self.registration_form = RegistrationForm.objects.first() + self.entity_list = EntityList.objects.get(name="trees") def test_entity_created(self): """Entity is created successfully""" @@ -721,3 +729,61 @@ def test_entity_created(self): self.assertEqual(entity_history.json, expected_json) self.assertEqual(entity_history.form_version, self.xform.version) self.assertEqual(entity_history.created_by, self.instance.user) + + def test_grouped_section(self): + """Entity properties within grouped section""" + group_md = """ + | survey | + | | type | name | label | save_to | + | | begin group | tree | Tree | | + | | geopoint | location | Location | geometry| + | | text | species | Species | species | + | | end group | | | | + | settings| | | | | + | | form_title | form_id | instance_name| version | + | | Group | group | ${species} | 2022110901| + | entities| list_name | label | | | + | | trees | ${species}| | | + """ + self._publish_markdown(group_md, self.user, self.project, id_string="group") + xform = XForm.objects.get(id_string="group") + xml = ( + '' + '' + "9833e23e6c6147298e0ae2d691dc1e6f" + "" + "-1.286905 36.772845 0 0" + "purpleheart" + "" + "" + "uuid:b817c598-a215-4fa9-ba78-a7c738bd1f91" + "purpleheart" + '' + "" + "" + "" + "" + ) + instance = Instance( + xform=xform, + xml=xml, + version=xform.version, + survey_type=self.survey_type, + ) + # We use bulk_create to avoid calling create_entity signal + Instance.objects.bulk_create([instance]) + instance = Instance.objects.order_by("pk").last() + registration_form = RegistrationForm.objects.get( + xform=xform, entity_list=self.entity_list + ) + create_entity_from_instance(instance, registration_form) + entity = Entity.objects.first() + expected_json = { + "geometry": "-1.286905 36.772845 0 0", + "species": "purpleheart", + "label": "purpleheart", + } + + self.assertEqual(Entity.objects.count(), 1) + self.assertCountEqual(entity.json, expected_json) diff --git a/onadata/libs/utils/logger_tools.py b/onadata/libs/utils/logger_tools.py index afbb501736..f18e476e1f 100644 --- a/onadata/libs/utils/logger_tools.py +++ b/onadata/libs/utils/logger_tools.py @@ -994,20 +994,13 @@ def get_entity_json_from_instance( # Field names with an alias defined property_fields = list(mapped_properties.values()) - def convert_to_alias(field_name: str) -> str: - """Convert field name to it's alias""" - alias_field_name = field_name - # We split along / to take care of group questions - parts = field_name.split("/") - # Replace field parts with alias - for part in parts: - if part in property_fields: - for alias, field in mapped_properties.items(): - if field == part: - alias_field_name = alias_field_name.replace(field, alias) - break - - return alias_field_name + def get_field_alias(field_name: str) -> str: + """Get the alias (save_to value) of a form field""" + for alias, field in mapped_properties.items(): + if field == field_name: + return alias + + return field_name def parse_instance_json(data: dict[str, Any]) -> None: """Parse the original json, replacing field names with their alias @@ -1015,25 +1008,24 @@ def parse_instance_json(data: dict[str, Any]) -> None: The data keys are modified in place """ for field_name in list(data): - if isinstance(data[field_name], list): - # Handle repeat question - for child_data in data[field_name]: - parse_instance_json(child_data) + field_data = data[field_name] + del data[field_name] - else: - if field_name in property_fields: - alias_field_name = convert_to_alias(field_name) + if field_name.startswith("formhub"): + continue + + if field_name.startswith("meta"): + if field_name == "meta/entity/label": + data["label"] = field_data - if alias_field_name != field_name: - data[alias_field_name] = data[field_name] - del data[field_name] + continue - elif field_name == "meta/entity/label": - data["label"] = data["meta/entity/label"] - del data["meta/entity/label"] + # We extract field names within grouped sections + ungrouped_field_name = field_name.split("/")[-1] - else: - del data[field_name] + if ungrouped_field_name in property_fields: + field_alias = get_field_alias(ungrouped_field_name) + data[field_alias] = field_data parse_instance_json(instance_json) From b130a3db09ab36b7e51e3436633558c33a822ca5 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Tue, 16 Jul 2024 14:14:23 +0300 Subject: [PATCH 268/270] Remove default ordering on XForm, Attachment model (#2645) * remove default ordering on XForm ordering creates a performance bottleneck and should be avoided if unnecessary * order queryset in XFormViewSet * restore original file * refactor code * update max_runs flaky test * update max_runs flaky test * remove default ordering on Attachment model --- .../api/tests/viewsets/test_data_viewset.py | 2 +- .../apps/api/viewsets/attachment_viewset.py | 2 +- onadata/apps/api/viewsets/xform_viewset.py | 18 +++++++++------- ..._attachment_options_alter_xform_options.py | 21 +++++++++++++++++++ onadata/apps/logger/models/attachment.py | 1 - onadata/apps/logger/models/xform.py | 7 +++---- .../logger/tests/test_briefcase_client.py | 2 +- 7 files changed, 38 insertions(+), 15 deletions(-) create mode 100644 onadata/apps/logger/migrations/0021_alter_attachment_options_alter_xform_options.py diff --git a/onadata/apps/api/tests/viewsets/test_data_viewset.py b/onadata/apps/api/tests/viewsets/test_data_viewset.py index 1897434b1c..41f45695d0 100644 --- a/onadata/apps/api/tests/viewsets/test_data_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_data_viewset.py @@ -3766,7 +3766,7 @@ def setUp(self): self.logger = logging.getLogger("console_logger") # pylint: disable=invalid-name,too-many-locals - @flaky(max_runs=10) + @flaky(max_runs=15) def test_data_retrieve_instance_osm_format(self): """Test /data endpoint OSM format.""" filenames = [ diff --git a/onadata/apps/api/viewsets/attachment_viewset.py b/onadata/apps/api/viewsets/attachment_viewset.py index 6552044712..40ca21949e 100644 --- a/onadata/apps/api/viewsets/attachment_viewset.py +++ b/onadata/apps/api/viewsets/attachment_viewset.py @@ -112,7 +112,7 @@ def list(self, request, *args, **kwargs): # pylint: disable=attribute-defined-outside-init self.object_list = self.filter_queryset(self.get_queryset()) - page = self.paginate_queryset(self.object_list) + page = self.paginate_queryset(self.object_list.order_by("pk")) if page is not None: serializer = self.get_serializer(page, many=True) diff --git a/onadata/apps/api/viewsets/xform_viewset.py b/onadata/apps/api/viewsets/xform_viewset.py index a4b5d5f3e2..acd7608798 100644 --- a/onadata/apps/api/viewsets/xform_viewset.py +++ b/onadata/apps/api/viewsets/xform_viewset.py @@ -555,7 +555,7 @@ def retrieve(self, request, *args, **kwargs): # pylint: disable=attribute-defined-outside-init self.object_list = self._get_public_forms_queryset() - page = self.paginate_queryset(self.object_list) + page = self.paginate_queryset(self.object_list.order_by("pk")) if page is not None: serializer = self.get_serializer(page, many=True) else: @@ -725,9 +725,11 @@ def data_import(self, request, *args, **kwargs): return Response( data=resp, - status=status.HTTP_200_OK - if resp.get("error") is None - else status.HTTP_400_BAD_REQUEST, + status=( + status.HTTP_200_OK + if resp.get("error") is None + else status.HTTP_400_BAD_REQUEST + ), ) @action(methods=["POST", "GET"], detail=True) @@ -795,9 +797,11 @@ def csv_import(self, request, *args, **kwargs): return Response( data=resp, - status=status.HTTP_200_OK - if resp.get("error") is None - else status.HTTP_400_BAD_REQUEST, + status=( + status.HTTP_200_OK + if resp.get("error") is None + else status.HTTP_400_BAD_REQUEST + ), ) def partial_update(self, request, *args, **kwargs): diff --git a/onadata/apps/logger/migrations/0021_alter_attachment_options_alter_xform_options.py b/onadata/apps/logger/migrations/0021_alter_attachment_options_alter_xform_options.py new file mode 100644 index 0000000000..f0cf5b3e47 --- /dev/null +++ b/onadata/apps/logger/migrations/0021_alter_attachment_options_alter_xform_options.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.13 on 2024-07-16 10:46 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('logger', '0020_rename_logger_inst_deleted_at_da31a3_idx_logger_inst_deleted_da31a3_idx_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='attachment', + options={}, + ), + migrations.AlterModelOptions( + name='xform', + options={'permissions': (('view_xform_all', 'Can view all associated data'), ('view_xform_data', 'Can view submitted data'), ('report_xform', 'Can make submissions to the form'), ('move_xform', 'Can move form between projects'), ('transfer_xform', 'Can transfer form ownership.'), ('can_export_xform_data', 'Can export form data'), ('delete_submission', 'Can delete submissions from form')), 'verbose_name': 'XForm', 'verbose_name_plural': 'XForms'}, + ), + ] diff --git a/onadata/apps/logger/models/attachment.py b/onadata/apps/logger/models/attachment.py index e44f7d18a7..96f55b6061 100644 --- a/onadata/apps/logger/models/attachment.py +++ b/onadata/apps/logger/models/attachment.py @@ -85,7 +85,6 @@ class Attachment(models.Model): class Meta: app_label = "logger" - ordering = ("pk",) def save(self, *args, **kwargs): if self.media_file and self.mimetype == "": diff --git a/onadata/apps/logger/models/xform.py b/onadata/apps/logger/models/xform.py index 546d19c5dd..2c4dbe8a8e 100644 --- a/onadata/apps/logger/models/xform.py +++ b/onadata/apps/logger/models/xform.py @@ -456,9 +456,9 @@ def get_mongo_field_names_dict(self): """ names = {} for elem in self.get_survey_elements(): - names[ - _encode_for_mongo(str(elem.get_abbreviated_xpath())) - ] = elem.get_abbreviated_xpath() + names[_encode_for_mongo(str(elem.get_abbreviated_xpath()))] = ( + elem.get_abbreviated_xpath() + ) return names survey_elements = property(get_survey_elements) @@ -903,7 +903,6 @@ class Meta: ) verbose_name = gettext_lazy("XForm") verbose_name_plural = gettext_lazy("XForms") - ordering = ("pk",) permissions = ( ("view_xform_all", _("Can view all associated data")), ("view_xform_data", _("Can view submitted data")), diff --git a/onadata/apps/logger/tests/test_briefcase_client.py b/onadata/apps/logger/tests/test_briefcase_client.py index 32394fcd64..90121f7834 100644 --- a/onadata/apps/logger/tests/test_briefcase_client.py +++ b/onadata/apps/logger/tests/test_briefcase_client.py @@ -169,7 +169,7 @@ def _download_submissions(self): mocker.head(requests_mock.ANY, content=submission_list) self.briefcase_client.download_instances(self.xform.id_string) - @flaky(max_runs=11) + @flaky(max_runs=15) def test_download_xform_xml(self): """ Download xform via briefcase api From d7e32543b79f4520c6c9a291cd5fcb50f4ace060 Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Fri, 19 Jul 2024 16:51:57 +0300 Subject: [PATCH 269/270] Cache XForm manifest data (#2646) * cache response on xform manifest endpoint cache response on endpoint //xformsManifest/ * invalidate manifest cache when metadata saved * update comment * refactor code * update manifest cache TTL * update comment * update manifest cache TTL * fix incorrect cached manifest content_type * refactor code * refactor code * refactor code * refactor code * refactor code * release lock in XFormManifestRenderer * refactor code --- .../tests/viewsets/test_xform_list_viewset.py | 42 +++++++++++++++--- .../apps/api/viewsets/xform_list_viewset.py | 26 ++++++++--- onadata/apps/main/models/meta_data.py | 12 +++++- onadata/apps/main/tests/test_metadata.py | 34 +++++++++++++++ onadata/libs/renderers/renderers.py | 43 +++++++++++++++++++ onadata/libs/utils/cache_tools.py | 3 ++ 6 files changed, 145 insertions(+), 15 deletions(-) diff --git a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py index 6ad86fed90..b1731389d9 100644 --- a/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_xform_list_viewset.py @@ -8,6 +8,7 @@ from unittest.mock import patch from django.conf import settings +from django.core.cache import cache from django.test import TransactionTestCase from django.urls import reverse @@ -789,16 +790,43 @@ def test_retrieve_xform_manifest(self): response = self.view(request, pk=self.xform.pk) self.assertEqual(response.status_code, 200) - manifest_xml = """screenshot.png%(hash)shttp://testserver/bob/xformsMedia/%(xform)s/%(pk)s.png""" # noqa - data = { - "hash": self.metadata.hash, - "pk": self.metadata.pk, - "xform": self.xform.pk, - } + hash = self.metadata.hash + xform_pk = self.xform.pk + metadata_pk = self.metadata.pk + manifest_xml = ( + '' + f"screenshot.png{hash}" + f"http://testserver/bob/xformsMedia/{xform_pk}/{metadata_pk}.png" + "" + ) content = "".join( [i.decode("utf-8").strip() for i in response.streaming_content] ) - self.assertEqual(content, manifest_xml % data) + self.assertEqual(content, manifest_xml) + self.assertTrue(response.has_header("X-OpenRosa-Version")) + self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) + self.assertTrue(response.has_header("Date")) + self.assertEqual(response["Content-Type"], "text/xml; charset=utf-8") + # Cache is set + self.assertEqual(cache.get(f"xfm-manifest-{self.xform.pk}"), manifest_xml) + + def test_retrieve_xform_manifest_cache(self): + """Manifest cache is used if not empty""" + self._load_metadata(self.xform) + self.view = XFormListViewSet.as_view({"get": "manifest", "head": "manifest"}) + cache.set(f"xfm-manifest-{self.xform.pk}", "Test") + request = self.factory.head("/") + response = self.view(request, pk=self.xform.pk) + auth = DigestAuth("bob", "bobbob") + request = self.factory.get("/") + request.META.update(auth(request.META, response)) + response = self.view(request, pk=self.xform.pk) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.render().content.decode("utf-8"), "Test" + ) self.assertTrue(response.has_header("X-OpenRosa-Version")) self.assertTrue(response.has_header("X-OpenRosa-Accept-Content-Length")) self.assertTrue(response.has_header("Date")) diff --git a/onadata/apps/api/viewsets/xform_list_viewset.py b/onadata/apps/api/viewsets/xform_list_viewset.py index e802cd9897..a602cfb69f 100644 --- a/onadata/apps/api/viewsets/xform_list_viewset.py +++ b/onadata/apps/api/viewsets/xform_list_viewset.py @@ -3,6 +3,7 @@ OpenRosa Form List API - https://docs.getodk.org/openrosa-form-list/ """ from django.conf import settings +from django.core.cache import cache from django.http import Http404, StreamingHttpResponse from django.shortcuts import get_object_or_404 @@ -29,6 +30,7 @@ XFormListSerializer, XFormManifestSerializer, ) +from onadata.libs.utils.cache_tools import XFORM_MANIFEST_CACHE from onadata.libs.utils.common_tags import GROUP_DELIMETER_TAG, REPEAT_INDEX_TAGS from onadata.libs.utils.export_builder import ExportBuilder @@ -169,17 +171,29 @@ def retrieve(self, request, *args, **kwargs): self.object.xml, headers=get_openrosa_headers(request, location=False) ) - @action(methods=["GET", "HEAD"], detail=True) + @action( + methods=["GET", "HEAD"], detail=True, renderer_classes=[XFormManifestRenderer] + ) def manifest(self, request, *args, **kwargs): """A manifest defining additional supporting objects.""" # pylint: disable=attribute-defined-outside-init - self.object = self.get_object() - object_list = MetaData.objects.filter( - data_type="media", object_id=self.object.pk - ) + xform = self.get_object() + cache_key = f"{XFORM_MANIFEST_CACHE}{xform.pk}" + cached_manifest: str | None = cache.get(cache_key) + # Ensure a previous stream has completed updating the cache by + # confirm the last tag exists + if cached_manifest is not None and cached_manifest.endswith(""): + return Response( + cached_manifest, + content_type="text/xml; charset=utf-8", + headers=get_openrosa_headers(request, location=False), + ) + + metadata_qs = MetaData.objects.filter(data_type="media", object_id=xform.pk) + renderer = XFormManifestRenderer(cache_key) return StreamingHttpResponse( - XFormManifestRenderer().stream_data(object_list, self.get_serializer), + renderer.stream_data(metadata_qs, self.get_serializer), content_type="text/xml; charset=utf-8", headers=get_openrosa_headers(request, location=False), ) diff --git a/onadata/apps/main/models/meta_data.py b/onadata/apps/main/models/meta_data.py index 0ac4e936d0..430c19cd0b 100644 --- a/onadata/apps/main/models/meta_data.py +++ b/onadata/apps/main/models/meta_data.py @@ -23,7 +23,11 @@ import requests -from onadata.libs.utils.cache_tools import XFORM_METADATA_CACHE, safe_delete +from onadata.libs.utils.cache_tools import ( + XFORM_METADATA_CACHE, + XFORM_MANIFEST_CACHE, + safe_delete, +) from onadata.libs.utils.common_tags import ( GOOGLE_SHEET_DATA_TYPE, TEXTIT, @@ -563,7 +567,11 @@ def clear_cached_metadata_instance_object( """ Clear the cache for the metadata object. """ - safe_delete(f"{XFORM_METADATA_CACHE}{instance.object_id}") + xform_id = instance.object_id + safe_delete(f"{XFORM_METADATA_CACHE}{xform_id}") + + if instance.data_type == "media": + safe_delete(f"{XFORM_MANIFEST_CACHE}{xform_id}") # pylint: disable=unused-argument diff --git a/onadata/apps/main/tests/test_metadata.py b/onadata/apps/main/tests/test_metadata.py index e038c3db88..5f3b80c1b7 100644 --- a/onadata/apps/main/tests/test_metadata.py +++ b/onadata/apps/main/tests/test_metadata.py @@ -2,6 +2,8 @@ """ Test MetaData model. """ +from django.core.cache import cache + from onadata.apps.logger.models import Instance, Project, XForm from onadata.apps.main.models.meta_data import MetaData, unique_type_for_form, upload_to from onadata.apps.main.tests.test_base import TestBase @@ -146,3 +148,35 @@ def test_upload_to_with_project_and_xform_instance(self): upload_to(metadata, filename), "{}/{}/{}".format(self.user.username, "formid-media", filename), ) + + def test_caches_cleared(self): + """Related caches are cleared on creating or updating""" + key_1 = f"xfs-get_xform_metadata{self.xform.pk}" + key_2 = f"xfm-manifest-{self.xform.pk}" + cache.set(key_1, "foo") + cache.set(key_2, "bar") + enketo_url = "https://dmfrm.enketo.org/webform" + # Metadata cache is cleared if any MetaData is created + MetaData.enketo_url(self.xform, enketo_url) + + self.assertIsNone(cache.get(key_1)) + self.assertIsNotNone(cache.get(key_2)) + + # Metadata cache is cleared if any MetaData is updated + metadata = MetaData.objects.first() + cache.set(key_1, "foo") + metadata.save() + + self.assertIsNone(cache.get(key_1)) + self.assertIsNotNone(cache.get(key_2)) + + # Manifest cache is cleared if `media` MetaData is created + metadata = MetaData.objects.create(data_type="media", object_id=self.xform.id) + + self.assertIsNone(cache.get(key_2)) + + # Manifest cache is cleared if `media` MetaData is updated + cache.set(key_2, "bar") + metadata.save() + + self.assertIsNone(cache.get(key_2)) diff --git a/onadata/libs/renderers/renderers.py b/onadata/libs/renderers/renderers.py index b7cf15e04f..f371783dd7 100644 --- a/onadata/libs/renderers/renderers.py +++ b/onadata/libs/renderers/renderers.py @@ -8,11 +8,13 @@ from io import BytesIO, StringIO from typing import Tuple +from django.core.cache import cache from django.utils import timezone from django.utils.dateparse import parse_datetime from django.utils.encoding import force_str, smart_str from django.utils.xmlutils import SimplerXMLGenerator + import six from rest_framework import negotiation from rest_framework.renderers import ( @@ -25,8 +27,13 @@ from rest_framework_xml.renderers import XMLRenderer from six import iteritems +from onadata.libs.utils.cache_tools import ( + XFORM_MANIFEST_CACHE_TTL, + XFORM_MANIFEST_CACHE_LOCK_TTL, +) from onadata.libs.utils.osm import get_combined_osm + IGNORE_FIELDS = [ "formhub/uuid", "meta/contactID", @@ -378,6 +385,42 @@ class XFormManifestRenderer(XFormListRenderer, StreamRendererMixin): element_node = "mediaFile" xmlns = "http://openrosa.org/xforms/xformsManifest" + def __init__(self, cache_key=None) -> None: + self.cache_key = cache_key + self.can_update_cache = False + self.cache_lock_key = None + + def _get_current_buffer_data(self): + data = super()._get_current_buffer_data() + + if data and self.can_update_cache: + data = data.strip() + cached_manifest: str | None = cache.get(self.cache_key) + + if cached_manifest is not None: + cached_manifest += data + cache.set(self.cache_key, cached_manifest, XFORM_MANIFEST_CACHE_TTL) + + if data.endswith(""): + # We are done, release the lock + cache.delete(self.cache_lock_key) + + else: + cache.set(self.cache_key, data, XFORM_MANIFEST_CACHE_TTL) + + return data + + def stream_data(self, data, serializer): + if self.cache_key: + # In the case of concurrent requests, we ensure only the first + # request is updating the cache + self.cache_lock_key = f"{self.cache_key}_lock" + self.can_update_cache = cache.add( + self.cache_lock_key, "true", XFORM_MANIFEST_CACHE_LOCK_TTL + ) + + return super().stream_data(data, serializer) + # pylint: disable=too-few-public-methods class TemplateXMLRenderer(TemplateHTMLRenderer): diff --git a/onadata/libs/utils/cache_tools.py b/onadata/libs/utils/cache_tools.py index b2661536be..5491bf4f81 100644 --- a/onadata/libs/utils/cache_tools.py +++ b/onadata/libs/utils/cache_tools.py @@ -56,9 +56,12 @@ XFORM_SUBMISSION_STAT = "xfm-get_form_submissions_grouped_by_field-" XFORM_CHARTS = "xfm-get_form_charts-" XFORM_REGENERATE_INSTANCE_JSON_TASK = "xfm-regenerate_instance_json_task-" +XFORM_MANIFEST_CACHE = "xfm-manifest-" # Cache timeouts used in XForm model XFORM_REGENERATE_INSTANCE_JSON_TASK_TTL = 24 * 60 * 60 # 24 hrs converted to seconds +XFORM_MANIFEST_CACHE_TTL = 10 * 60 # 10 minutes converted to seconds +XFORM_MANIFEST_CACHE_LOCK_TTL = 300 # 5 minutes converted to seconds # Project date modified cache PROJECT_DATE_MODIFIED_CACHE = "project_date_modified" From 98064d140ee01a41fcafc860938945c205c92f7f Mon Sep 17 00:00:00 2001 From: Kelvin Muchiri Date: Thu, 25 Jul 2024 14:31:23 +0300 Subject: [PATCH 270/270] Exclude deleted Entities from form's manifest data (#2648) * exclude deleted EntityList in endpoint /api/v1/projects * delete form metadata on deleting EntityList * ignore deleted Entity for EntityList dataset export * index deleted_at field for model EntityList, Entity * update dataset info when hard deleting Entity * delete form metadata when EntityList is hard deleted * add test * resolve cyclic dep * resolve cyclic dep * delete EntityList in an atomic transaction * fix failing test * remove unnecessary .all() --- .../viewsets/test_entity_list_viewset.py | 7 +- .../tests/viewsets/test_project_viewset.py | 15 +++++ ...logger_enti_deleted_66eee5_idx_and_more.py | 45 +++++++++++++ onadata/apps/logger/models/entity.py | 2 +- onadata/apps/logger/models/entity_list.py | 32 ++++++--- onadata/apps/logger/signals.py | 67 ++++++++++++++++--- .../apps/logger/tests/models/test_entity.py | 15 +++++ .../logger/tests/models/test_entity_list.py | 36 ++++++++++ .../libs/serializers/project_serializer.py | 6 +- onadata/libs/tests/utils/test_export_tools.py | 37 ++++++++++ onadata/libs/utils/export_tools.py | 2 +- 11 files changed, 240 insertions(+), 24 deletions(-) create mode 100644 onadata/apps/logger/migrations/0022_entity_logger_enti_deleted_66eee5_idx_and_more.py diff --git a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py index fb56b4703b..03de565832 100644 --- a/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_entity_list_viewset.py @@ -1119,12 +1119,15 @@ def test_delete(self, mock_now): request = self.factory.delete("/", **self.extra) response = self.view(request, pk=self.entity_list.pk, entity_pk=self.entity.pk) self.entity.refresh_from_db() + self.entity_list.refresh_from_db() + self.assertEqual(response.status_code, 204) self.assertEqual(self.entity.deleted_at, date) self.assertEqual(self.entity.deleted_by, self.user) - self.entity_list.refresh_from_db() self.assertEqual(self.entity_list.num_entities, 0) - self.assertEqual(self.entity_list.last_entity_update_time, date) + self.assertEqual( + self.entity_list.last_entity_update_time, self.entity.date_modified + ) def test_invalid_entity(self): """Invalid Entity is handled""" diff --git a/onadata/apps/api/tests/viewsets/test_project_viewset.py b/onadata/apps/api/tests/viewsets/test_project_viewset.py index 1257815314..166dd1bfae 100644 --- a/onadata/apps/api/tests/viewsets/test_project_viewset.py +++ b/onadata/apps/api/tests/viewsets/test_project_viewset.py @@ -2796,6 +2796,7 @@ def test_get_project_w_registration_form(self): request = self.factory.get("/", **self.extra) response = view(request, pk=self.project.pk) entity_list = EntityList.objects.first() + self.assertEqual(response.status_code, 200) self.assertEqual( response.data["forms"][0]["contributes_entities_to"], @@ -2805,6 +2806,13 @@ def test_get_project_w_registration_form(self): "is_active": True, }, ) + # Soft delete dataset + entity_list.soft_delete() + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + self.assertIsNone(response.data["forms"][0]["contributes_entities_to"]) def test_get_project_w_follow_up_form(self): """Retrieve project with Entity follow up form""" @@ -2825,6 +2833,13 @@ def test_get_project_w_follow_up_form(self): } ], ) + # Soft delete dataset + entity_list.soft_delete() + request = self.factory.get("/", **self.extra) + response = view(request, pk=self.project.pk) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data["forms"][0]["consumes_entities_from"], []) class GetProjectInvitationListTestCase(TestAbstractViewSet): diff --git a/onadata/apps/logger/migrations/0022_entity_logger_enti_deleted_66eee5_idx_and_more.py b/onadata/apps/logger/migrations/0022_entity_logger_enti_deleted_66eee5_idx_and_more.py new file mode 100644 index 0000000000..2f04e399f1 --- /dev/null +++ b/onadata/apps/logger/migrations/0022_entity_logger_enti_deleted_66eee5_idx_and_more.py @@ -0,0 +1,45 @@ +# Generated by Django 4.2.13 on 2024-07-23 07:38 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("logger", "0021_alter_attachment_options_alter_xform_options"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_enti_deleted_66eee5_idx" ' + 'ON "logger_entity" ("deleted_at");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_enti_deleted_66eee5_idx";', + ), + migrations.RunSQL( + sql=( + 'CREATE INDEX CONCURRENTLY "logger_enti_deleted_476ed6_idx" ' + 'ON "logger_entitylist" ("deleted_at");' + ), + reverse_sql='DROP INDEX CONCURRENTLY "logger_enti_deleted_476ed6_idx";', + ), + ], + state_operations=[ + migrations.AddIndex( + model_name="entity", + index=models.Index( + fields=["deleted_at"], name="logger_enti_deleted_66eee5_idx" + ), + ), + migrations.AddIndex( + model_name="entitylist", + index=models.Index( + fields=["deleted_at"], name="logger_enti_deleted_476ed6_idx" + ), + ), + ], + ) + ] diff --git a/onadata/apps/logger/models/entity.py b/onadata/apps/logger/models/entity.py index 3fb7e3ae2b..596d7a0e32 100644 --- a/onadata/apps/logger/models/entity.py +++ b/onadata/apps/logger/models/entity.py @@ -39,11 +39,11 @@ def soft_delete(self, deleted_by=None): self.deleted_by = deleted_by self.save(update_fields=["deleted_at", "deleted_by"]) self.entity_list.num_entities = models.F("num_entities") - 1 - self.entity_list.last_entity_update_time = deletion_time self.entity_list.save() class Meta(BaseModel.Meta): app_label = "logger" + indexes = [models.Index(fields=["deleted_at"])] class EntityHistory(BaseModel): diff --git a/onadata/apps/logger/models/entity_list.py b/onadata/apps/logger/models/entity_list.py index 1734d9b2bc..669614a4d9 100644 --- a/onadata/apps/logger/models/entity_list.py +++ b/onadata/apps/logger/models/entity_list.py @@ -5,15 +5,19 @@ from django.contrib.auth import get_user_model from django.contrib.auth.models import Group, Permission from django.contrib.contenttypes.fields import GenericRelation -from django.db import models +from django.db import models, transaction from django.utils.translation import gettext_lazy as _ from django.utils import timezone + from guardian.models import UserObjectPermissionBase, GroupObjectPermissionBase from guardian.compat import user_model_label from onadata.apps.logger.models.project import Project +from onadata.apps.logger.models.xform import clear_project_cache +from onadata.apps.main.models.meta_data import MetaData from onadata.libs.models import BaseModel +from onadata.libs.utils.model_tools import queryset_iterator User = get_user_model() @@ -39,13 +43,6 @@ class EntityList(BaseModel): deleted_at = models.DateTimeField(null=True, blank=True) deleted_by = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) - class Meta(BaseModel.Meta): - app_label = "logger" - unique_together = ( - "name", - "project", - ) - def __str__(self): return f"{self.name}|{self.project}" @@ -69,6 +66,7 @@ def properties(self) -> list[str]: return list(dataset_properties) + @transaction.atomic() def soft_delete(self, deleted_by=None): """Soft delete EntityList""" if self.deleted_at is None: @@ -76,9 +74,27 @@ def soft_delete(self, deleted_by=None): deletion_suffix = deletion_time.strftime("-deleted-at-%s") self.deleted_at = deletion_time self.deleted_by = deleted_by + original_name = self.name self.name += deletion_suffix self.name = self.name[:255] # Only first 255 characters self.save() + clear_project_cache(self.project.pk) + # Soft deleted follow up forms MetaData + metadata_qs = MetaData.objects.filter( + data_type="media", + data_value=f"entity_list {self.pk} {original_name}", + ) + + for datum in queryset_iterator(metadata_qs): + datum.soft_delete() + + class Meta(BaseModel.Meta): + app_label = "logger" + unique_together = ( + "name", + "project", + ) + indexes = [models.Index(fields=["deleted_at"])] class EntityListUserObjectPermission(UserObjectPermissionBase): diff --git a/onadata/apps/logger/signals.py b/onadata/apps/logger/signals.py index 7848ac60b3..c09f7cf375 100644 --- a/onadata/apps/logger/signals.py +++ b/onadata/apps/logger/signals.py @@ -4,12 +4,15 @@ """ from django.db import transaction from django.db.models import F -from django.db.models.signals import post_save +from django.db.models.signals import post_save, post_delete from django.dispatch import receiver +from django.utils import timezone from onadata.apps.logger.models import Entity, EntityList, Instance, RegistrationForm +from onadata.apps.logger.models.xform import clear_project_cache from onadata.apps.logger.xform_instance_parser import get_meta_from_xml from onadata.apps.logger.tasks import set_entity_list_perms_async +from onadata.apps.main.models.meta_data import MetaData from onadata.libs.utils.logger_tools import ( create_entity_from_instance, update_entity_from_instance, @@ -47,19 +50,50 @@ def create_or_update_entity(sender, instance, created=False, **kwargs): create_entity_from_instance(instance, registration_form) -@receiver(post_save, sender=Entity, dispatch_uid="update_entity_dataset") -def update_entity_dataset(sender, instance, created=False, **kwargs): - """Update EntityList when Entity is created or updated""" - if not instance: - return - +@receiver(post_save, sender=Entity, dispatch_uid="update_enti_el_inc_num_entities") +def increment_entity_list_num_entities(sender, instance, created=False, **kwargs): + """Increment EntityList `num_entities`""" entity_list = instance.entity_list if created: - entity_list.num_entities = F("num_entities") + 1 + # Using Queryset.update ensures we do not call the model's save method and + # signals + EntityList.objects.filter(pk=entity_list.pk).update( + num_entities=F("num_entities") + 1 + ) + + +@receiver(post_delete, sender=Entity, dispatch_uid="update_enti_el_dec_num_entities") +def decrement_entity_list_num_entities(sender, instance, **kwargs): + """Decrement EntityList `num_entities`""" + entity_list = instance.entity_list + # Using Queryset.update ensures we do not call the model's save method and + # signals + EntityList.objects.filter(pk=entity_list.pk).update( + num_entities=F("num_entities") - 1 + ) + - entity_list.last_entity_update_time = instance.date_modified - entity_list.save() +@receiver(post_delete, sender=Entity, dispatch_uid="delete_enti_el_last_update_time") +def update_last_entity_update_time_now(sender, instance, **kwargs): + """Update EntityList `last_entity_update_time`""" + entity_list = instance.entity_list + # Using Queryset.update ensures we do not call the model's save method and + # signals + EntityList.objects.filter(pk=entity_list.pk).update( + last_entity_update_time=timezone.now() + ) + + +@receiver(post_save, sender=Entity, dispatch_uid="update_enti_el_last_update_time") +def update_last_entity_update_time(sender, instance, **kwargs): + """Update EntityList `last_entity_update_time`""" + entity_list = instance.entity_list + # Using Queryset.update ensures we do not call the model's save method and + # signals + EntityList.objects.filter(pk=entity_list.pk).update( + last_entity_update_time=instance.date_modified + ) @receiver(post_save, sender=EntityList, dispatch_uid="set_entity_list_perms") @@ -67,3 +101,16 @@ def set_entity_list_perms(sender, instance, created=False, **kwargs): """Set project permissions to EntityList""" if created: transaction.on_commit(lambda: set_entity_list_perms_async.delay(instance.pk)) + + +@receiver(post_delete, sender=EntityList, dispatch_uid="delete_entity_list_metadata") +def delete_entity_list_metadata(sender, instance, **kwargs): + """Delete EntityList related data on delete""" + clear_project_cache(instance.project.pk) + # We get original name incase name has been modified in the case where + # EntityList was first soft deleted + entity_list_name = instance.name.split("-")[0] + MetaData.objects.filter( + data_type="media", + data_value=f"entity_list {instance.pk} {entity_list_name}", + ).delete() diff --git a/onadata/apps/logger/tests/models/test_entity.py b/onadata/apps/logger/tests/models/test_entity.py index f42073e0e1..2f66751e36 100644 --- a/onadata/apps/logger/tests/models/test_entity.py +++ b/onadata/apps/logger/tests/models/test_entity.py @@ -93,6 +93,21 @@ def test_soft_delete(self, mock_now): self.assertEqual(entity3.deleted_at, self.mocked_now) self.assertIsNone(entity3.deleted_by) + def test_hard_delete(self): + """Hard deleting updates dataset info""" + entity = Entity.objects.create(entity_list=self.entity_list) + self.entity_list.refresh_from_db() + old_last_entity_update_time = self.entity_list.last_entity_update_time + + self.assertEqual(self.entity_list.num_entities, 1) + + entity.delete() + self.entity_list.refresh_from_db() + new_last_entity_update_time = self.entity_list.last_entity_update_time + + self.assertEqual(self.entity_list.num_entities, 0) + self.assertTrue(old_last_entity_update_time < new_last_entity_update_time) + class EntityHistoryTestCase(TestBase): """Tests for model EntityHistory""" diff --git a/onadata/apps/logger/tests/models/test_entity_list.py b/onadata/apps/logger/tests/models/test_entity_list.py index 70b6a32521..3c2a9e6d5f 100644 --- a/onadata/apps/logger/tests/models/test_entity_list.py +++ b/onadata/apps/logger/tests/models/test_entity_list.py @@ -132,13 +132,19 @@ def test_soft_delete(self): with patch("django.utils.timezone.now") as mock_now: mock_now.return_value = self.mocked_now entity_list = EntityList.objects.create(name="trees", project=self.project) + follow_up_form = self._publish_follow_up_form(self.user) entity_list.soft_delete(self.user) entity_list.refresh_from_db() + follow_up_form_meta_datum = follow_up_form.metadata_set.get( + data_value=f"entity_list {entity_list.pk} trees" + ) + self.assertEqual(entity_list.deleted_at, self.mocked_now) self.assertEqual(entity_list.deleted_by, self.user) self.assertEqual( entity_list.name, f'trees{self.mocked_now.strftime("-deleted-at-%s")}' ) + self.assertIsNotNone(follow_up_form_meta_datum.deleted_at) # Try soft deleting soft deleted dataset entity_list.soft_delete(self.user) @@ -155,3 +161,33 @@ def test_soft_delete(self): entity_list.soft_delete() entity_list.refresh_from_db() self.assertEqual(entity_list.name, dataset_name) + + def test_hard_delete(self): + """Hard delete removes consumers' metadata""" + entity_list = EntityList.objects.create(name="trees", project=self.project) + follow_up_form = self._publish_follow_up_form(self.user) + data_value = f"entity_list {entity_list.pk} trees" + self.assertTrue( + follow_up_form.metadata_set.filter(data_value=data_value).exists() + ) + + entity_list.delete() + + self.assertFalse( + follow_up_form.metadata_set.filter(data_value=data_value).exists() + ) + # Hard deleted previously soft deleted dataset works + follow_up_form.delete() + entity_list = EntityList.objects.create(name="trees", project=self.project) + follow_up_form = self._publish_follow_up_form(self.user) + data_value = f"entity_list {entity_list.pk} trees" + + self.assertTrue( + follow_up_form.metadata_set.filter(data_value=data_value).exists() + ) + entity_list.soft_delete() + entity_list.delete() + + self.assertFalse( + follow_up_form.metadata_set.filter(data_value=data_value).exists() + ) diff --git a/onadata/libs/serializers/project_serializer.py b/onadata/libs/serializers/project_serializer.py index c5ff793333..b0f688002d 100644 --- a/onadata/libs/serializers/project_serializer.py +++ b/onadata/libs/serializers/project_serializer.py @@ -223,7 +223,9 @@ class BaseProjectXFormSerializer(serializers.HyperlinkedModelSerializer): def get_contributes_entities_to(self, obj: XForm): """Return the EntityList that the form contributes Entities to""" - registration_form = obj.registration_forms.first() + registration_form = obj.registration_forms.filter( + entity_list__deleted_at__isnull=True + ).first() if registration_form is None: return None @@ -236,7 +238,7 @@ def get_contributes_entities_to(self, obj: XForm): def get_consumes_entities_from(self, obj: XForm): """Return the EntityLIst that the form consumes Entities""" - queryset = obj.follow_up_forms.all() + queryset = obj.follow_up_forms.filter(entity_list__deleted_at__isnull=True) if not queryset: return [] diff --git a/onadata/libs/tests/utils/test_export_tools.py b/onadata/libs/tests/utils/test_export_tools.py index a2cf21fe59..33106bd034 100644 --- a/onadata/libs/tests/utils/test_export_tools.py +++ b/onadata/libs/tests/utils/test_export_tools.py @@ -2,6 +2,7 @@ """ Test export_tools module """ +import csv import json import os import shutil @@ -1022,7 +1023,43 @@ def test_generate_export_entity_list(self): }, uuid="dbee4c32-a922-451c-9df7-42f40bf78f48", ) + Entity.objects.create( + entity_list=entity_list, + json={ + "species": "purpleheart", + "geometry": "-1.286905 36.772845 0 0", + "circumference_cm": 300, + "label": "300cm purpleheart", + }, + uuid="614bda97-0a46-4d31-9661-736287edf7da", + deleted_at=timezone.now(), # deleted Entity should be ignored + ) + export = generate_entity_list_export(entity_list) self.assertIsNotNone(export) self.assertTrue(export.is_successful) self.assertEqual(GenericExport.objects.count(), 1) + export = GenericExport.objects.first() + + with open(export.full_filepath, "r") as csv_file: + csv_reader = csv.reader(csv_file) + header = next(csv_reader) + expected_header = [ + "name", + "label", + "geometry", + "species", + "circumference_cm", + ] + self.assertCountEqual(header, expected_header) + # Read all rows into a list + rows = list(csv_reader) + self.assertEqual(len(rows), 1) + expected_row = [ + "dbee4c32-a922-451c-9df7-42f40bf78f48", + "300cm purpleheart", + "-1.286905 36.772845 0 0", + "purpleheart", + "300", + ] + self.assertCountEqual(rows[0], expected_row) diff --git a/onadata/libs/utils/export_tools.py b/onadata/libs/utils/export_tools.py index 8cf16e5356..bded181e99 100644 --- a/onadata/libs/utils/export_tools.py +++ b/onadata/libs/utils/export_tools.py @@ -133,7 +133,7 @@ def get_entity_list_dataset(entity_list: EntityList) -> Iterator[dict]: An iterator of dicts which represent the json data for Entities belonging to the dataset """ - entities = Entity.objects.filter(entity_list=entity_list) + entities = Entity.objects.filter(entity_list=entity_list, deleted_at__isnull=True) dataset_properties = entity_list.properties for entity in queryset_iterator(entities):