.
+ """
+ hook_logs = HookLog.objects.filter(id__in=hooklogs_ids)
+ for hook_log in hook_logs:
+ hook_log.retry()
+ time.sleep(0.2)
+
+ return True
+
+
+@shared_task
+def failures_reports():
+ """
+ Notifies owners' assets by email of hooks failures.
+ :return: bool
+ """
+ beat_schedule = settings.CELERY_BEAT_SCHEDULE.get("send-hooks-failures-reports")
+ # Use `.first()` instead of `.get()`, because task can be duplicated in admin section
+ failures_reports_period_task = PeriodicTask.objects.filter(enabled=True, task=beat_schedule.get("task"))\
+ .order_by("-last_run_at").first()
+
+ if failures_reports_period_task:
+
+ last_run_at = failures_reports_period_task.last_run_at
+ queryset = HookLog.objects.filter(hook__email_notification=True, status=HOOK_LOG_FAILED)
+ if last_run_at:
+ queryset = queryset.filter(date_modified__gte=last_run_at)
+ queryset = queryset.order_by("hook__asset__name", "hook__uid", "-date_modified")
+
+ # PeriodicTask are updated every 3 minutes (default).
+ # It means, if this task interval is less than 3 minutes, some data can be duplicated in emails.
+ # Setting `beat-sync-every` to 1, makes PeriodicTask to be updated before running the task.
+ # So, we need to update it manually.
+ # see: http://docs.celeryproject.org/en/latest/userguide/configuration.html#beat-sync-every
+ PeriodicTask.objects.filter(task=beat_schedule.get("task")).update(last_run_at=timezone.now())
+
+ records = {}
+ max_length = 0
+
+ # Prepare data for templates.
+ # All logs will be grouped under their respective asset and user.
+ for record in queryset:
+ # if users don't exist in dict, add them
+ if record.hook.asset.owner.id not in records:
+ records[record.hook.asset.owner.id] = {
+ "username": record.hook.asset.owner.username,
+ # language is not implemented yet.
+ # TODO add language to user table in registration process
+ "language": getattr(record.hook.asset.owner, "language", "en"),
+ "email": record.hook.asset.owner.email,
+ "assets": {}
+ }
+
+ # if asset doesn't exist in user's asset dict, add it
+ if record.hook.asset.id not in records[record.hook.asset.owner.id]["assets"]:
+ max_length = 0
+ records[record.hook.asset.owner.id]["assets"][record.hook.asset.id] = {
+ "name": record.hook.asset.name,
+ "max_length": 0,
+ "logs": []
+ }
+
+ # Add log to corresponding asset and user
+ records[record.hook.asset.owner.id]["assets"][record.hook.asset.id]["logs"].append({
+ "hook_name": record.hook.name,
+ "uid": record.uid,
+ "date_modified": record.date_modified,
+ "status_code": record.status_code,
+ "message": record.message
+ })
+ hook_name_length = len(record.hook.name)
+
+ # Max Length is used for plain text template. To display fixed size columns.
+ max_length = max(max_length, hook_name_length)
+ records[record.hook.asset.owner.id]["assets"][record.hook.asset.id]["max_length"] = max_length
+
+ # Get templates
+ plain_text_template = get_template("reports/failures_email_body.txt")
+ html_template = get_template("reports/failures_email_body.html")
+ email_messages = []
+
+ for owner_id, record in records.items():
+ variables = {
+ "username": record.get("username"),
+ "assets": record.get("assets")
+ }
+ # Localize templates
+ translation.activate(record.get("language"))
+ text_content = plain_text_template.render(Context(variables))
+ html_content = html_template.render(Context(variables))
+
+ msg = EmailMultiAlternatives(translation.ugettext("REST Services Failure Report"), text_content,
+ constance.config.SUPPORT_EMAIL,
+ [record.get("email")])
+ msg.attach_alternative(html_content, "text/html")
+ email_messages.append(msg)
+
+ # Send email messages
+ if len(email_messages) > 0:
+ try:
+ with get_connection() as connection:
+ connection.send_messages(email_messages)
+ except Exception as e:
+ logging.error("failures_reports - {}".format(str(e)), exc_info=True)
+ return False
+
+ return True
diff --git a/kobo/apps/hook/templates/reports/failures_email_body.html b/kobo/apps/hook/templates/reports/failures_email_body.html
new file mode 100644
index 0000000000..236b2eea19
--- /dev/null
+++ b/kobo/apps/hook/templates/reports/failures_email_body.html
@@ -0,0 +1,38 @@
+{% load i18n %}
+{% load strings %}
+{% trans "Asset" as i18n_asset %}
+{% trans "Hook" as i18n_hook %}
+{% trans "Submission" as i18n_submission %}
+{% trans "Status code" as i18n_status_code %}
+{% trans "Message" as i18n_message %}
+{% trans "Date" as i18n_date %}
+
+{% trans "Dear" %} {{ username }},
+{% trans "Some submissions could not be sent to their external endpoint." %}
+
+{% for asset_id, asset in assets.items %}
+ {{ i18n_asset }}: {{ asset.name }}
+
+
+ | {{ i18n_hook }} |
+ {{ i18n_submission }} |
+ {{ i18n_status_code }} |
+ {{ i18n_message }} |
+ {{ i18n_date }} |
+
+ {% for log in asset.logs %}
+
+ | {{ log.hook_name }} |
+ {{ log.uid }} |
+ {{ log.status_code }} |
+ {{ log.message|truncatechars:50 }} |
+ {{ log.date_modified|date:"Y-m-d H:i" }} UTC |
+
+ {% endfor %}
+
+{% endfor %}
+
+
+{% trans "Best," %}
+KoBoToolbox
+
diff --git a/kobo/apps/hook/templates/reports/failures_email_body.txt b/kobo/apps/hook/templates/reports/failures_email_body.txt
new file mode 100644
index 0000000000..3e5aaecc2b
--- /dev/null
+++ b/kobo/apps/hook/templates/reports/failures_email_body.txt
@@ -0,0 +1,30 @@
+{% load i18n %}
+{% load strings %}
+{% trans "Asset" as i18n_asset %}
+{% trans "Hook" as i18n_hook %}
+{% trans "Submission" as i18n_submission %}
+{% trans "Status code" as i18n_status_code %}
+{% trans "Message" as i18n_message %}
+{% trans "Date" as i18n_date %}
+
+{% trans "Dear" %} {{ username }},
+
+{% trans "Some submissions could not be sent to their external endpoint." %}
+
+{% for asset_id, asset in assets.items %}
+
+{{ i18n_asset }}: {{ asset.name }}
+
+ {% with max_length=asset.max_length|add:"2" %}
+ {{ i18n_hook|center:max_length }}|{{ i18n_submission|center:25 }}|{{ i18n_status_code|center:15 }}|{{ i18n_message|truncatechars:23|center:25 }}|{{ i18n_date|center:25 }}
+ {{ "-"|repeat:max_length }}|{{ "-"|repeat:25 }}|{{ "-"|repeat:15 }}|{{ "-"|repeat:25 }}|{{ "-"|repeat:25 }}
+ {% for log in asset.logs %}
+ {{ log.hook_name|center:max_length }}|{{ log.uid|center:25 }}|{{ log.status_code|center:15 }}|{{ log.message|truncatechars:23|center:25 }}|{{ log.date_modified|date:"Y-m-d H:i"|center:25 }}
+ {{ "-"|repeat:max_length }}|{{ "-"|repeat:25 }}|{{ "-"|repeat:15 }}|{{ "-"|repeat:25 }}|{{ "-"|repeat:25 }}
+ {% endfor %}
+ {% endwith %}
+
+{% endfor %}
+
+{% trans "Best," %}
+KoBoToolbox
\ No newline at end of file
diff --git a/kobo/apps/hook/templatetags/__init__.py b/kobo/apps/hook/templatetags/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/kobo/apps/hook/templatetags/strings.py b/kobo/apps/hook/templatetags/strings.py
new file mode 100644
index 0000000000..ddc5b463e8
--- /dev/null
+++ b/kobo/apps/hook/templatetags/strings.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+from django import template
+from datetime import date, timedelta
+
+register = template.Library()
+
+
+@register.filter(name="repeat")
+def repeat(value, count):
+ """
+ Returns a string of an identical characters n times
+
+ :param value: str. Character to repeat
+ :param count: int. Number of times
+ :return:
+ """
+ return value * count
\ No newline at end of file
diff --git a/kobo/apps/hook/tests/__init__.py b/kobo/apps/hook/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py
new file mode 100644
index 0000000000..03f18a529a
--- /dev/null
+++ b/kobo/apps/hook/tests/hook_test_case.py
@@ -0,0 +1,175 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+
+import responses
+from django.conf import settings
+from rest_framework import status
+from django.core.urlresolvers import reverse
+
+from ..models import HookLog, Hook
+from ..constants import HOOK_LOG_FAILED
+from kpi.exceptions import BadFormatException
+from kpi.tests.kpi_test_case import KpiTestCase
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON, INSTANCE_FORMAT_TYPE_XML
+
+
+class HookTestCase(KpiTestCase):
+
+ def setUp(self):
+ self.client.login(username="someuser", password="someuser")
+ self.asset = self.create_asset(
+ "some_asset",
+ content=json.dumps({"survey": [
+ {"type": "text", "name": "q1"},
+ {"type": "begin_group", "name": "group1"},
+ {"type": "text", "name": "q2"},
+ {"type": "text", "name": "q3"},
+ {"type": "end_group"},
+ {"type": "begin_group", "name": "group2"},
+ {"type": "begin_group", "name": "subgroup1"},
+ {"type": "text", "name": "q4"},
+ {"type": "text", "name": "q5"},
+ {"type": "text", "name": "q6"},
+ {"type": "end_group"},
+ {"type": "end_group"},
+ ]}),
+ format="json")
+ self.asset.deploy(backend='mock', active=True)
+ self.asset.save()
+ self.hook = Hook()
+ self._submission_pk = 1
+
+ settings.CELERY_TASK_ALWAYS_EAGER = True
+
+ def _create_hook(self, return_response_only=False, **kwargs):
+
+ format_type = kwargs.get("format_type", INSTANCE_FORMAT_TYPE_JSON)
+
+ if format_type == INSTANCE_FORMAT_TYPE_JSON:
+ self.__prepare_json_submission()
+ _asset = self.asset
+ elif format_type == INSTANCE_FORMAT_TYPE_XML:
+ self.__prepare_xml_submission()
+ _asset = self.asset_xml
+ else:
+ raise BadFormatException(
+ "The format {} is not supported".format(format_type)
+ )
+
+ url = reverse("hook-list", kwargs={"parent_lookup_asset": _asset.uid})
+ data = {
+ "name": kwargs.get("name", "some external service with token"),
+ "endpoint": kwargs.get("endpoint", "http://external.service.local/"),
+ "settings": kwargs.get("settings", {
+ "custom_headers": {
+ "X-Token": "1234abcd"
+ }
+ }),
+ "export_type": format_type,
+ "active": kwargs.get("active", True),
+ "subset_fields": kwargs.get("subset_fields", [])
+ }
+ response = self.client.post(url, data, format='json')
+ if return_response_only:
+ return response
+ else:
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED,
+ msg=response.data)
+ hook = _asset.hooks.last()
+ self.assertTrue(hook.active)
+ return hook
+
+ def _send_and_fail(self):
+ """
+ The public method which calls this method needs to be decorated by
+ `@responses.activate`
+
+ :return: dict
+ """
+ self.hook = self._create_hook()
+
+ ServiceDefinition = self.hook.get_service_definition()
+ submissions = self.asset.deployment.get_submissions()
+ instance_id = submissions[0].get("id")
+ service_definition = ServiceDefinition(self.hook, instance_id)
+ first_mock_response = {"error": "not found"}
+
+ # Mock first request's try
+ responses.add(responses.POST, self.hook.endpoint,
+ json=first_mock_response, status=status.HTTP_404_NOT_FOUND)
+
+ # Mock next requests' tries
+ responses.add(responses.POST, self.hook.endpoint,
+ status=status.HTTP_200_OK,
+ content_type="application/json")
+
+ # Try to send data to external endpoint
+ success = service_definition.send()
+ self.assertFalse(success)
+
+ # Retrieve the corresponding log
+ url = reverse("hook-log-list", kwargs={
+ "parent_lookup_asset": self.hook.asset.uid,
+ "parent_lookup_hook": self.hook.uid
+ })
+
+ response = self.client.get(url)
+ first_hooklog_response = response.data.get("results")[0]
+
+ # Result should match first try
+ self.assertEqual(first_hooklog_response.get("status_code"), status.HTTP_404_NOT_FOUND)
+ self.assertEqual(json.loads(first_hooklog_response.get("message")), first_mock_response)
+
+ # Fakes Celery n retries by forcing status to `failed` (where n is `settings.HOOKLOG_MAX_RETRIES`)
+ first_hooklog = HookLog.objects.get(uid=first_hooklog_response.get("uid"))
+ first_hooklog.change_status(HOOK_LOG_FAILED)
+
+ return first_hooklog_response
+
+ def __prepare_json_submission(self):
+ v_uid = self.asset.latest_deployed_version.uid
+ submission = {
+ "__version__": v_uid,
+ "q1": u"¿Qué tal?",
+ "group1/q2": u"¿Cómo está en el grupo uno la primera vez?",
+ "group1/q3": u"¿Cómo está en el grupo uno la segunda vez?",
+ "group2/subgroup1/q4": u"¿Cómo está en el subgrupo uno la primera vez?",
+ "group2/subgroup1/q5": u"¿Cómo está en el subgrupo uno la segunda vez?",
+ "group2/subgroup1/q6": u"¿Cómo está en el subgrupo uno la tercera vez?",
+ "group2/subgroup11/q1": u"¿Cómo está en el subgrupo once?",
+ "id": self._submission_pk
+ }
+ self.__inject_submission(self.asset, submission)
+
+ def __prepare_xml_submission(self):
+ v_uid = self.asset_xml.latest_deployed_version.uid
+ submission = ("<{asset_uid}>"
+ " <__version__>{v_uid}"
+ " ¿Qué tal?"
+ " "
+ " ¿Cómo está en el grupo uno la primera vez?"
+ " ¿Cómo está en el grupo uno la segunda vez?"
+ " "
+ " "
+ " "
+ " ¿Cómo está en el subgrupo uno la primera vez?"
+ " ¿Cómo está en el subgrupo uno la segunda vez?"
+ " ¿Cómo está en el subgrupo uno la tercera vez?"
+ " "
+ " "
+ " ¿Cómo está en el subgrupo once?"
+ " "
+ " "
+ " {id}"
+ "{asset_uid}>").format(
+ asset_uid=self.asset_xml.uid,
+ v_uid=v_uid,
+ id=self._submission_pk
+ )
+ self.__inject_submission(self.asset_xml, submission)
+
+ def __inject_submission(self, asset, submission):
+ self._submission_pk += 1
+ asset.deployment.mock_submissions([submission])
diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py
new file mode 100644
index 0000000000..c9c8f50ba8
--- /dev/null
+++ b/kobo/apps/hook/tests/test_api_hook.py
@@ -0,0 +1,166 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+
+import constance
+from django.core.urlresolvers import reverse
+import requests
+import responses
+from rest_framework import status
+
+from .hook_test_case import HookTestCase
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON
+
+
+class ApiHookTestCase(HookTestCase):
+
+ def test_anonymous_access(self):
+ hook = self._create_hook()
+ self.client.logout()
+
+ list_url = reverse("hook-list", kwargs={
+ "parent_lookup_asset": self.asset.uid
+ })
+
+ response = self.client.get(list_url)
+ self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+ detail_url = reverse("hook-detail", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "uid": hook.uid,
+ })
+
+ response = self.client.get(detail_url)
+ self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+ log_list_url = reverse("hook-log-list", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "parent_lookup_hook": hook.uid,
+ })
+
+ response = self.client.get(log_list_url)
+ self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+ def test_create_hook(self):
+ self._create_hook()
+
+ def test_data_submission(self):
+ # Create first hook
+ first_hook = self._create_hook(name="dummy external service",
+ endpoint="http://dummy.service.local/",
+ settings={})
+ responses.add(responses.POST, first_hook.endpoint,
+ status=status.HTTP_200_OK,
+ content_type="application/json")
+ submission_url = reverse("submission-list", kwargs={"parent_lookup_asset": self.asset.uid})
+
+ submissions = self.asset.deployment.get_submissions()
+ data = {"instance_id": submissions[0].get("id")}
+ response = self.client.post(submission_url, data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+ # Create second hook
+ second_hook = self._create_hook(name="other dummy external service",
+ endpoint="http://otherdummy.service.local/",
+ settings={})
+ responses.add(responses.POST, second_hook.endpoint,
+ status=status.HTTP_200_OK,
+ content_type="application/json")
+
+ response = self.client.post(submission_url, data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+ response = self.client.post(submission_url, data)
+ self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
+
+ def test_non_owner_cannot_access(self):
+ hook = self._create_hook()
+ self.client.logout()
+ self.client.login(username="anotheruser", password="anotheruser")
+
+ list_url = reverse("hook-list", kwargs={
+ "parent_lookup_asset": self.asset.uid
+ })
+
+ response = self.client.get(list_url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ detail_url = reverse("hook-detail", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "uid": hook.uid,
+ })
+
+ response = self.client.get(detail_url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ log_list_url = reverse("hook-log-list", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "parent_lookup_hook": hook.uid,
+ })
+
+ response = self.client.get(log_list_url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_non_owner_cannot_create(self):
+ self.client.logout()
+ self.client.login(username="anotheruser", password="anotheruser")
+ response = self._create_hook(return_response_only=True, name="Hook for asset I don't own")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_anonymous_cannot_create(self):
+ self.client.logout()
+ response = self._create_hook(return_response_only=True, name="Hook for asset from anonymous")
+ self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+ def test_partial_update_hook(self):
+ hook = self._create_hook()
+ url = reverse("hook-detail", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "uid": hook.uid
+ })
+ data = {
+ "name": "some disabled external service",
+ "active": False
+ }
+ response = self.client.patch(url, data, format=INSTANCE_FORMAT_TYPE_JSON)
+ self.assertEqual(response.status_code, status.HTTP_200_OK,
+ msg=response.data)
+ hook.refresh_from_db()
+ self.assertFalse(hook.active)
+ self.assertEqual(hook.name, "some disabled external service")
+
+ @responses.activate
+ def test_send_and_retry(self):
+
+ first_log_response = self._send_and_fail()
+
+ # Let's retry through API call
+ retry_url = reverse("hook-log-retry", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "parent_lookup_hook": self.hook.uid,
+ "uid": first_log_response.get("uid")
+ })
+
+ # It should be a success
+ response = self.client.patch(retry_url, format=INSTANCE_FORMAT_TYPE_JSON)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Let's check if logs has 2 tries
+ detail_url = reverse("hook-log-detail", kwargs={
+ "parent_lookup_asset": self.asset.uid,
+ "parent_lookup_hook": self.hook.uid,
+ "uid": first_log_response.get("uid")
+ })
+
+ response = self.client.get(detail_url, format=INSTANCE_FORMAT_TYPE_JSON)
+ self.assertEqual(response.data.get("tries"), 2)
+
+ def test_validation(self):
+
+ constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS = False
+
+ response = self._create_hook(return_response_only=True)
+ self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+ expected_response = {"endpoint": ["Unsecured endpoint is not allowed"]}
+ self.assertEqual(response.data, expected_response)
\ No newline at end of file
diff --git a/kobo/apps/hook/tests/test_email.py b/kobo/apps/hook/tests/test_email.py
new file mode 100644
index 0000000000..82f4d8db2b
--- /dev/null
+++ b/kobo/apps/hook/tests/test_email.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import json
+
+from django.conf import settings
+from django.core import mail
+from django_celery_beat.models import PeriodicTask
+from django.template import Context
+from django.template.loader import get_template
+from django.utils import translation, dateparse
+import responses
+from rest_framework import status
+
+from .hook_test_case import HookTestCase
+from ..tasks import failures_reports
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON
+
+
+class EmailTestCase(HookTestCase):
+
+ def _create_periodisk_task(self):
+ beat_schedule = settings.CELERY_BEAT_SCHEDULE.get("send-hooks-failures-reports")
+ periodic_task = PeriodicTask(name="Periodic Task Mock",
+ enabled=True,
+ task=beat_schedule.get("task"))
+ periodic_task.save()
+
+
+ @responses.activate
+ def test_notifications(self):
+ self._create_periodisk_task()
+ first_log_response = self._send_and_fail()
+ failures_reports.delay()
+ self.assertEqual(len(mail.outbox), 1)
+
+ expected_record = {
+ "username": self.asset.owner.username,
+ "email": self.asset.owner.email,
+ "language": "en",
+ "assets": {
+ self.asset.id: {
+ "name": self.asset.name,
+ "max_length": len(self.hook.name),
+ "logs": [{
+ "hook_name": self.hook.name,
+ "status_code": first_log_response.get("status_code"),
+ "message": first_log_response.get("message"),
+ "uid": first_log_response.get("uid"),
+ "date_modified": dateparse.parse_datetime(first_log_response.get("date_modified"))
+ }]
+ }
+ }
+ }
+
+ plain_text_template = get_template("reports/failures_email_body.txt")
+
+ variables = {
+ "username": expected_record.get("username"),
+ "assets": expected_record.get("assets")
+ }
+ ## Localize templates
+ translation.activate(expected_record.get("language"))
+ text_content = plain_text_template.render(Context(variables))
+
+ self.assertEqual(mail.outbox[0].body, text_content)
diff --git a/kobo/apps/hook/tests/test_parser.py b/kobo/apps/hook/tests/test_parser.py
new file mode 100644
index 0000000000..84d0283e72
--- /dev/null
+++ b/kobo/apps/hook/tests/test_parser.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from lxml import etree
+import json
+import re
+
+from .hook_test_case import HookTestCase
+from kpi.constants import INSTANCE_FORMAT_TYPE_XML
+
+
+class ParserTestCase(HookTestCase):
+
+ def test_json_parser(self):
+ hook = self._create_hook(subset_fields=["id", "subgroup1", "q3"])
+
+ ServiceDefinition = hook.get_service_definition()
+ submissions = hook.asset.deployment.get_submissions()
+ uuid = submissions[0].get("id")
+ service_definition = ServiceDefinition(hook, uuid)
+ expected_data = {
+ "group1/q3": u"¿Cómo está en el grupo uno la segunda vez?",
+ "group2/subgroup1/q4": u"¿Cómo está en el subgrupo uno la primera vez?",
+ "group2/subgroup1/q5": u"¿Cómo está en el subgrupo uno la segunda vez?",
+ "group2/subgroup1/q6": u"¿Cómo está en el subgrupo uno la tercera vez?",
+ "id": 1
+ }
+ self.assertEquals(service_definition._get_data(), expected_data)
+
+ def test_xml_parser(self):
+ self.asset_xml = self.create_asset(
+ "some_asset_with_xml_submissions",
+ content=json.dumps(self.asset.content),
+ format="json")
+ self.asset_xml.deploy(backend='mock', active=True)
+ self.asset_xml.save()
+
+ hook = self._create_hook(subset_fields=["id", "subgroup1", "q3"], format_type=INSTANCE_FORMAT_TYPE_XML)
+
+ ServiceDefinition = hook.get_service_definition()
+ submissions = hook.asset.deployment.get_submissions(format_type=INSTANCE_FORMAT_TYPE_XML)
+ xml_doc = etree.fromstring(submissions[0])
+ tree = etree.ElementTree(xml_doc)
+ uuid = tree.find("id").text
+
+ service_definition = ServiceDefinition(hook, uuid)
+ expected_etree = etree.fromstring(("<{asset_uid}>"
+ " "
+ " ¿Cómo está en el grupo uno la segunda vez?"
+ " "
+ " "
+ " "
+ " ¿Cómo está en el subgrupo uno la primera vez?"
+ " ¿Cómo está en el subgrupo uno la segunda vez?"
+ " ¿Cómo está en el subgrupo uno la tercera vez?"
+ " "
+ " "
+ " {id}"
+ "{asset_uid}>").format(
+ asset_uid=self.asset_xml.uid,
+ id=uuid)
+ )
+ expected_xml = etree.tostring(expected_etree, pretty_print=True)
+
+ def remove_whitespace(str_):
+ return re.sub(r">\s+<", "><", str_)
+
+ self.assertEquals(remove_whitespace(service_definition._get_data()),
+ remove_whitespace(expected_xml))
diff --git a/kobo/apps/hook/utils.py b/kobo/apps/hook/utils.py
new file mode 100644
index 0000000000..f9c0d52199
--- /dev/null
+++ b/kobo/apps/hook/utils.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from .models.hook_log import HookLog
+from .tasks import service_definition_task
+
+
+class HookUtils(object):
+
+ @staticmethod
+ def call_services(asset, instance_id):
+ """
+ Delegates to Celery data submission to remote servers
+
+ :param asset: Asset.
+ :param instance_id: int. Instance primary key
+ """
+ # Retrieve `Hook` ids, to send data to their respective endpoint.
+ hooks_ids = asset.hooks.filter(active=True).values_list("id", flat=True).distinct()
+ # At least, one of the hooks must not have a log that corresponds to `instance_id`
+ # to make success equal True
+ success = False
+ for hook_id in hooks_ids:
+ if not HookLog.objects.filter(instance_id=instance_id, hook_id=hook_id).exists():
+ success = True
+ service_definition_task.delay(hook_id, instance_id)
+
+ return success
\ No newline at end of file
diff --git a/kobo/apps/hook/views/__init__.py b/kobo/apps/hook/views/__init__.py
new file mode 100644
index 0000000000..f114974daa
--- /dev/null
+++ b/kobo/apps/hook/views/__init__.py
@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from .hook import HookViewSet
+from .hook_log import HookLogViewSet
\ No newline at end of file
diff --git a/kobo/apps/hook/views/hook.py b/kobo/apps/hook/views/hook.py
new file mode 100644
index 0000000000..c8a41ce3f0
--- /dev/null
+++ b/kobo/apps/hook/views/hook.py
@@ -0,0 +1,204 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+from datetime import datetime, timedelta
+import json
+
+import constance
+from django.utils import timezone
+from django.db.models import Q
+from django.shortcuts import get_object_or_404
+from django.utils.translation import ugettext as _
+from rest_framework import viewsets, status
+from rest_framework.decorators import detail_route
+from rest_framework.response import Response
+from rest_framework_extensions.mixins import NestedViewSetMixin
+
+from ..constants import HOOK_LOG_FAILED, HOOK_LOG_PENDING
+from ..tasks import retry_all_task
+from ..models import Hook, HookLog
+from ..serializers.hook import HookSerializer
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON
+from kpi.models import Asset
+from kpi.permissions import AssetOwnerNestedObjectsPermissions
+from kpi.views import AssetOwnerFilterBackend, SubmissionViewSet
+from kpi.utils.log import logging
+
+
+class HookViewSet(NestedViewSetMixin, viewsets.ModelViewSet):
+ """
+
+ ## External services
+
+ Lists the external services endpoints accessible to requesting user
+
+
+ GET /assets/{asset_uid}/hooks/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/
+
+ ## CRUD
+
+ * `asset_uid` - is the unique identifier of a specific asset
+ * `uid` - is the unique identifier of a specific external service
+
+ #### Retrieves an external service
+
+ GET /assets/{asset_uid}/hooks/{uid}
+
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb
+
+ #### Add an external service to asset.
+
+ POST /assets/{asset_uid}/hooks/
+
+
+
+ > Example
+ >
+ > curl -X POST https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/
+
+
+ > **Payload to create a new external service**
+ >
+ > {
+ > "name": {string},
+ > "endpoint": {string},
+ > "active": {boolean},
+ > "email_notification": {boolean},
+ > "export_type": {string},
+ > "subset_fields": [{string}],
+ > "auth_level": {string},
+ > "settings": {
+ > "username": {string},
+ > "password": {string},
+ > "custom_headers": {
+ > {string}: {string}
+ > ...
+ > {string}: {string}
+ > }
+ > }
+ > }
+
+ where
+
+ * `name` and `endpoint` are required
+ * `active` is True by default
+ * `export_type` must be one these values:
+
+ 1. `json` (_default_)
+ 2. `xml`
+
+ * `email_notification` is a boolean. If true, User will be notified when request to remote server has failed.
+ * `auth_level` must be one these values:
+
+ 1. `no_auth` (_default_)
+ 2. `basic_auth`
+
+ * `subset_fields` is the list of fields of the form definition. Only these fields should be present in data sent to remote server
+ * `settings`.`custom_headers` is dictionary of `custom header`: `value`
+
+ For example:
+ > "settings": {
+ > "customer_headers": {
+ > "Authorization" : "Token 1af538baa9045a84c0e889f672baf83ff24"
+ > }
+
+ #### Update an external service.
+
+ PATCH /assets/{asset_uid}/hooks/{uid}
+
+
+
+ > Example
+ >
+ > curl -X PATCH https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb
+
+
+ Only specify properties to update in the payload. See above for payload structure
+
+ #### Delete an external service.
+
+ DELETE /assets/{asset_uid}/hooks/{uid}
+
+
+
+ > Example
+ >
+ > curl -X DELETE https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb
+
+ #### Retries all failed attempts
+
+ PATCH /assets/{asset_uid}/hooks/{hook_uid}/retry/
+
+
+ **This call is asynchronous. Job is sent to Celery to be run in background**
+
+ > Example
+ >
+ > curl -X PATCH https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb/retry/
+
+ It returns all logs `uid`s that are being retried.
+
+ ### CURRENT ENDPOINT
+ """
+ model = Hook
+ lookup_field = "uid"
+ filter_backends = (
+ AssetOwnerFilterBackend,
+ )
+ serializer_class = HookSerializer
+ permission_classes = (AssetOwnerNestedObjectsPermissions,)
+
+ def get_queryset(self):
+ asset_uid = self.get_parents_query_dict().get("asset")
+ queryset = self.model.objects.filter(asset__uid=asset_uid)
+ queryset = queryset.select_related("asset__uid")
+ return queryset
+
+ def perform_create(self, serializer):
+ asset_uid = self.get_parents_query_dict().get("asset")
+ asset = get_object_or_404(Asset, uid=asset_uid)
+ serializer.save(asset=asset)
+
+ @detail_route(methods=["PATCH"])
+ def retry(self, request, uid=None, *args, **kwargs):
+ hook = self.get_object()
+ response = {"detail": _("Task successfully scheduled")}
+ status_code = status.HTTP_200_OK
+ if hook.active:
+ seconds = HookLog.get_elapsed_seconds(constance.config.HOOK_MAX_RETRIES)
+ threshold = timezone.now() - timedelta(seconds=seconds)
+
+ records = hook.logs.filter(Q(date_modified__lte=threshold, status=HOOK_LOG_PENDING) |
+ Q(status=HOOK_LOG_FAILED)).values_list("id", "uid").distinct()
+ # Prepare lists of ids
+ hooklogs_ids = []
+ hooklogs_uids = []
+ for record in records:
+ hooklogs_ids.append(record[0])
+ hooklogs_uids.append(record[1])
+
+ if len(records) > 0:
+ # Mark all logs as PENDING
+ HookLog.objects.filter(id__in=hooklogs_ids).update(status=HOOK_LOG_PENDING)
+ # Delegate to Celery
+ retry_all_task.delay(hooklogs_ids)
+ response.update({
+ "pending_uids": hooklogs_uids
+ })
+
+ else:
+ response["detail"] = _("No data to retry")
+ status_code = status.HTTP_304_NOT_MODIFIED
+ else:
+ response["detail"] = _("Can not retry on disabled hooks")
+ status_code = status.HTTP_400_BAD_REQUEST
+
+ return Response(response, status=status_code)
\ No newline at end of file
diff --git a/kobo/apps/hook/views/hook_log.py b/kobo/apps/hook/views/hook_log.py
new file mode 100644
index 0000000000..5a01f87811
--- /dev/null
+++ b/kobo/apps/hook/views/hook_log.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+import json
+
+from django.shortcuts import get_object_or_404
+from django.utils.translation import ugettext as _
+from rest_framework import viewsets, mixins, status
+from rest_framework.decorators import detail_route
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.response import Response
+from rest_framework_extensions.mixins import NestedViewSetMixin
+
+from ..constants import KOBO_INTERNAL_ERROR_STATUS_CODE
+from ..models.hook_log import HookLog
+from ..serializers.hook_log import HookLogSerializer
+from kpi.models import Asset
+from kpi.permissions import AssetOwnerNestedObjectsPermissions
+from kpi.serializers import TinyPaginated
+from kpi.views import AssetOwnerFilterBackend, SubmissionViewSet
+
+
+class HookLogViewSet(NestedViewSetMixin,
+ mixins.RetrieveModelMixin,
+ mixins.ListModelMixin,
+ viewsets.GenericViewSet):
+ """
+ ## Logs of an external service
+
+ ** Users can't add, update or delete logs with the API. They can only retry failed attempts (see below)**
+
+ #### Lists logs of an external services endpoints accessible to requesting user
+
+ GET /assets/{asset_uid}/hooks/{hook_uid}/logs/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hSBxsiVNa5UxkVAjwu6dFB/logs/
+
+
+
+ * `asset_uid` - is the unique identifier of a specific asset
+ * `hook_uid` - is the unique identifier of a specific external service
+ * `uid` - is the unique identifier of a specific log
+
+ #### Retrieves a log
+
+ GET /assets/{asset_uid}/hooks/{hook_uid}/logs/{uid}/
+
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb/logs/3005940a-6e30-4699-813a-0ee5b2b07395/
+
+
+ #### Retries a failed attempt
+
+ PATCH /assets/{asset_uid}/hooks/{hook_uid}/logs/{uid}/retry/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/a9PkXcgVgaDXuwayVeAuY5/hooks/hfgha2nxBdoTVcwohdYNzb/logs/3005940a-6e30-4699-813a-0ee5b2b07395/retry/
+
+
+ ### CURRENT ENDPOINT
+ """
+ model = HookLog
+
+ lookup_field = "uid"
+ filter_backends = (
+ AssetOwnerFilterBackend,
+ )
+ serializer_class = HookLogSerializer
+ permission_classes = (AssetOwnerNestedObjectsPermissions,)
+ pagination_class = TinyPaginated
+
+ def get_queryset(self):
+ asset_uid = self.get_parents_query_dict().get("asset")
+ hook_uid = self.get_parents_query_dict().get("hook")
+ queryset = self.model.objects.filter(hook__uid=hook_uid, hook__asset__uid=asset_uid)
+ queryset = queryset.select_related("hook__asset__uid")
+
+ return queryset
+
+ @detail_route(methods=["PATCH"])
+ def retry(self, request, uid=None, *args, **kwargs):
+ """
+ Retries to send data to external service.
+ :param request: rest_framework.request.Request
+ :param uid: str
+ :return: Response
+ """
+ response = {"detail": "",
+ "status_code": KOBO_INTERNAL_ERROR_STATUS_CODE}
+ status_code = status.HTTP_200_OK
+ hook_log = self.get_object()
+
+ if hook_log.can_retry():
+ hook_log.change_status()
+ success = hook_log.retry()
+ if success:
+ # Return status_code of remote server too.
+ # `response["status_code"]` is not the same as `status_code`
+ response["detail"] = hook_log.message
+ response["status_code"] = hook_log.status_code
+ else:
+ response["detail"] = _("An error has occurred when sending the data. Please try again later.")
+ status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
+ else:
+ response["detail"] = _("Data is being or has already been processed")
+ status_code = status.HTTP_400_BAD_REQUEST
+
+ return Response(response, status=status_code)
\ No newline at end of file
diff --git a/kobo/apps/reports/report_data.py b/kobo/apps/reports/report_data.py
index 576febb6a7..1d3323dfb1 100644
--- a/kobo/apps/reports/report_data.py
+++ b/kobo/apps/reports/report_data.py
@@ -1,7 +1,6 @@
# coding: utf-8
from __future__ import unicode_literals
-import logging
import itertools
from collections import OrderedDict
from copy import deepcopy
@@ -11,21 +10,8 @@
from formpack import FormPack
from rest_framework import serializers
-from kpi.utils.mongo_helper import MongoDecodingHelper
-
-from .constants import (SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY
- )
-
-
-def get_instances_for_userform_id(userform_id, submission=None):
- query = {'_userform_id': userform_id, '_deleted_at': {'$exists': False}}
- if submission:
- query['_id'] = submission
- instances = settings.MONGO_DB.instances.find(query)
- return (
- MongoDecodingHelper.to_readable_dict(instance)
- for instance in instances
- )
+from .constants import SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY
+from kpi.utils.log import logging
def build_formpack(asset, submission_stream=None, use_all_form_versions=True):
@@ -115,7 +101,8 @@ def _infer_version_id(submission):
_userform_id = asset.deployment.mongo_userform_id
if not _userform_id.startswith(asset.owner.username):
raise Exception('asset has unexpected `mongo_userform_id`')
- submission_stream = get_instances_for_userform_id(_userform_id)
+
+ submission_stream = asset.deployment.get_submissions()
submission_stream = (
_infer_version_id(submission) for submission in submission_stream
diff --git a/kobo/celery.py b/kobo/celery.py
index 1352ac8bcc..32aa67d3fd 100644
--- a/kobo/celery.py
+++ b/kobo/celery.py
@@ -30,7 +30,7 @@ def on_configure(self):
app = Celery(PROJECT_NAME)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
-app.config_from_object('django.conf:settings')
+app.config_from_object('django.conf:settings', namespace='CELERY')
# The `app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)` technique
# described in
diff --git a/kobo/settings.py b/kobo/settings.py
index 6f7a8abb36..aab8f15d16 100644
--- a/kobo/settings.py
+++ b/kobo/settings.py
@@ -7,12 +7,14 @@
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
+from __future__ import absolute_import
from datetime import timedelta
import multiprocessing
import os
import subprocess
+from celery.schedules import crontab
import django.conf.locale
from django.conf import global_settings
from django.conf.global_settings import LOGIN_URL
@@ -21,7 +23,7 @@
from pymongo import MongoClient
-from static_lists import EXTRA_LANG_INFO
+from .static_lists import EXTRA_LANG_INFO
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
@@ -43,6 +45,11 @@
SECURE_PROXY_SSL_HEADER = tuple((substring.strip() for substring in
os.environ['SECURE_PROXY_SSL_HEADER'].split(',')))
+# Make Django use NginX $host. Useful when running with ./manage.py runserver_plus
+# It avoids adding the debugger webserver port (i.e. `:8000`) at the end of urls.
+if os.getenv("USE_X_FORWARDED_HOST", "False") == "True":
+ USE_X_FORWARDED_HOST = True
+
UPCOMING_DOWNTIME = False
# Domain must not exclude KoBoCAT when sharing sessions
@@ -93,6 +100,8 @@
'constance',
'constance.backends.database',
'guardian', # For access to KC permissions ONLY
+ 'kobo.apps.hook',
+ 'django_celery_beat',
)
MIDDLEWARE_CLASSES = (
@@ -137,7 +146,12 @@
'help@kobotoolbox.org'),
'Email address for users to contact, e.g. when they '
'encounter unhandled errors in the application'),
-
+ 'ALLOW_UNSECURED_HOOK_ENDPOINTS': (True,
+ 'Allow the use of unsecured endpoints for hooks. '
+ '(e.g http://hook.example.com)'),
+ 'HOOK_MAX_RETRIES': (3,
+ 'Number of times the system will retry '
+ 'to send data to remote server before giving up')
}
# Tell django-constance to use a database model instead of Redis
CONSTANCE_BACKEND = 'constance.backends.database.DatabaseBackend'
@@ -270,6 +284,11 @@ def __init__(self, *args, **kwargs):
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.TokenAuthentication',
],
+ 'DEFAULT_RENDERER_CLASSES': [
+ 'rest_framework.renderers.JSONRenderer',
+ 'rest_framework.renderers.BrowsableAPIRenderer',
+ 'kpi.renderers.XMLRenderer',
+ ]
}
TEMPLATES = [
@@ -366,29 +385,37 @@ def __init__(self, *args, **kwargs):
ENKETO_SURVEY_ENDPOINT = 'api/v2/survey/all'
''' Celery configuration '''
+# Celery 4.0 New lowercase settings.
+# Uppercase settings can be used when using a PREFIX
+# http://docs.celeryproject.org/en/latest/userguide/configuration.html#new-lowercase-settings
+# http://docs.celeryproject.org/en/4.0/whatsnew-4.0.html#step-2-update-your-configuration-with-the-new-setting-names
+
+CELERY_TIMEZONE = "UTC"
if os.environ.get('SKIP_CELERY', 'False') == 'True':
# helpful for certain debugging
- CELERY_ALWAYS_EAGER = True
+ CELERY_TASK_ALWAYS_EAGER = True
# Celery defaults to having as many workers as there are cores. To avoid
# excessive resource consumption, don't spawn more than 6 workers by default
# even if there more than 6 cores.
+
CELERYD_MAX_CONCURRENCY = int(os.environ.get('CELERYD_MAX_CONCURRENCY', 6))
if multiprocessing.cpu_count() > CELERYD_MAX_CONCURRENCY:
- CELERYD_CONCURRENCY = CELERYD_MAX_CONCURRENCY
+ CELERY_WORKER_CONCURRENCY = CELERYD_MAX_CONCURRENCY
# Replace a worker after it completes 7 tasks by default. This allows the OS to
# reclaim memory allocated during large tasks
-CELERYD_MAX_TASKS_PER_CHILD = int(os.environ.get(
+CELERY_WORKER_MAX_TASKS_PER_CHILD = int(os.environ.get(
'CELERYD_MAX_TASKS_PER_CHILD', 7))
# Default to a 30-minute soft time limit and a 35-minute hard time limit
-CELERYD_TASK_TIME_LIMIT = int(os.environ.get('CELERYD_TASK_TIME_LIMIT', 2100))
-CELERYD_TASK_SOFT_TIME_LIMIT = int(os.environ.get(
+CELERY_TASK_TIME_LIMIT = int(os.environ.get('CELERYD_TASK_TIME_LIMIT', 2100))
+
+CELERY_TASK_SOFT_TIME_LIMIT = int(os.environ.get(
'CELERYD_TASK_SOFT_TIME_LIMIT', 1800))
-CELERYBEAT_SCHEDULE = {
+CELERY_BEAT_SCHEDULE = {
# Failsafe search indexing: update the Haystack index twice per day to
# catch any stragglers that might have gotten past
# haystack.signals.RealtimeSignalProcessor
@@ -396,6 +423,11 @@ def __init__(self, *args, **kwargs):
# 'task': 'kpi.tasks.update_search_index',
# 'schedule': timedelta(hours=12)
#},
+ # Schedule every day at midnight UTC. Can be customized in admin section
+ "send-hooks-failures-reports": {
+ "task": "kobo.apps.hook.tasks.failures_reports",
+ "schedule": crontab(hour=0, minute=0),
+ },
}
if 'KOBOCAT_URL' in os.environ:
@@ -406,7 +438,7 @@ def __init__(self, *args, **kwargs):
# Create/update KPI assets to match KC forms
SYNC_KOBOCAT_XFORMS_PERIOD_MINUTES = int(
os.environ.get('SYNC_KOBOCAT_XFORMS_PERIOD_MINUTES', '30'))
- CELERYBEAT_SCHEDULE['sync-kobocat-xforms'] = {
+ CELERY_BEAT_SCHEDULE['sync-kobocat-xforms'] = {
'task': 'kpi.tasks.sync_kobocat_xforms',
'schedule': timedelta(minutes=SYNC_KOBOCAT_XFORMS_PERIOD_MINUTES),
'options': {'queue': 'sync_kobocat_xforms_queue',
@@ -421,7 +453,7 @@ def __init__(self, *args, **kwargs):
rabbitmqctl set_permissions -p kpi kpi '.*' '.*' '.*'
See http://celery.readthedocs.org/en/latest/getting-started/brokers/rabbitmq.html#setting-up-rabbitmq.
'''
-BROKER_URL = os.environ.get('KPI_BROKER_URL', 'amqp://kpi:kpi@rabbit:5672/kpi')
+CELERY_BROKER_URL = os.environ.get('KPI_BROKER_URL', 'amqp://kpi:kpi@rabbit:5672/kpi')
# http://django-registration-redux.readthedocs.org/en/latest/quickstart.html#settings
ACCOUNT_ACTIVATION_DAYS = 3
@@ -476,6 +508,42 @@ def __init__(self, *args, **kwargs):
'private_storage.storage.s3boto3.PrivateS3BotoStorage'
AWS_PRIVATE_STORAGE_BUCKET_NAME = AWS_STORAGE_BUCKET_NAME
+
+# Need a default logger when sentry is not activated
+
+LOGGING = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'formatters': {
+ 'verbose': {
+ 'format': '%(levelname)s %(asctime)s %(module)s' +
+ ' %(process)d %(thread)d %(message)s'
+ },
+ 'simple': {
+ 'format': '%(levelname)s %(message)s'
+ },
+ },
+ 'handlers': {
+ 'console': {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'verbose'
+ }
+ },
+ 'loggers': {
+ 'console_logger': {
+ 'handlers': ['console'],
+ 'level': 'DEBUG',
+ 'propagate': True
+ },
+ 'django.db.backends': {
+ 'level': 'ERROR',
+ 'handlers': ['console'],
+ 'propagate': True
+ },
+ }
+}
+
''' Sentry configuration '''
if os.environ.get('RAVEN_DSN', False):
import raven
@@ -542,6 +610,11 @@ def __init__(self, *args, **kwargs):
'handlers': ['console'],
'propagate': False,
},
+ 'console_logger': {
+ 'level': 'DEBUG',
+ 'handlers': ['console'],
+ 'propagate': True
+ },
},
}
diff --git a/kpi/constants.py b/kpi/constants.py
index e9ac482007..1979ed06f1 100644
--- a/kpi/constants.py
+++ b/kpi/constants.py
@@ -1,5 +1,6 @@
-
+INSTANCE_FORMAT_TYPE_XML = "xml"
+INSTANCE_FORMAT_TYPE_JSON = "json"
ASSET_TYPE_TEXT = 'text'
ASSET_TYPE_EMPTY = 'empty'
diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py
index 0ab4dcedcc..11b6f93165 100644
--- a/kpi/deployment_backends/base_backend.py
+++ b/kpi/deployment_backends/base_backend.py
@@ -3,6 +3,9 @@
class BaseDeploymentBackend(object):
+
+ # TODO. Stop using protected property `_deployment_data`.
+
def __init__(self, asset):
self.asset = asset
diff --git a/kpi/deployment_backends/kc_access/utils.py b/kpi/deployment_backends/kc_access/utils.py
index 3f4e80a3de..70a920cf51 100644
--- a/kpi/deployment_backends/kc_access/utils.py
+++ b/kpi/deployment_backends/kc_access/utils.py
@@ -1,6 +1,4 @@
-import logging
import json
-import logging
from collections import Iterable
from django.conf import settings
@@ -13,6 +11,7 @@
import requests
from .shadow_models import _models, safe_kc_read
+from kpi.utils.log import logging
class _KoboCatProfileException(Exception):
diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py
index 5bbaf11dcc..a74e775eca 100644
--- a/kpi/deployment_backends/kobocat_backend.py
+++ b/kpi/deployment_backends/kobocat_backend.py
@@ -1,8 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import cStringIO
-import logging
+import json
import re
import requests
import unicodecsv
@@ -11,16 +12,19 @@
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
-from django.http import HttpResponse
-from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from pyxform.xls2json_backends import xls_to_dict
from rest_framework import exceptions, status, serializers
+from rest_framework.request import Request
from rest_framework.authtoken.models import Token
-from rest_framework.decorators import detail_route, list_route
-from base_backend import BaseDeploymentBackend
+from ..exceptions import BadFormatException
+from .base_backend import BaseDeploymentBackend
from .kc_access.utils import instance_count, last_submission_time
+from .kc_access.shadow_models import _models
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON, INSTANCE_FORMAT_TYPE_XML
+from kpi.utils.mongo_helper import MongoDecodingHelper
+from kpi.utils.log import logging
class KobocatDeploymentException(exceptions.APIException):
@@ -203,6 +207,16 @@ def timestamp(self):
def xform_id_string(self):
return self.asset._deployment_data.get('backend_response', {}).get('id_string')
+ @property
+ def xform_id(self):
+ pk = self.asset._deployment_data.get('backend_response', {}).get('formid')
+ xform = _models.XForm.objects.filter(pk=pk).only(
+ 'user__username', 'id_string').first()
+ if not (xform.user.username == self.asset.owner.username and
+ xform.id_string == self.xform_id_string):
+ raise Exception('Deployment links to an unexpected KoBoCAT XForm')
+ return pk
+
@property
def mongo_userform_id(self):
return '{}_{}'.format(self.asset.owner.username, self.xform_id_string)
@@ -259,7 +273,10 @@ def connect(self, identifier=None, active=False):
}
}
)
- payload = {u'downloadable': active}
+ payload = {
+ u"downloadable": active,
+ u"has_kpi_hook": self.asset.has_active_hooks
+ }
files = {'xls_file': (u'{}.xls'.format(id_string), xls_io)}
json_response = self._kobocat_request(
'POST', url, data=payload, files=files)
@@ -289,8 +306,9 @@ def redeploy(self, active=None):
}
)
payload = {
- u'downloadable': active,
- u'title': self.asset.name
+ u"downloadable": active,
+ u"title": self.asset.name,
+ u"has_kpi_hook": self.asset.has_active_hooks
}
files = {'xls_file': (u'{}.xls'.format(id_string), xls_io)}
try:
@@ -327,6 +345,27 @@ def set_active(self, active):
'backend_response': json_response,
})
+ def set_has_kpi_hooks(self):
+ """
+ PATCH `has_kpi_hooks` boolean of survey.
+ It lets `kc` know whether it needs to ping `kpi`
+ each time a submission comes in.
+
+ Store results in self.asset._deployment_data
+ """
+ has_active_hooks = self.asset.has_active_hooks
+ url = self.external_to_internal_url(
+ self.backend_response["url"])
+ payload = {
+ u"has_kpi_hooks": has_active_hooks
+ }
+ json_response = self._kobocat_request("PATCH", url, data=payload)
+ assert(json_response["has_kpi_hooks"] == has_active_hooks)
+ self.store_data({
+ "has_kpi_hooks": json_response.get("has_kpi_hooks"),
+ "backend_response": json_response,
+ })
+
def delete(self):
''' WARNING! Deletes all submitted data! '''
url = self.external_to_internal_url(self.backend_response['url'])
@@ -403,7 +442,6 @@ def get_data_download_links(self):
# For GET requests that return files directly
'xls': u'/'.join((reports_base_url, 'export.xlsx')),
'csv': u'/'.join((reports_base_url, 'export.csv')),
- 'spss_labels': u'/'.join((forms_base_url, 'spss_labels.zip')),
}
return links
@@ -450,132 +488,82 @@ def get_submission_validation_status_url(self, submission_pk):
)
return url
-class KobocatDataProxyViewSetMixin(object):
- '''
- List, retrieve, and delete submission data for a deployed asset via the
- KoBoCAT API.
- '''
- def _get_deployment(self, request):
- '''
- Presupposing the use of `NestedViewSetMixin`, return the deployment for
- the asset specified by the KPI request
- '''
- asset_uid = self.get_parents_query_dict()['asset']
- asset = get_object_or_404(self.parent_model, uid=asset_uid)
- if not asset.has_deployment:
- raise serializers.ValidationError(
- _('The specified asset has not been deployed'))
- if not isinstance(asset.deployment, KobocatDeploymentBackend):
- raise NotImplementedError(
- 'This viewset can only be used with the KoBoCAT deployment '
- 'backend')
- return asset.deployment
-
- @staticmethod
- def _kobocat_proxy_request(kpi_request, kc_request):
- '''
- Send `kc_request`, which must specify `method` and `url` at a minimum.
- If `kpi_request`, i.e. the incoming request to be proxied, is
- authenticated, logged-in user's API token will be added to
- `kc_request.headers`
- '''
- user = kpi_request.user
- if not user.is_anonymous() and user.pk != settings.ANONYMOUS_USER_ID:
- token, created = Token.objects.get_or_create(user=user)
- kc_request.headers['Authorization'] = 'Token %s' % token.key
- session = requests.Session()
- return session.send(kc_request.prepare())
-
- @staticmethod
- def _requests_response_to_django_response(requests_response):
- '''
- Convert a `requests.models.Response` into a `django.http.HttpResponse`
- '''
- HEADERS_TO_COPY = ('Content-Type', 'Content-Language')
- django_response = HttpResponse()
- for header in HEADERS_TO_COPY:
- try:
- django_response[header] = requests_response.headers[header]
- except KeyError:
- continue
- django_response.status_code = requests_response.status_code
- django_response.write(requests_response.content)
- return django_response
-
-
- def list(self, kpi_request, *args, **kwargs):
- return self.retrieve(kpi_request, None, *args, **kwargs)
-
- def retrieve(self, kpi_request, pk, *args, **kwargs):
- deployment = self._get_deployment(kpi_request)
- if pk is None:
- kc_url = deployment.submission_list_url
+ def get_submissions(self, format_type=INSTANCE_FORMAT_TYPE_JSON, instances_ids=[]):
+ """
+ Retreives submissions through Postgres or Mongo depending on `format_type`.
+ It can be filtered on instances uuids.
+ `uuid` is used instead of `id` because `id` is not available in ReadOnlyInstance model
+
+ :param format_type: str. INSTANCE_FORMAT_TYPE_JSON|INSTANCE_FORMAT_TYPE_XML
+ :param instances_ids: list. Optional
+ :return: list: mixed
+ """
+ submissions = []
+ if format_type == INSTANCE_FORMAT_TYPE_JSON:
+ submissions = self.__get_submissions_in_json(instances_ids)
+ elif format_type == INSTANCE_FORMAT_TYPE_XML:
+ submissions = self.__get_submissions_in_xml(instances_ids)
else:
- kc_url = deployment.get_submission_detail_url(pk)
- kc_request = requests.Request(
- method='GET',
- url=kc_url,
- params=kpi_request.GET
- )
- kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
- return self._requests_response_to_django_response(kc_response)
-
- def delete(self, kpi_request, pk, *args, **kwargs):
- deployment = self._get_deployment(kpi_request)
- kc_url = deployment.get_submission_detail_url(pk)
- kc_request = requests.Request(method='DELETE', url=kc_url)
- kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
- return self._requests_response_to_django_response(kc_response)
-
- @detail_route(methods=['GET'])
- def edit(self, kpi_request, pk, *args, **kwargs):
- deployment = self._get_deployment(kpi_request)
- kc_url = deployment.get_submission_edit_url(pk)
- kc_request = requests.Request(
- method='GET',
- url=kc_url,
- params=kpi_request.GET
- )
- kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
- return self._requests_response_to_django_response(kc_response)
+ raise BadFormatException(
+ "The format {} is not supported".format(format_type)
+ )
+ return submissions
- @detail_route(methods=["GET", "PATCH"])
- def validation_status(self, kpi_request, pk, *args, **kwargs):
- deployment = self._get_deployment(kpi_request)
- kc_url = deployment.get_submission_validation_status_url(pk)
+ def get_submission(self, pk, format_type=INSTANCE_FORMAT_TYPE_JSON):
+ """
+ Returns only one occurrence.
- requests_params = {
- "method": kpi_request.method,
- "url": kc_url
- }
+ :param pk: int. `Instance.id`
+ :param format_type: str. INSTANCE_FORMAT_TYPE_JSON|INSTANCE_FORMAT_TYPE_XML
+ :return: mixed. JSON or XML
+ """
- # According to HTTP method,
- # params are passed to Request object in different ways.
- http_method_params = {}
- if kpi_request.method == "PATCH":
- http_method_params = {"json": kpi_request.data}
+ if pk:
+ submissions = list(self.get_submissions(format_type, [pk]))
+ if len(submissions) > 0:
+ return submissions[0]
+ return None
else:
- http_method_params = {"params": kpi_request.GET}
+ raise ValueError("Primary key must be provided")
+
+ def __get_submissions_in_json(self, instances_ids=[]):
+ """
+ Retrieves instances directly from Mongo.
+
+ :param instances_ids: list. Optional
+ :return: generator
+ """
+ query = {
+ "_userform_id": self.mongo_userform_id,
+ "_deleted_at": {"$exists": False}
+ }
- requests_params.update(http_method_params)
- kc_request = requests.Request(**requests_params)
- kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+ if len(instances_ids) > 0:
+ query.update({
+ "_id": {"$in": instances_ids}
+ })
- return self._requests_response_to_django_response(kc_response)
+ instances = settings.MONGO_DB.instances.find(query)
+ return (
+ MongoDecodingHelper.to_readable_dict(instance)
+ for instance in instances
+ )
+ def __get_submissions_in_xml(self, instances_ids=[]):
+ """
+ Retrieves instances directly from Postgres.
- @list_route(methods=["PATCH"])
- def validation_statuses(self, kpi_request, *args, **kwargs):
- deployment = self._get_deployment(kpi_request)
- kc_url = deployment.submission_list_url
+ :param instances_ids: list. Optional
+ :return: list
+ """
+ queryset = _models.Instance.objects.filter(
+ xform_id=self.xform_id,
+ deleted_at=None
+ )
- requests_params = {
- "method": kpi_request.method,
- "url": kc_url,
- "json": kpi_request.data
- }
+ if len(instances_ids) > 0:
+ queryset = queryset.filter(id__in=instances_ids)
- kc_request = requests.Request(**requests_params)
- kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+ queryset = queryset.order_by("id")
- return self._requests_response_to_django_response(kc_response)
+ return (lazy_instance.xml for lazy_instance in queryset)
diff --git a/kpi/deployment_backends/mixin.py b/kpi/deployment_backends/mixin.py
index bb978e0625..dfd50e0abd 100644
--- a/kpi/deployment_backends/mixin.py
+++ b/kpi/deployment_backends/mixin.py
@@ -1,6 +1,20 @@
-#!/usr/bin/python
# -*- coding: utf-8 -*-
-from backends import DEPLOYMENT_BACKENDS
+from __future__ import absolute_import
+import json
+
+from django.conf import settings
+from django.http import HttpResponse
+from django.shortcuts import get_object_or_404
+from django.utils.translation import ugettext_lazy as _
+import requests
+from requests.utils import quote
+from rest_framework import serializers
+from rest_framework.authtoken.models import Token
+from rest_framework.decorators import detail_route, list_route
+
+from .backends import DEPLOYMENT_BACKENDS
+from .kobocat_backend import KobocatDeploymentBackend
+from .mock_backend import MockDeploymentBackend
from kpi.exceptions import BadAssetTypeException
from kpi.constants import ASSET_TYPE_SURVEY
@@ -18,7 +32,7 @@ def connect_deployment(self, **kwargs):
raise KeyError('connect_deployment requires an argument: backend')
def deploy(self, backend=False, active=True):
- '''this method could be called "deploy_latest_version()".'''
+ """this method could be called "deploy_latest_version()"."""
if self.can_be_deployed:
if not self.has_deployment:
@@ -31,9 +45,9 @@ def deploy(self, backend=False, active=True):
self.asset_type))
def _mark_latest_version_as_deployed(self):
- ''' `sync_kobocat_xforms` calls this, since it manipulates
+ """ `sync_kobocat_xforms` calls this, since it manipulates
`_deployment_data` directly. Everything else should probably call
- `deploy()` above '''
+ `deploy()` above """
latest_version = self.latest_version
latest_version.deployed = True
latest_version.save()
@@ -49,9 +63,203 @@ def deployment(self):
try:
backend = self._deployment_data['backend']
return DEPLOYMENT_BACKENDS[backend](self)
- except KeyError, e:
+ except KeyError as e:
raise KeyError('cannot retrieve asset backend: {}'.format(backend))
@property
def can_be_deployed(self):
- return self.asset_type and self.asset_type == ASSET_TYPE_SURVEY
\ No newline at end of file
+ return self.asset_type and self.asset_type == ASSET_TYPE_SURVEY
+
+
+class MockDataProxyViewSetMixin(object):
+
+ def retrieve(self, request, pk, *args, **kwargs):
+ asset = self._get_asset(kwargs.get("asset"))
+ if not asset.has_deployment:
+ raise serializers.ValidationError(
+ _('The specified asset has not been deployed'))
+ elif pk is None:
+ return HttpResponse(json.dumps(asset.deployment.get_submissions()),
+ content_type="application/json")
+ else:
+ data = (submission for submission in asset.deployment.get_submissions()
+ if submission.get("id") == pk).next()
+ return HttpResponse(json.dumps(data), content_type="application/json")
+
+ def _get_asset(self, asset):
+ if asset is None:
+ asset_uid = self.get_parents_query_dict()['asset']
+ asset = get_object_or_404(self.parent_model, uid=asset_uid)
+
+ return asset
+
+class KobocatDataProxyViewSetMixin(MockDataProxyViewSetMixin):
+ """
+ List, retrieve, and delete submission data for a deployed asset via the
+ KoBoCAT API.
+ """
+ def _get_deployment(self, request, asset=None):
+ """
+ Presupposing the use of `NestedViewSetMixin`, return the deployment for
+ the asset specified by the KPI request
+ """
+ asset = self._get_asset(asset)
+
+ if not asset.has_deployment:
+ raise serializers.ValidationError(
+ _('The specified asset has not been deployed'))
+ if not isinstance(asset.deployment, KobocatDeploymentBackend):
+ raise NotImplementedError(
+ 'This viewset can only be used with the KoBoCAT deployment '
+ 'backend')
+ return asset.deployment
+
+ @staticmethod
+ def _kobocat_proxy_request(kpi_request, kc_request):
+ """
+ Send `kc_request`, which must specify `method` and `url` at a minimum.
+ If `kpi_request`, i.e. the incoming request to be proxied, is
+ authenticated, logged-in user's API token will be added to
+ `kc_request.headers`
+ """
+ user = kpi_request.user
+ if not user.is_anonymous() and user.pk != settings.ANONYMOUS_USER_ID:
+ token, created = Token.objects.get_or_create(user=user)
+ kc_request.headers['Authorization'] = 'Token %s' % token.key
+ session = requests.Session()
+ return session.send(kc_request.prepare())
+
+ @staticmethod
+ def _requests_response_to_django_response(requests_response):
+ """
+ Convert a `requests.models.Response` into a `django.http.HttpResponse`
+ """
+ HEADERS_TO_COPY = ('Content-Type', 'Content-Language')
+ django_response = HttpResponse()
+ for header in HEADERS_TO_COPY:
+ try:
+ django_response[header] = requests_response.headers[header]
+ except KeyError:
+ continue
+ django_response.status_code = requests_response.status_code
+ django_response.write(requests_response.content)
+ return django_response
+
+ def list(self, kpi_request, *args, **kwargs):
+ return self.retrieve(kpi_request, None, *args, **kwargs)
+
+ def retrieve(self, kpi_request, pk, *args, **kwargs):
+
+ asset_uid = self.get_parents_query_dict()['asset']
+ asset = get_object_or_404(self.parent_model, uid=asset_uid)
+
+ if isinstance(asset.deployment, MockDeploymentBackend):
+ return super(KobocatDataProxyViewSetMixin, self).retrieve(
+ request=kpi_request,
+ pk=pk,
+ *args,
+ **kwargs)
+ else:
+ deployment = self._get_deployment(kpi_request, asset=asset)
+ if pk is None:
+ kc_url = deployment.submission_list_url
+ else:
+ kc_url = deployment.get_submission_detail_url(pk)
+
+ # We need to append query string parameters to url
+ # if any.
+ query_string_params = []
+ for key, value in kwargs.items():
+ if key.startswith("?"):
+ query_string_params.append("{}={}".format(
+ key[1:],
+ value
+ ))
+ kwargs.pop(key)
+ if query_string_params:
+ kc_url = "{}?{}".format(
+ kc_url,
+ "&".join(query_string_params)
+ )
+
+ # We can now retrieve XML or JSON format from `kc`
+ # Request can be:
+ # - /assets//submissions//
+
+ # - /assets//submissions/./
+ # where `format` is among `kwargs`
+
+ # - /assets//submissions/?format=/
+ # where `format` is among `request.GET`
+ format = kwargs.pop("format", None)
+ params = kpi_request.GET.copy()
+ if format:
+ params.update({"format": format})
+
+ kc_request = requests.Request(
+ method='GET',
+ url=kc_url,
+ params=params
+ )
+ kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+ return self._requests_response_to_django_response(kc_response)
+
+ def delete(self, kpi_request, pk, *args, **kwargs):
+ deployment = self._get_deployment(kpi_request)
+ kc_url = deployment.get_submission_detail_url(pk)
+ kc_request = requests.Request(method='DELETE', url=kc_url)
+ kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+ return self._requests_response_to_django_response(kc_response)
+
+ @detail_route(methods=['GET'])
+ def edit(self, kpi_request, pk, *args, **kwargs):
+ deployment = self._get_deployment(kpi_request)
+ kc_url = deployment.get_submission_edit_url(pk)
+ kc_request = requests.Request(
+ method='GET',
+ url=kc_url,
+ params=kpi_request.GET
+ )
+ kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+ return self._requests_response_to_django_response(kc_response)
+
+ @detail_route(methods=["GET", "PATCH"])
+ def validation_status(self, kpi_request, pk, *args, **kwargs):
+ deployment = self._get_deployment(kpi_request)
+ kc_url = deployment.get_submission_validation_status_url(pk)
+
+ requests_params = {
+ "method": kpi_request.method,
+ "url": kc_url
+ }
+
+ # According to HTTP method,
+ # params are passed to Request object in different ways.
+ http_method_params = {}
+ if kpi_request.method == "PATCH":
+ http_method_params = {"json": kpi_request.data}
+ else:
+ http_method_params = {"params": kpi_request.GET}
+
+ requests_params.update(http_method_params)
+ kc_request = requests.Request(**requests_params)
+ kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+
+ return self._requests_response_to_django_response(kc_response)
+
+
+ @list_route(methods=["PATCH"])
+ def validation_statuses(self, kpi_request, *args, **kwargs):
+ deployment = self._get_deployment(kpi_request)
+ kc_url = deployment.submission_list_url
+
+ requests_params = {
+ "method": kpi_request.method,
+ "url": kc_url,
+ "json": kpi_request.data
+ }
+
+ kc_request = requests.Request(**requests_params)
+ kc_response = self._kobocat_proxy_request(kpi_request, kc_request)
+
+ return self._requests_response_to_django_response(kc_response)
diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py
index 3ad7413e30..2c6e7cc72b 100644
--- a/kpi/deployment_backends/mock_backend.py
+++ b/kpi/deployment_backends/mock_backend.py
@@ -1,7 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
+import re
from base_backend import BaseDeploymentBackend
+from kpi.constants import INSTANCE_FORMAT_TYPE_JSON, INSTANCE_FORMAT_TYPE_XML
class MockDeploymentBackend(BaseDeploymentBackend):
@@ -9,6 +11,8 @@ class MockDeploymentBackend(BaseDeploymentBackend):
only used for unit testing and interface testing.
defines the interface for a deployment backend.
+
+ # TODO. Stop using protected property `_deployment_data`.
'''
def connect(self, active=False):
self.store_data({
@@ -50,11 +54,59 @@ def _submission_count(self):
return len(submissions)
def _mock_submission(self, submission):
+ """
+ @TODO may be useless because of mock_submissions. Remove if it's not used anymore anywhere else.
+ :param submission:
+ """
submissions = self.asset._deployment_data.get('submissions', [])
submissions.append(submission)
self.store_data({
'submissions': submissions,
})
- def _get_submissions(self):
- return self.asset._deployment_data.get('submissions', [])
+ def mock_submissions(self, submissions):
+ """
+ Insert dummy submissions into `asset._deployment_data`
+ :param submissions: list
+ """
+ self.store_data({"submissions": submissions})
+ self.asset.save(create_version=False)
+
+ def get_submissions(self, format_type=INSTANCE_FORMAT_TYPE_JSON, instances_ids=[]):
+ """
+ Returns a list of json representation of instances.
+
+ :param format_type: str. xml or json
+ :param instances_ids: list. Ids of instances to retrieve
+ :return: list
+ """
+ submissions = self.asset._deployment_data.get("submissions", [])
+
+ if len(instances_ids) > 0:
+ if format_type == INSTANCE_FORMAT_TYPE_XML:
+ # ugly way to find matches, but it avoids to load each xml in memory.
+ pattern = "|".join(instances_ids)
+ submissions = [submission for submission in submissions
+ if re.search(r"({})<\/id>".format(pattern), submission)]
+ else:
+ submissions = [submission for submission in submissions if submission.get("id") in instances_ids]
+
+ return submissions
+
+ def get_submission(self, pk, format_type=INSTANCE_FORMAT_TYPE_JSON):
+ if pk:
+ submissions = list(self.get_submissions(format_type, [pk]))
+ if len(submissions) > 0:
+ return submissions[0]
+ return None
+ else:
+ raise ValueError("Primary key must be provided")
+
+ def set_has_kpi_hooks(self):
+ """
+ Store results in self.asset._deployment_data
+ """
+ has_active_hooks = self.asset.has_active_hooks
+ self.store_data({
+ "has_kpi_hooks": has_active_hooks,
+ })
diff --git a/kpi/exceptions.py b/kpi/exceptions.py
index 4730f2ca4b..7d7187a6ca 100644
--- a/kpi/exceptions.py
+++ b/kpi/exceptions.py
@@ -1,4 +1,7 @@
# -*- coding: utf-8 -*-
class BadAssetTypeException(Exception):
+ pass
+
+class BadFormatException(Exception):
pass
\ No newline at end of file
diff --git a/kpi/filters.py b/kpi/filters.py
index be72f704de..711961550a 100644
--- a/kpi/filters.py
+++ b/kpi/filters.py
@@ -18,17 +18,25 @@
class AssetOwnerFilterBackend(filters.BaseFilterBackend):
"""
- For use with AssetVersions
+ For use with nested models of Asset.
Restricts access to items that are owned by the current user
"""
def filter_queryset(self, request, queryset, view):
- return queryset.filter(asset__owner=request.user)
+ # Because HookLog is two level nested,
+ # we need to specify the relation in the filter field
+ if type(view).__name__ == "HookLogViewSet":
+ fields = {"hook__asset__owner": request.user}
+ else:
+ fields = {"asset__owner": request.user}
+
+ return queryset.filter(**fields)
class KpiObjectPermissionsFilter(object):
perm_format = '%(app_label)s.view_%(model_name)s'
def filter_queryset(self, request, queryset, view):
+
user = request.user
if user.is_superuser and view.action != 'list':
# For a list, we won't deluge the superuser with everyone else's
@@ -58,7 +66,7 @@ def filter_queryset(self, request, queryset, view):
get_anonymous_user(), permission, queryset)
if view.action != 'list':
# Not a list, so discoverability doesn't matter
- return owned_and_explicitly_shared | public
+ return (owned_and_explicitly_shared | public).distinct()
# For a list, do not include public objects unless they are also
# discoverable
@@ -77,7 +85,7 @@ def filter_queryset(self, request, queryset, view):
if all_public:
# We were asked not to consider subscriptions; return all
# discoverable objects
- return owned_and_explicitly_shared | discoverable
+ return (owned_and_explicitly_shared | discoverable).distinct()
# Of the discoverable objects, determine to which the user has
# subscribed
@@ -93,7 +101,7 @@ def filter_queryset(self, request, queryset, view):
# Neither the model or its parent has a subscription relation
subscribed = public.none()
- return owned_and_explicitly_shared | subscribed
+ return (owned_and_explicitly_shared | subscribed).distinct()
class RelatedAssetPermissionsFilter(KpiObjectPermissionsFilter):
diff --git a/kpi/fixtures/test_data.json b/kpi/fixtures/test_data.json
index 39cbddec26..52b33ad0f9 100644
--- a/kpi/fixtures/test_data.json
+++ b/kpi/fixtures/test_data.json
@@ -29,7 +29,6 @@
"last_login": "2015-02-12T19:52:14.406Z",
"last_name": "User",
"password": "pbkdf2_sha256$15000$AGA044bkjSNO$jQx/SdL6ok6mFPRQhCuWtU9zNNFvRUQ7qexLZgLRiys=",
- "user_permissions": [],
"username": "someuser",
"user_permissions" : [
["add_collection", "kpi", "collection"],
@@ -65,7 +64,6 @@
"last_login": "2015-02-12T19:52:14.406Z",
"last_name": "User",
"password": "pbkdf2_sha256$15000$MQp3yLvTajFx$yttM5DVFuGpK8cSJI09tC55SeaEL2geWzk9ZDVMFWRw=",
- "user_permissions": [],
"username": "anotheruser",
"user_permissions" : [
["add_collection", "kpi", "collection"],
diff --git a/kpi/haystack_utils.py b/kpi/haystack_utils.py
index dd1cce2128..711a38808b 100644
--- a/kpi/haystack_utils.py
+++ b/kpi/haystack_utils.py
@@ -1,10 +1,12 @@
import contextlib
import haystack
-import logging
+
from django.apps import apps as kpi_apps
from django.db import models
from django.conf import settings
from django.core import exceptions
+from kpi.utils.log import logging
+
def update_object_in_search_index(obj):
'''
diff --git a/kpi/management/commands/import_survey_drafts_from_dkobo.py b/kpi/management/commands/import_survey_drafts_from_dkobo.py
index 38f94ec17d..3895da4c55 100644
--- a/kpi/management/commands/import_survey_drafts_from_dkobo.py
+++ b/kpi/management/commands/import_survey_drafts_from_dkobo.py
@@ -1,7 +1,6 @@
from StringIO import StringIO
from optparse import make_option
from pyxform.xls2json_backends import csv_to_dict
-import logging
import re
from django.contrib.auth.models import User
@@ -13,6 +12,7 @@
from kpi.models import Asset
from kpi.models import Collection
from kpi.models.asset import KpiTaggableManager
+from kpi.utils.log import logging
class SurveyDraft(models.Model):
diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py
index 576b3a7691..cb0ed0ff7e 100644
--- a/kpi/management/commands/sync_kobocat_xforms.py
+++ b/kpi/management/commands/sync_kobocat_xforms.py
@@ -2,7 +2,6 @@
import datetime
import io
import json
-import logging
import re
import requests
import xlwt
@@ -26,6 +25,8 @@
from ...deployment_backends.kc_access.shadow_models import _models
from ...models import Asset, ObjectPermission
from .import_survey_drafts_from_dkobo import _set_auto_field_update
+from kpi.utils.log import logging
+
TIMESTAMP_DIFFERENCE_TOLERANCE = datetime.timedelta(seconds=30)
diff --git a/kpi/model_utils.py b/kpi/model_utils.py
index 34184a0224..bef44b1f97 100644
--- a/kpi/model_utils.py
+++ b/kpi/model_utils.py
@@ -1,7 +1,7 @@
import contextlib
import copy
import re
-import logging
+
from django.apps import apps
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
@@ -13,6 +13,7 @@
from .models.object_permission import perm_parse
from .haystack_utils import update_object_in_search_index
+
'''
This circular import will bite you if you don't import kpi.models before
importing kpi.model_utils:
diff --git a/kpi/models/asset.py b/kpi/models/asset.py
index 3c45ee86d8..c1fbeac035 100644
--- a/kpi/models/asset.py
+++ b/kpi/models/asset.py
@@ -6,7 +6,6 @@
import sys
import copy
import json
-import logging
import StringIO
from collections import OrderedDict
@@ -59,6 +58,7 @@
from ..deployment_backends.mixin import DeployableMixin
from kobo.apps.reports.constants import (SPECIFIC_REPORTS_KEY,
DEFAULT_REPORTS_KEY)
+from kpi.utils.log import logging
# TODO: Would prefer this to be a mixin that didn't derive from `Manager`.
@@ -218,14 +218,18 @@ def _unlink_list_items(self, content):
def _remove_empty_expressions(self, content):
remove_empty_expressions_in_place(content)
- def _adjust_active_translation(self, content):
- # to get around the form builder's way of handling translations where
- # the interface focuses on the "null translation" and shows other ones
- # in advanced settings, we allow the builder to attach a parameter
- # which says what to name the null translation.
- _null_translation_as = content.pop('#active_translation_name', None)
- if _null_translation_as:
- self._rename_translation(content, None, _null_translation_as)
+ def _make_default_translation_first(self, content):
+ # The form builder only shows the first language, so make sure the
+ # default language is always at the top of the translations list. The
+ # new translations UI, on the other hand, shows all languages:
+ # https://github.com/kobotoolbox/kpi/issues/1273
+ try:
+ default_translation_name = content['settings']['default_language']
+ except KeyError:
+ # No `default_language`; don't do anything
+ return
+ else:
+ self._prioritize_translation(content, default_translation_name)
def _strip_empty_rows(self, content, vals=None):
if vals is None:
@@ -291,7 +295,9 @@ def update_translation_list(self, translation_list):
)
def _prioritize_translation(self, content, translation_name, is_new=False):
- _translations = content.get('translations')
+ # the translations/languages present this particular content
+ _translations = content['translations']
+ # the columns that have translations
_translated = content.get('translated', [])
if is_new and (translation_name in _translations):
raise ValueError('cannot add existing translation')
@@ -299,20 +305,17 @@ def _prioritize_translation(self, content, translation_name, is_new=False):
raise ValueError('translation cannot be found')
_tindex = -1 if is_new else _translations.index(translation_name)
if is_new or (_tindex > 0):
- for row in content.get('survey', []):
- for col in _translated:
- if is_new:
- val = '{}'.format(row[col][0])
- else:
- val = row[col].pop(_tindex)
- row[col].insert(0, val)
- for row in content.get('choices', []):
- for col in _translated:
- if is_new:
- val = '{}'.format(row[col][0])
- else:
- val = row[col].pop(_tindex)
- row[col].insert(0, val)
+ for sheet_name in 'survey', 'choices':
+ for row in content.get(sheet_name, []):
+ for col in _translated:
+ if is_new:
+ val = '{}'.format(row[col][0])
+ else:
+ try:
+ val = row[col].pop(_tindex)
+ except KeyError:
+ continue
+ row[col].insert(0, val)
if is_new:
_translations.insert(0, translation_name)
else:
@@ -544,7 +547,7 @@ def adjust_content_on_save(self):
'''
self._standardize(self.content)
- self._adjust_active_translation(self.content)
+ self._make_default_translation_first(self.content)
self._strip_empty_rows(self.content)
self._assign_kuids(self.content)
self._autoname(self.content)
@@ -669,7 +672,15 @@ def get_ancestors_or_none(self):
@property
def latest_version(self):
- return self.asset_versions.order_by('-date_modified').first()
+ versions = None
+ try:
+ versions = self.prefetched_latest_versions
+ except AttributeError:
+ versions = self.asset_versions.order_by('-date_modified')
+ try:
+ return versions[0]
+ except IndexError:
+ return None
@property
def deployed_versions(self):
@@ -682,17 +693,19 @@ def latest_deployed_version(self):
@property
def version_id(self):
- try:
- latest_versions = self.prefetched_latest_versions
- except AttributeError:
- latest_version = self.latest_version
- if latest_version:
- return latest_version.uid
- else:
- try:
- return latest_versions[0]
- except IndexError:
- return None
+ # Avoid reading the propery `self.latest_version` more than once, since
+ # it may execute a database query each time it's read
+ latest_version = self.latest_version
+ if latest_version:
+ return latest_version.uid
+
+ @property
+ def version__content_hash(self):
+ # Avoid reading the propery `self.latest_version` more than once, since
+ # it may execute a database query each time it's read
+ latest_version = self.latest_version
+ if latest_version:
+ return latest_version.content_hash
@property
def snapshot(self):
@@ -735,6 +748,15 @@ def _snapshot(self, regenerate=True):
def __unicode__(self):
return u'{} ({})'.format(self.name, self.uid)
+ @property
+ def has_active_hooks(self):
+ """
+ Returns if asset has active hooks.
+ Useful to update `kc.XForm.has_kpi_hooks` field.
+ :return: {boolean}
+ """
+ return self.hooks.filter(active=True).exists()
+
@staticmethod
def optimize_queryset_for_list(queryset):
''' Used by serializers to improve performance when listing assets '''
@@ -797,9 +819,9 @@ def content(self):
def save(self, *args, **kwargs):
if self.asset is not None:
- if self.asset_version is None:
- self.asset_version = self.asset.latest_version
if self.source is None:
+ if self.asset_version is None:
+ self.asset_version = self.asset.latest_version
self.source = self.asset_version.version_content
if self.owner is None:
self.owner = self.asset.owner
@@ -808,7 +830,7 @@ def save(self, *args, **kwargs):
if _source is None:
_source = {}
self._standardize(_source)
- self._adjust_active_translation(_source)
+ self._make_default_translation_first(_source)
self._strip_empty_rows(_source)
self._autoname(_source)
self._remove_empty_expressions(_source)
diff --git a/kpi/models/asset_version.py b/kpi/models/asset_version.py
index fe071e3f40..c435fcebfe 100644
--- a/kpi/models/asset_version.py
+++ b/kpi/models/asset_version.py
@@ -54,14 +54,11 @@ def to_formpack_schema(self):
'version_id_key': '__version__',
}
- def _content_hash(self):
+ @property
+ def content_hash(self):
# used to determine changes in the content from version to version
- # not saved, only compared with other asset_versions (in tests and
- # migration scripts, initially)
- _json_string = json.dumps({'version_content': self.version_content,
- 'deployed_content': self.deployed_content,
- 'deployed': self.deployed,
- }, sort_keys=True)
+ # not saved, only compared with other asset_versions
+ _json_string = json.dumps(self.version_content, sort_keys=True)
return hashlib.sha1(_json_string).hexdigest()
def __unicode__(self):
diff --git a/kpi/models/import_export_task.py b/kpi/models/import_export_task.py
index acb6ef5830..295f46f1e4 100644
--- a/kpi/models/import_export_task.py
+++ b/kpi/models/import_export_task.py
@@ -1,7 +1,6 @@
import re
import pytz
import base64
-import logging
import datetime
import requests
import tempfile
@@ -23,6 +22,9 @@
import formpack.constants
from pyxform import xls2json_backends
from formpack.utils.string import ellipsize
+from formpack.schema.fields import ValidationStatusCopyField
+
+from kpi.utils.log import logging
from kobo.apps.reports.report_data import build_formpack
from ..fields import KpiUidField
@@ -30,6 +32,15 @@
from ..zip_importer import HttpContentParse
from ..model_utils import create_assets, _load_library_content, \
remove_string_prefix
+from ..deployment_backends.mock_backend import MockDeploymentBackend
+
+
+def utcnow(*args, **kwargs):
+ '''
+ Stupid, and exists only to facilitate mocking during unit testing.
+ If you know of a better way, please remove this.
+ '''
+ return datetime.datetime.utcnow()
def _resolve_url_to_asset_or_collection(item_path):
@@ -333,9 +344,10 @@ class ExportTask(ImportExportTask):
`data` attribute to a dictionary with the following keys:
* `type`: required; `xls` or `csv`
* `source`: required; URL of a deployed `Asset`
- * `lang`: optional; `xml` for XML names or the name of the language to be
- used for labels. Leave unset, or use `_default` or `None`, for
- labels in the default language
+ * `lang`: optional; the name of the translation to be used for headers and
+ response values. Specify `_xml` to use question and choice names
+ instead of labels. Leave unset, or use `_default` for labels in
+ the default language
* `hierarchy_in_labels`: optional; when `true`, include the labels for all
ancestor groups in each field label, separated by
`group_sep`. Defaults to `False`
@@ -366,7 +378,21 @@ class ExportTask(ImportExportTask):
last_submission_time = models.DateTimeField(null=True)
result = PrivateFileField(upload_to=export_upload_to, max_length=380)
- COPY_FIELDS = ('_id', '_uuid', '_submission_time')
+ COPY_FIELDS = (
+ '_id',
+ '_uuid',
+ '_submission_time',
+ ValidationStatusCopyField,
+ )
+
+ # It's not very nice to ask our API users to submit `null` or `false`,
+ # so replace friendlier language strings with the constants that formpack
+ # expects
+ API_LANGUAGE_TO_FORMPACK_LANGUAGE = {
+ '_default': formpack.constants.UNTRANSLATED,
+ '_xml': formpack.constants.UNSPECIFIED_TRANSLATION,
+ }
+
TIMESTAMP_KEY = '_submission_time'
# Above 244 seems to cause 'Download error' in Chrome 64/Linux
MAXIMUM_FILENAME_LENGTH = 240
@@ -377,14 +403,25 @@ def _fields_from_all_versions(self):
'fields_from_all_versions', 'true'
).lower() == 'true'
- def _build_export_filename(self, export, extension):
+ def _build_export_filename(self, export, export_type):
'''
Internal method to build the export filename based on the export title
(which should be set when calling the `FormPack()` constructor),
whether the latest or all versions are included, the label language,
- the current date and time, and the given `extension`
+ the current date and time, and the appropriate extension for the given
+ `export_type`
'''
- if export.lang == formpack.constants.UNTRANSLATED:
+
+ if export_type == 'xls':
+ extension = 'xlsx'
+ elif export_type == 'spss_labels':
+ extension = 'zip'
+ else:
+ extension = export_type
+
+ if export_type == 'spss_labels':
+ lang = 'SPSS Labels'
+ elif export.lang == formpack.constants.UNTRANSLATED:
lang = 'labels'
else:
lang = export.lang
@@ -399,7 +436,7 @@ def _build_export_filename(self, export, extension):
u'{{title}} - {version} - {{lang}} - {date:%Y-%m-%d-%H-%M-%S}'
u'.{ext}'.format(
version=version,
- date=datetime.datetime.utcnow(),
+ date=utcnow(),
ext=extension
)
)
@@ -425,8 +462,12 @@ def _build_export_options(self, pack):
group_sep = self.data.get('group_sep', '/')
translations = pack.available_translations
lang = self.data.get('lang', None) or next(iter(translations), None)
- if lang == '_default':
- lang = formpack.constants.UNTRANSLATED
+ try:
+ # If applicable, substitute the constants that formpack expects for
+ # friendlier language strings used by the API
+ lang = self.API_LANGUAGE_TO_FORMPACK_LANGUAGE[lang]
+ except KeyError:
+ pass
tag_cols_for_header = self.data.get('tag_cols_for_header', ['hxl'])
return {
@@ -490,17 +531,17 @@ def _run_task(self, messages):
raise Exception('the source must be deployed prior to export')
export_type = self.data.get('type', '').lower()
- if export_type not in ('xls', 'csv'):
+ if export_type not in ('xls', 'csv', 'spss_labels'):
raise NotImplementedError(
- 'only `xls` and `csv` are valid export types')
+ 'only `xls`, `csv`, and `spss_labels` are valid export types')
# Take this opportunity to do some housekeeping
self.log_and_mark_stuck_as_errored(self.user, source_url)
- if hasattr(source.deployment, '_get_submissions'):
+ if isinstance(source.deployment, MockDeploymentBackend):
# Currently used only for unit testing (`MockDeploymentBackend`)
# TODO: Have the KC backend also implement `_get_submissions()`?
- submission_stream = source.deployment._get_submissions()
+ submission_stream = source.deployment.get_submissions()
else:
submission_stream = None
@@ -514,8 +555,7 @@ def _run_task(self, messages):
options = self._build_export_options(pack)
export = pack.export(**options)
- extension = 'xlsx' if export_type == 'xls' else export_type
- filename = self._build_export_filename(export, extension)
+ filename = self._build_export_filename(export, export_type)
self.result.save(filename, ContentFile(''))
# FileField files are opened read-only by default and must be
# closed and reopened to allow writing
@@ -545,6 +585,8 @@ def _run_task(self, messages):
break
'''
output_file.write(xlsx_output_file.read())
+ elif export_type == 'spss_labels':
+ export.to_spss_labels(output_file)
# Restore the FileField to its typical state
self.result.open('rb')
@@ -574,7 +616,7 @@ def log_and_mark_stuck_as_errored(cls, user, source):
# How long can an export possibly run, not including time spent waiting
# in the Celery queue?
max_export_run_time = getattr(
- settings, 'CELERYD_TASK_TIME_LIMIT', 2100)
+ settings, 'CELERY_TASK_TIME_LIMIT', 2100)
# Allow a generous grace period
max_allowed_export_age = datetime.timedelta(
seconds=max_export_run_time * 4)
diff --git a/kpi/permissions.py b/kpi/permissions.py
index 4cc3ff3ffd..5482a1f1ec 100644
--- a/kpi/permissions.py
+++ b/kpi/permissions.py
@@ -1,7 +1,11 @@
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
+from django.http import Http404
+from django.shortcuts import get_object_or_404
from rest_framework import permissions
+from rest_framework_extensions.settings import extensions_api_settings
+from kpi.models.asset import Asset
# FIXME: Move to `object_permissions` module.
def get_perm_name(perm_name_prefix, model_instance):
@@ -52,3 +56,49 @@ class PostMappedToChangePermission(IsOwnerOrReadOnly):
'''
perms_map = IsOwnerOrReadOnly.perms_map
perms_map['POST'] = ['%(app_label)s.change_%(model_name)s']
+
+
+class AssetOwnerNestedObjectsPermissions(permissions.BasePermission):
+ """
+ Permissions for objects that are nested under Asset which only owner should access.
+ Others should receive a 404 response (instead of 403) to avoid revealing existence
+ of objects.
+ """
+ def has_permission(self, request, view):
+
+ if not request.user or (request.user and
+ (request.user.is_anonymous() or
+ not request.user.is_authenticated())):
+ return False
+
+ asset_uid = self.__get_parents_query_dict(request).get("asset")
+ asset = get_object_or_404(Asset, uid=asset_uid)
+
+ if request.user != asset.owner:
+ raise Http404
+
+ return True
+
+ def has_object_permission(self, request, view, obj):
+ # Because authentication checks have already executed via has_permission,
+ # always return True.
+ # Only owner can reach access this.
+ return True
+
+ def __get_parents_query_dict(self, request):
+ """
+ Mimics NestedViewSetMixin.get_parents_query_dict
+ :param request:
+ :return:
+ """
+ result = {}
+ for kwarg_name, kwarg_value in request.parser_context.get("kwargs").items():
+ if kwarg_name.startswith(extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX):
+ query_lookup = kwarg_name.replace(
+ extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX,
+ '',
+ 1
+ )
+ query_value = kwarg_value
+ result[query_lookup] = query_value
+ return result
\ No newline at end of file
diff --git a/kpi/renderers.py b/kpi/renderers.py
index e3e5cd5ec3..705f2cd4cd 100644
--- a/kpi/renderers.py
+++ b/kpi/renderers.py
@@ -1,9 +1,11 @@
-from rest_framework import renderers
-from kpi.serializers import UserSerializer
-from kpi.models import AssetSnapshot
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, unicode_literals
import json
+from rest_framework import renderers
+from rest_framework_xml.renderers import XMLRenderer as DRFXMLRenderer
+
class AssetJsonRenderer(renderers.JSONRenderer):
media_type = 'application/json'
@@ -21,24 +23,30 @@ def render(self, data, media_type=None, renderer_context=None):
return json.dumps(renderer_context['view'].get_object().to_ss_structure())
-class XFormRenderer(renderers.BaseRenderer):
- media_type = 'application/xml'
- format = 'xml'
- charset = 'utf-8'
+class XMLRenderer(DRFXMLRenderer):
- def render(self, data, media_type=None, renderer_context=None):
- asset = renderer_context['view'].get_object()
- return asset.snapshot.xml
+ def render(self, data, accepted_media_type=None, renderer_context=None, relationship=None):
+ if hasattr(renderer_context.get("view"), "get_object"):
+ obj = renderer_context.get("view").get_object()
+ # If `relationship` is passed among arguments, retrieve `xml` from this relationship.
+ # e.g. obj is `Asset`, relationship can be `snapshot`
+ if relationship is not None and hasattr(obj, relationship):
+ return getattr(obj, relationship).xml
+ return obj.xml
+ else:
+ # @TODO Handle submissions
+ return super(XMLRenderer, self).render(data=data,
+ accepted_media_type=accepted_media_type,
+ renderer_context=renderer_context)
-class AssetSnapshotXFormRenderer(renderers.BaseRenderer):
- media_type = 'application/xml'
- format = 'xml'
- charset = 'utf-8'
+class XFormRenderer(XMLRenderer):
- def render(self, data, media_type=None, renderer_context=None):
- asset_snapshot = renderer_context['view'].get_object()
- return asset_snapshot.xml
+ def render(self, data, accepted_media_type=None, renderer_context=None):
+ return super(XFormRenderer, self).render(data=data,
+ accepted_media_type=accepted_media_type,
+ renderer_context=renderer_context,
+ relationship="snapshot")
class XlsRenderer(renderers.BaseRenderer):
diff --git a/kpi/serializers.py b/kpi/serializers.py
index 16ac4f0fd5..6d45058281 100644
--- a/kpi/serializers.py
+++ b/kpi/serializers.py
@@ -14,7 +14,7 @@
from django.utils.six.moves.urllib import parse as urlparse
from django.conf import settings
from rest_framework import serializers, exceptions
-from rest_framework.pagination import LimitOffsetPagination
+from rest_framework.pagination import LimitOffsetPagination, PageNumberPagination
from rest_framework.reverse import reverse_lazy, reverse
from taggit.models import Tag
@@ -51,6 +51,13 @@ def get_parent_url(self, obj):
return reverse_lazy('api-root', request=self.context.get('request'))
+class TinyPaginated(PageNumberPagination):
+ """
+ Same as Paginated with a small page size
+ """
+ page_size = 50
+
+
class WritableJSONField(serializers.Field):
""" Serializer for JSONField -- required to make field writable"""
@@ -440,6 +447,7 @@ class AssetVersionListSerializer(serializers.Serializer):
# `select_related()` calls in `AssetVersionViewSet.get_queryset()`
uid = serializers.ReadOnlyField()
url = serializers.SerializerMethodField()
+ content_hash = serializers.ReadOnlyField()
date_deployed = serializers.SerializerMethodField(read_only=True)
date_modified = serializers.CharField(read_only=True)
@@ -466,6 +474,7 @@ class Meta:
'version_id',
'date_deployed',
'date_modified',
+ 'content_hash',
'content',
)
@@ -502,6 +511,7 @@ class AssetSerializer(serializers.HyperlinkedModelSerializer):
permissions = ObjectPermissionNestedSerializer(many=True, read_only=True)
tag_string = serializers.CharField(required=False, allow_blank=True)
version_id = serializers.CharField(read_only=True)
+ version__content_hash = serializers.CharField(read_only=True)
has_deployment = serializers.ReadOnlyField()
deployed_version_id = serializers.SerializerMethodField()
deployed_versions = PaginatedApiField(
@@ -516,6 +526,9 @@ class AssetSerializer(serializers.HyperlinkedModelSerializer):
deployment__data_download_links = serializers.SerializerMethodField()
deployment__submission_count = serializers.SerializerMethodField()
+ # Only add link instead of hooks list to avoid multiple access to DB.
+ hooks_link = serializers.SerializerMethodField()
+
class Meta:
model = Asset
lookup_field = 'uid'
@@ -530,6 +543,7 @@ class Meta:
'summary',
'date_modified',
'version_id',
+ 'version__content_hash',
'version_count',
'has_deployment',
'deployed_version_id',
@@ -548,6 +562,7 @@ class Meta:
'embeds',
'koboform_link',
'xform_link',
+ 'hooks_link',
'tag_string',
'uid',
'kind',
@@ -609,6 +624,9 @@ def get_xls_link(self, obj):
def get_xform_link(self, obj):
return reverse('asset-xform', args=(obj.uid,), request=self.context.get('request', None))
+ def get_hooks_link(self, obj):
+ return reverse('hook-list', args=(obj.uid,), request=self.context.get('request', None))
+
def get_embeds(self, obj):
request = self.context.get('request', None)
@@ -705,6 +723,7 @@ class DeploymentSerializer(serializers.Serializer):
identifier = serializers.CharField(read_only=True)
active = serializers.BooleanField(required=False)
version_id = serializers.CharField(required=False)
+ asset = serializers.SerializerMethodField()
@staticmethod
def _raise_unless_current_version(asset, validated_data):
@@ -715,6 +734,10 @@ def _raise_unless_current_version(asset, validated_data):
raise NotImplementedError(
'Only the current version_id can be deployed')
+ def get_asset(self, obj):
+ asset = self.context['asset']
+ return AssetSerializer(asset, context=self.context).data
+
def create(self, validated_data):
asset = self.context['asset']
self._raise_unless_current_version(asset, validated_data)
diff --git a/kpi/signals.py b/kpi/signals.py
index 4528a9e979..ba09eea812 100644
--- a/kpi/signals.py
+++ b/kpi/signals.py
@@ -1,11 +1,14 @@
-from django.db import models
+# -*- coding: utf-8 -*-
+from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.contrib.auth.models import User
+
+from kobo.apps.hook.models.hook import Hook
from taggit.models import Tag
from .models import TagUid
from .model_utils import grant_default_model_level_perms
-@receiver(models.signals.post_save, sender=User)
+@receiver(post_save, sender=User)
def default_permissions_post_save(sender, instance, created, raw, **kwargs):
'''
Users must have both model-level and object-level permissions to satisfy
@@ -22,9 +25,19 @@ def default_permissions_post_save(sender, instance, created, raw, **kwargs):
return
grant_default_model_level_perms(instance)
-@receiver(models.signals.post_save, sender=Tag)
+@receiver(post_save, sender=Tag)
def tag_uid_post_save(sender, instance, created, raw, **kwargs):
''' Make sure we have a TagUid object for each newly-created Tag '''
if raw or not created:
return
TagUid.objects.get_or_create(tag=instance)
+
+
+@receiver([post_save, post_delete], sender=Hook)
+def update_kc_xform_has_kpi_hooks(sender, instance, **kwargs):
+ """
+ Updates `kc.XForm` instance as soon as Asset.Hook list is updated.
+ """
+ asset = instance.asset
+ if asset.has_deployment:
+ asset.deployment.set_has_kpi_hooks()
\ No newline at end of file
diff --git a/kpi/tests/kpi_test_case.py b/kpi/tests/kpi_test_case.py
index 727a86203e..344a138e8d 100644
--- a/kpi/tests/kpi_test_case.py
+++ b/kpi/tests/kpi_test_case.py
@@ -68,7 +68,7 @@ def create_asset(self, name, content=None, owner=None,
owner_password=None, **kwargs):
if owner and owner_password:
if isinstance(owner, basestring):
- self.login(owner.username, owner_password)
+ self.login(owner, owner_password)
self.login(owner.username, owner_password)
if content is None:
@@ -80,7 +80,7 @@ def create_asset(self, name, content=None, owner=None,
"asset_type": kwargs.get("asset_type", "survey")
})
- response= self.client.post(reverse('asset-list'), kwargs)
+ response = self.client.post(reverse('asset-list'), kwargs)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
if owner and owner_password:
diff --git a/kpi/tests/test_api_asset_snapshots.py b/kpi/tests/test_api_asset_snapshots.py
index e13df4c85a..65e11d9ad4 100644
--- a/kpi/tests/test_api_asset_snapshots.py
+++ b/kpi/tests/test_api_asset_snapshots.py
@@ -1,3 +1,5 @@
+import re
+
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import status
@@ -65,6 +67,25 @@ def _create_asset_snapshot_from_asset(self):
def test_create_asset_snapshot_from_asset(self):
self._create_asset_snapshot_from_asset()
+ def test_create_two_asset_snapshots_from_source_and_asset(self):
+ '''
+ Make sure it's possible to preview unsaved changes to an asset multiple
+ times; see https://github.com/kobotoolbox/kpi/issues/2058
+ '''
+ self.client.login(username='someuser', password='someuser')
+ snapshot_list_url = reverse('assetsnapshot-list')
+ asset = self.create_asset(
+ 'Take my snapshot!', self.form_source, format='json')
+ asset_url = reverse('asset-detail', args=(asset.uid,))
+ data = {'source': self.form_source, 'asset': asset_url}
+ for _ in range(2):
+ response = self.client.post(snapshot_list_url, data, format='json')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED,
+ msg=response.data)
+ xml_resp = self.client.get(response.data['xml'])
+ self.assertTrue(len(xml_resp.content) > 0)
+ self.client.logout()
+
def test_asset_owner_can_access_snapshot(self):
creation_response = self._create_asset_snapshot_from_asset()
snapshot_uid = creation_response.data['uid']
@@ -121,3 +142,36 @@ def test_anon_can_access_snapshot_xml(self):
for xml_url in snapshot_xml_urls:
detail_response = self.client.get(xml_url)
self.assertEqual(detail_response.status_code, status.HTTP_200_OK)
+
+ def test_xml_renderer(self):
+ '''
+ Make sure the API endpoint returns the same XML as the ORM
+ '''
+ def kludgy_is_xml_equal(*args):
+ '''
+ Compare strings after removing newlines and whitespace between
+ tags. Returns True if all strings are equal after this manipulation
+ '''
+ xml_strings = list(args)
+ for i, xml in enumerate(xml_strings):
+ xml = xml.replace('\n', '')
+ xml = re.sub(r'>\s+<', '><', xml)
+ xml_strings[i] = xml
+
+ return len(set(xml_strings)) == 1
+
+ creation_response = self._create_asset_snapshot_from_asset()
+ snapshot_uid = creation_response.data['uid']
+ snapshot_url = reverse('assetsnapshot-detail', args=(snapshot_uid,))
+ snapshot_orm_xml = AssetSnapshot.objects.get(uid=snapshot_uid).xml
+ # Test both DRF conventions of specifying the format
+ snapshot_xml_urls = (
+ snapshot_url.rstrip('/') + '.xml',
+ snapshot_url + '?format=xml',
+ )
+ for xml_url in snapshot_xml_urls:
+ xml_response = self.client.get(xml_url)
+ self.assertEqual(xml_response.status_code, status.HTTP_200_OK)
+ self.assertTrue(
+ kludgy_is_xml_equal(xml_response.content, snapshot_orm_xml)
+ )
diff --git a/kpi/tests/test_api_assets.py b/kpi/tests/test_api_assets.py
index 7b73a5b2cd..98c9faa594 100644
--- a/kpi/tests/test_api_assets.py
+++ b/kpi/tests/test_api_assets.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import copy
+from hashlib import md5
import json
import requests
import StringIO
@@ -19,6 +20,7 @@
from kpi.models import AssetVersion
from kpi.models import Collection
from kpi.models import ExportTask
+from kpi.serializers import AssetListSerializer
from .kpi_test_case import KpiTestCase
from formpack.utils.expand_content import SCHEMA_VERSION
@@ -30,6 +32,7 @@ class AssetsListApiTests(APITestCase):
def setUp(self):
self.client.login(username='someuser', password='someuser')
+ self.list_url = reverse('asset-list')
def test_login_as_other_users(self):
self.client.logout()
@@ -42,12 +45,11 @@ def test_create_asset(self):
"""
Ensure we can create a new asset
"""
- url = reverse('asset-list')
data = {
'content': '{}',
'asset_type': 'survey',
}
- response = self.client.post(url, data, format='json')
+ response = self.client.post(self.list_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED,
msg=response.data)
sa = Asset.objects.order_by('date_created').last()
@@ -63,6 +65,46 @@ def test_delete_asset(self):
self.assertEqual(response.status_code, status.HTTP_200_OK,
msg=response.data)
+ def test_asset_list_matches_detail(self):
+ detail_response = self.test_create_asset()
+ list_response = self.client.get(self.list_url)
+ self.assertEqual(list_response.status_code, status.HTTP_200_OK,
+ msg=list_response.data)
+ expected_list_data = {
+ field: detail_response.data[field]
+ for field in AssetListSerializer.Meta.fields
+ }
+ list_result_detail = None
+ for result in list_response.data['results']:
+ if result['uid'] == expected_list_data['uid']:
+ list_result_detail = result
+ break
+ self.assertIsNotNone(list_result_detail)
+ self.assertDictEqual(expected_list_data, dict(list_result_detail))
+
+ def test_assets_hash(self):
+ another_user = User.objects.get(username="anotheruser")
+ user_asset = Asset.objects.first()
+ user_asset.save()
+ user_asset.assign_perm(another_user, "view_asset")
+
+ self.client.logout()
+ self.client.login(username="anotheruser", password="anotheruser")
+ creation_response = self.test_create_asset()
+
+ another_user_asset = another_user.assets.last()
+ another_user_asset.save()
+
+ versions_ids = [
+ user_asset.version_id,
+ another_user_asset.version_id
+ ]
+ versions_ids.sort()
+ expected_hash = md5("".join(versions_ids)).hexdigest()
+ hash_url = reverse("asset-hash-list")
+ hash_response = self.client.get(hash_url)
+ self.assertEqual(hash_response.data.get("hash"), expected_hash)
+
class AssetVersionApiTests(APITestCase):
fixtures = ['test_data']
@@ -86,6 +128,17 @@ def test_asset_version(self):
self.assertTrue('survey' in resp2.data['content'])
self.assertEqual(len(resp2.data['content']['survey']), 2)
+ def test_asset_version_content_hash(self):
+ resp = self.client.get(self.version_list_url, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ first_version = resp.data['results'][0]
+ asset = AssetVersion.objects.get(uid=first_version['uid']).asset
+ self.assertEqual(first_version['content_hash'],
+ asset.latest_version.content_hash)
+ resp2 = self.client.get(first_version['url'], format='json')
+ self.assertEqual(resp2.data['content_hash'],
+ asset.latest_version.content_hash)
+
def test_restricted_access_to_version(self):
self.client.logout()
self.client.login(username='anotheruser', password='anotheruser')
@@ -138,7 +191,9 @@ def test_asset_deployment_data_updates(self):
'backend': 'mock',
'active': True,
})
- asset = Asset.objects.get(uid=self.asset_uid)
+
+ self.assertEqual(response1.data.get("asset").get('deployment__active'), True)
+ self.assertEqual(response1.data.get("asset").get('has_deployment'), True)
response2 = self.client.get(self.asset_url, format='json')
self.assertEqual(response2.data.get('deployment__active'), True)
@@ -242,6 +297,15 @@ def test_map_styles_field(self):
def test_map_custom_field(self):
self.check_asset_writable_json_field('map_custom')
+ def test_asset_version_id_and_content_hash(self):
+ response = self.client.get(self.asset_url, format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(self.asset.version_id, self.asset.latest_version.uid)
+ self.assertEqual(response.data['version_id'],
+ self.asset.version_id)
+ self.assertEqual(response.data['version__content_hash'],
+ self.asset.latest_version.content_hash)
+
class AssetsXmlExportApiTests(KpiTestCase):
fixtures = ['test_data']
@@ -449,9 +513,8 @@ def setUp(self):
'__version__': v_uid,
'q1': u'¿Qué tal?'
}
- self.asset.deployment._mock_submission(submission)
- self.asset.save(create_version=False)
- settings.CELERY_ALWAYS_EAGER = True
+ self.asset.deployment.mock_submissions([submission])
+ settings.CELERY_TASK_ALWAYS_EAGER = True
def result_stored_locally(self, detail_response):
'''
@@ -471,7 +534,7 @@ def test_owner_can_create_export(self):
# Create the export task
response = self.client.post(post_url, task_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- # Task should complete right away due to `CELERY_ALWAYS_EAGER`
+ # Task should complete right away due to `CELERY_TASK_ALWAYS_EAGER`
detail_response = self.client.get(response.data['url'])
self.assertEqual(detail_response.status_code, status.HTTP_200_OK)
self.assertEqual(detail_response.data['status'], 'complete')
@@ -485,8 +548,8 @@ def test_owner_can_create_export(self):
result_content = result_response.content
self.assertEqual(result_response.status_code, status.HTTP_200_OK)
expected_content = ''.join([
- '"q1";"_id";"_uuid";"_submission_time";"_index"\r\n',
- '"¿Qué tal?";"";"";"";"1"\r\n',
+ '"q1";"_id";"_uuid";"_submission_time";"_validation_status";"_index"\r\n',
+ '"¿Qué tal?";"";"";"";"";"1"\r\n',
])
self.assertEqual(result_content, expected_content)
return detail_response
diff --git a/kpi/tests/test_api_imports.py b/kpi/tests/test_api_imports.py
index 5b33e21a75..dcd0f2a02c 100644
--- a/kpi/tests/test_api_imports.py
+++ b/kpi/tests/test_api_imports.py
@@ -19,7 +19,7 @@ def setUp(self):
self.client.login(username='someuser', password='someuser')
self.user = User.objects.get(username='someuser')
self.asset = Asset.objects.first()
- settings.CELERY_ALWAYS_EAGER = True
+ settings.CELERY_TASK_ALWAYS_EAGER = True
def _assert_assets_contents_equal(self, a1, a2):
def _prep_row_for_comparison(row):
@@ -38,7 +38,7 @@ def _post_import_task_and_compare_created_asset_to_source(self, task_data,
post_url = reverse('importtask-list')
response = self.client.post(post_url, task_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- # Task should complete right away due to `CELERY_ALWAYS_EAGER`
+ # Task should complete right away due to `CELERY_TASK_ALWAYS_EAGER`
detail_response = self.client.get(response.data['url'])
self.assertEqual(detail_response.status_code, status.HTTP_200_OK)
self.assertEqual(detail_response.data['status'], 'complete')
@@ -90,7 +90,7 @@ def test_import_non_xls_url(self):
post_url = reverse('importtask-list')
response = self.client.post(post_url, task_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- # Task should complete right away due to `CELERY_ALWAYS_EAGER`
+ # Task should complete right away due to `CELERY_TASK_ALWAYS_EAGER`
detail_response = self.client.get(response.data['url'])
self.assertEqual(detail_response.status_code, status.HTTP_200_OK)
self.assertEqual(detail_response.data['status'], 'error')
@@ -112,7 +112,7 @@ def test_import_invalid_host_url(self):
post_url = reverse('importtask-list')
response = self.client.post(post_url, task_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- # Task should complete right away due to `CELERY_ALWAYS_EAGER`
+ # Task should complete right away due to `CELERY_TASK_ALWAYS_EAGER`
detail_response = self.client.get(response.data['url'])
# FIXME: this fails because the detail request returns a 404, even
# after the POST returns a 201!
diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py
index 6d16ef351b..3be1234e8e 100644
--- a/kpi/tests/test_asset_versions.py
+++ b/kpi/tests/test_asset_versions.py
@@ -1,3 +1,6 @@
+import json
+import hashlib
+import unittest
from django.contrib.auth.models import User
from django.test import TestCase
from copy import deepcopy
@@ -82,3 +85,23 @@ def _bad_deployment():
self.assertRaises(BadAssetTypeException, _bad_deployment)
+ def test_version_content_hash(self):
+ _content = {
+ u'survey': [
+ {u'type': u'note',
+ u'label': u'Read me',
+ u'name': u'n1'}
+ ],
+ }
+ new_asset = Asset.objects.create(asset_type='survey', content=_content)
+ expected_hash = hashlib.sha1(json.dumps(new_asset.content,
+ sort_keys=True)).hexdigest()
+ self.assertEqual(new_asset.latest_version.content_hash, expected_hash)
+ return new_asset
+
+ def test_version_content_hash_same_after_non_content_change(self):
+ new_asset = self.test_version_content_hash()
+ expected_hash = new_asset.latest_version.content_hash
+ new_asset.settings['description'] = 'Loco el que lee'
+ new_asset.save()
+ self.assertEqual(new_asset.latest_version.content_hash, expected_hash)
diff --git a/kpi/tests/test_assets.py b/kpi/tests/test_assets.py
index 741ed88e07..3122358d40 100644
--- a/kpi/tests/test_assets.py
+++ b/kpi/tests/test_assets.py
@@ -146,20 +146,64 @@ def _wrap_type(self, type_val, select_from=None):
{'list_name': 'yn', 'name': 'n', 'label': 'No'},
]}
- def test_rename_null_translation(self):
+ def test_default_translation_first(self):
'''
This allows a workaround to enable multi-translation editing in the
form builder which focuses on the "null" language.
'''
- self.asset = Asset.objects.create(content={'survey': [
- {'label': ['lang1', 'lang2'], 'type': 'text', 'name': 'q1'},
- ],
- 'translations': ['lang1', None],
- '#active_translation_name': 'lang2',
+ def _check_content(content, expected_translations):
+ self.assertListEqual(
+ content['translations'], expected_translations
+ )
+ for sheet_name in 'survey', 'choices':
+ for row in content[sheet_name]:
+ for col in 'label', 'hint':
+ for index, cell in enumerate(row.get(col, [])):
+ self.assertTrue(
+ str(cell).endswith(
+ str(expected_translations[index])
+ )
+ )
+
+ self.asset = Asset.objects.create(content={
+ 'survey': [
+ {
+ 'name': 'q1',
+ 'type': 'select_one',
+ 'label': ['q label lang1', None, 'q label lang3'],
+ 'hint': ['q hint lang1', 'q hint None', 'q hint lang3'],
+ 'select_from_list_name': 'choice_list',
+ },
+ ],
+ 'choices': [
+ {'list_name': 'choice_list', 'name': 'c1',
+ 'label': ['c1 lang1', None, 'c1 lang3']},
+ {'list_name': 'choice_list', 'name': 'c2',
+ 'label': ['c2 lang1', 'c2 None', 'c2 lang3']},
+ {'list_name': 'choice_list', 'name': 'c3',
+ 'label': ['c3 lang1', 'c3 None', 'c3 lang3']},
+ ],
+ 'settings': [{'default_language': 'lang3'}],
+ 'translations': ['lang1', None, 'lang3'],
})
- self.assertEqual(self.asset.content['translations'], ['lang1', 'lang2'])
- self.assertTrue('#active_translation_name' not in self.asset.content)
- self.assertTrue('#null_translation' not in self.asset.content)
+ _check_content(
+ self.asset.content,
+ expected_translations=['lang3', 'lang1', None]
+ )
+
+ self.asset.content['settings']['default_language'] = None
+ self.asset.save()
+ _check_content(
+ self.asset.content,
+ expected_translations=[None, 'lang3', 'lang1']
+ )
+
+ del self.asset.content['settings']['default_language']
+ self.asset.save()
+ _check_content(
+ self.asset.content,
+ expected_translations=[None, 'lang3', 'lang1']
+ )
def test_rename_translation(self):
'''
diff --git a/kpi/tests/test_cloning.py b/kpi/tests/test_cloning.py
index db02b9d777..ce3ac8e10b 100644
--- a/kpi/tests/test_cloning.py
+++ b/kpi/tests/test_cloning.py
@@ -69,7 +69,7 @@ def _clone_asset(self, original_asset, partial_update=False, **kwargs):
expected_status_code = kwargs.pop('expected_status_code',
status_code)
- response = action(endpoint, kwargs)
+ response = action(endpoint, data=kwargs, format='json')
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code != status_code:
@@ -195,8 +195,8 @@ def _bad_clone():
self.assertRaises(BadAssetTypeException, _bad_clone)
- def test_clone_template_to_existing_asset(self):
- self.login(self.someuser.username, self.someuser_password)
+
+ def _create_sample_survey_and_template(self):
survey_settings = {
"sector": {
"value": "Arts, Entertainment, and Recreation",
@@ -233,6 +233,13 @@ def test_clone_template_to_existing_asset(self):
asset_type=ASSET_TYPE_TEMPLATE
)
+ return survey_asset, template_asset
+
+
+ def test_clone_template_to_existing_asset(self):
+ self.login(self.someuser.username, self.someuser_password)
+ survey_asset, template_asset = self._create_sample_survey_and_template()
+
modified_survey_asset = self._clone_asset(template_asset,
partial_update=True,
uid=survey_asset.uid,
@@ -243,6 +250,26 @@ def test_clone_template_to_existing_asset(self):
self.assertEqual(modified_survey_asset.settings.get("country").get("value"), "CAN")
self.assertEqual(modified_survey_asset.asset_type, survey_asset.asset_type)
+
+ def test_override_settings_while_cloning_template_to_existing_asset(self):
+ self.login(self.someuser.username, self.someuser_password)
+ survey_asset, template_asset = self._create_sample_survey_and_template()
+ modified_survey_asset = self._clone_asset(
+ template_asset,
+ partial_update=True,
+ uid=survey_asset.uid,
+ asset_type=survey_asset.asset_type,
+ settings={'description': 'I prefer my own, thank you very much!'},
+ )
+ self.assertEqual(
+ modified_survey_asset.settings['description'],
+ 'I prefer my own, thank you very much!'
+ )
+ self.assertEqual(
+ modified_survey_asset.settings["country"]["value"], "CAN"
+ )
+
+
# TODO
# def test_clone_collection(self):
# raise NotImplementedError
diff --git a/kpi/tests/test_mock_data.py b/kpi/tests/test_mock_data.py
index 7b6e401e4c..7c0819d613 100644
--- a/kpi/tests/test_mock_data.py
+++ b/kpi/tests/test_mock_data.py
@@ -95,12 +95,11 @@ def setUp(self):
submission.update({
'__version__': v_uid
})
- self.asset.deployment._mock_submission(submission)
- self.asset.save(create_version=False)
+ self.asset.deployment.mock_submissions(submissions)
schemas = [v.to_formpack_schema() for v in self.asset.deployed_versions]
self.fp = FormPack(versions=schemas, id_string=self.asset.uid)
self.vs = self.fp.versions.keys()
- self.submissions = self.asset.deployment._get_submissions()
+ self.submissions = self.asset.deployment.get_submissions()
def test_kobo_apps_reports_report_data(self):
values = report_data.data_by_identifiers(self.asset,
@@ -172,7 +171,7 @@ def test_kobo_apps_reports_report_data_translation(self):
u'\u0627\u0644\u062b\u0627\u0646\u064a'))
def test_export_works_if_no_version_value_provided_in_submission(self):
- submissions = self.asset.deployment._get_submissions()
+ submissions = self.asset.deployment.get_submissions()
for submission in submissions:
del submission['__version__']
@@ -206,7 +205,7 @@ def test_has_report_styles(self):
self.assertTrue(self.asset.report_styles is not None)
def test_formpack_results(self):
- submissions = self.asset.deployment._get_submissions()
+ submissions = self.asset.deployment.get_submissions()
def _get_autoreport_values(qname, key, lang=None, index=False):
stats = OrderedDict(_get_stats_object(self.fp,
@@ -237,4 +236,4 @@ def _get_autoreport_values(qname, key, lang=None, index=False):
def test_has_version_and_submissions(self):
self.assertEqual(self.asset.asset_versions.count(), 2)
self.assertTrue(self.asset.has_deployment)
- self.assertEqual(self.asset.deployment._submission_count(), 4)
+ self.assertEqual(self.asset.deployment.submission_count, 4)
diff --git a/kpi/tests/test_mock_data_conflicting_version_exports.py b/kpi/tests/test_mock_data_conflicting_version_exports.py
index 2532b3b22f..fc337e9b9e 100644
--- a/kpi/tests/test_mock_data_conflicting_version_exports.py
+++ b/kpi/tests/test_mock_data_conflicting_version_exports.py
@@ -31,7 +31,7 @@ def setUp(self):
self.asset = Asset.objects.get(uid='axD3Wc8ZnfgLXBcURRt5fM')
# To avoid cluttering the fixture, assign permissions here
self.asset.assign_perm(self.user, 'view_submissions')
- self.submissions = self.asset.deployment._get_submissions()
+ self.submissions = self.asset.deployment.get_submissions()
self.submission_id_field = '_id'
self.formpack, self.submission_stream = report_data.build_formpack(
self.asset,
@@ -67,7 +67,7 @@ def test_csv_export(self):
export_task.data = {
'source': reverse('asset-detail', args=[self.asset.uid]),
'type': 'csv',
- 'lang': 'xml'
+ 'lang': '_xml'
}
messages = defaultdict(list)
export_task._run_task(messages)
diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py
index c265b55972..ad3ce49b05 100644
--- a/kpi/tests/test_mock_data_exports.py
+++ b/kpi/tests/test_mock_data_exports.py
@@ -2,7 +2,9 @@
from __future__ import unicode_literals
import os
+import mock
import xlrd
+import zipfile
import datetime
import unittest
from collections import defaultdict
@@ -192,11 +194,10 @@ def setUp(self):
submission.update({
'__version__': v_uid
})
- self.asset.deployment._mock_submission(submission)
- self.asset.save(create_version=False)
+ self.asset.deployment.mock_submissions(self.submissions)
self.formpack, self.submission_stream = report_data.build_formpack(
self.asset,
- submission_stream=self.asset.deployment._get_submissions()
+ submission_stream=self.asset.deployment.get_submissions()
)
def run_csv_export_test(self, expected_lines, export_options=None):
@@ -229,11 +230,11 @@ def run_csv_export_test(self, expected_lines, export_options=None):
def test_csv_export_default_options(self):
# FIXME: Is this right? English is listed as the first translation
expected_lines = [
- '"start";"end";"¿Qué tipo de simetría tiene?";"¿Qué tipo de simetría tiene?/Esférico";"¿Qué tipo de simetría tiene?/Radial";"¿Qué tipo de simetría tiene?/Bilateral";"¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_index"',
- '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";""',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"¿Qué tipo de simetría tiene?";"¿Qué tipo de simetría tiene?/Esférico";"¿Qué tipo de simetría tiene?/Radial";"¿Qué tipo de simetría tiene?/Bilateral";"¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";"";""',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines)
@@ -242,11 +243,11 @@ def test_csv_export_english_labels(self):
'lang': 'English',
}
expected_lines = [
- '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_index"',
- '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";""',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";"";""',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines, export_options)
@@ -255,11 +256,11 @@ def test_csv_export_spanish_labels(self):
'lang': 'Spanish',
}
expected_lines = [
- '"start";"end";"¿Qué tipo de simetría tiene?";"¿Qué tipo de simetría tiene?/Esférico";"¿Qué tipo de simetría tiene?/Radial";"¿Qué tipo de simetría tiene?/Bilateral";"¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_index"',
- '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";""',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"¿Qué tipo de simetría tiene?";"¿Qué tipo de simetría tiene?/Esférico";"¿Qué tipo de simetría tiene?/Radial";"¿Qué tipo de simetría tiene?/Bilateral";"¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";"";""',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines, export_options)
@@ -269,10 +270,10 @@ def test_csv_export_english_labels_no_hxl(self):
'tag_cols_for_header': [],
}
expected_lines = [
- '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_index"',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines, export_options)
@@ -283,22 +284,22 @@ def test_csv_export_english_labels_group_sep(self):
'group_sep': '%',
}
expected_lines = [
- '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?%Spherical";"What kind of symmetry do you have?%Radial";"What kind of symmetry do you have?%Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_index"',
- '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";""',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?%Spherical";"What kind of symmetry do you have?%Radial";"What kind of symmetry do you have?%Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";"";""',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines, export_options)
def test_csv_export_hierarchy_in_labels(self):
export_options = {'hierarchy_in_labels': 'true'}
expected_lines = [
- '"start";"end";"Características externas/¿Qué tipo de simetría tiene?";"Características externas/¿Qué tipo de simetría tiene?/Esférico";"Características externas/¿Qué tipo de simetría tiene?/Radial";"Características externas/¿Qué tipo de simetría tiene?/Bilateral";"Características externas/¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_index"',
- '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";""',
- '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"',
+ '"start";"end";"Características externas/¿Qué tipo de simetría tiene?";"Características externas/¿Qué tipo de simetría tiene?/Esférico";"Características externas/¿Qué tipo de simetría tiene?/Radial";"Características externas/¿Qué tipo de simetría tiene?/Bilateral";"Características externas/¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"";"";"#symmetry";"#symmetry";"#symmetry";"#symmetry";"#segments";"#fluids";"";"";"";"";"";""',
+ '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"',
]
self.run_csv_export_test(expected_lines, export_options)
@@ -315,11 +316,11 @@ def test_xls_export_english_labels(self):
self.assertFalse(messages)
expected_rows = [
- ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_index'],
- ['', '', '#symmetry', '#symmetry', '#symmetry', '#symmetry', '#segments', '#fluids', '', '', '', '', ''],
- ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', 1.0],
- ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', 2.0],
- ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', 3.0],
+ ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_validation_status', '_index'],
+ ['', '', '#symmetry', '#symmetry', '#symmetry', '#symmetry', '#segments', '#fluids', '', '', '', '', '', ''],
+ ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', 1.0],
+ ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', 2.0],
+ ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', 3.0],
]
book = xlrd.open_workbook(file_contents=export_task.result.read())
self.assertEqual(book.sheet_names(), [self.asset.name])
@@ -331,6 +332,88 @@ def test_xls_export_english_labels(self):
self.assertEqual(result_row, expected_row)
row_index += 1
+ def test_export_spss_labels(self):
+ export_task = ExportTask()
+ export_task.user = self.user
+ export_task.data = {
+ 'source': reverse('asset-detail', args=[self.asset.uid]),
+ 'type': 'spss_labels',
+ }
+ messages = defaultdict(list)
+ # Set the current date and time artificially to generate a predictable
+ # file name for the export
+ utcnow = datetime.datetime.utcnow()
+ with mock.patch('kpi.models.import_export_task.utcnow') as mock_utcnow:
+ mock_utcnow.return_value = utcnow
+ export_task._run_task(messages)
+ self.assertFalse(messages)
+ self.assertEqual(
+ os.path.split(export_task.result.name)[-1],
+ 'Identificaci\xf3n de animales - all versions - SPSS Labels - '
+ '{date:%Y-%m-%d-%H-%M-%S}.zip'.format(date=utcnow)
+ )
+ expected_file_names_and_content_lines = {
+ 'Identificaci\xf3n de animales - Spanish - SPSS labels.sps': [
+ '\ufeffVARIABLE LABELS',
+ " start 'start'",
+ " /end 'end'",
+ " /What_kind_of_symmetry_do_you_have '\xbfQu\xe9 tipo de simetr\xeda tiene?'",
+ " /What_kind_of_symmetry_do_you_have_spherical '\xbfQu\xe9 tipo de simetr\xeda tiene? :: Esf\xe9rico'",
+ " /What_kind_of_symmetry_do_you_have_radial '\xbfQu\xe9 tipo de simetr\xeda tiene? :: Radial'",
+ " /What_kind_of_symmetry_do_you_have_bilateral '\xbfQu\xe9 tipo de simetr\xeda tiene? :: Bilateral'",
+ " /How_many_segments_does_your_body_have '\xbfCu\xe1ntos segmentos tiene tu cuerpo?'",
+ " /Do_you_have_body_flu_intracellular_space '\xbfTienes fluidos corporales que ocupan espacio intracelular?'",
+ " /Do_you_descend_from_unicellular_organism '\xbfDesciende de un organismo unicelular ancestral?'",
+ " /_id '_id'",
+ " /_uuid '_uuid'",
+ " /_submission_time '_submission_time'",
+ " /_validation_status '_validation_status'",
+ ' .',
+ 'VALUE LABELS',
+ ' Do_you_have_body_flu_intracellular_space',
+ " 'yes' 'S\xed'",
+ " 'yes__and_some_' 'S\xed, y alg\xfan espacio extracelular'",
+ " 'no___unsure' 'No / Inseguro'",
+ ' /Do_you_descend_from_unicellular_organism',
+ " 'yes' 'S\xed'",
+ " 'no' 'No'",
+ ' .'
+ ],
+ 'Identificaci\xf3n de animales - English - SPSS labels.sps': [
+ '\ufeffVARIABLE LABELS',
+ " start 'start'",
+ " /end 'end'",
+ " /What_kind_of_symmetry_do_you_have 'What kind of symmetry do you have?'",
+ " /What_kind_of_symmetry_do_you_have_spherical 'What kind of symmetry do you have? :: Spherical'",
+ " /What_kind_of_symmetry_do_you_have_radial 'What kind of symmetry do you have? :: Radial'",
+ " /What_kind_of_symmetry_do_you_have_bilateral 'What kind of symmetry do you have? :: Bilateral'",
+ " /How_many_segments_does_your_body_have 'How many segments does your body have?'",
+ " /Do_you_have_body_flu_intracellular_space 'Do you have body fluids that occupy intracellular space?'",
+ " /Do_you_descend_from_unicellular_organism 'Do you descend from an ancestral unicellular organism?'",
+ " /_id '_id'",
+ " /_uuid '_uuid'",
+ " /_submission_time '_submission_time'",
+ " /_validation_status '_validation_status'",
+ ' .',
+ 'VALUE LABELS',
+ ' Do_you_have_body_flu_intracellular_space',
+ " 'yes' 'Yes'",
+ " 'yes__and_some_' 'Yes, and some extracellular space'",
+ " 'no___unsure' 'No / Unsure'",
+ ' /Do_you_descend_from_unicellular_organism',
+ " 'yes' 'Yes'",
+ " 'no' 'No'",
+ ' .'
+ ],
+ }
+ result_zip = zipfile.ZipFile(export_task.result, 'r')
+ for name, content_lines in expected_file_names_and_content_lines.items():
+ self.assertEqual(
+ # we have `unicode_literals` but the rest of the app doesn't
+ result_zip.open(name, 'r').read().decode('utf-8'),
+ '\r\n'.join(content_lines)
+ )
+
def test_remove_excess_exports(self):
task_data = {
'source': reverse('asset-detail', args=[self.asset.uid]),
@@ -460,10 +543,10 @@ def test_export_latest_version_only(self):
self.asset.save()
self.asset.deploy(backend='mock', active=True)
expected_lines = [
- '"¿Desciende de... etiqueta nueva";"_id";"_uuid";"_submission_time";"_index"',
- '"no";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"1"',
- '"no";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"2"',
- '"yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"3"'
+ '"¿Desciende de... etiqueta nueva";"_id";"_uuid";"_submission_time";"_validation_status";"_index"',
+ '"no";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"1"',
+ '"no";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"2"',
+ '"yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"3"'
]
self.run_csv_export_test(
expected_lines, {'fields_from_all_versions': 'false'})
diff --git a/kpi/tests/test_mongo_decoding.py b/kpi/tests/test_mongo_decoding.py
index 96f257f6e1..c8b5bdac0e 100644
--- a/kpi/tests/test_mongo_decoding.py
+++ b/kpi/tests/test_mongo_decoding.py
@@ -6,7 +6,16 @@
from django.test import TestCase
from django.conf import settings
-from kobo.apps.reports import report_data
+from kpi.utils.mongo_helper import MongoDecodingHelper
+
+
+def get_instances_from_mongo():
+ query = {'_deleted_at': {'$exists': False}}
+ instances = settings.MONGO_DB.instances.find(query)
+ return (
+ MongoDecodingHelper.to_readable_dict(instance)
+ for instance in instances
+ )
class FakeMongoDB(object):
@@ -78,8 +87,7 @@ def test_decoding_base64_dots(self):
'regular': '1.3'
}]
settings.MONGO_DB = FakeMongoDB(encoded_results)
- decoded = list(report_data.get_instances_for_userform_id(
- '_userform_id is ignored by FakeMongoDB'))
+ decoded = list(get_instances_from_mongo())
expected_results = decoded_results
self.assertEqual(decoded, expected_results)
@@ -144,7 +152,6 @@ def test_decoding_base64_dots_in_repeating_groups(self):
'regular': '1.3'
}]
settings.MONGO_DB = FakeMongoDB(encoded_results)
- decoded = list(report_data.get_instances_for_userform_id(
- '_userform_id is ignored by FakeMongoDB'))
+ decoded = list(get_instances_from_mongo())
expected_results = decoded_results
self.assertEqual(decoded, expected_results)
diff --git a/kpi/urls.py b/kpi/urls.py
index 8fca8ec175..5933f30a1f 100644
--- a/kpi/urls.py
+++ b/kpi/urls.py
@@ -33,13 +33,14 @@
from kpi.forms import RegistrationForm
from hub.views import switch_builder
from hub.models import ConfigurationFile
+from kobo.apps.hook.views import HookViewSet, HookLogViewSet
# TODO: Give other apps their own `urls.py` files instead of importing their
# views directly! See
# https://docs.djangoproject.com/en/1.8/intro/tutorial03/#namespacing-url-names
router = ExtendedDefaultRouter()
-asset_routes = router.register(r'assets', AssetViewSet)
+asset_routes = router.register(r'assets', AssetViewSet, base_name='asset')
asset_routes.register(r'versions',
AssetVersionViewSet,
base_name='asset-version',
@@ -56,6 +57,17 @@
parents_query_lookups=['asset'],
)
+hook_routes = asset_routes.register(r'hooks',
+ HookViewSet,
+ base_name='hook',
+ parents_query_lookups=['asset'],
+ )
+
+hook_routes.register(r'logs',
+ HookLogViewSet,
+ base_name='hook-log',
+ parents_query_lookups=['asset', 'hook'],
+ )
router.register(r'asset_snapshots', AssetSnapshotViewSet)
router.register(
diff --git a/kpi/utils/log.py b/kpi/utils/log.py
new file mode 100644
index 0000000000..b338b86606
--- /dev/null
+++ b/kpi/utils/log.py
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+import logging as stdlib_logging
+
+def get_default_logger():
+ return stdlib_logging.getLogger("console_logger")
+
+# logging does not print to stdout when RAVEN/sentry is activated.
+# To avoid to call logging.getLogger everywhere, we overload logging
+logging = get_default_logger()
+
diff --git a/kpi/views.py b/kpi/views.py
index b26708ac5b..3caa83f921 100644
--- a/kpi/views.py
+++ b/kpi/views.py
@@ -1,6 +1,7 @@
from distutils.util import strtobool
from itertools import chain
import copy
+from hashlib import md5
import json
import base64
import datetime
@@ -30,7 +31,7 @@
)
from rest_framework.decorators import api_view
from rest_framework.decorators import renderer_classes
-from rest_framework.decorators import detail_route
+from rest_framework.decorators import detail_route, list_route
from rest_framework.decorators import authentication_classes
from rest_framework.parsers import MultiPartParser
from rest_framework.response import Response
@@ -75,7 +76,7 @@
AssetJsonRenderer,
SSJsonRenderer,
XFormRenderer,
- AssetSnapshotXFormRenderer,
+ XMLRenderer,
XlsRenderer,)
from .serializers import (
AssetSerializer, AssetListSerializer,
@@ -103,8 +104,10 @@
COLLECTION_CLONE_FIELDS, ASSET_TYPE_ARG_NAME, CLONE_COMPATIBLE_TYPES, \
ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY, ASSET_TYPES
from deployment_backends.backends import DEPLOYMENT_BACKENDS
-from deployment_backends.kobocat_backend import KobocatDataProxyViewSetMixin
+from deployment_backends.mixin import KobocatDataProxyViewSetMixin
+from kobo.apps.hook.utils import HookUtils
from kpi.exceptions import BadAssetTypeException
+from kpi.utils.log import logging
@login_required
@@ -593,7 +596,7 @@ class AssetSnapshotViewSet(NoUpdateModelViewSet):
queryset = AssetSnapshot.objects.all()
renderer_classes = NoUpdateModelViewSet.renderer_classes + [
- AssetSnapshotXFormRenderer,
+ XMLRenderer,
]
def filter_queryset(self, queryset):
@@ -700,10 +703,54 @@ class SubmissionViewSet(NestedViewSetMixin, viewsets.ViewSet,
KobocatDataProxyViewSetMixin):
'''
TODO: Access the submission data directly instead of merely proxying to
- KoBoCAT
+ KoBoCAT. We can now use `KobocatBackend.get_submissions()` and
+ `KobocatBackend.get_submission()`
'''
parent_model = Asset
+ # @TODO Handle list of ids before using it
+ # def list(self, request, *args, **kwargs):
+ # asset_uid = self.get_parents_query_dict().get("asset")
+ # asset = get_object_or_404(self.parent_model, uid=asset_uid)
+ # format_type = kwargs.get("format", "json")
+ # submissions = asset.deployment.get_submissions(format_type=format_type)
+ # return Response(list(submissions))
+
+ def create(self, request, *args, **kwargs):
+ """
+ This endpoint is handled by the SubmissionViewSet (not KobocatDataProxyViewSetMixin)
+ because it doesn't use KC proxy.
+ It's only used to trigger hook services of the Asset (so far).
+
+ :param request:
+ :return:
+ """
+ # Follow Open Rosa responses by default
+ response_status_code = status.HTTP_202_ACCEPTED
+ response = {
+ "detail": _(
+ "We got and saved your data, but may not have fully processed it. You should not try to resubmit.")
+ }
+ try:
+ asset_uid = self.get_parents_query_dict().get("asset")
+ asset = get_object_or_404(self.parent_model, uid=asset_uid)
+ instance_id = request.data.get("instance_id")
+ if not HookUtils.call_services(asset, instance_id):
+ response_status_code = status.HTTP_409_CONFLICT
+ response = {
+ "detail": _(
+ "Your data for instance {} has been already submitted.".format(instance_id))
+ }
+
+ except Exception as e:
+ logging.error("SubmissionViewSet.create - {}".format(str(e)))
+ response = {
+ "detail": _("An error has occurred when calling the external service. Please retry later.")
+ }
+ response_status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
+
+ return Response(response, status=response_status_code)
+
class AssetVersionViewSet(NestedViewSetMixin, viewsets.ModelViewSet):
model = AssetVersion
@@ -724,7 +771,6 @@ def get_queryset(self):
_queryset = self.model.objects.filter(asset__uid=_asset_uid)
if _deployed is not None:
_queryset = _queryset.filter(deployed=_deployed)
- _queryset = _queryset.filter(asset__uid=_asset_uid)
if self.action == 'list':
# Save time by only retrieving fields from the DB that the
# serializer will use
@@ -755,6 +801,17 @@ class AssetViewSet(NestedViewSetMixin, viewsets.ModelViewSet):
>
> curl -X GET https://[kpi-url]/assets/
+ Get an hash of all `version_id`s of assets.
+ Useful to detect any changes in assets with only one call to `API`
+
+
+ GET /assets/hash/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi-url]/assets/hash/
+
## CRUD
* `uid` - is the unique identifier of a specific asset
@@ -934,9 +991,11 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update):
:return: dict
"""
if self._validate_destination_type(original_asset):
+ # `to_clone_dict()` returns only `name`, `content`, `asset_type`,
+ # and `tag_string`
cloned_data = original_asset.to_clone_dict(version=source_version)
- # Merge cloned_data with user's request data.
+ # Allow the user's request data to override `cloned_data`
cloned_data.update(self.request.data.items())
if partial_update:
@@ -956,9 +1015,10 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update):
if cloned_asset_type in [None, ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY] and \
original_asset.asset_type in [ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY]:
- settings = original_asset.settings
+ settings = original_asset.settings.copy()
settings.pop("share-metadata", None)
- cloned_data.update({"settings": json.dumps(settings)})
+ settings.update(cloned_data.get('settings', {}))
+ cloned_data['settings'] = json.dumps(settings)
# until we get content passed as a dict, transform the content obj to a str
# TODO, verify whether `Asset.content.settings.id_string` should be cleared out.
@@ -997,6 +1057,36 @@ def create(self, request, *args, **kwargs):
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
+ @list_route(methods=["GET"], renderer_classes=[renderers.JSONRenderer])
+ def hash(self, request):
+ """
+ Creates an hash of `version_id` of all accessible assets by the user.
+ Useful to detect changes between each request.
+
+ :param request:
+ :return: JSON
+ """
+ user = self.request.user
+ if user.is_anonymous():
+ raise exceptions.NotAuthenticated()
+ else:
+ accessible_assets = get_objects_for_user(
+ user, "view_asset", Asset).filter(asset_type=ASSET_TYPE_SURVEY)\
+ .order_by("uid")
+
+ assets_version_ids = [asset.version_id for asset in accessible_assets if asset.version_id is not None]
+ # Sort alphabetically
+ assets_version_ids.sort()
+
+ if len(assets_version_ids) > 0:
+ hash = md5("".join(assets_version_ids)).hexdigest()
+ else:
+ hash = ""
+
+ return Response({
+ "hash": hash
+ })
+
@detail_route(renderer_classes=[renderers.JSONRenderer])
def content(self, request, uid):
asset = self.get_object()
@@ -1059,6 +1149,8 @@ def deployment(self, request, uid):
contents, but does not change the deployment's identifier
'''
asset = self.get_object()
+ serializer_context = self.get_serializer_context()
+ serializer_context['asset'] = asset
# TODO: Require the client to provide a fully-qualified identifier,
# otherwise provide less kludgy solution
@@ -1078,7 +1170,8 @@ def deployment(self, request, uid):
raise Http404
else:
serializer = DeploymentSerializer(
- asset.deployment, context=self.get_serializer_context())
+ asset.deployment, context=serializer_context
+ )
# TODO: Understand why this 404s when `serializer.data` is not
# coerced to a dict
return Response(dict(serializer.data))
@@ -1094,7 +1187,7 @@ def deployment(self, request, uid):
)
serializer = DeploymentSerializer(
data=request.data,
- context={'asset': asset}
+ context=serializer_context
)
serializer.is_valid(raise_exception=True)
serializer.save()
@@ -1115,7 +1208,7 @@ def deployment(self, request, uid):
serializer = DeploymentSerializer(
asset.deployment,
data=request.data,
- context={'asset': asset},
+ context=serializer_context,
partial=True
)
serializer.is_valid(raise_exception=True)
diff --git a/package.json b/package.json
index 014e784847..9f0044c098 100644
--- a/package.json
+++ b/package.json
@@ -5,32 +5,26 @@
"devDependencies": {
"@mapbox/leaflet-omnivore": "^0.3.4",
"alertifyjs": "^1.9.0",
- "async": "^1.4.2",
- "autoprefixer": "^7.1.1",
+ "autoprefixer": "^9.1.5",
"babel-core": "^6.26.3",
- "babel-eslint": "^8.2.3",
+ "babel-eslint": "^10.0.1",
"babel-loader": "^7.1.4",
- "babel-plugin-add-module-exports": "^0.1.2",
+ "babel-plugin-add-module-exports": "^1.0.0",
"babel-polyfill": "^6.26.0",
"babel-preset-env": "^1.7.0",
"babel-preset-react": "^6.24.1",
"backbone": "^1.1.2",
"backbone-validation": "^0.11.5",
- "bluebird": "^2.9.27",
- "browser-sync": "^2.10.1",
"chai": "^4.1.2",
"chart.js": "^2.7.1",
"classnames": "^2.2.5",
"coffee-loader": "^0.9.0",
"coffeescript": "^1.12.0",
- "css-loader": "^0.28.11",
- "diff": "^1.4.0",
- "es6-map": "^0.1.4",
- "es6-promise": "^3.3.1",
- "eslint": "^4.19.1",
+ "css-loader": "^1.0.0",
+ "eslint": "^5.6.0",
"eslint-loader": "^2.0.0",
"eslint-plugin-react": "^7.10.0",
- "file-loader": "^1.1.11",
+ "file-loader": "^2.0.0",
"font-awesome": "^4.4.0",
"immutable": "^3.4.7",
"jquery": "^3.3.1",
@@ -39,56 +33,46 @@
"leaflet": "^1.3.1",
"leaflet.heat": "^0.2.0",
"leaflet.markercluster": "^1.3.0",
- "lodash._bindcallback": "^3.0.1",
- "lodash.isarguments": "^3.0.4",
- "lodash.isarray": "^3.0.4",
- "lodash.merge": "^3.3.2",
- "lodash.restparam": "^3.6.1",
- "mime-types": "^2.1.8",
+ "lodash.clonedeep": "^4.5.0",
+ "lodash.merge": "^4.6.1",
"mocha": "^5.2.0",
"mocha-chrome": "^1.1.0",
"moment": "^2.10.3",
- "ms": "^0.7.1",
- "node-sass": "^4.5.1",
+ "node-sass": "^4.9.3",
"object-assign": "^4.1.1",
- "opn-cli": "^3.1.0",
- "postcss": "^6.0.22",
- "postcss-loader": "^2.1.5",
+ "postcss": "^7.0.4",
+ "postcss-loader": "^3.0.0",
"prop-types": "^15.6.1",
- "q": "^1.4.1",
- "react": "^16.4.0",
+ "react": "^16.5.2",
"react-autobind": "^1.0.6",
- "react-autosize-textarea": "^0.4.3",
+ "react-autosize-textarea": "^4.0.0",
"react-cookie": "^0.2.3",
"react-copy-to-clipboard": "^5.0.0",
"react-debounce-input": "^3.1.0",
"react-document-title": "^2.0.2",
"react-dom": "^16.4.0",
- "react-dropzone": "^4.2.11",
+ "react-dropzone": "^5.1.1",
"react-hot-loader": "3.1.0",
- "react-mixin": "^3.0.5",
+ "react-mixin": "^5.0.0",
"react-router": "^3.2.1",
- "react-select": "^1.1.1",
+ "react-select": "^2.0.0",
"react-shortcuts": "^2.0.0",
"react-table": "^6.8.1",
"react-tagsinput": "^3.15.1",
"reflux": "^6.4.1",
"reflux-core": "^1.0.0",
"replace-in-file": "^3.4.0",
- "rimraf": "^2.4.3",
- "roboto-fontface": "^0.4.5",
+ "roboto-fontface": "^0.10.0",
"sass-loader": "^7.0.1",
"select2": "3.5.2-browserify",
- "style-loader": "^0.21.0",
+ "spark-md5": "^3.0.0",
+ "style-loader": "^0.23.0",
"underscore": "^1.8.3",
- "vinyl-source-stream": "^1.1.0",
"webfonts-generator": "^0.4.0",
- "webpack": "^4.10.1",
+ "webpack": "^4.20.2",
"webpack-bundle-tracker": "0.4.0-beta",
- "webpack-cli": "^3.0.0",
- "webpack-dev-middleware": "3.0.0",
- "webpack-dev-server": "^3.1.4",
- "webpack-hot-middleware": "2.21.2"
+ "webpack-cli": "^3.1.1",
+ "webpack-dev-server": "^3.1.8"
},
"scripts": {
"build": "webpack --config webpack/prod.config.js --progress --colors",
diff --git a/scripts/copy_fonts.py b/scripts/copy_fonts.py
index 717b843c76..9756456582 100644
--- a/scripts/copy_fonts.py
+++ b/scripts/copy_fonts.py
@@ -1,14 +1,30 @@
import glob
+import os
import shutil
+
dest_dir = "./jsapp/fonts/"
-print("Copying fonts...")
+def create_folder_if_not_exists():
+ if not os.path.exists(dest_dir):
+ try:
+ os.makedirs(dest_dir)
+ print("Destination folder has been created!")
+ except Exception as e:
+ print("Could not create fonts folder - Error: {}".format(str(e)))
+
+def copy_fonts():
+
+ create_folder_if_not_exists()
+
+ print("Copying fonts...")
+
+ for file in glob.glob("./node_modules/font-awesome/fonts/*.*"):
+ print(file)
+ shutil.copy(file, dest_dir)
+ for file in glob.glob("./node_modules/roboto-fontface/fonts/roboto/*.wof*"):
+ print(file)
+ shutil.copy(file, dest_dir)
-for file in glob.glob("./node_modules/font-awesome/fonts/*.*"):
- print(file)
- shutil.copy(file, dest_dir)
-for file in glob.glob("./node_modules/roboto-fontface/fonts/*.wof*"):
- print(file)
- shutil.copy(file, dest_dir)
+ print("DONE")
-print("DONE")
\ No newline at end of file
+copy_fonts()
\ No newline at end of file
diff --git a/test/xlform/translations.tests.coffee b/test/xlform/translations.tests.coffee
index 4afb5a7ed4..3c8a41b371 100644
--- a/test/xlform/translations.tests.coffee
+++ b/test/xlform/translations.tests.coffee
@@ -1,136 +1,27 @@
-# DISABLED
-# CONSIDER DELETING BEFORE MERGING
-
-# {expect} = require('../helper/fauxChai')
-
-# $inputParser = require("../../jsapp/xlform/src/model.inputParser")
-# $survey = require("../../jsapp/xlform/src/model.survey")
-
-# describe " translations set proper values ", ->
-# process = (src)->
-# parsed = $inputParser.parse(src)
-# new $survey.Survey(parsed)
-
-# it 'example 0', ->
-# survey1 = process(
-# survey: [
-# type: "text"
-# label: "VAL1",
-# name: "val1",
-# ]
-# )
-# survey2 = process(
-# survey: [
-# type: "text"
-# label: ["VAL1"],
-# name: "val1",
-# ]
-# translations: [null]
-# )
-
-# expect(survey1._translation_1).toEqual(null)
-# expect(survey1._translation_2).toEqual(undefined)
-
-# expect(survey2._translation_1).toEqual(null)
-# expect(survey2._translation_2).toEqual(undefined)
-
-# it 'does not have active_translation_name value when none set', ->
-# survey_json = process(
-# survey: [type: "text", label: ["VAL1"], name: "val1"]
-# translations: [null]
-# ).toJSON()
-# expect(survey_json['#active_translation_name']).toBeUndefined()
-
-# it 'passes thru active_translation_name', ->
-# survey = process(
-# survey: [
-# type: "text"
-# label: ["VAL1_NULL", "VAL2_L2"],
-# name: "val1",
-# ]
-# translations: [null, "L2"]
-# '#active_translation_name': 'XYZ'
-# )
-# expect(survey.active_translation_name).toEqual('XYZ')
-# _json = survey.toJSON()
-# expect(_json['#active_translation_name']).toEqual('XYZ')
-
-# it 'fails with invalid active_translation_name', ->
-# run = ->
-# survey = process(
-# survey: [
-# type: "text"
-# label: ["VAL1_NULL", "VAL2_L2"],
-# name: "val1",
-# ]
-# translations: ["L1", "L2"]
-# '#active_translation_name': 'XYZ'
-# )
-# # "#active_translation_name" is set, but refers to a value in "translations"
-# # but in this case there is no null in the translations list so it should
-# # throw an error
-# expect(run).toThrow()
-
-# it 'example 1', ->
-# survey = process(
-# survey: [
-# type: "text"
-# label: ["VAL1_NULL", "VAL2_L2"],
-# name: "val1",
-# ]
-# translations: [null, "L2"]
-# )
-# expect(survey._translation_1).toEqual(null)
-# expect(survey._translation_2).toEqual("L2")
-# r0 = survey.rows.at(0)
-# expect(r0.getLabel('_1')).toEqual('VAL1_NULL')
-# expect(r0.getLabel('_2')).toEqual('VAL2_L2')
-
-# rj0 = survey.toJSON().survey[0]
-# expect(rj0['label']).toBeDefined()
-# expect(rj0['label::L2']).toBeDefined()
-
-# it 'example 2', ->
-# survey = process(
-# survey: [
-# type: "text"
-# label: ["VAL1_L1", "VAL2_L2"],
-# name: "val1",
-# ]
-# translations: ["L1", "L2"]
-# )
-# src = $inputParser.parse(
-# survey: [
-# type: "text"
-# label: ["VAL1_L1", "VAL2_L2"],
-# name: "val1",
-# ]
-# translations: ["L1", "L2"]
-# )
-# expect(src['_active_translation_name']).toEqual("L1")
-# expect(src.translations[0]).toEqual(null)
-
-# expect(survey._translation_2).toEqual("L2")
-# _sjson = survey.toJSON()
-
-# r0 = survey.rows.at(0)
-# expect(r0.getLabel('_1')).toEqual('VAL1_L1')
-# expect(r0.getLabel('_2')).toEqual('VAL2_L2')
-
-# rj0 = _sjson.survey[0]
-# expect(rj0['label']).toBeDefined()
-# expect(rj0['label::L2']).toBeDefined()
-# expect(_sjson['#active_translation_name']).toEqual('L1')
-
-# it 'example 3', ->
-# run = ->
-# survey = process(
-# survey: [
-# type: "text"
-# label: ["VAL1_L2", "VAL2_NULL"],
-# name: "val1",
-# ]
-# translations: ["L2", null]
-# )
-# # run()
-# expect(run).toThrow('There is an unnamed translation in your form definition')
+{expect} = require('../helper/fauxChai')
+
+$inputParser = require("../../jsapp/xlform/src/model.inputParser")
+$survey = require("../../jsapp/xlform/src/model.survey")
+
+describe("translations", ->
+ process = (src) ->
+ parsed = $inputParser.parse(src)
+ new $survey.Survey(parsed)
+
+ it('should not allow editing form with unnamed translation', ->
+ run = ->
+ survey = process(
+ survey: [
+ type: "text"
+ label: ["Ciasto?", "Pizza?"],
+ name: "Pizza survey",
+ ]
+ translations: ["polski (pl)", null]
+ )
+ expect(run).toThrow("""
+ There is an unnamed translation in your form definition.
+ Please give a name to all translations in your form.
+ Use "Manage Translations" option from form landing page.
+ """)
+ )
+)
diff --git a/webpack/dev.server.js b/webpack/dev.server.js
index 480d37a5d0..884cbd5472 100644
--- a/webpack/dev.server.js
+++ b/webpack/dev.server.js
@@ -3,7 +3,11 @@ const path = require('path');
const webpack = require('webpack');
const WebpackCommon = require('./webpack.common');
const BundleTracker = require('webpack-bundle-tracker');
-var publicPath = 'http://localhost:3000/static/compiled/';
+var isPublicDomainDefined = process.env.KOBOFORM_PUBLIC_SUBDOMAIN &&
+ process.env.PUBLIC_DOMAIN_NAME;
+var publicDomain = isPublicDomainDefined ? process.env.KOBOFORM_PUBLIC_SUBDOMAIN
+ + '.' + process.env.PUBLIC_DOMAIN_NAME : 'localhost';
+var publicPath = 'http://' + publicDomain + ':3000/static/compiled/';
module.exports = WebpackCommon({
mode: "development",
@@ -22,7 +26,8 @@ module.exports = WebpackCommon({
disableHostCheck: true,
hot: true,
headers: {'Access-Control-Allow-Origin': '*'},
- port: 3000
+ port: 3000,
+ host: '0.0.0.0'
},
plugins: [
new BundleTracker({path: __dirname, filename: '../webpack-stats.json'}),