Skip to content

Commit

Permalink
Merge pull request #426 from uktrade/release/okukuseku
Browse files Browse the repository at this point in the history
Release Okukuseku
  • Loading branch information
reupen authored Aug 30, 2017
2 parents 5b8360b + 59e7b30 commit 91bf2fb
Show file tree
Hide file tree
Showing 99 changed files with 3,642 additions and 714 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ Leeloo uses Docker compose to setup and run all the necessary components. The do
```shell
docker-compose run leeloo python manage.py migrate
docker-compose run leeloo python manage.py loadmetadata
docker-compose run leeloo python manage.py load_omis_metadata
docker-compose run leeloo python manage.py createinitialrevisions
```
4. Optionally, you can load some test data and update elasticsearch:
Expand Down Expand Up @@ -136,6 +137,7 @@ Dependencies:
./manage.py createsuperuser
./manage.py loadmetadata
./manage.py load_omis_metadata
./manage.py createinitialrevisions
```
Expand Down Expand Up @@ -225,6 +227,7 @@ Load metadata:
```shell
docker-compose run leeloo python manage.py loadmetadata
docker-compose run leeloo python manage.py load_omis_metadata
```
Update Elasticsearch:
Expand Down
11 changes: 11 additions & 0 deletions config/settings/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,10 @@
'datahub.metadata',
'datahub.search.apps.SearchConfig',
'datahub.user',
'datahub.omis.core',
'datahub.omis.order',
'datahub.omis.market',
'datahub.omis.notification',
]

INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
Expand Down Expand Up @@ -175,3 +178,11 @@
BULK_CREATE_BATCH_SIZE = env.int('BULK_CREATE_BATCH_SIZE', default=5000)
DOCUMENTS_BUCKET = env('DOCUMENTS_BUCKET')
AV_SERVICE_URL = env('AV_SERVICE_URL', default=None)

# FRONTEND
DATAHUB_FRONTEND_BASE_URL = env('DATAHUB_FRONTEND_BASE_URL', default='http://localhost:3000')

# OMIS
OMIS_NOTIFICATION_ADMIN_EMAIL = env('OMIS_NOTIFICATION_ADMIN_EMAIL', default='')
OMIS_NOTIFICATION_API_KEY = env('OMIS_NOTIFICATION_API_KEY', default='')
OMIS_NOTIFICATION_TEST_API_KEY = env('OMIS_NOTIFICATION_TEST_API_KEY', default='')
2 changes: 1 addition & 1 deletion config/settings/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
# This gets normal Python logging working with Django
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'disable_existing_loggers': False,
'root': {
'level': 'INFO',
'handlers': ['console'],
Expand Down
3 changes: 3 additions & 0 deletions config/settings/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@
}
DOCUMENT_BUCKET='test-bucket'
AV_SERVICE_URL='http://av-service/'

OMIS_NOTIFICATION_ADMIN_EMAIL = '[email protected]'
OMIS_NOTIFICATION_API_KEY = ''
1 change: 1 addition & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ def django_db_setup(django_db_setup, django_db_blocker):
"""Fixture for DB setup."""
with django_db_blocker.unblock():
call_command('loadmetadata')
call_command('load_omis_metadata')


@pytest.fixture
Expand Down
17 changes: 11 additions & 6 deletions datahub/company/models/company.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,12 +130,14 @@ class Meta: # noqa: D101
class MPTTMeta: # noqa: D101
order_insertion_by = ['name']

@cached_property
@property
def uk_based(self):
"""Whether a company is based in the UK or not."""
if not self.registered_address_country:
return None
return self.registered_address_country.name == constants.Country.united_kingdom.value.name

united_kingdom_id = uuid.UUID(constants.Country.united_kingdom.value.id)
return self.registered_address_country.id == united_kingdom_id

@cached_property
def companies_house_data(self):
Expand All @@ -148,6 +150,12 @@ def companies_house_data(self):
except CompaniesHouseCompany.DoesNotExist:
return None

def has_valid_trading_address(self):
"""Tells if Company has all required trading address fields defined."""
return all(
getattr(self, field) for field in self.REQUIRED_TRADING_ADDRESS_FIELDS
)

def _validate_trading_address(self):
"""Trading address fields are not mandatory in the model definition.
Expand All @@ -162,10 +170,7 @@ def _validate_trading_address(self):
self.trading_address_postcode,
self.trading_address_country
))
all_required_trading_address_fields = all(
getattr(self, field) for field in self.REQUIRED_TRADING_ADDRESS_FIELDS
)
if any_trading_address_fields and not all_required_trading_address_fields:
if any_trading_address_fields and not self.has_valid_trading_address():
return False
return True

Expand Down
4 changes: 4 additions & 0 deletions datahub/company/test/test_company_views_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from django.utils.timezone import now
from rest_framework import status
from rest_framework.reverse import reverse
from reversion.models import Version

from datahub.company.test.factories import (
CompaniesHouseCompanyFactory, CompanyFactory
Expand Down Expand Up @@ -499,6 +500,8 @@ def test_audit_log_view(self):
reversion.set_date_created(changed_datetime)
reversion.set_user(self.user)

versions = Version.objects.get_for_object(company)
version_id = versions[0].id
url = reverse('api-v3:company:audit-item', kwargs={'pk': company.pk})

response = self.api_client.get(url)
Expand All @@ -508,6 +511,7 @@ def test_audit_log_view(self):
assert len(response_data) == 1
entry = response_data[0]

assert entry['id'] == version_id
assert entry['user']['name'] == self.user.name
assert entry['comment'] == 'Changed'
assert entry['timestamp'] == changed_datetime.isoformat()
Expand Down
4 changes: 4 additions & 0 deletions datahub/company/test/test_contact_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from freezegun import freeze_time
from rest_framework import status
from rest_framework.reverse import reverse
from reversion.models import Version

from datahub.core import constants
from datahub.core.test_utils import APITestMixin
Expand Down Expand Up @@ -564,6 +565,8 @@ def test_audit_log_view(self):
reversion.set_date_created(changed_datetime)
reversion.set_user(self.user)

versions = Version.objects.get_for_object(contact)
version_id = versions[0].id
url = reverse('api-v3:contact:audit-item', kwargs={'pk': contact.pk})

response = self.api_client.get(url)
Expand All @@ -573,6 +576,7 @@ def test_audit_log_view(self):
assert len(response_data) == 1
entry = response_data[0]

assert entry['id'] == version_id
assert entry['user']['name'] == self.user.name
assert entry['comment'] == 'Changed'
assert entry['timestamp'] == changed_datetime.isoformat()
Expand Down
4 changes: 2 additions & 2 deletions datahub/company/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
})

contact_audit = ContactAuditViewSet.as_view({
'get': 'retrieve',
'get': 'list',
})

contact_urls = [
Expand All @@ -55,7 +55,7 @@
})

company_audit = CompanyAuditViewSet.as_view({
'get': 'retrieve',
'get': 'list',
})

company_archive = CompanyViewSetV3.as_view({
Expand Down
8 changes: 3 additions & 5 deletions datahub/company/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from rest_framework import mixins, viewsets
from rest_framework.filters import OrderingFilter

from datahub.core.audit import AuditViewSet
from datahub.core.mixins import ArchivableViewSetMixin
from datahub.core.serializers import AuditSerializer
from datahub.core.viewsets import CoreViewSetV1, CoreViewSetV3
from datahub.investment.queryset import get_slim_investment_project_queryset
from .models import Advisor, CompaniesHouseCompany, Company, Contact
Expand Down Expand Up @@ -67,10 +67,9 @@ class CompanyViewSetV3(ArchivableViewSetMixin, CoreViewSetV3):
)


class CompanyAuditViewSet(CoreViewSetV3):
class CompanyAuditViewSet(AuditViewSet):
"""Company audit views."""

serializer_class = AuditSerializer
queryset = Company.objects.all()


Expand Down Expand Up @@ -101,10 +100,9 @@ def get_additional_data(self, create):
return data


class ContactAuditViewSet(CoreViewSetV3):
class ContactAuditViewSet(AuditViewSet):
"""Contact audit views."""

serializer_class = AuditSerializer
queryset = Contact.objects.all()


Expand Down
109 changes: 109 additions & 0 deletions datahub/core/audit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.viewsets import GenericViewSet
from reversion.models import Version


class AuditViewSet(GenericViewSet):
"""Generic view set for audit logs.
Subclasses must set the queryset class attribute.
Only the LimitOffsetPagination paginator is supported, and so this is set explicitly.
"""

queryset = None
pagination_class = LimitOffsetPagination

def list(self, request, *args, **kwargs):
"""Lists audit log entries (paginated)."""
instance = self.get_object()
return self.create_response(instance)

def create_response(self, instance):
"""Creates an audit log response."""
versions = Version.objects.get_for_object(instance)
proxied_versions = _VersionQuerySetProxy(versions)
versions_subset = self.paginator.paginate_queryset(proxied_versions, self.request)

version_pairs = (
(versions_subset[n], versions_subset[n + 1]) for n in range(len(versions_subset) - 1)
)
results = self._construct_changelog(version_pairs)
return self.paginator.get_paginated_response(results)

@classmethod
def _construct_changelog(cls, version_pairs):
changelog = []
for v_new, v_old in version_pairs:
version_creator = v_new.revision.user
creator_repr = None
if version_creator:
creator_repr = {
'id': str(version_creator.pk),
'first_name': version_creator.first_name,
'last_name': version_creator.last_name,
'name': version_creator.name,
'email': version_creator.email,
}

changelog.append({
'id': v_new.id,
'user': creator_repr,
'timestamp': v_new.revision.date_created,
'comment': v_new.revision.comment or '',
'changes': cls._diff_versions(
v_old.field_dict, v_new.field_dict
),
})

return changelog

@staticmethod
def _diff_versions(old_version, new_version):
changes = {}

for field_name, new_value in new_version.items():
if field_name not in old_version:
changes[field_name] = [None, new_value]
else:
old_value = old_version[field_name]
if old_value != new_value:
changes[field_name] = [old_value, new_value]

return changes


class _VersionQuerySetProxy:
"""
Proxies a VersionQuerySet, modifying slicing behaviour to return an extra item.
This is allow the AuditSerializer to use the LimitOffsetPagination class
as N+1 versions are required to produce N audit log entries.
"""

def __init__(self, queryset):
"""Initialises the instance, saving a reference to the underlying query set."""
self.queryset = queryset

def __getitem__(self, item):
"""Handles self[item], forwarding calls to underlying query set.
Where item is a slice, 1 is added to item.stop.
"""
if isinstance(item, slice):
if item.step is not None:
raise TypeError('Slicing with step not supported')

stop = item.stop + 1 if item.stop is not None else None
return self.queryset[item.start:stop]

return self.queryset[item]

def count(self):
"""
Gets the count of the query set, minus 1. This is due to N audit log entries
being generated from N+1 query set results.
The return value is always non-negative.
"""
return max(self.queryset.count() - 1, 0)
2 changes: 1 addition & 1 deletion datahub/core/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ class Country(Enum):
'French Southern Territories', 'ddf682ac-5d95-e211-a939-e4115bead28a'
)
gabon = Constant('Gabon', 'def682ac-5d95-e211-a939-e4115bead28a')
gambia, _the = Constant('Gambia, The', 'dff682ac-5d95-e211-a939-e4115bead28a')
gambia = Constant('Gambia, The', 'dff682ac-5d95-e211-a939-e4115bead28a')
georgia = Constant('Georgia', 'e0f682ac-5d95-e211-a939-e4115bead28a')
germany = Constant('Germany', '83756b9a-5d95-e211-a939-e4115bead28a')
ghana = Constant('Ghana', 'e1f682ac-5d95-e211-a939-e4115bead28a')
Expand Down
1 change: 0 additions & 1 deletion datahub/core/management/commands/loadmetadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,5 @@ def handle(self, *args, **options):
METADATA_FIXTURE_DIR / 'teams.yaml',
METADATA_FIXTURE_DIR / 'titles.yaml',
METADATA_FIXTURE_DIR / 'uk_regions.yaml',
METADATA_FIXTURE_DIR / 'omis.yaml',
]
)
15 changes: 15 additions & 0 deletions datahub/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,21 @@ def unarchive(self):
self.save()


class DisableableModel(models.Model):
"""Defines a disabled_on field and related logic."""

disabled_on = models.DateTimeField(blank=True, null=True)

def was_disabled_on(self, date_on):
"""Returns True if this object was disabled at time `date_on`, False otherwise."""
if not self.disabled_on:
return False
return self.disabled_on <= date_on

class Meta: # noqa: D101
abstract = True


class BaseConstantModel(models.Model):
"""Constant tables for FKs."""

Expand Down
Loading

0 comments on commit 91bf2fb

Please sign in to comment.