diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..2d5c25d --- /dev/null +++ b/.dockerignore @@ -0,0 +1,5 @@ +doc +Dockerfile* +nginx.conf +uwsgi.ini +docker-compose.yml diff --git a/.gitignore b/.gitignore index d494434..01f9735 100644 --- a/.gitignore +++ b/.gitignore @@ -23,7 +23,7 @@ # Gradle: .idea/gradle.xml .idea/libraries - +.idea # Mongo Explorer plugin: .idea/mongoSettings.xml @@ -113,7 +113,8 @@ coverage.xml local_settings.py # Flask stuff: -instance/ +####instance/ + .webassets-cache # Scrapy stuff: diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..04816c7 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "Service_Components/signed_requests"] + path = Service_Components/signed_requests + url = https://github.com/Allu2/signed_requests diff --git a/Account/Dockerfile-account b/Account/Dockerfile-account new file mode 100644 index 0000000..f4963b8 --- /dev/null +++ b/Account/Dockerfile-account @@ -0,0 +1,98 @@ +FROM python:2.7 +MAINTAINER hjhsalo + +# NOTE: Baseimage python:2.7 already contains latest pip + +# TODO: Compile cryptography (and everything else pip related) elsewhere and +# get rid of "build-essential libssl-dev libffi-dev python-dev" +# Maybe according to these instructions: +# https://glyph.twistedmatrix.com/2015/03/docker-deploy-double-dutch.html + +# TODO: Double check and think about the order of commands. Should application +# specific stuff be moved to the end of the file? +# What are actually application specific? etc. + +# TODO: Have brainstorming session on how to properly setup EXPOSE ports, hosts, etc. +# Now it is difficult to come up with sensible defaults. +# Remember to check out what Docker Compose offers. + +# TODO: Make a new user and usergroup. +# Now everything including the ENTRYPOINT is being run as root which is bad +# practise and for example uWSGI complains about this. + +### +# Install +# Specific structure where a single RUN is used to execute everything. +# Based on Docker Best practices -document. To force cache busting. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/apt-get +# NOTE: python-mysql.connector is MyData Account specific dependency. +RUN apt-get update && apt-get install -y \ + build-essential \ + libffi-dev \ + libssl-dev \ + python-dev \ + python-mysql.connector \ + && rm -rf /var/lib/apt/lists/* + + +### +# Install application specific Python-dependencies. + +# NOTE: If you have multiple Dockerfile steps that use different files from +# your context, COPY them individually, rather than all at once. This will +# ensure that each step’s build cache is only invalidated (forcing the step +# to be re-run) if the specifically required files change. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/add-or-copy +COPY requirements.txt /tmp/ +RUN pip install --requirement /tmp/requirements.txt + +# NOTE: As uwsgi is part the configuration in some sense, how should we make +# this optional or at least clear to the reader? +RUN pip install uwsgi + +# NOTE: j2cli is needed to preprocess config files based on values +# environment variables +# https://github.com/kolypto/j2cli +# https://tryolabs.com/blog/2015/03/26/configurable-docker-containers-for-multiple-environments/ +RUN pip install j2cli + +### +# Setup configuration using environment variables +ENV MYSQL_HOST ${MYSQL_HOST:-'mysql-db'} +ENV MYSQL_USER ${MYSQL_USER:-'mydataaccount'} +ENV MYSQL_PASSWORD ${MYSQL_PASSWORD:-'wr8gabrA'} +ENV MYSQL_DB ${MYSQL_DB:-'MyDataAccount'} +ENV MYSQL_PORT ${MYSQL_PORT:-3306} +ENV URL_PREFIX ${URL_PREFIX:-''} + +### +# Create a installation directory into the container and copy the application +# to that folder. +ARG APP_INSTALL_PATH=/mydata-sdk-account + +# TODO: This may not be needed. Test and refactor if necessary to keep it. +ENV APP_INSTALL_PATH ${APP_INSTALL_PATH:-/mydata-account} +RUN mkdir -p $APP_INSTALL_PATH + +# Change current directory inside the container / image to this path. +WORKDIR $APP_INSTALL_PATH + +# Copy everything (including previously copied filed and folders) from directory +# where Dockerfile is located to current WORKDIR inside container. +# Remember that must be inside the context of the build: +# http://serverfault.com/a/666154 +COPY . . + +### +# Configure and run the application using entrypoint.sh. +# NOTE: Content of CMD are the default parameters passed to entrypoint.sh. +# These can be overwritten on "docker run " +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/entrypoint +COPY ./docker-entrypoint-account.sh / + +ENTRYPOINT ["/docker-entrypoint-account.sh"] + +# NOTE: Maybe this should be replaced with something that doesn't run anything +# and the command below should go to compose.yml ?? +CMD ["uwsgi --socket 0.0.0.0:8080 --protocol=http -w wsgi --callable app --processes 2"] + diff --git a/Account/account_config_template.py.j2 b/Account/account_config_template.py.j2 index f8b6209..820e3a0 100644 --- a/Account/account_config_template.py.j2 +++ b/Account/account_config_template.py.j2 @@ -82,7 +82,7 @@ MYSQL_PORT = 3306 #MYSQL_READ_DEFAULT_FILE = '' # MySQL configuration file to read, see the MySQL documentation for mysql_options(). #MYSQL_USE_UNICODE = '' # If True, CHAR and VARCHAR and TEXT columns are returned as Unicode strings, using the configured character set. MYSQL_CHARSET = 'utf8' # If present, the connection character set will be changed to this character set, if they are not equal. Default: utf-8 -#MYSQL_SQL_MODE = '' # If present, the session SQL mode will be set to the given string. +MYSQL_SQL_MODE = 'TRADITIONAL' # If present, the session SQL mode will be set to the given string. #MYSQL_CURSORCLASS = '' # If present, the cursor class will be set to the given string. diff --git a/Account/app/__init__.py b/Account/app/__init__.py index d4dd9bc..679bc42 100644 --- a/Account/app/__init__.py +++ b/Account/app/__init__.py @@ -51,15 +51,18 @@ # ========================================= api = Api(app, prefix=app.config["URL_PREFIX"]) + @app.before_request def new_request(): print("New Request") print("############") + @app.errorhandler(404) def not_found(error): - # When file not found, respon with 403 - return make_response(('FORBIDDEN', 403)) + not_found_error = ApiError(code=404, title="Not Found", detail="Endpoint not found", status="NotFound") + error_dict = not_found_error.to_dict() + return make_json_response(errors=error_dict, status_code=str(error_dict['code'])) @app.errorhandler(ApiError) diff --git a/Account/app/mod_account/controllers.py b/Account/app/mod_account/controllers.py index ac03f5d..4da0c59 100644 --- a/Account/app/mod_account/controllers.py +++ b/Account/app/mod_account/controllers.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Import dependencies +import json import uuid import logging import bcrypt # https://github.com/pyca/bcrypt/, https://pypi.python.org/pypi/bcrypt/2.0.0 @@ -17,107 +18,2306 @@ from app import db, api, login_manager, app # Import services -from app.helpers import get_custom_logger -from app.mod_database.helpers import get_db_cursor -from app.mod_account.services import get_contacts_by_account, get_emails_by_account, get_telephones_by_account, \ - get_service_link_record_count_by_account, get_consent_record_count_by_account - +from app.helpers import get_custom_logger, ApiError +from app.mod_api_auth.controllers import get_account_id_by_api_key +from app.mod_database.helpers import get_db_cursor, get_primary_keys_by_account_id, get_slr_ids, \ + get_slsr_ids, get_cr_ids, get_csr_ids # create logger with 'spam_application' -logger = get_custom_logger('mod_account_controllers') +from app.mod_database.models import Particulars, Contacts, Email, Telephone, Settings, EventLog, ServiceLinkRecord, \ + ServiceLinkStatusRecord, ConsentRecord, ConsentStatusRecord + +logger = get_custom_logger(__name__) + +def verify_account_id_match(account_id=None, api_key=None, account_id_to_compare=None, endpoint=None): + """ + Verifies that provided account id matches with account id fetched with api key. -def check_account_id(account_id=None): - # TODO: check that session[account_id] and account_id from path are matching + :param account_id: + :param api_key: + :param endpoint: + :return: + """ if account_id is None: - logger.debug('Account ID must be provided as call parameter.') - raise AttributeError('Account ID must be provided as call parameter.') + raise AttributeError("Provide account_id as parameter") + if endpoint is None: + raise AttributeError("Provide endpoint as parameter") + + # Get Account ID by Api-Key or compare to provided + if api_key is not None: + try: + logger.info("Fetching Account ID by Api-Key") + account_id_by_api_key = get_account_id_by_api_key(api_key=api_key) + except Exception as exp: + error_title = "Account ID not found with provided ApiKey" + logger.error(error_title) + raise ApiError( + code=403, + title=error_title, + detail=repr(exp), + source=endpoint + ) + else: + logger.info("account_id_by_api_key: " + str(account_id_by_api_key)) + account_id_to_compare = account_id_by_api_key + error_title = "Authenticated Account ID not matching with Account ID that was provided with request" + elif account_id_to_compare is not None: + logger.info("account_id_to_compare provided as parameter") + error_title = "Account ID in payload not matching with Account ID that was provided with request" + + # Check if Account IDs are matching + logger.info("Check if Account IDs are matching") + logger.info("account_id: " + str(account_id)) + logger.info("account_id_to_compare: " + str(account_id_to_compare)) + if str(account_id) != str(account_id_to_compare): + logger.error(error_title) + raise ApiError( + code=403, + title=error_title, + source=endpoint + ) else: - return True + logger.info("Account IDs are matching") + logger.info("account_id: " + str(account_id)) + logger.info("account_id_to_compare: " + str(account_id_to_compare)) + + return True -def get_potential_services_count(cursor=None, account_id=None): - data = randint(10, 100) - return cursor, data +################################## +################################## +# Particulars +################################## +################################## +def get_particular(account_id=None, id=None, cursor=None): + """ + Get one particular entry from database by Account ID and Particulars ID + :param account_id: + :param id: + :return: Particular dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + logger.info("Creating Particulars object") + db_entry_object = Particulars(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create Particulars object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("Particulars object created: " + db_entry_object.log_entry) + + # Get particulars from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch Particulars from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("Particulars fetched") + logger.info("Particulars fetched from db: " + db_entry_object.log_entry) + return db_entry_object.to_api_dict -def get_potential_consents_count(cursor=None, account_id=None): - data = randint(10, 100) - return cursor, data +def get_particulars(account_id=None): + """ + Get all Particulars -entries related to account + :param account_id: + :return: List of Particular dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") -def get_passive_services_count(cursor=None, account_id=None): - data = randint(10, 100) - return cursor, data + # Get table name + logger.info("Create db_entry_object") + db_entry_object = Particulars() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise -def get_passive_consents_count(cursor=None, account_id=None): - data = randint(10, 100) - return cursor, data + # Get primary keys for particulars + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + # Get Particulars from database + logger.info("Get Particulars from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting particulars with particular_id: " + str(id)) + db_entry_dict = get_particular(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("Particulars object added to list: " + json.dumps(db_entry_dict)) -def get_service_link_record_count(cursor=None, account_id=None): + return db_entry_list - check_account_id(account_id=account_id) +def update_particular(account_id=None, id=None, attributes=None, cursor=None): + """ + Update one particular entry at database identified by Account ID and Particulars ID + :param account_id: + :param id: + :return: Particular dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Particulars(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create Particulars object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("Particulars object created: " + db_entry_object.log_entry) + + # Get particulars from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch Particulars from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("Particulars fetched") + logger.info("Particulars fetched from db: " + db_entry_object.log_entry) + + # Update Particulars object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to update.") + return db_entry_object.to_api_dict + else: + logger.info("Particulars object to update: " + db_entry_object.log_entry) + + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Update object attributes + if "lastname" in attributes: + logger.info("Updating lastname") + old_value = str(db_entry_object.lastname) + new_value = str(attributes.get("lastname", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.lastname = new_value + logger.info(db_entry_object.log_entry) + + if "firstname" in attributes: + logger.info("Updating firstname") + old_value = str(db_entry_object.firstname) + new_value = str(attributes.get("firstname", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.firstname = new_value + logger.info(db_entry_object.log_entry) + + if "img_url" in attributes: + logger.info("Updating img_url") + old_value = str(db_entry_object.img_url) + new_value = str(attributes.get("img_url", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.img_url = new_value + logger.info(db_entry_object.log_entry) + + if "date_of_birth" in attributes: + logger.info("Updating date_of_birth") + old_value = str(db_entry_object.date_of_birth) + new_value = str(attributes.get("date_of_birth", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.date_of_birth = new_value + logger.info(db_entry_object.log_entry) + + # Store updates + try: + cursor = db_entry_object.update_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to update Particulars to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("Particulars updated") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +################################## +################################### +# Contacts +################################## +################################## +def get_contact(account_id=None, id=None, cursor=None): + """ + Get one contact entry from database by Account ID and contact ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Contacts(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create contact object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("contact object created: " + db_entry_object.log_entry) + + # Get contact from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch contact from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("contact fetched") + logger.info("contact fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def get_contacts(account_id=None): + """ + Get all contact -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create contact") + db_entry_object = Contacts() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: cursor = get_db_cursor() - logger.debug('No DB cursor provided as call parameter. Getting new one.') + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise - cursor, data = get_service_link_record_count_by_account(cursor=cursor, account_id=account_id) + # Get primary keys for contacts + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise - return cursor, data + # Get contacts from database + logger.info("Get contacts from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting contacts with contacts_id: " + str(id)) + db_entry_dict = get_contact(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("contact object added to list: " + json.dumps(db_entry_dict)) + return db_entry_list -def get_consent_record_count(cursor=None, account_id=None): - check_account_id(account_id=account_id) +def add_contact(account_id=None, attributes=None, cursor=None): + """ + Add one contacts entry at database identified by Account ID and ID + :param account_id: + :param id: + :return: Particular dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Update contacts object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to add.") + raise StandardError("Not adding empty entry to database") + else: + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Create object + try: + db_entry_object = Contacts( + account_id=account_id, + address1=str(attributes.get("address1", "")), + address2=str(attributes.get("address2", "")), + postal_code=str(attributes.get("postalCode", "")), + city=str(attributes.get("city", "")), + state=str(attributes.get("state", "")), + country=str(attributes.get("country", "")), + type=str(attributes.get("type", "")), + prime=str(attributes.get("primary", "")) + ) + except Exception as exp: + error_title = "Failed to create contacts object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("contacts object created: " + db_entry_object.log_entry) + # Store updates + try: + cursor = db_entry_object.to_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to add contacts to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("contacts added") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def update_contact(account_id=None, id=None, attributes=None, cursor=None): + """ + Update one contacts entry at database identified by Account ID and ID + :param account_id: + :param id: + :return: Particular dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Contacts(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create contacts object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("contacts object created: " + db_entry_object.log_entry) + + # Get contacts from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch contacts from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("contacts fetched") + logger.info("contacts fetched from db: " + db_entry_object.log_entry) + + # Update contacts object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to update.") + return db_entry_object.to_api_dict + else: + logger.info("contacts object to update: " + db_entry_object.log_entry) + + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Update object attributes + if "address1" in attributes: + logger.info("Updating address1") + old_value = str(db_entry_object.address1) + new_value = str(attributes.get("address1", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.address1 = new_value + logger.info(db_entry_object.log_entry) + + if "address2" in attributes: + logger.info("Updating address2") + old_value = str(db_entry_object.address2) + new_value = str(attributes.get("address2", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.address2 = new_value + logger.info(db_entry_object.log_entry) + + if "postalCode" in attributes: + logger.info("Updating postalCode") + old_value = str(db_entry_object.postal_code) + new_value = str(attributes.get("postalCode", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.postal_code = new_value + logger.info(db_entry_object.log_entry) + + if "city" in attributes: + logger.info("Updating city") + old_value = str(db_entry_object.city) + new_value = str(attributes.get("city", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.city = new_value + logger.info(db_entry_object.log_entry) + + if "state" in attributes: + logger.info("Updating state") + old_value = str(db_entry_object.state) + new_value = str(attributes.get("state", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.state = new_value + logger.info(db_entry_object.log_entry) + + if "country" in attributes: + logger.info("Updating country") + old_value = str(db_entry_object.country) + new_value = str(attributes.get("country", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.country = new_value + logger.info(db_entry_object.log_entry) + + if "type" in attributes: + logger.info("Updating type") + old_value = str(db_entry_object.type) + new_value = str(attributes.get("type", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.type = new_value + logger.info(db_entry_object.log_entry) + + if "primary" in attributes: + logger.info("Updating primary") + old_value = str(db_entry_object.prime) + new_value = str(attributes.get("primary", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.prime = new_value + logger.info(db_entry_object.log_entry) + + # Store updates + try: + cursor = db_entry_object.update_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to update contacts to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("contacts updated") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +################################## +################################### +# Emails +################################## +################################## +def get_email(account_id=None, id=None, cursor=None): + """ + Get one email entry from database by Account ID and email ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Email(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create email object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("email object created: " + db_entry_object.log_entry) + + # Get email from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch email from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("email fetched") + logger.info("email fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def get_emails(account_id=None): + """ + Get all email -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create email") + db_entry_object = Email() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: cursor = get_db_cursor() - logger.debug('No DB cursor provided as call parameter. Getting new one.') + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise - cursor, data = get_consent_record_count_by_account(cursor=cursor, account_id=account_id) + # Get primary keys for emails + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise - return cursor, data + # Get emails from database + logger.info("Get emails from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting emails with emails_id: " + str(id)) + db_entry_dict = get_email(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("email object added to list: " + json.dumps(db_entry_dict)) + return db_entry_list -def get_contacts(cursor=None, account_id=None): - check_account_id(account_id=account_id) +def add_email(account_id=None, attributes=None, cursor=None): + """ + Add one email entry to database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Update emails object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to add.") + raise StandardError("Not adding empty entry to database") + else: + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Create object + try: + db_entry_object = Email( + account_id=account_id, + email=str(attributes.get("email", "")), + type=str(attributes.get("type", "")), + prime=str(attributes.get("primary", "")) + ) + except Exception as exp: + error_title = "Failed to create emails object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("emails object created: " + db_entry_object.log_entry) + # Store updates + try: + cursor = db_entry_object.to_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to add emails to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("emails added") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def update_email(account_id=None, id=None, attributes=None, cursor=None): + """ + Update one email entry at database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Email(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create email object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("email object created: " + db_entry_object.log_entry) + + # Get email from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch email from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("email fetched") + logger.info("email fetched from db: " + db_entry_object.log_entry) + + # Update email object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to update.") + return db_entry_object.to_api_dict + else: + logger.info("email object to update: " + db_entry_object.log_entry) + + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Update object attributes + if "email" in attributes: + logger.info("Updating email") + old_value = str(db_entry_object.email) + new_value = str(attributes.get("email", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.email = new_value + logger.info(db_entry_object.log_entry) + + if "type" in attributes: + logger.info("Updating type") + old_value = str(db_entry_object.type) + new_value = str(attributes.get("type", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.type = new_value + logger.info(db_entry_object.log_entry) + + if "primary" in attributes: + logger.info("Updating primary") + old_value = str(db_entry_object.prime) + new_value = str(attributes.get("primary", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.prime = new_value + logger.info(db_entry_object.log_entry) + + # Store updates + try: + cursor = db_entry_object.update_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to update email to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("email updated") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + + + +################################## +################################### +# Telephones (numbers) +################################## +################################## +def get_telephone(account_id=None, id=None, cursor=None): + """ + Get one telephone entry from database by Account ID and telephone ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Telephone(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create telephone object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("telephone object created: " + db_entry_object.log_entry) + + # Get telephone from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch telephone from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("telephone fetched") + logger.info("telephone fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def get_telephones(account_id=None): + """ + Get all telephone -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create telephone") + db_entry_object = Telephone() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: cursor = get_db_cursor() - logger.debug('No DB cursor provided as call parameter. Getting new one.') + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for telephones + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get telephones from database + logger.info("Get telephones from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting telephones with telephones_id: " + str(id)) + db_entry_dict = get_telephone(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("telephone object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list - cursor, data = get_contacts_by_account(cursor=cursor, account_id=account_id) - return cursor, data +def add_telephone(account_id=None, attributes=None, cursor=None): + """ + Add one telephone entry to database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Update telephone object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to add.") + raise StandardError("Not adding empty entry to database") + else: + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + # Create object + try: + db_entry_object = Telephone( + account_id=account_id, + tel=str(attributes.get("tel", "")), + type=str(attributes.get("type", "")), + prime=str(attributes.get("primary", "")) + ) + except Exception as exp: + error_title = "Failed to create telephone object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("telephone object created: " + db_entry_object.log_entry) -def get_emails(cursor=None, account_id=None): + # Store updates + try: + cursor = db_entry_object.to_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to add telephone to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("telephone added") + logger.info(db_entry_object.log_entry) - check_account_id(account_id=account_id) + return db_entry_object.to_api_dict + +def update_telephone(account_id=None, id=None, attributes=None, cursor=None): + """ + Update one telephone entry at database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Telephone(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create telephone object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("telephone object created: " + db_entry_object.log_entry) + + # Get telephone from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch telephone from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("telephone fetched") + logger.info("telephone fetched from db: " + db_entry_object.log_entry) + + # Update telephone object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to update.") + return db_entry_object.to_api_dict + else: + logger.info("telephone object to update: " + db_entry_object.log_entry) + + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Update object attributes + if "tel" in attributes: + logger.info("Updating telephone") + old_value = str(db_entry_object.tel) + new_value = str(attributes.get("tel", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.tel = new_value + logger.info(db_entry_object.log_entry) + + if "type" in attributes: + logger.info("Updating type") + old_value = str(db_entry_object.type) + new_value = str(attributes.get("type", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.type = new_value + logger.info(db_entry_object.log_entry) + + if "primary" in attributes: + logger.info("Updating primary") + old_value = str(db_entry_object.prime) + new_value = str(attributes.get("primary", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.prime = new_value + logger.info(db_entry_object.log_entry) + + # Store updates + try: + cursor = db_entry_object.update_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to update telephone to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("telephone updated") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + + +################################## +################################### +# Settings +################################## +################################## +def get_setting(account_id=None, id=None, cursor=None): + """ + Get one setting entry from database by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Settings(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create setting object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("setting object created: " + db_entry_object.log_entry) + + # Get setting from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch setting from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("setting fetched") + logger.info("setting fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def get_settings(account_id=None): + """ + Get all setting -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create setting") + db_entry_object = Settings() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: cursor = get_db_cursor() - logger.debug('No DB cursor provided as call parameter. Getting new one.') + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for setting + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get setting from database + logger.info("Get setting from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting setting with setting_id: " + str(id)) + db_entry_dict = get_setting(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("setting object added to list: " + json.dumps(db_entry_dict)) - cursor, data = get_emails_by_account(cursor=cursor, account_id=account_id) + return db_entry_list - return cursor, data +def add_setting(account_id=None, attributes=None, cursor=None): + """ + Add one setting entry to database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise -def get_telephones(cursor=None, account_id=None): + # Update setting object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to add.") + raise StandardError("Not adding empty entry to database") + else: + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) - check_account_id(account_id=account_id) + # Create object + try: + db_entry_object = Settings( + account_id=account_id, + key=str(attributes.get("key", "")), + value=str(attributes.get("value", "")) + ) + except Exception as exp: + error_title = "Failed to create setting object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("setting object created: " + db_entry_object.log_entry) + # Store updates + try: + cursor = db_entry_object.to_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to add setting to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("setting added") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def update_setting(account_id=None, id=None, attributes=None, cursor=None): + """ + Update one setting entry at database identified by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if attributes is None: + raise AttributeError("Provide attributes as parameter") + if not isinstance(attributes, dict): + raise AttributeError("attributes must be a dict") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = Settings(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create setting object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("setting object created: " + db_entry_object.log_entry) + + # Get setting from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch setting from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("setting fetched") + logger.info("setting fetched from db: " + db_entry_object.log_entry) + + # Update setting object + if len(attributes) == 0: + logger.info("Empty attributes dict provided. Nothing to update.") + return db_entry_object.to_api_dict + else: + logger.info("setting object to update: " + db_entry_object.log_entry) + + # log provided attributes + for key, value in attributes.items(): + logger.debug("attributes[" + str(key) + "]: " + str(value)) + + # Update object attributes + if "key" in attributes: + logger.info("Updating key") + old_value = str(db_entry_object.key) + new_value = str(attributes.get("key", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.key = new_value + logger.info(db_entry_object.log_entry) + + if "value" in attributes: + logger.info("Updating value") + old_value = str(db_entry_object.value) + new_value = str(attributes.get("value", "None")) + logger.debug("Updating: " + old_value + " --> " + new_value) + db_entry_object.value = new_value + logger.info(db_entry_object.log_entry) + + # Store updates + try: + cursor = db_entry_object.update_db(cursor=cursor) + ### + # Commit + db.connection.commit() + except Exception as exp: + error_title = "Failed to update setting to DB" + logger.error(error_title + ": " + repr(exp)) + logger.debug('commit failed: ' + repr(exp)) + logger.debug('--> rollback') + db.connection.rollback() + raise + else: + logger.debug("Committed") + logger.info("setting updated") + logger.info(db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +################################## +################################### +# Event logs +################################## +################################## +def get_event_log(account_id=None, id=None, cursor=None): + """ + Get one event_log entry from database by Account ID and ID + :param account_id: + :param id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = EventLog(account_id=account_id, id=id) + except Exception as exp: + error_title = "Failed to create event_log object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("event_log object created: " + db_entry_object.log_entry) + + # Get event_log from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch event_log from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("event_log fetched") + logger.info("event_log fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_api_dict + + +def get_event_logs(account_id=None): + """ + Get all event_log -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create event_log") + db_entry_object = EventLog() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for event_log + try: + cursor, id_list = get_primary_keys_by_account_id(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get event_log from database + logger.info("Get event_log from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting event_log with event_log_id: " + str(id)) + db_entry_dict = get_event_log(account_id=account_id, id=id) + db_entry_list.append(db_entry_dict) + logger.info("event_log object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + +################################## +################################### +# Service Link Records +################################## +################################## +def get_slr(account_id=None, slr_id=None, cursor=None): + """ + Get one slr entry from database by Account ID and ID + :param account_id: + :param slr_id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if id is None: + raise AttributeError("Provide id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = ServiceLinkRecord(account_id=account_id, service_link_record_id=slr_id) + except Exception as exp: + error_title = "Failed to create slr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("slr object created: " + db_entry_object.log_entry) + + # Get slr from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch slr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("slr fetched") + logger.info("slr fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_record_dict + + +def get_slrs(account_id=None): + """ + Get all slr -entries related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Get table name + logger.info("Create slr") + db_entry_object = ServiceLinkRecord() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for slr + try: + cursor, id_list = get_slr_ids(cursor=cursor, account_id=account_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get slrs from database + logger.info("Get slrs from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting slr with slr_id: " + str(id)) + db_entry_dict = get_slr(account_id=account_id, slr_id=id) + db_entry_list.append(db_entry_dict) + logger.info("slr object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + +def get_record_ids(cursor=None, account_id=None): + """ + Fetches IDs for all record structures + :param cursor: + :param account_id: + :return: + """ if cursor is None: + raise AttributeError("Provide cursor as parameter") + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Containers + record_id_container = {} + table_names = { + "slr": "", + "slsr": "", + "cr": "", + "csr": "" + } + + # Get table names + try: + logger.info("Table names") + # SLR + db_entry_object = ServiceLinkRecord() + table_names["slr"] = db_entry_object.table_name + # SLSR + db_entry_object = ServiceLinkStatusRecord() + table_names["slsr"] = db_entry_object.table_name + # CR + db_entry_object = ConsentRecord() + table_names["cr"] = db_entry_object.table_name + # CSR + db_entry_object = ConsentStatusRecord() + table_names["csr"] = db_entry_object.table_name + # + logger.info("Table names: " + json.dumps(table_names)) + except Exception as exp: + logger.error('Could not get database table names: ' + repr(exp)) + raise + + # Get primary keys for Service Link Records + try: + logger.info("Getting SLR IDs") + cursor, slr_id_list = get_slr_ids(cursor=cursor, account_id=account_id, table_name=table_names["slr"]) + except Exception as exp: + logger.error('Could not get slr primary key list: ' + repr(exp)) + raise + else: + logger.debug("Got following SLR IDs: " + json.dumps(slr_id_list)) + + # Get primary keys for Service Link Status Records and Consent Records + for slr_id in slr_id_list: + logger.debug("Looping through slr_id_list: " + json.dumps(slr_id_list)) + # Add Service Link Record IDs to record_container + try: + logger.info("Adding SLR IDs") + record_id_container[slr_id] = {"serviceLinkStatusRecords": {}, "consentRecords": {}} + except Exception as exp: + logger.error('Could not add slr_id: ' + str(slr_id) + ' to record_id_container: ' + repr(exp)) + raise + else: + logger.debug("Added SLR ID: " + str(slr_id)) + + # Get Service Link Status Record IDs + try: + logger.info("Getting SLSR IDs") + cursor, slsr_id_list = get_slsr_ids(cursor=cursor, slr_id=slr_id, table_name=table_names["slsr"]) + except Exception as exp: + logger.error('Could not get slsr primary key list: ' + repr(exp)) + raise + else: + logger.debug("Got following SLSR IDs: " + json.dumps(slsr_id_list)) + + # Add Service Link Status Record IDs to record_container + for slsr_id in slsr_id_list: + logger.debug("Looping through slsr_id_list: " + json.dumps(slsr_id_list)) + try: + logger.info("Adding SLSR IDs") + record_id_container[slr_id]["serviceLinkStatusRecords"][slsr_id] = {} + except Exception as exp: + logger.error('Could not add slsr_id: ' + str(slsr_id) + ' to record_id_container: ' + repr(exp)) + raise + else: + logger.debug("Added SLSR ID: " + str(slsr_id)) + + # Get Consent Record IDs + try: + logger.info("Getting CR IDs") + cursor, cr_id_list = get_cr_ids(cursor=cursor, slr_id=slr_id, table_name=table_names["cr"]) + except Exception as exp: + logger.error('Could not get cr primary key list: ' + repr(exp)) + raise + else: + logger.debug("Got following CR IDs: " + json.dumps(cr_id_list)) + + # Add Consent Record IDs to record_container + for cr_id in cr_id_list: + logger.debug("Looping through cr_id_list: " + json.dumps(cr_id_list)) + try: + logger.info("Adding CR IDs") + record_id_container[slr_id]["consentRecords"][cr_id] = {"consentStatusRecords": {}} + except Exception as exp: + logger.error('Could not add cr_id: ' + str(cr_id) + ' to record_id_container: ' + repr(exp)) + raise + else: + logger.debug("Added CR ID: " + str(cr_id)) + + # Get Consent Status Record IDs + try: + logger.info("Getting CSR IDs") + cursor, csr_id_list = get_csr_ids(cursor=cursor, cr_id=cr_id, table_name=table_names["csr"]) + except Exception as exp: + logger.error('Could not get csr primary key list: ' + repr(exp)) + raise + else: + logger.debug("Got following CSR IDs: " + json.dumps(csr_id_list)) + + # Add Consent Status Record IDs to record_container + for csr_id in csr_id_list: + logger.debug("Looping through csr_id_list: " + json.dumps(csr_id_list)) + try: + logger.info("Adding CSR IDs") + record_id_container[slr_id]["consentRecords"][cr_id]["consentStatusRecords"][csr_id] = {} + except Exception as exp: + logger.error('Could not add csr_id: ' + str(csr_id) + ' to record_id_container: ' + repr(exp)) + raise + else: + logger.debug("Added CSR ID: " + str(csr_id)) + + return record_id_container + + +def get_records(cursor=None, record_ids=None): + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if record_ids is None: + raise AttributeError("Provide record_ids as parameter") + if not isinstance(record_ids, dict): + raise AttributeError("record_ids MUST be dict") + + logger.debug("Type of record_ids: " + repr(type(record_ids))) + + record_container = {} + + logger.info("Getting Records") + logger.info("record_ids: " + repr(record_ids)) + record_ids = dict(record_ids) + + # logger.info("Get Service Link Records") + # for slr in record_ids.iteritems(): + # logger.debug("slr: " + repr(slr)) + # logger.info("Looping through Service Link Record with ID: " + json.dumps(slr)) + + return record_container + + +def get_slrs_and_subcomponents(account_id=None): + """ + Get all slr -entries with sub elements (slsr, cr, csr) related to account + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + # Containers + return_container = {} + record_id_container = {} + record_container = {} + + # Get DB cursor + try: cursor = get_db_cursor() - logger.debug('No DB cursor provided as call parameter. Getting new one.') + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + record_id_container = get_record_ids(cursor=cursor, account_id=account_id) + except Exception as exp: + logger.error('Could not get record id collection: ' + repr(exp)) + raise + + # TODO: Get Actual records from db + logger.info("################") + logger.info("################") + logger.info("################") + try: + record_container = get_records(cursor=cursor, record_ids=record_id_container) + except Exception as exp: + logger.error('Could not get record collection: ' + repr(exp)) + raise + + logger.info("################") + logger.info("################") + logger.info("################") + + return_container["record_id_container"] = record_id_container + return_container["record_container"] = record_container + + + # Get slrs from database + # logger.info("Get slrs from database") + # db_entry_list = [] + # for id in id_list: + # # TODO: try-except needed? + # logger.info("Getting slr with slr_id: " + str(id)) + # db_entry_dict = get_slr(account_id=account_id, slr_id=id) + # db_entry_list.append(db_entry_dict) + # logger.info("slr object added to list: " + json.dumps(db_entry_dict)) + + return return_container + + +################################## +################################### +# Service Link Status Records +################################## +################################## +def get_slsr(account_id=None, slr_id=None, slsr_id=None, cursor=None): + """ + Get one slsr entry from database by Account ID and ID + :param slr_id: + :param slsr_id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if slsr_id is None: + raise AttributeError("Provide slsr_id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Check if slr can be found with account_id and slr_id + try: + slr = get_slr(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id} + title = "No SLR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Found SLR: " + repr(slr)) + + try: + db_entry_object = ServiceLinkStatusRecord(service_link_status_record_id=slsr_id, service_link_record_id=slr_id) + except Exception as exp: + error_title = "Failed to create slsr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("slsr object created: " + db_entry_object.log_entry) + + # Get slsr from DB + try: + logger.info("Get slsr from DB") + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch slsr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("slsr fetched") + logger.info("slsr fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_record_dict + + +def get_slsrs(account_id=None, slr_id=None): + """ + Get all slsr -entries related to service link record + :param account_id: + :param slr_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + + # Check if slr can be found with account_id and slr_id + try: + slr = get_slr(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id} + title = "No SLR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("HEP") + logger.info("Found SLR: " + repr(slr)) + + # Get table name + logger.info("Create slsr") + db_entry_object = ServiceLinkStatusRecord() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for slsr + try: + cursor, id_list = get_slsr_ids(cursor=cursor, slr_id=slr_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get slsrs from database + logger.info("Get slsrs from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting slsr with account_id: " + str(account_id) + " slr_id: " + str(slr_id) + " slsr_id: " + str(id)) + db_entry_dict = get_slsr(account_id=account_id, slr_id=slr_id, slsr_id=id) + db_entry_list.append(db_entry_dict) + logger.info("slsr object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + +################################## +################################### +# Consent Records +################################## +################################## +def get_cr(account_id=None, slr_id=None, cr_id=None, cursor=None): + """ + Get one cr entry from database by Account ID and ID + :param slr_id: + :param cr_id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Check if slr can be found with account_id and slr_id + try: + logger.info("Check if slr can be found with account_id and slr_id") + slr = get_slr(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id} + title = "No SLR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Found: " + repr(slr)) + + try: + db_entry_object = ConsentRecord(consent_id=cr_id, service_link_record_id=slr_id) + except Exception as exp: + error_title = "Failed to create cr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("cr object created: " + db_entry_object.log_entry) + + # Get cr from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch cr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("cr fetched") + logger.info("cr fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_record_dict + + +def get_crs(account_id=None, slr_id=None): + """ + Get all cr -entries related to service link record + :param account_id: + :param slr_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + + # Check if slr can be found with account_id and slr_id + try: + slr = get_slr(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id} + title = "No SLR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Found SLR: " + repr(slr)) + + # Get table name + logger.info("Create cr") + db_entry_object = ConsentRecord() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for crs + try: + logger.info("Get primary keys for crs") + cursor, id_list = get_cr_ids(cursor=cursor, slr_id=slr_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + else: + logger.info("primary keys for crs: " + repr(id_list)) + + # Get crs from database + logger.info("Get crs from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting cr with account_id: " + str(account_id) + " slr_id: " + str(slr_id) + " cr_id: " + str(id)) + db_entry_dict = get_cr(account_id=account_id, slr_id=slr_id, cr_id=id) + db_entry_list.append(db_entry_dict) + logger.info("cr object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + +################################## +################################### +# Consent Status Records +################################## +################################## +def get_csr(account_id=None, slr_id=None, cr_id=None, csr_id=None, cursor=None): + """ + Get one csr entry from database by Account ID and ID + :param slr_id: + :param cr_id: + :param csr_id: + :return: dict + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if csr_id is None: + raise AttributeError("Provide csr_id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Check if cr can be found with account_id, slr_id and cr_id + try: + cr = get_cr(account_id=account_id, slr_id=slr_id, cr_id=cr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id, 'cr_id': cr_id} + title = "No CR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Found: " + repr(cr)) + + try: + db_entry_object = ConsentStatusRecord(consent_record_id=cr_id, consent_status_record_id=csr_id) + except Exception as exp: + error_title = "Failed to create csr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("csr object created: " + db_entry_object.log_entry) + + # Get csr from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch csr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("csr fetched") + logger.info("csr fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_record_dict + + +def get_csrs(account_id=None, slr_id=None, cr_id=None): + """ + Get all csr -entries related to service link record + :param account_id: + :param slr_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + # Check if cr can be found with account_id, slr_id and cr_id + try: + cr = get_cr(account_id=account_id, slr_id=slr_id, cr_id=cr_id) + except StandardError as exp: + logger.error(repr(exp)) + raise + except Exception as exp: + func_data = {'account_id': account_id, 'slr_id': slr_id, 'cr_id': cr_id} + title = "No CR with: " + json.dumps(func_data) + logger.error(title) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Found: " + repr(cr)) + + # Get table name + logger.info("Create csr") + db_entry_object = ConsentStatusRecord() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get primary keys for csrs + try: + cursor, id_list = get_csr_ids(cursor=cursor, cr_id=cr_id, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get csrs from database + logger.info("Get csrs from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting csr with account_id: " + str(account_id) + " slr_id: " + str(slr_id) + " cr_id: " + str(cr_id) + " csr_id: " + str(id)) + db_entry_dict = get_csr(account_id=account_id, slr_id=slr_id, cr_id=cr_id, csr_id=id) + db_entry_list.append(db_entry_dict) + logger.info("csr object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + +################################## +################################## +# Account Export +################################## +################################## +def export_account(account_id=None): + """ + Export Account as JSON presentation + :param account_id: + :return: List of dicts + """ + if account_id is None: + raise AttributeError("Provide account_id as parameter") + + export = { + "type": "Account", + "id": account_id, + "attributes": {} + } + + export_attributes = {} + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + ################################## + # Service Link Records + ################################## + title = "Service Link Records" + try: + logger.info(title) + entries = get_slrs_and_subcomponents(account_id=account_id) + export_attributes["serviceLinkRecords"] = entries + except IndexError as exp: + error_title = "Export of " + title + " failed. No entries in database." + logger.error(error_title + ': ' + repr(exp)) + export_attributes["serviceLinkRecords"] = {} + except Exception as exp: + error_title = "Export of " + title + " failed" + logger.error(error_title + ': ' + repr(exp)) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info(title + ": " + json.dumps(entries)) + + ################################## + # Particulars + ################################## + # title = "Particulars" + # try: + # logger.info(title) + # entries = get_particulars(account_id=account_id) + # export_attributes["particulars"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["particulars"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + # + # ################################## + # # Contacts + # ################################## + # title = "Contacts" + # try: + # logger.info(title) + # entries = get_contacts(account_id=account_id) + # export_attributes["contacts"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["contacts"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + # + # ################################## + # # Emails + # ################################## + # title = "Emails" + # try: + # logger.info(title) + # entries = get_emails(account_id=account_id) + # export_attributes["emails"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["emails"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + # + # ################################## + # # Telephones + # ################################## + # title = "Telephones" + # try: + # logger.info(title) + # entries = get_telephones(account_id=account_id) + # export_attributes["telephones"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["telephones"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + # + # ################################## + # # Settings + # ################################## + # title = "Settings" + # try: + # logger.info(title) + # entries = get_settings(account_id=account_id) + # export_attributes["settings"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["settings"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + # + # ################################## + # # Event logs + # ################################## + # title = "Event logs" + # try: + # logger.info(title) + # entries = get_event_logs(account_id=account_id) + # export_attributes["logs"] = {} + # export_attributes["logs"]["events"] = entries + # except IndexError as exp: + # error_title = "Export of " + title + " failed. No entries in database." + # logger.error(error_title + ': ' + repr(exp)) + # export_attributes["logs"] = {} + # export_attributes["logs"]["events"] = {} + # except Exception as exp: + # error_title = "Export of " + title + " failed" + # logger.error(error_title + ': ' + repr(exp)) + # raise StandardError(title + ": " + repr(exp)) + # else: + # logger.info(title + ": " + json.dumps(entries)) + + ################################## + ################################## + ################################## + # Preparing return content + ################################## + title = "export['attributes'] = export_attributes" + try: + logger.info(title) + export["attributes"] = export_attributes + except Exception as exp: + error_title = title + " failed" + logger.error(error_title + ': ' + repr(exp)) + raise StandardError(title + ": " + repr(exp)) + else: + logger.info("Content of export: " + json.dumps(export)) + + return export + + + - cursor, data = get_telephones_by_account(cursor=cursor, account_id=account_id) - return cursor, data diff --git a/Account/app/mod_account/models.py b/Account/app/mod_account/models.py index 1959f72..6d87c2e 100644 --- a/Account/app/mod_account/models.py +++ b/Account/app/mod_account/models.py @@ -13,7 +13,16 @@ # Import dependencies from marshmallow import Schema, fields, validates, ValidationError -from marshmallow.validate import Range, Regexp, ContainsOnly, Equal +from marshmallow.validate import Range, Regexp, ContainsOnly, Equal, OneOf, Length + +TYPE_LIST = ["Personal", "Work", "School", "Other"] # List that contains types entries +PRIMARY_LIST = ["True", "False"] # List that contains primary values + +STRING_MIN_LENGTH = 3 +STRING_MAX_LENGTH = 255 + +PWD_MIN_LENGTH = 4 +PWD_MAX_LENGTH = 20 GENERAL_STRING_MIN_LENGTH = 3 GENERAL_STRING_MAX_LENGTH = 100 @@ -36,7 +45,7 @@ LASTNAME_REGEX = GENERAL_REGEX class BaseSchema(Schema): - type = fields.Str() + type = fields.Str(validate=[Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH), OneOf(TYPE_LIST)]) class AccountSchema(BaseSchema): @@ -81,11 +90,174 @@ class AccountSchema(BaseSchema): acceptTermsOfService = fields.Bool(required=True, validate=[ContainsOnly(choices='True')]) -class AccountSchema2(BaseSchema): - username = fields.Str(required=True) - password = fields.Str(required=True) - firstName = fields.Str(required=True) - lastName = fields.Str(required=True) - email = fields.Email(required=True) +## +## +# Account creation +class AccountAttributes(Schema): + username = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + password = fields.Str(validate=Length(min=PWD_MIN_LENGTH, max=PWD_MAX_LENGTH)) + firstName = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + lastName = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + email = fields.Email(required=True, validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) dateOfBirth = fields.Date(required=True, error='Not valid date. Provide ISO8601-formatted date string.') acceptTermsOfService = fields.Str(required=True, validate=Equal("True")) + + +class AccountData(Schema): + type = fields.Str(required=True, validate=Equal("Account")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=AccountAttributes, required=True) + + +class AccountSchema2(Schema): + data = fields.Nested(nested=AccountData, required=True) + + +## +## +# particulars +class ParticularsAttributes(Schema): + firstName = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + lastName = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + img = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + dateOfBirth = fields.Date(error='Not valid date. Provide ISO8601-formatted date string.') + + +class ParticularsData(Schema): + type = fields.Str(required=True, validate=Equal("Particular")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=ParticularsAttributes, required=True) + + +class ParticularsSchema(Schema): + data = fields.Nested(nested=ParticularsData, required=True) + + +class ParticularsDataForUpdate(Schema): + type = fields.Str(required=True, validate=Equal("Particular")) + id = fields.Str(required=True, validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=ParticularsAttributes, required=True) + + +class ParticularsSchemaForUpdate(Schema): + data = fields.Nested(nested=ParticularsDataForUpdate, required=True) + + +## +## +# Contacts +class ContactsAttributes(Schema): + address1 = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + address2 = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + postalCode = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + city = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + state = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + country = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + type = fields.Str(validate=[Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH), OneOf(TYPE_LIST)]) + primary = fields.Str(validate=OneOf(PRIMARY_LIST)) # TODO: Not acting as Boolean for MySQL + + +class ContactsData(Schema): + type = fields.Str(required=True, validate=Equal("Contact")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=ContactsAttributes, required=True) + + +class ContactsSchema(Schema): + data = fields.Nested(nested=ContactsData, required=True) + + +class ContactsDataForUpdate(Schema): + type = fields.Str(required=True, validate=Equal("Contact")) + id = fields.Str(required=True, validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=ContactsAttributes, required=True) + + +class ContactsSchemaForUpdate(Schema): + data = fields.Nested(nested=ContactsDataForUpdate, required=True) + + +## +## +# Telephone +class TelephonesAttributes(Schema): + tel = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + type = fields.Str(validate=[Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH), OneOf(TYPE_LIST)]) + primary = fields.Str(validate=OneOf(PRIMARY_LIST)) # TODO: Not acting as Boolean for MySQL + + +class TelephonesData(Schema): + type = fields.Str(required=True, validate=Equal("Telephone")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=TelephonesAttributes, required=True) + + +class TelephonesSchema(Schema): + data = fields.Nested(nested=TelephonesData, required=True) + + +class TelephonesDataForUpdate(Schema): + type = fields.Str(required=True, validate=Equal("Telephone")) + id = fields.Str(required=True, validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=TelephonesAttributes, required=True) + + +class TelephonesSchemaForUpdate(Schema): + data = fields.Nested(nested=TelephonesDataForUpdate, required=True) + + +## +## +# Email +class EmailsAttributes(Schema): + email = fields.Email(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + type = fields.Str(validate=[Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH), OneOf(TYPE_LIST)]) + primary = fields.Str(validate=OneOf(PRIMARY_LIST)) # TODO: Not acting as Boolean for MySQL + + +class EmailsData(Schema): + type = fields.Str(required=True, validate=Equal("Email")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=EmailsAttributes, required=True) + + +class EmailsSchema(Schema): + data = fields.Nested(nested=EmailsData, required=True) + + +class EmailsDataForUpdate(Schema): + type = fields.Str(required=True, validate=Equal("Email")) + id = fields.Str(required=True, validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=EmailsAttributes, required=True) + + +class EmailsSchemaForUpdate(Schema): + data = fields.Nested(nested=EmailsDataForUpdate, required=True) + + +## +## +# Settings +class SettingsAttributes(Schema): + key = fields.Str(validate=Length(min=STRING_MIN_LENGTH, max=STRING_MAX_LENGTH)) + value = fields.Str(validate=Length(min=2, max=STRING_MAX_LENGTH)) + + +class SettingsData(Schema): + type = fields.Str(required=True, validate=Equal("Setting")) + id = fields.Str(validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=SettingsAttributes, required=True) + + +class SettingsSchema(Schema): + data = fields.Nested(nested=SettingsData, required=True) + + +class SettingsDataForUpdate(Schema): + type = fields.Str(required=True, validate=Equal("Setting")) + id = fields.Str(required=True, validate=Length(max=STRING_MAX_LENGTH)) + attributes = fields.Nested(nested=SettingsAttributes, required=True) + + +class SettingsSchemaForUpdate(Schema): + data = fields.Nested(nested=SettingsDataForUpdate, required=True) diff --git a/Account/app/mod_account/services.py b/Account/app/mod_account/services.py index e513fd6..f0cea4d 100644 --- a/Account/app/mod_account/services.py +++ b/Account/app/mod_account/services.py @@ -16,178 +16,178 @@ logger = get_custom_logger('mod_account_services') - -def get_service_link_record_count_by_account(cursor=None, account_id=None): - if app.config["SUPER_DEBUG"]: - logger.debug('account_id: ' + repr(account_id)) - - ### - logger.debug('get_consent_record_count(account_id)') - if app.config["SUPER_DEBUG"]: - logger.debug('account_id: ' + repr(account_id)) - - sql_query = "SELECT count(MyDataAccount.ServiceLinkRecords.id) " \ - "FROM MyDataAccount.ServiceLinkRecords " \ - "WHERE MyDataAccount.ServiceLinkRecords.Accounts_id = '%s'" % (account_id) - - try: - cursor, count = execute_sql_select(cursor=cursor, sql_query=sql_query) - count = count[0][0] - except Exception as exp: - logger.error('Failed') - logger.debug('sql_query: ' + repr(exp)) - raise - else: - if app.config["SUPER_DEBUG"]: - logger.debug('contacts: ' + repr(count)) - - return cursor, count - - -def get_consent_record_count_by_account(cursor=None, account_id=None): - if app.config["SUPER_DEBUG"]: - logger.debug('account_id: ' + repr(account_id)) - - ### - logger.debug('get_consent_record_count(account_id)') - if app.config["SUPER_DEBUG"]: - logger.debug('account_id: ' + repr(account_id)) - - sql_query = "SELECT count(MyDataAccount.ConsentRecords.id) " \ - "FROM MyDataAccount.ConsentRecords " \ - "WHERE MyDataAccount.ConsentRecords.Accounts_id = '%s'" % (account_id) - - try: - cursor, count = execute_sql_select(cursor=cursor, sql_query=sql_query) - count = count[0][0] - except Exception as exp: - logger.error('Failed') - logger.debug('sql_query: ' + repr(exp)) - raise - else: - if app.config["SUPER_DEBUG"]: - logger.debug('contacts: ' + repr(count)) - - return cursor, count - - -def get_contacts_by_account(cursor=None, account_id=None): - - sql_query = "SELECT " \ - "MyDataAccount.Contacts.id, " \ - "MyDataAccount.Contacts.address1, " \ - "MyDataAccount.Contacts.address2, " \ - "MyDataAccount.Contacts.postalCode, " \ - "MyDataAccount.Contacts.city, " \ - "MyDataAccount.Contacts.state, " \ - "MyDataAccount.Contacts.country, " \ - "MyDataAccount.Contacts.typeEnum, " \ - "MyDataAccount.Contacts.prime " \ - "FROM MyDataAccount.Contacts " \ - "WHERE Accounts_id = ('%s')" % (account_id) - - try: - cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) - - contacts = [] - - for entry in data: - contact_obj = Contacts( - id=entry[0], - address1=entry[1], - address2=entry[2], - postal_code=entry[3], - city=entry[4], - state=entry[5], - country=entry[6], - type=entry[7], - prime=entry[8] - ) - - contacts.append(contact_obj.to_dict) - - - except Exception as exp: - logger.error('Failed') - logger.debug('sql_query: ' + repr(exp)) - raise - else: - if app.config["SUPER_DEBUG"]: - logger.debug('contacts: ' + repr(contacts)) - - return cursor, contacts - - -def get_emails_by_account(cursor=None, account_id=None): - - sql_query = "SELECT " \ - "MyDataAccount.Emails.id, " \ - "MyDataAccount.Emails.email, " \ - "MyDataAccount.Emails.typeEnum, " \ - "MyDataAccount.Emails.prime " \ - "FROM MyDataAccount.Emails " \ - "WHERE Accounts_id = ('%s')" % (account_id) - - try: - cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) - - emails = [] - - for entry in data: - email_obj = Email( - id=entry[0], - email=entry[1], - type=entry[2], - prime=entry[3] - ) - - emails.append(email_obj.to_dict) - - - except Exception as exp: - logger.error('Failed') - logger.debug('sql_query: ' + repr(exp)) - raise - else: - if app.config["SUPER_DEBUG"]: - logger.debug('contacts: ' + repr(emails)) - - return cursor, emails - - -def get_telephones_by_account(cursor=None, account_id=None): - - sql_query = "SELECT " \ - "MyDataAccount.Telephones.id, " \ - "MyDataAccount.Telephones.tel, " \ - "MyDataAccount.Telephones.typeEnum, " \ - "MyDataAccount.Telephones.prime " \ - "FROM MyDataAccount.Telephones " \ - "WHERE Accounts_id = ('%s')" % (account_id) - - try: - cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) - - telephones = [] - - for entry in data: - telephone_obj = Telephone( - id=entry[0], - tel=entry[1], - type=entry[2], - prime=entry[3] - ) - - telephones.append(telephone_obj.to_dict) - - - except Exception as exp: - logger.error('Failed') - logger.debug('sql_query: ' + repr(exp)) - raise - else: - if app.config["SUPER_DEBUG"]: - logger.debug('contacts: ' + repr(telephones)) - - return cursor, telephones - +# +# def get_service_link_record_count_by_account(cursor=None, account_id=None): +# if app.config["SUPER_DEBUG"]: +# logger.debug('account_id: ' + repr(account_id)) +# +# ### +# logger.debug('get_consent_record_count(account_id)') +# if app.config["SUPER_DEBUG"]: +# logger.debug('account_id: ' + repr(account_id)) +# +# sql_query = "SELECT count(MyDataAccount.ServiceLinkRecords.id) " \ +# "FROM MyDataAccount.ServiceLinkRecords " \ +# "WHERE MyDataAccount.ServiceLinkRecords.Accounts_id = '%s'" % (account_id) +# +# try: +# cursor, count = execute_sql_select(cursor=cursor, sql_query=sql_query) +# count = count[0][0] +# except Exception as exp: +# logger.error('Failed') +# logger.debug('sql_query: ' + repr(exp)) +# raise +# else: +# if app.config["SUPER_DEBUG"]: +# logger.debug('contacts: ' + repr(count)) +# +# return cursor, count +# +# +# def get_consent_record_count_by_account(cursor=None, account_id=None): +# if app.config["SUPER_DEBUG"]: +# logger.debug('account_id: ' + repr(account_id)) +# +# ### +# logger.debug('get_consent_record_count(account_id)') +# if app.config["SUPER_DEBUG"]: +# logger.debug('account_id: ' + repr(account_id)) +# +# sql_query = "SELECT count(MyDataAccount.ConsentRecords.id) " \ +# "FROM MyDataAccount.ConsentRecords " \ +# "WHERE MyDataAccount.ConsentRecords.Accounts_id = '%s'" % (account_id) +# +# try: +# cursor, count = execute_sql_select(cursor=cursor, sql_query=sql_query) +# count = count[0][0] +# except Exception as exp: +# logger.error('Failed') +# logger.debug('sql_query: ' + repr(exp)) +# raise +# else: +# if app.config["SUPER_DEBUG"]: +# logger.debug('contacts: ' + repr(count)) +# +# return cursor, count +# +# +# def get_contacts_by_account(cursor=None, account_id=None): +# +# sql_query = "SELECT " \ +# "MyDataAccount.Contacts.id, " \ +# "MyDataAccount.Contacts.address1, " \ +# "MyDataAccount.Contacts.address2, " \ +# "MyDataAccount.Contacts.postalCode, " \ +# "MyDataAccount.Contacts.city, " \ +# "MyDataAccount.Contacts.state, " \ +# "MyDataAccount.Contacts.country, " \ +# "MyDataAccount.Contacts.typeEnum, " \ +# "MyDataAccount.Contacts.prime " \ +# "FROM MyDataAccount.Contacts " \ +# "WHERE Accounts_id = ('%s')" % (account_id) +# +# try: +# cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) +# +# contacts = [] +# +# for entry in data: +# contact_obj = Contacts( +# id=entry[0], +# address1=entry[1], +# address2=entry[2], +# postal_code=entry[3], +# city=entry[4], +# state=entry[5], +# country=entry[6], +# type=entry[7], +# prime=entry[8] +# ) +# +# contacts.append(contact_obj.to_dict) +# +# +# except Exception as exp: +# logger.error('Failed') +# logger.debug('sql_query: ' + repr(exp)) +# raise +# else: +# if app.config["SUPER_DEBUG"]: +# logger.debug('contacts: ' + repr(contacts)) +# +# return cursor, contacts +# +# +# def get_emails_by_account(cursor=None, account_id=None): +# +# sql_query = "SELECT " \ +# "MyDataAccount.Emails.id, " \ +# "MyDataAccount.Emails.email, " \ +# "MyDataAccount.Emails.typeEnum, " \ +# "MyDataAccount.Emails.prime " \ +# "FROM MyDataAccount.Emails " \ +# "WHERE Accounts_id = ('%s')" % (account_id) +# +# try: +# cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) +# +# emails = [] +# +# for entry in data: +# email_obj = Email( +# id=entry[0], +# email=entry[1], +# type=entry[2], +# prime=entry[3] +# ) +# +# emails.append(email_obj.to_dict) +# +# +# except Exception as exp: +# logger.error('Failed') +# logger.debug('sql_query: ' + repr(exp)) +# raise +# else: +# if app.config["SUPER_DEBUG"]: +# logger.debug('contacts: ' + repr(emails)) +# +# return cursor, emails +# +# +# def get_telephones_by_account(cursor=None, account_id=None): +# +# sql_query = "SELECT " \ +# "MyDataAccount.Telephones.id, " \ +# "MyDataAccount.Telephones.tel, " \ +# "MyDataAccount.Telephones.typeEnum, " \ +# "MyDataAccount.Telephones.prime " \ +# "FROM MyDataAccount.Telephones " \ +# "WHERE Accounts_id = ('%s')" % (account_id) +# +# try: +# cursor, data = execute_sql_select(cursor=cursor, sql_query=sql_query) +# +# telephones = [] +# +# for entry in data: +# telephone_obj = Telephone( +# id=entry[0], +# tel=entry[1], +# type=entry[2], +# prime=entry[3] +# ) +# +# telephones.append(telephone_obj.to_dict) +# +# +# except Exception as exp: +# logger.error('Failed') +# logger.debug('sql_query: ' + repr(exp)) +# raise +# else: +# if app.config["SUPER_DEBUG"]: +# logger.debug('contacts: ' + repr(telephones)) +# +# return cursor, telephones +# diff --git a/Account/app/mod_account/view_api.py b/Account/app/mod_account/view_api.py index 8fdbc8e..91b9f9c 100644 --- a/Account/app/mod_account/view_api.py +++ b/Account/app/mod_account/view_api.py @@ -19,14 +19,21 @@ # Import services from app.helpers import get_custom_logger, make_json_response, ApiError -from app.mod_account.controllers import get_service_link_record_count, get_consent_record_count, get_telephones, \ - get_emails, get_contacts, get_passive_consents_count, get_potential_services_count, get_potential_consents_count -from app.mod_account.models import AccountSchema2 +from app.mod_account.controllers import get_particulars, get_particular, verify_account_id_match, \ + update_particular, get_contacts, add_contact, get_contact, update_contact, get_emails, add_email, get_email, \ + update_email, get_telephone, update_telephone, get_telephones, add_telephone, get_settings, add_setting, get_setting, \ + update_setting, get_event_log, get_event_logs, get_slrs, get_slr, get_slsrs, get_slsr, get_cr, get_crs, get_csrs, \ + get_csr, export_account +from app.mod_account.models import AccountSchema2, ParticularsSchema, ContactsSchema, ContactsSchemaForUpdate, \ + EmailsSchema, EmailsSchemaForUpdate, TelephonesSchema, TelephonesSchemaForUpdate, SettingsSchema, \ + SettingsSchemaForUpdate from app.mod_api_auth.controllers import gen_account_api_key, requires_api_auth_user, provideApiKey from app.mod_blackbox.controllers import gen_account_key from app.mod_database.helpers import get_db_cursor from app.mod_database.models import Account, LocalIdentityPWD, LocalIdentity, Salt, Particulars, Email +from app.mod_api_auth.controllers import get_account_id_by_api_key + mod_account_api = Blueprint('account_api', __name__, template_folder='templates') # create logger with 'spam_application' @@ -39,14 +46,19 @@ def post(self): """ Example JSON { - "username": "jukkakukkulansukka", - "password": "kukka", - "firstName": "string", - "lastName": "string", - "email": "jukka@kukkula.sukka", - "dateOfBirth": "18-08-2016", - "acceptTermsOfService": "True" - } + "data": { + "type": "Account", + "attributes": { + 'firstName': 'Erkki', + 'lastName': 'Esimerkki', + 'dateOfBirth': '2016-05-31', + 'email': 'erkki.esimerkki@examlpe.org', + 'username': 'testUser', + 'password': 'Hello', + 'acceptTermsOfService': 'True' + } + } + } :return: """ @@ -75,13 +87,13 @@ def post(self): logger.debug("JSON validation -> OK") try: - username = json_data['username'] - password = json_data['password'] - firstName = json_data['firstName'] - lastName = json_data['lastName'] - email_address = json_data['email'] - dateOfBirth = json_data['dateOfBirth'] - acceptTermsOfService = json_data['acceptTermsOfService'] + username = json_data['data']['attributes']['username'] + password = json_data['data']['attributes']['password'] + firstName = json_data['data']['attributes']['firstName'] + lastName = json_data['data']['attributes']['lastName'] + email_address = json_data['data']['attributes']['email'] + dateOfBirth = json_data['data']['attributes']['dateOfBirth'] + acceptTermsOfService = json_data['data']['attributes']['acceptTermsOfService'] global_identifier = str(uuid.uuid4()) salt_str = str(bcrypt.gensalt()) @@ -194,13 +206,7 @@ def post(self): response_data['data'] = {} response_data['data']['type'] = "Account" response_data['data']['id'] = str(account.id) - response_data['data']['attributes'] = {} - response_data['data']['attributes']['username'] = username - response_data['data']['attributes']['firstName'] = firstName - response_data['data']['attributes']['lastName'] = lastName - response_data['data']['attributes']['email'] = email_address - response_data['data']['attributes']['dateOfBirth'] = dateOfBirth - response_data['data']['attributes']['acceptTermsOfService'] = acceptTermsOfService + response_data['data']['attributes'] = json_data['data']['attributes'] except Exception as exp: logger.error('Could not prepare response data: ' + repr(exp)) raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) @@ -213,35 +219,180 @@ def post(self): return make_json_response(data=response_data_dict, status_code=201) -class ExportAccount(Resource): +class AccountExport(Resource): @requires_api_auth_user def get(self, account_id): + logger.info("AccountExport") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Account Export + try: + logger.info("Exporting Account") + db_entries = export_account(account_id=account_id) + except Exception as exp: + error_title = "Account Export failed" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Account Export Succeed") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + +class AccountParticulars(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountParticulars") try: endpoint = str(api.url_for(self, account_id=account_id)) except Exception as exp: endpoint = str(__name__) try: + logger.info("Fetching Api-Key from Headers") api_key = request.headers.get('Api-Key') except Exception as exp: - logger.error("No ApiKey in headers") - logger.debug("No ApiKey in headers: " + repr(repr(exp))) + logger.error("No ApiKey in headers: " + repr(repr(exp))) return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) try: account_id = str(account_id) except Exception as exp: - raise ApiError(code=400, title="Unsupported account_id", detail=repr(exp), source=endpoint) + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Particulars + try: + logger.info("Fetching Particulars") + db_entries = get_particulars(account_id=account_id) + except Exception as exp: + error_title = "No Particulars found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Particulars Fetched") + logger.info("Particulars: ") # Response data container try: + db_entry_list = db_entries response_data = {} - response_data['meta'] = {} - response_data['meta']['activationInstructions'] = "Account Export" + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) - response_data['data'] = {} - response_data['data']['type'] = "Account" + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountParticular(Resource): + @requires_api_auth_user + def get(self, account_id, particulars_id): + logger.info("AccountParticulars") + try: + endpoint = str(api.url_for(self, account_id=account_id, particulars_id=particulars_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + particulars_id = str(particulars_id) + except Exception as exp: + error_title = "Unsupported particulars_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("particulars_id: " + particulars_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Particulars + try: + logger.info("Fetching Particulars") + db_entries = get_particular(account_id=account_id, id=particulars_id) + except Exception as exp: + error_title = "No Particulars found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Particulars Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries except Exception as exp: logger.error('Could not prepare response data: ' + repr(exp)) raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) @@ -251,9 +402,2192 @@ def get(self, account_id): response_data_dict = dict(response_data) logger.debug('response_data_dict: ' + repr(response_data_dict)) - return make_json_response(data=response_data_dict, status_code=201) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def patch(self, account_id, particulars_id): + logger.info("AccountParticular") + try: + endpoint = str(api.url_for(self, account_id=account_id, particulars_id=particulars_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + particulars_id = str(particulars_id) + except Exception as exp: + error_title = "Unsupported particulars_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("particulars_id: " + particulars_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = ParticularsSchema() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + try: + particulars_id_from_payload = json_data['data'].get("id", "") + except Exception as exp: + error_title = "Could not get id from payload" + logger.error(error_title) + raise ApiError( + code=404, + title=error_title, + detail=repr(exp), + source=endpoint + ) + + # Check if particulars_id from path and payload are matching + if particulars_id != particulars_id_from_payload: + error_title = "Particulars IDs from path and payload are not matching" + compared_ids = {'IdFromPath': particulars_id, 'IdFromPayload': particulars_id_from_payload} + logger.error(error_title + ", " + json.dumps(compared_ids)) + raise ApiError( + code=403, + title=error_title, + detail=compared_ids, + source=endpoint + ) + else: + logger.info("Particulars IDs from path and payload are matching") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Update Particulars + try: + logger.info("Updating Particulars") + db_entries = update_particular(account_id=account_id, id=particulars_id, attributes=attributes) + except Exception as exp: + error_title = "No Particulars found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Particulars Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountContacts(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountContacts") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Contacts + try: + logger.info("Fetching Contacts") + db_entries = get_contacts(account_id=account_id) + except Exception as exp: + error_title = "No Contacts found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Contacts Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def post(self, account_id): + logger.info("AccountContacts") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = ContactsSchema() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), + source=endpoint) + else: + logger.debug("JSON validation -> OK") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Add Contact + try: + logger.info("Adding Contacts") + db_entries = add_contact(account_id=account_id, attributes=attributes) + except Exception as exp: + error_title = "Could not add Contact entry" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Contacts Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=201) + + +class AccountContact(Resource): + @requires_api_auth_user + def get(self, account_id, contacts_id): + logger.info("AccountContact") + try: + endpoint = str(api.url_for(self, account_id=account_id, contacts_id=contacts_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + contacts_id = str(contacts_id) + except Exception as exp: + error_title = "Unsupported contacts_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("contacts_id: " + contacts_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Contacts + try: + logger.info("Fetching Contacts") + db_entries = get_contact(account_id=account_id, id=contacts_id) + except Exception as exp: + error_title = "No Contacts found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Contacts Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def patch(self, account_id, contacts_id): # TODO: Should be PATCH instead of PUT + logger.info("AccountContact") + try: + endpoint = str(api.url_for(self, account_id=account_id, contacts_id=contacts_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + contacts_id = str(contacts_id) + except Exception as exp: + error_title = "Unsupported contacts_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("contacts_id: " + contacts_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = ContactsSchemaForUpdate() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + try: + contacts_id_from_payload = json_data['data'].get("id", "") + except Exception as exp: + error_title = "Could not get id from payload" + logger.error(error_title) + raise ApiError( + code=404, + title=error_title, + detail=repr(exp), + source=endpoint + ) + + # Check if contacts_id from path and payload are matching + if contacts_id != contacts_id_from_payload: + error_title = "Contact IDs from path and payload are not matching" + compared_ids = {'IdFromPath': contacts_id, 'IdFromPayload': contacts_id_from_payload} + logger.error(error_title + ", " + json.dumps(compared_ids)) + raise ApiError( + code=403, + title=error_title, + detail=compared_ids, + source=endpoint + ) + else: + logger.info("Contact IDs from path and payload are matching") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Update Contact + try: + logger.info("Updating Contacts") + db_entries = update_contact(account_id=account_id, id=contacts_id, attributes=attributes) + except Exception as exp: + error_title = "No Contacts found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Contacts Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountEmails(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountEmails") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Emails + try: + logger.info("Fetching Emails") + db_entries = get_emails(account_id=account_id) + except Exception as exp: + error_title = "No Emails found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Emails Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def post(self, account_id): + logger.info("AccountEmails") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = EmailsSchema() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), + source=endpoint) + else: + logger.debug("JSON validation -> OK") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Add Email + try: + logger.info("Adding Email") + db_entries = add_email(account_id=account_id, attributes=attributes) + except Exception as exp: + error_title = "Could not add Email entry" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Email added") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=201) + + +class AccountEmail(Resource): + @requires_api_auth_user + def get(self, account_id, emails_id): + logger.info("AccountEmail") + try: + endpoint = str(api.url_for(self, account_id=account_id, emails_id=emails_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + emails_id = str(emails_id) + except Exception as exp: + error_title = "Unsupported emails_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("emails_id: " + emails_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Email + try: + logger.info("Fetching Email") + db_entries = get_email(account_id=account_id, id=emails_id) + except Exception as exp: + error_title = "No Email found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Email Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def patch(self, account_id, emails_id): # TODO: Should be PATCH instead of PUT + logger.info("AccountEmail") + try: + endpoint = str(api.url_for(self, account_id=account_id, emails_id=emails_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + emails_id = str(emails_id) + except Exception as exp: + error_title = "Unsupported emails_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("emails_id: " + emails_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = EmailsSchemaForUpdate() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + try: + emails_id_from_payload = json_data['data'].get("id", "") + except Exception as exp: + error_title = "Could not get id from payload" + logger.error(error_title) + raise ApiError( + code=404, + title=error_title, + detail=repr(exp), + source=endpoint + ) + + # Check if emails_id from path and payload are matching + if emails_id != emails_id_from_payload: + error_title = "Email IDs from path and payload are not matching" + compared_ids = {'IdFromPath': emails_id, 'IdFromPayload': emails_id_from_payload} + logger.error(error_title + ", " + json.dumps(compared_ids)) + raise ApiError( + code=403, + title=error_title, + detail=compared_ids, + source=endpoint + ) + else: + logger.info("Email IDs from path and payload are matching") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Update Email + try: + logger.info("Updating Emails") + db_entries = update_email(account_id=account_id, id=emails_id, attributes=attributes) + except Exception as exp: + # TODO: Error handling on more detailed level + error_title = "No Email found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Email Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountTelephones(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountTelephones") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Telephones + try: + logger.info("Fetching Telephones") + db_entries = get_telephones(account_id=account_id) + except Exception as exp: + error_title = "No Telephones found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Telephones Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def post(self, account_id): + logger.info("AccountTelephones") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = TelephonesSchema() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), + source=endpoint) + else: + logger.debug("JSON validation -> OK") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Add Telephone + try: + logger.info("Adding Telephone") + db_entries = add_telephone(account_id=account_id, attributes=attributes) + except Exception as exp: + error_title = "Could not add Telephone entry" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Telephone added") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=201) + + +class AccountTelephone(Resource): + @requires_api_auth_user + def get(self, account_id, telephones_id): + logger.info("AccountTelephone") + try: + endpoint = str(api.url_for(self, account_id=account_id, telephones_id=telephones_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + telephones_id = str(telephones_id) + except Exception as exp: + error_title = "Unsupported telephones_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("telephones_id: " + telephones_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Telephone + try: + logger.info("Fetching Telephone") + db_entries = get_telephone(account_id=account_id, id=telephones_id) + except Exception as exp: + error_title = "No Telephone found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Telephone Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def patch(self, account_id, telephones_id): # TODO: Should be PATCH instead of PUT + logger.info("AccountTelephone") + try: + endpoint = str(api.url_for(self, account_id=account_id, telephones_id=telephones_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + telephones_id = str(telephones_id) + except Exception as exp: + error_title = "Unsupported telephones_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("telephones_id: " + telephones_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = TelephonesSchemaForUpdate() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + try: + telephones_id_from_payload = json_data['data'].get("id", "") + except Exception as exp: + error_title = "Could not get id from payload" + logger.error(error_title) + raise ApiError( + code=404, + title=error_title, + detail=repr(exp), + source=endpoint + ) + + # Check if emails_id from path and payload are matching + if telephones_id != telephones_id_from_payload: + error_title = "Email IDs from path and payload are not matching" + compared_ids = {'IdFromPath': telephones_id, 'IdFromPayload': telephones_id_from_payload} + logger.error(error_title + ", " + json.dumps(compared_ids)) + raise ApiError( + code=403, + title=error_title, + detail=compared_ids, + source=endpoint + ) + else: + logger.info("Telephone IDs from path and payload are matching") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Update Telephone + try: + logger.info("Updating Telephone") + db_entries = update_telephone(account_id=account_id, id=telephones_id, attributes=attributes) + except Exception as exp: + # TODO: Error handling on more detailed level + error_title = "No Telephone found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Telephone Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountSettings(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountSettings") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Settings + try: + logger.info("Fetching Settings") + db_entries = get_settings(account_id=account_id) + except Exception as exp: + error_title = "No Settings found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Settings Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def post(self, account_id): + logger.info("AccountSettings") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = SettingsSchema() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), + source=endpoint) + else: + logger.debug("JSON validation -> OK") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Add Setting + try: + logger.info("Adding Setting") + db_entries = add_setting(account_id=account_id, attributes=attributes) + except Exception as exp: + error_title = "Could not add Setting entry" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Setting added") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=201) + + +class AccountSetting(Resource): + @requires_api_auth_user + def get(self, account_id, settings_id): + logger.info("AccountSetting") + try: + endpoint = str(api.url_for(self, account_id=account_id, settings_id=settings_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + settings_id = str(settings_id) + except Exception as exp: + error_title = "Unsupported settings_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("settings_id: " + settings_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get Setting + try: + logger.info("Fetching Setting") + db_entries = get_setting(account_id=account_id, id=settings_id) + except Exception as exp: + error_title = "No Setting found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Setting Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + @requires_api_auth_user + def patch(self, account_id, settings_id): # TODO: Should be PATCH instead of PUT + logger.info("AccountSetting") + try: + endpoint = str(api.url_for(self, account_id=account_id, settings_id=settings_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + settings_id = str(settings_id) + except Exception as exp: + error_title = "Unsupported settings_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("settings_id: " + settings_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs from path and ApiKey are matching") + + # load JSON from payload + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = SettingsSchemaForUpdate() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + try: + settings_id_from_payload = json_data['data'].get("id", "") + except Exception as exp: + error_title = "Could not get id from payload" + logger.error(error_title) + raise ApiError( + code=404, + title=error_title, + detail=repr(exp), + source=endpoint + ) + + # Check if emails_id from path and payload are matching + if settings_id != settings_id_from_payload: + error_title = "Email IDs from path and payload are not matching" + compared_ids = {'IdFromPath': settings_id, 'IdFromPayload': settings_id_from_payload} + logger.error(error_title + ", " + json.dumps(compared_ids)) + raise ApiError( + code=403, + title=error_title, + detail=compared_ids, + source=endpoint + ) + else: + logger.info("Setting IDs from path and payload are matching") + + # Collect data + try: + attributes = json_data['data']['attributes'] + except Exception as exp: + error_title = "Could not collect data" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + + # Update Setting + try: + logger.info("Updating Setting") + db_entries = update_setting(account_id=account_id, id=settings_id, attributes=attributes) + except Exception as exp: + # TODO: Error handling on more detailed level + error_title = "No Setting found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Setting Updated") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountEventLogs(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountEventLogs") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get EventLog + try: + logger.info("Fetching EventLog") + db_entries = get_event_logs(account_id=account_id) + except Exception as exp: + error_title = "No EventLog found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("EventLog Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountEventLog(Resource): + @requires_api_auth_user + def get(self, account_id, event_log_id): + logger.info("AccountEventLog") + try: + endpoint = str(api.url_for(self, account_id=account_id, event_log_id=event_log_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + event_log_id = str(event_log_id) + except Exception as exp: + error_title = "Unsupported event_log_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("event_log_id: " + event_log_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get EventLog + try: + logger.info("Fetching EventLog") + db_entries = get_event_log(account_id=account_id, id=event_log_id) + except Exception as exp: + error_title = "No EventLog found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("EventLog Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountServiceLinkRecords(Resource): + @requires_api_auth_user + def get(self, account_id): + logger.info("AccountServiceLinkRecords") + try: + endpoint = str(api.url_for(self, account_id=account_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ServiceLinkRecords + try: + logger.info("Fetching ServiceLinkRecords") + db_entries = get_slrs(account_id=account_id) + except Exception as exp: + error_title = "No ServiceLinkRecords found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ServiceLinkRecords Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountServiceLinkRecord(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id): + logger.info("AccountServiceLinkRecord") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ServiceLinkRecord + try: + logger.info("Fetching ServiceLinkRecord") + db_entries = get_slr(account_id=account_id, slr_id=slr_id) + except Exception as exp: + error_title = "No ServiceLinkRecord found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ServiceLinkRecord Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountServiceLinkStatusRecords(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id): + logger.info("AccountServiceLinkStatusRecords") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ServiceLinkStatusRecords + try: + logger.info("Fetching ServiceLinkStatusRecords") + db_entries = get_slsrs(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + error_title = "ServiceLinkStatusRecords not accessible" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ServiceLinkStatusRecords found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ServiceLinkStatusRecords Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountServiceLinkStatusRecord(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id, slsr_id): + logger.info("AccountServiceLinkStatusRecord") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, slsr_id=slsr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + try: + slsr_id = str(slsr_id) + except Exception as exp: + error_title = "Unsupported slsr_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slsr_id: " + slsr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ServiceLinkStatusRecord + try: + logger.info("Fetching ServiceLinkStatusRecord") + db_entries = get_slsr(account_id=account_id, slr_id=slr_id, slsr_id=slsr_id) + except StandardError as exp: + error_title = "ServiceLinkStatusRecords not accessible" + logger.error(error_title + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No tServiceLinkStatusRecord found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ServiceLinkStatusRecord Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountConsentRecords(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id): + logger.info("AccountConsentRecords") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ConsentRecords + try: + logger.info("Fetching ConsentRecords") + db_entries = get_crs(account_id=account_id, slr_id=slr_id) + except StandardError as exp: + error_title = "ConsentRecords not accessible" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ConsentRecords found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ConsentRecords Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountConsentRecord(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id, cr_id): + logger.info("AccountConsentRecord") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + try: + cr_id = str(cr_id) + except Exception as exp: + error_title = "Unsupported cr_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("cr_id: " + cr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ServiceLinkStatusRecord + try: + logger.info("Fetching ConsentRecord") + db_entries = get_cr(account_id=account_id, slr_id=slr_id, cr_id=cr_id) + except StandardError as exp: + error_title = "ConsentRecord not accessible" + logger.error(error_title + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ConsentRecord found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ConsentRecord Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountConsentStatusRecords(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id, cr_id): + logger.info("AccountConsentStatusRecords") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + try: + cr_id = str(cr_id) + except Exception as exp: + error_title = "Unsupported cr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("cr_id: " + cr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ConsentStatusRecords + try: + logger.info("Fetching ConsentStatusRecords") + db_entries = get_csrs(account_id=account_id, slr_id=slr_id, cr_id=cr_id) + except StandardError as exp: + error_title = "ConsentStatusRecords not accessible" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ConsentStatusRecords found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ConsentStatusRecords Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class AccountConsentStatusRecord(Resource): + @requires_api_auth_user + def get(self, account_id, slr_id, cr_id, csr_id): + logger.info("AccountConsentStatusRecord") + try: + endpoint = str(api.url_for(self, account_id=account_id, slr_id=slr_id, cr_id=cr_id, csr_id=csr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + account_id = str(account_id) + except Exception as exp: + error_title = "Unsupported account_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_id: " + account_id) + + try: + slr_id = str(slr_id) + except Exception as exp: + error_title = "Unsupported slr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_id: " + slr_id) + + try: + cr_id = str(cr_id) + except Exception as exp: + error_title = "Unsupported cr_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("cr_id: " + cr_id) + + try: + csr_id = str(csr_id) + except Exception as exp: + error_title = "Unsupported csr_id" + logger.error(error_title + repr(exp)) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("csr_id: " + csr_id) + + # Check if Account IDs from path and ApiKey are matching + if verify_account_id_match(account_id=account_id, api_key=api_key, endpoint=endpoint): + logger.info("Account IDs are matching") + + # Get ConsentStatusRecord + try: + logger.info("Fetching ConsentStatusRecord") + db_entries = get_csr(account_id=account_id, slr_id=slr_id, cr_id=cr_id, csr_id=csr_id) + except StandardError as exp: + error_title = "ConsentStatusRecord not accessible" + logger.error(error_title + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ConsentStatusRecord found" + logger.error(error_title) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ConsentStatusRecord Fetched") + + # Response data container + try: + response_data = {} + response_data['data'] = db_entries + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + + +# Register resources +api.add_resource(Accounts, '/api/accounts/', '/', endpoint='/api/accounts/') +api.add_resource(AccountExport, '/api/accounts//export/', endpoint='account-export') +api.add_resource(AccountParticulars, '/api/accounts//particulars/', endpoint='account-particulars') +api.add_resource(AccountParticular, '/api/accounts//particulars//', endpoint='account-particular') +api.add_resource(AccountContacts, '/api/accounts//contacts/', endpoint='account-contacts') +api.add_resource(AccountContact, '/api/accounts//contacts//', endpoint='account-contact') +api.add_resource(AccountEmails, '/api/accounts//emails/', endpoint='account-emails') +api.add_resource(AccountEmail, '/api/accounts//emails//', endpoint='account-email') +api.add_resource(AccountTelephones, '/api/accounts//telephones/', endpoint='account-telephones') +api.add_resource(AccountTelephone, '/api/accounts//telephones//', endpoint='account-telephone') +api.add_resource(AccountSettings, '/api/accounts//settings/', endpoint='account-settings') +api.add_resource(AccountSetting, '/api/accounts//settings//', endpoint='account-setting') +api.add_resource(AccountEventLogs, '/api/accounts//logs/events/', endpoint='account-events') +api.add_resource(AccountEventLog, '/api/accounts//logs/events//', endpoint='account-event') +api.add_resource(AccountServiceLinkRecords, '/api/accounts//servicelinks/', endpoint='account-slrs') +api.add_resource(AccountServiceLinkRecord, '/api/accounts//servicelinks//', endpoint='account-slr') +api.add_resource(AccountServiceLinkStatusRecords, '/api/accounts//servicelinks//statuses/', endpoint='account-slsrs') +api.add_resource(AccountServiceLinkStatusRecord, '/api/accounts//servicelinks//statuses//', endpoint='account-slsr') +api.add_resource(AccountConsentRecords, '/api/accounts//servicelinks//consents/', endpoint='account-crs') +api.add_resource(AccountConsentRecord, '/api/accounts//servicelinks//consents//', endpoint='account-cr') +api.add_resource(AccountConsentStatusRecords, '/api/accounts//servicelinks//consents//statuses/', endpoint='account-csrs') +api.add_resource(AccountConsentStatusRecord, '/api/accounts//servicelinks//consents//statuses//', endpoint='account-csr') + -# Register resources -api.add_resource(Accounts, '/api/accounts/', '/', endpoint='/api/accounts/') -api.add_resource(ExportAccount, '/api/account//export/', endpoint='account-export') diff --git a/Account/app/mod_account/view_html.py b/Account/app/mod_account/view_html.py index 64ea0d9..50ab1d4 100644 --- a/Account/app/mod_account/view_html.py +++ b/Account/app/mod_account/view_html.py @@ -18,8 +18,6 @@ # Import services from app.helpers import get_custom_logger -from app.mod_account.controllers import get_service_link_record_count, get_consent_record_count, get_telephones, \ - get_emails, get_contacts, get_passive_consents_count, get_potential_services_count, get_potential_consents_count from app.mod_api_auth.controllers import get_account_api_key from app.mod_database.helpers import get_db_cursor @@ -47,66 +45,5 @@ def get(self): return make_response(render_template('profile/index.html', content_data=content_data), 200, headers) -class Details(Resource): - @login_required - def get(self): - - account_id = session['user_id'] - logger.debug('Account id: ' + account_id) - - cursor = get_db_cursor() - - cursor, service_link_record_count = get_service_link_record_count(cursor=cursor, account_id=account_id) - cursor, consent_count = get_consent_record_count(cursor=cursor, account_id=account_id) - - cursor, contacts = get_contacts(cursor=cursor, account_id=account_id) - cursor, emails = get_emails(cursor=cursor, account_id=account_id) - cursor, telephones = get_telephones(cursor=cursor, account_id=account_id) - - cursor, potential_services = get_potential_services_count(cursor=cursor, account_id=account_id) - cursor, potential_consents = get_potential_consents_count(cursor=cursor, account_id=account_id) - cursor, passive_services = get_potential_services_count(cursor=cursor, account_id=account_id) - cursor, passive_consents = get_passive_consents_count(cursor=cursor, account_id=account_id) - - content_data = { - 'service_link_record_count': service_link_record_count, - 'consent_count': consent_count, - 'contacts': contacts, - 'emails': emails, - 'telephones': telephones, - 'potential_services': potential_services, - 'potential_consents': potential_consents, - 'passive_services': passive_services, - 'passive_consents': passive_consents - } - - headers = {'Content-Type': 'text/html'} - return make_response(render_template('profile/details.html', content_data=content_data), 200, headers) - - -class Settings(Resource): - @login_required - def get(self): - account_id = session['user_id'] - logger.debug('Account id: ' + account_id) - - content_data = { - 'service_link_record_count': None, - 'consent_count': None, - 'contacts': None, - 'emails': None, - 'telephones': None, - 'potential_services': None, - 'potential_consents': None, - 'passive_services': None, - 'passive_consents': None - } - - headers = {'Content-Type': 'text/html'} - return make_response(render_template('profile/settings.html', content_data=content_data), 200, headers) - - # Register resources api.add_resource(Home, '/html/account/home/', '/', endpoint='home') -api.add_resource(Details, '/html/account/details/', endpoint='details') -api.add_resource(Settings, '/html/account/settings/', endpoint='settings') diff --git a/Account/app/mod_api_auth/controllers.py b/Account/app/mod_api_auth/controllers.py index 2988fc5..6dd84dc 100644 --- a/Account/app/mod_api_auth/controllers.py +++ b/Account/app/mod_api_auth/controllers.py @@ -99,27 +99,35 @@ def get_account_api_key(account_id=None): :param account_id: :return: API Key """ + logger.info("Get Account APIKey by Account ID") + if account_id is None: raise AttributeError("Provide account_id as parameter") try: + logger.info("Getting DB connection") connection = get_sqlite_connection() except Exception as exp: exp = append_description_to_exception(exp=exp, description='Could not get connection SQL database.') logger.error('Could not get connection SQL database: ' + repr(exp)) raise + else: + logger.info("Got DB connection") try: + logger.info("Getting DB cursor") cursor, connection = get_sqlite_cursor(connection=connection) except Exception as exp: exp = append_description_to_exception(exp=exp, description='Could not get cursor for database connection') logger.error('Could not get cursor for database connection: ' + repr(exp)) raise + else: + logger.info("Got DB cursor") try: cursor, api_key = get_api_key(account_id=account_id, cursor=cursor) except Exception as exp: - exp = append_description_to_exception(exp=exp, description='Could not API key from database') + exp = append_description_to_exception(exp=exp, description='Could not find API key from database') logger.error('Could not get API key from database: ' + repr(exp)) connection.rollback() connection.close() @@ -155,6 +163,7 @@ def get_account_id_by_api_key(api_key=None): raise try: + logger.info("Fetching Account ID") cursor, account_id = get_account_id(api_key=api_key, cursor=cursor) except Exception as exp: exp = append_description_to_exception(exp=exp, description='Could not Account ID from database') @@ -164,11 +173,13 @@ def get_account_id_by_api_key(api_key=None): raise else: connection.close() - logger.debug('Account ID fetched') + logger.info('Account ID fetched') + logger.info('account_id: ' + str(account_id)) return account_id def check_api_auth_user(api_key): + logger.info("Checking Api-Key") try: logger.debug("Fetching Account ID") account_id = get_account_id_by_api_key(api_key=api_key) @@ -220,17 +231,21 @@ def requires_api_auth_user(f): @wraps(f) def decorated(*args, **kwargs): api_key = None + logger.info("Verifying Api-Key") try: api_key = request.headers.get('Api-Key') if api_key is None: - raise AttributeError('No API Key in Request Headers') + raise AttributeError('No Api-Key in Request Headers') except Exception as exp: - logger.debug("No ApiKey in headers: " + repr(exp)) + logger.debug("No Api-Key in headers: " + repr(exp)) return provideApiKey() else: + logger.info("Provided Api-Key: " + str(api_key)) if not check_api_auth_user(api_key=api_key): - logger.debug("Wrong API Key") + logger.debug("Wrong Api-Key") return wrongApiKey() + logger.info("Correct Api-Key") + logger.info("User Authenticated") return f(*args, **kwargs) return decorated diff --git a/Account/app/mod_api_auth/view_api.py b/Account/app/mod_api_auth/view_api.py index fe13e77..0a7928f 100644 --- a/Account/app/mod_api_auth/view_api.py +++ b/Account/app/mod_api_auth/view_api.py @@ -18,8 +18,9 @@ from flask_restful import Resource, Api, reqparse from app import api -from app.helpers import get_custom_logger, make_json_response +from app.helpers import get_custom_logger, make_json_response, ApiError from app.mod_api_auth.controllers import get_account_api_key, get_api_key_sdk +from app.mod_api_auth.helpers import ApiKeyNotFoundError from app.mod_auth.helpers import get_account_id_by_username_and_password logger = get_custom_logger('mod_api_auth_view_api') @@ -37,14 +38,16 @@ def check_basic_auth(self, username, password): """ This function is called to check if a username password combination is valid. """ + logger.info("Checking username and password") user = get_account_id_by_username_and_password(username=username, password=password) logger.debug("User with following info: " + str(user)) if user is not None: self.account_id = user['account_id'] self.username = user['username'] - logger.debug("User info set") + logger.info("User authenticated") return True else: + logger.info("User not authenticated") return False @@ -53,21 +56,43 @@ def authenticate(): """Sends a 401 response that enables basic auth""" headers = {'WWW-Authenticate': 'Basic realm="Login Required"'} body = 'Could not verify your access level for that URL. \n You have to login with proper credentials' + logger.info("Authentication required") return make_response(body, 401, headers) def get(self): - # account_id = session['user_id'] - # logger.debug('Account id: ' + account_id) + try: + endpoint = str(api.url_for(self)) + except Exception as exp: + endpoint = str(__name__) + + logger.info("Authenticating user") auth = request.authorization if not auth or not self.check_basic_auth(auth.username, auth.password): return self.authenticate() - - api_key = get_account_api_key(account_id=self.account_id) + else: + logger.info("Authenticated") + + try: + api_key = get_account_api_key(account_id=self.account_id) + except ApiKeyNotFoundError as exp: + error_title = "ApiKey not found for authenticated user" + logger.error(error_title) + logger.error(repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Could not get ApiKey for authenticated user" + logger.error(error_title) + logger.error(repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("account_id: " + str(self.account_id)) + logger.debug("api_key: " + str(api_key)) response_data = { - 'api_key': api_key + 'Api-Key': api_key, + 'account_id': str(self.account_id) } return make_json_response(data=response_data, status_code=200) diff --git a/Account/app/mod_auth/helpers.py b/Account/app/mod_auth/helpers.py index 241e7b7..4cc047e 100644 --- a/Account/app/mod_auth/helpers.py +++ b/Account/app/mod_auth/helpers.py @@ -191,7 +191,7 @@ def get_account_id_by_username_and_password(username=None, password=None): try: ### # User info by username - logger.debug('credentials') + logger.debug('User info by username from DB') sql_query = "SELECT " \ "MyDataAccount.LocalIdentities.Accounts_id, " \ "MyDataAccount.LocalIdentities.id, " \ @@ -230,17 +230,16 @@ def get_account_id_by_username_and_password(username=None, password=None): else: logger.debug('User found with given username: ' + username) - if app.config["SUPER_DEBUG"]: - logger.debug('account_id_from_db: ' + account_id_from_db) - logger.debug('identity_id_from_db: ' + identity_id_from_db) - logger.debug('username_from_db: ' + username_from_db) - logger.debug('password_from_db: ' + password_from_db) - logger.debug('salt_from_db: ' + salt_from_db) + logger.debug('account_id_from_db: ' + account_id_from_db) + logger.debug('identity_id_from_db: ' + identity_id_from_db) + logger.debug('username_from_db: ' + username_from_db) + logger.debug('password_from_db: ' + password_from_db) + logger.debug('salt_from_db: ' + salt_from_db) + logger.info("Checking password") if bcrypt.hashpw(password_to_check, salt_from_db) == password_from_db: - if app.config["SUPER_DEBUG"]: - logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db)) - logger.debug('Password hash from db : ' + password_from_db) + logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db)) + logger.debug('Password hash from db : ' + password_from_db) logger.debug('Authenticated') #cursor, user = get_account_by_id(cursor=cursor, account_id=int(account_id_from_db)) @@ -249,9 +248,8 @@ def get_account_id_by_username_and_password(username=None, password=None): return user else: - if app.config["SUPER_DEBUG"]: - logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db)) - logger.debug('Password hash from db : ' + password_from_db) + logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db)) + logger.debug('Password hash from db : ' + password_from_db) logger.debug('Not Authenticated') return None diff --git a/Account/app/mod_authorization/controllers.py b/Account/app/mod_authorization/controllers.py index 900e998..425dceb 100644 --- a/Account/app/mod_authorization/controllers.py +++ b/Account/app/mod_authorization/controllers.py @@ -21,15 +21,119 @@ # Import services from app.helpers import get_custom_logger, ApiError, get_utc_time from app.mod_blackbox.controllers import get_account_public_key, generate_and_sign_jws -from app.mod_database.helpers import get_db_cursor - +from app.mod_database.helpers import get_db_cursor, get_last_csr_id, get_csr_ids, get_account_id_by_csr_id # create logger with 'spam_application' -from app.mod_database.models import SurrogateId, ConsentRecord, ServiceLinkRecord +from app.mod_database.models import SurrogateId, ConsentRecord, ServiceLinkRecord, ConsentStatusRecord, Account logger = get_custom_logger(__name__) +def get_account_id_by_cr(cr_id=None, endpoint="get_account_id_by_cr(cr_id, endpoint)"): + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + logger.info("Executing for: " + str(endpoint)) + + ## + # Account + try: + logger.info("Create Account object") + account_entry = Account() + except Exception as exp: + error_title = "Failed to create Account object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("account_entry: " + account_entry.log_entry) + + # Get database table name for Consent Status Record + try: + logger.info("Get Account table name") + account_table_name = account_entry.table_name + except Exception as exp: + error_title = "Failed to get Account table name" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got Account table name: " + str(account_table_name)) + + ## + # ServiceLinkRecord + try: + logger.info("Create ServiceLinkRecord object") + slr_entry = ServiceLinkRecord() + except Exception as exp: + error_title = "Failed to create ServiceLinkRecord object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("slr_entry: " + slr_entry.log_entry) + + # Get database table name for Consent Status Record + try: + logger.info("Get ServiceLinkRecord table name") + slr_table_name = slr_entry.table_name + except Exception as exp: + error_title = "Failed to get ServiceLinkRecord table name" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got ServiceLinkRecord table name: " + str(slr_table_name)) + + ## + # ConsentRecord + try: + logger.info("Create ConsentRecord object") + cr_entry = ConsentRecord() + except Exception as exp: + error_title = "Failed to create Consent Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("cr_entry: " + cr_entry.log_entry) + + # Get database table name for Consent Status Record + try: + logger.info("Get Consent Record table name") + cr_table_name = cr_entry.table_name + except Exception as exp: + error_title = "Failed to get Consent Record table name" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got Consent Record table name: " + str(cr_table_name)) + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise ApiError(code=500, title="Failed to get database cursor", detail=repr(exp), source=endpoint) + + # Get Account ID + try: + logger.info("Get Account ID") + cursor, account_id = get_account_id_by_csr_id( + cursor=cursor, + cr_id=cr_id, + acc_table_name=account_table_name, + slr_table_name=slr_table_name, + cr_table_name=cr_table_name + ) + except IndexError as exp: + error_title = "Account ID Not Found" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Failed to get Account ID" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got Account ID: " + str(cr_table_name)) + return account_id + + def sign_cr(account_id=None, payload=None, endpoint="sign_slr(account_id, payload, endpoint)"): if account_id is None: raise AttributeError("Provide account_id as parameter") @@ -48,33 +152,26 @@ def sign_cr(account_id=None, payload=None, endpoint="sign_slr(account_id, payloa else: logger.info("Account owner's public key and kid fetched") - # Fill timestamp to created in slr - try: - timestamp_to_fill = get_utc_time() - except Exception as exp: - logger.error("Could not get UTC time: " + repr(exp)) - raise ApiError(code=500, title="Could not get UTC time", detail=repr(exp), source=endpoint) - else: - logger.info("timestamp_to_fill: " + timestamp_to_fill) - - try: - payload['common_part']['issued'] = timestamp_to_fill - except Exception as exp: - logger.error("Could not fill timestamp to created in cr: " + repr(exp)) - raise ApiError(code=500, title="Failed to fill timestamp to created in cr", detail=repr(exp), source=endpoint) - else: - logger.info("Timestamp filled to issued in cr") - # Sign cr try: - cr_signed = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(payload)) + cr_signed_json = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(payload)) except Exception as exp: logger.error('Could not create Consent Record: ' + repr(exp)) raise ApiError(code=500, title="Failed to create Consent Record", detail=repr(exp), source=endpoint) else: - logger.info('Service Link Record created and signed') - logger.debug('cr_signed: ' + cr_signed) - return cr_signed, timestamp_to_fill + logger.info('Consent Record created and signed') + logger.debug('cr_signed_json: ' + cr_signed_json) + try: + logger.info("Converting signed CR from json to dict") + cr_signed_dict = json.loads(cr_signed_json) + except Exception as exp: + logger.error('Could not convert signed CSR from json to dict: ' + repr(exp)) + raise ApiError(code=500, title="Failed to convert signed CSR from json to dict", detail=repr(exp), source=endpoint) + else: + logger.info('Converted signed CR from json to dict') + logger.debug('cr_signed_dict: ' + json.dumps(cr_signed_dict)) + + return cr_signed_dict def sign_csr(account_id=None, payload=None, endpoint="sign_csr(account_id, payload, endpoint)"): @@ -85,33 +182,26 @@ def sign_csr(account_id=None, payload=None, endpoint="sign_csr(account_id, paylo logger.info("Signing Service Link Status Record") - # Fill timestamp to created in slr - try: - timestamp_to_fill = get_utc_time() - except Exception as exp: - logger.error("Could not get UTC time: " + repr(exp)) - raise ApiError(code=500, title="Could not get UTC time", detail=repr(exp), source=endpoint) - else: - logger.info("timestamp_to_fill: " + timestamp_to_fill) - - try: - payload['iat'] = timestamp_to_fill - except Exception as exp: - logger.error("Could not fill timestamp to iat in csr_payload: " + repr(exp)) - raise ApiError(code=500, title="Failed to fill timestamp to iat in csr_payload", detail=repr(exp), source=endpoint) - else: - logger.info("Timestamp filled to created in csr_payload") - # Sign csr try: - csr_signed = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(payload)) + csr_signed_json = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(payload)) except Exception as exp: logger.error('Could not create Consent Status Record: ' + repr(exp)) raise ApiError(code=500, title="Failed to create Consent Status Record", detail=repr(exp), source=endpoint) else: - logger.info('SConsent Status Record created and signed') - logger.debug('csr_signed: ' + csr_signed) - return csr_signed, timestamp_to_fill + logger.info('Consent Status Record created and signed') + logger.debug('csr_signed_json: ' + csr_signed_json) + try: + logger.info("Converting signed CSR from json to dict") + csr_signed_dict = json.loads(csr_signed_json) + except Exception as exp: + logger.error('Could not convert signed CSR from json to dict: ' + repr(exp)) + raise ApiError(code=500, title="Failed to convert signed CSR from json to dict", detail=repr(exp), source=endpoint) + else: + logger.info('Converted signed CSR from json to dict') + logger.debug('csr_signed_dict: ' + json.dumps(csr_signed_dict)) + + return csr_signed_dict def store_cr_and_csr(source_slr_entry=None, sink_slr_entry=None, source_cr_entry=None, source_csr_entry=None, sink_cr_entry=None, sink_csr_entry=None, endpoint="store_cr_and_csr()"): @@ -138,102 +228,112 @@ def store_cr_and_csr(source_slr_entry=None, sink_slr_entry=None, source_cr_entry try: # Get Source's SLR from DB try: + logger.info("Get Source SLR from database") cursor = source_slr_entry.from_db(cursor=cursor) except Exception as exp: error_title = "Failed to fetch Source's SLR from DB" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_slr_entry: " + source_slr_entry.log_entry) # Get Sink's SLR from DB try: + logger.info("Get Sink SLR from database") cursor = sink_slr_entry.from_db(cursor=cursor) except Exception as exp: error_title = "Failed to fetch Sink's SLR from DB" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_slr_entry: " + sink_slr_entry.log_entry) # Get Source's SLR ID try: + logger.info("Source SLR ID to Source CR") source_cr_entry.service_link_records_id = source_slr_entry.id except Exception as exp: error_title = "Failed to fetch Source's Service Link Record ID" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_cr_entry: " + source_cr_entry.log_entry) # Get Sink's SLR ID try: + logger.info("Sink SLR ID to Sink CR") sink_cr_entry.service_link_records_id = sink_slr_entry.id except Exception as exp: error_title = "Failed to fetch Sink's Service Link Record ID" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_cr_entry: " + sink_cr_entry.log_entry) # Store Source CR try: + logger.info("Store Source CR") cursor = source_cr_entry.to_db(cursor=cursor) except Exception as exp: error_title = "Failed to store Source's Consent Record" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_cr_entry: " + source_cr_entry.log_entry) # Link Source's CSR with it's CR try: + logger.info("Source CR ID to Source CSR") source_csr_entry.consent_records_id = source_cr_entry.id except Exception as exp: error_title = "Failed to link Source's CSR with it's CR" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug(source_csr_entry.log_entry) # Store Source CSR try: + logger.info("Store Source CSR") cursor = source_csr_entry.to_db(cursor=cursor) except Exception as exp: error_title = "Failed to store Source's Consent Status Record" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_csr_entry: " + source_csr_entry.log_entry) # Store Sink CR try: + logger.info("Store Sink CR") cursor = sink_cr_entry.to_db(cursor=cursor) except Exception as exp: error_title = "Failed to store Sink's Consent Record" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_cr_entry: " + sink_cr_entry.log_entry) # Link Sink's CSR with it's CR try: + logger.info("Sink CR ID to Sink CSR") sink_csr_entry.consent_records_id = sink_cr_entry.id except Exception as exp: error_title = "Failed to link Sink's CSR with it's CR" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_csr_entry: " + sink_csr_entry.log_entry) # Store Sink CSR try: + logger.info("Store Sink CSR") cursor = sink_csr_entry.to_db(cursor=cursor) except Exception as exp: error_title = "Failed to store Sink's Consent Status Record" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_csr_entry: " + sink_csr_entry.log_entry) # Commit @@ -246,23 +346,7 @@ def store_cr_and_csr(source_slr_entry=None, sink_slr_entry=None, source_cr_entry raise else: logger.info("CR's and CSR's commited") - - try: - data = { - 'source': { - 'CR': source_cr_entry.to_dict, - 'CSR': source_csr_entry.to_dict - }, - 'sink': { - 'CR': sink_cr_entry.to_dict, - 'CSR': sink_csr_entry.to_dict - } - } - except Exception as exp: - logger.error("Could not construct data object: "+ repr(exp)) - data = {} - else: - return data + return source_cr_entry, source_csr_entry, sink_cr_entry, sink_csr_entry def get_auth_token_data(sink_cr_object=None, endpoint="get_auth_token_data()"): @@ -278,69 +362,477 @@ def get_auth_token_data(sink_cr_object=None, endpoint="get_auth_token_data()"): # Get Sink's CR from DB try: + logger.info("Get Sink's CR from DB") cursor = sink_cr_object.from_db(cursor=cursor) except Exception as exp: error_title = "Failed to fetch Sink's CR from DB" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_cr_object: " + sink_cr_object.log_entry) # Get required id's from Sink's CR try: + logger.info("Get required id's from Sink's CR") sink_rs_id = str(sink_cr_object.resource_set_id) sink_slr_primary_key = str(sink_cr_object.service_link_records_id) except Exception as exp: error_title = "Failed to get id's from Sink's CR" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_rs_id: " + sink_rs_id) # Init Source's Consent Record Object try: + logger.info("Init Source's Consent Record Object") source_cr_entry = ConsentRecord(resource_set_id=sink_rs_id, role="Source") except Exception as exp: error_title = "Failed to create Source's Consent Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_cr_entry: " + source_cr_entry.log_entry) # Get Source's Consent Record from DB - source_cr = {} try: + logger.info("Get Source's Consent Record from DB") cursor = source_cr_entry.from_db(cursor=cursor) - source_cr = source_cr_entry.consent_record except Exception as exp: error_title = "Failed to fetch Source's CR from DB" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("source_cr_entry: " + source_cr_entry.log_entry) - logger.debug("source_cr: " + json.dumps(source_cr)) # Init Sink's Service Link Record Object try: + logger.info("Init Sink's Service Link Record Object") sink_slr_entry = ServiceLinkRecord(id=sink_slr_primary_key) except Exception as exp: error_title = "Failed to create Source's Service Link Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: - logger.debug("source_cr_entry: " + source_cr_entry.log_entry) + else: + logger.debug("sink_slr_entry: " + sink_slr_entry.log_entry) # Get Source's Consent Record from DB - sink_slr = {} try: + logger.info("Get Source's Consent Record from DB") cursor = sink_slr_entry.from_db(cursor=cursor) - sink_slr = sink_slr_entry.service_link_record except Exception as exp: error_title = "Failed to fetch Sink's SLR from DB" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_slr_entry: " + sink_slr_entry.log_entry) - logger.debug("sink_slr: " + json.dumps(sink_slr)) - return source_cr, json.loads(sink_slr) + return source_cr_entry, sink_slr_entry + + +def get_last_cr_status(cr_id=None, endpoint="get_last_cr_status()"): + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise ApiError(code=500, title="Failed to get database cursor", detail=repr(exp), source=endpoint) + + # Init Consent Record Object + try: + logger.info("Create ConsentRecord object") + cr_entry = ConsentRecord(consent_id=cr_id) + logger.info(cr_entry.log_entry) + except Exception as exp: + error_title = "Failed to create Consent Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("sink_cr_entry: " + cr_entry.log_entry) + + # Get Consent Record from DB + try: + cursor = cr_entry.from_db(cursor=cursor) + except IndexError as exp: + error_title = "Consent Record not found from DB with given ID" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Failed to fetch Consent Record from DB" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("cr_entry: " + cr_entry.log_entry) + + # Get Consent Record ID of cr_entry + try: + cr_entry_id = cr_entry.consent_id + except Exception as exp: + error_title = "Failed to get Consent Record ID from object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("cr_entry_id: " + str(cr_entry_id)) + + # Create Consent Status Record object + try: + csr_entry = ConsentStatusRecord() + except Exception as exp: + error_title = "Failed to create Consent Status Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("csr_entry: " + csr_entry.log_entry) + + # Get database table name for Consent Status Record + try: + logger.info("Get Consent Status Record table name") + csr_table_name = csr_entry.table_name + except Exception as exp: + error_title = "Failed to get Consent Status Record table name" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got Consent Status Record table name: " + str(csr_table_name)) + + # Get Consent Status Record ID + try: + cursor, csr_id = get_last_csr_id(cursor=cursor, cr_id=cr_id, table_name=csr_table_name) + except IndexError as exp: + error_title = "Consent Status Record not found from DB with given Consent Record ID" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Failed to get last Consent Status Record ID from database" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("csr_id: " + str(csr_id)) + + # Append ID to Consent Status Record Object + try: + logger.info("Append ID to Consent Status Record object: " + csr_entry.log_entry) + csr_entry.consent_status_record_id = csr_id + except Exception as exp: + error_title = "Failed to append ID to Consent Status Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Appended ID to Consent Status Record object: " + csr_entry.log_entry) + + # Get Consent Status Record from DB + try: + cursor = csr_entry.from_db(cursor=cursor) + except IndexError as exp: + error_title = "Consent Record not found from DB with given ID" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Failed to fetch Consent Record from DB" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("csr_entry: " + csr_entry.log_entry) + + return csr_entry + + +def add_csr(cr_id=None, csr_payload=None, endpoint="add_csr()"): + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if csr_payload is None: + raise AttributeError("Provide csr_payload as parameter") + + ###### + # Base information + #### + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise ApiError(code=500, title="Failed to get database cursor", detail=repr(exp), source=endpoint) + + # IDs from CSR payload + try: + logger.info("Fetching IDs from CSR payload") + csr_surrogate_id = csr_payload['surrogate_id'] + csr_cr_id = csr_payload['cr_id'] + csr_prev_record_id = csr_payload['prev_record_id'] + csr_record_id = csr_payload['record_id'] + csr_consent_status = csr_payload['consent_status'] + csr_issued = csr_payload['iat'] + except Exception as exp: + error_title = "Could not fetch IDs from CSR payload" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Fetched IDs from CSR payload") + + # Verify that cr_id and csr_cr_id are the same + if cr_id != csr_cr_id: + error_title = "cr_id from URI and cr_id from payload are not identical" + logger.error(error_title + " | cr_id from URI: " + str(cr_id) + ", cr_id from payload: " + str(csr_cr_id)) + raise ApiError(code=400, title=error_title, source=endpoint) + else: + logger.info("Identical IDs: cr_id from URI: " + str(cr_id) + ", cr_id from payload: " + str(csr_cr_id)) + + + ###### + # Account ID + #### + try: + logger.info("Get Account ID by CSR_ID") + account_id = get_account_id_by_cr(cr_id=cr_id, endpoint=endpoint) + except Exception as exp: + logger.error("Could not Account ID by CSR_ID: " + repr(exp)) + raise + else: + logger.info("account_id: " + str(account_id)) + + ###### + # Sign + #### + # Sign CSR + try: + logger.info("Sign CSR") + csr_signed = sign_csr(account_id=account_id, payload=csr_payload, endpoint=endpoint) + except Exception as exp: + logger.error("Could not sign Source's CSR: " + repr(exp)) + raise + else: + logger.info("Source CR signed") + + ########### + # Entries # + ########### + # Existing Consent Record + ### + # Init Consent Record Object + try: + logger.info("Create ConsentRecord object") + cr_entry = ConsentRecord(consent_id=cr_id) + except Exception as exp: + error_title = "Failed to create Consent Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("sink_cr_entry: " + cr_entry.log_entry) + + # Get Consent Record from DB + try: + logger.info("Get Consent Record from DB") + cursor = cr_entry.from_db(cursor=cursor) + except IndexError as exp: + error_title = "Consent Record not found from DB with given ID" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "Failed to fetch Consent Record from DB" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("cr_entry: " + cr_entry.log_entry) + + # Get primary key of Consent Record database entry + try: + logger.info("Get primary key of Consent Record database entry") + cr_entry_primary_key = cr_entry.id + except Exception as exp: + error_title = "Failed to get primary key of Consent Record database entry" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("cr_entry_primary_key: " + str(cr_entry_primary_key)) + + # CSR + try: + logger.info("Create ConsentStatusRecord object") + csr_entry = ConsentStatusRecord( + consent_status_record_id=csr_record_id, + status=csr_consent_status, + consent_status_record=csr_signed, + consent_record_id=cr_id, + issued_at=int(csr_issued), + prev_record_id=csr_prev_record_id, + consent_records_id=int(cr_entry_primary_key) + ) + except Exception as exp: + error_title = "Failed to create Source's Consent Status Record object" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("csr_entry: " + csr_entry.log_entry) + + ########### + # Store # + ########### + # CSR + + # Get database table name for Consent Status Record + try: + logger.info("Get Consent Status Record table name") + csr_table_name = csr_entry.table_name + except Exception as exp: + error_title = "Failed to get Consent Status Record table name" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("Got Consent Status Record table name: " + str(csr_table_name)) + + # Store CSR + try: + logger.info("Store ConsentStatusRecord") + try: + cursor = csr_entry.to_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to store Consent Status Record" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("csr_entry: " + csr_entry.log_entry) + + # Commit + db.connection.commit() + except Exception as exp: + logger.error('Consent Status Record Commit failed: ' + repr(exp)) + db.connection.rollback() + logger.error('--> rollback') + raise + else: + logger.info("Consent Status Record commited") + + return csr_entry + + +def get_csr(cr_id=None, csr_id=None, cursor=None): + """ + Get one csr entry from database by Account ID and ID + :param slr_id: + :param cr_id: + :param csr_id: + :return: dict + """ + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if csr_id is None: + raise AttributeError("Provide csr_id as parameter") + if cursor is None: + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + try: + db_entry_object = ConsentStatusRecord(consent_record_id=cr_id, consent_status_record_id=csr_id) + except Exception as exp: + error_title = "Failed to create csr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("csr object created: " + db_entry_object.log_entry) + + # Get csr from DB + try: + cursor = db_entry_object.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch csr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("csr fetched") + logger.info("csr fetched from db: " + db_entry_object.log_entry) + + return db_entry_object.to_record_dict + + +def get_csrs(cr_id=None, last_csr_id=None): + """ + Get all csr -entries related to service link record + :param cr_id: + :return: List of dicts + """ + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + # Get DB cursor + try: + cursor = get_db_cursor() + except Exception as exp: + logger.error('Could not get database cursor: ' + repr(exp)) + raise + + # Get CSR limit if necessary + if last_csr_id is None: + logger.info("No limiting CSR ID provided") + csr_primary_key = None + else: + csr_limit_id = last_csr_id + logger.info("csr_limit_id: " + str(csr_limit_id)) + + # Get primary key of limiting CSR + try: + logger.info("Create CSR object") + csr_entry = ConsentStatusRecord(consent_record_id=cr_id, consent_status_record_id=last_csr_id) + except Exception as exp: + error_title = "Failed to create csr object" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("csr object created: " + csr_entry.log_entry) + + # Get csr from DB + try: + cursor = csr_entry.from_db(cursor=cursor) + except Exception as exp: + error_title = "Failed to fetch csr from DB" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.info("csr fetched") + logger.info("csr fetched from db: " + csr_entry.log_entry) + + # Get primary key of Consent Record database entry + try: + logger.info("Get primary key of Consent Record database entry") + csr_primary_key = csr_entry.id + except Exception as exp: + error_title = "Failed to get primary key of Consent Record database entry" + logger.error(error_title + ": " + repr(exp)) + raise + + logger.debug("csr_primary_key: " + str(csr_primary_key)) + + # Get primary keys for csrs + try: + # Get table name + logger.info("Create csr") + db_entry_object = ConsentStatusRecord() + logger.info(db_entry_object.log_entry) + logger.info("Get table name") + table_name = db_entry_object.table_name + logger.info("Got table name: " + str(table_name)) + + cursor, id_list = get_csr_ids(cursor=cursor, cr_id=cr_id, csr_primary_key=csr_primary_key, table_name=table_name) + except Exception as exp: + logger.error('Could not get primary key list: ' + repr(exp)) + raise + + # Get csrs from database + logger.info("Get csrs from database") + db_entry_list = [] + for id in id_list: + # TODO: try-except needed? + logger.info("Getting csr with cr_id: " + str(cr_id) + " csr_id: " + str(id)) + db_entry_dict = get_csr(cr_id=cr_id, csr_id=id) + db_entry_list.append(db_entry_dict) + logger.info("csr object added to list: " + json.dumps(db_entry_dict)) + + return db_entry_list + + diff --git a/Account/app/mod_authorization/models.py b/Account/app/mod_authorization/models.py index 03be7f4..f98eeb3 100644 --- a/Account/app/mod_authorization/models.py +++ b/Account/app/mod_authorization/models.py @@ -15,14 +15,16 @@ from marshmallow import Schema, fields from marshmallow.validate import Equal, OneOf +STATUS_LIST = ["Active", "Disabled", "Withdrawn"] # List that contains status entries +# Consent Status Records class ConsentStatusAttributes(Schema): record_id = fields.Str(required=True) - account_id = fields.Str(required=True) + surrogate_id = fields.Str(required=True) cr_id = fields.Str(required=True) - consent_status = fields.Str(required=True) - iat = fields.Str(required=True) + consent_status = fields.Str(required=True, validate=OneOf(STATUS_LIST)) + iat = fields.Int(required=True) prev_record_id = fields.Str(required=True) @@ -31,19 +33,7 @@ class ConsentStatusPayload(Schema): attributes = fields.Nested(nested=ConsentStatusAttributes, required=True) -class CommonConsentAttributes(Schema): - version_number = fields.Str(required=True) - cr_id = fields.Str(required=True) - surrogate_id = fields.Str(required=True) - rs_id = fields.Str(required=True) - slr_id = fields.Str(required=True) - issued = fields.Str(required=True) - not_before = fields.Str(required=True) - not_after = fields.Str(required=True) - issued_at = fields.Str(required=True) - subject_id = fields.Str(required=True) - - +# Consent Records class DataSet(Schema): dataset_id = fields.Str(required=True) distribution_id = fields.Str(required=True) @@ -58,34 +48,69 @@ class ResourceSetDescription(Schema): resource_set = fields.Nested(nested=ResourceSet, required=True) +class SourceCommonConsentAttributes(Schema): + version = fields.Str(required=True) + cr_id = fields.Str(required=True) + surrogate_id = fields.Str(required=True) + rs_description = fields.Nested(nested=ResourceSetDescription, required=True) + slr_id = fields.Str(required=True) + iat = fields.Int(required=True) + nbf = fields.Int(required=True) + exp = fields.Int(required=True) + operator = fields.Str(required=True) + subject_id = fields.Str(required=True) + role = fields.Str(required=True, validate=Equal("Source")) + + +class SinkCommonConsentAttributes(Schema): + version = fields.Str(required=True) + cr_id = fields.Str(required=True) + surrogate_id = fields.Str(required=True) + rs_description = fields.Nested(nested=ResourceSetDescription, required=True) + slr_id = fields.Str(required=True) + iat = fields.Int(required=True) + nbf = fields.Int(required=True) + exp = fields.Int(required=True) + operator = fields.Str(required=True) + subject_id = fields.Str(required=True) + role = fields.Str(required=True, validate=Equal("Sink")) + + class SourceRoleSpecificAttributes(Schema): - role = fields.Str(required=True, validate=OneOf(["Source", "InternalProcessing"])) - auth_token_issuer_key = fields.Dict(required=True) - resource_set_description = fields.Nested(nested=ResourceSetDescription, required=True) + pop_key = fields.Dict(required=True) + token_issuer_key = fields.Dict(required=True) -class UsageRules(Schema): - rule = fields.Str(required=True) +# class UsageRules(Schema): +# rule = fields.Str(required=True) class SinkRoleSpecificAttributes(Schema): - role = fields.Str(required=True, validate=OneOf(["Sink", "InternalProcessing"])) #usage_rules = fields.Nested(nested=UsageRules, only=UsageRules.rule, many=True, required=True) usage_rules = fields.Field(required=True) + source_cr_id = fields.Str(required=True) -class SinkConsentAttributes(Schema): - common_part = fields.Nested(nested=CommonConsentAttributes, required=True) - role_specific_part = fields.Nested(nested=SinkRoleSpecificAttributes, required=True) +class ConsentReceiptPart(Schema): ki_cr = fields.Dict(required=True) + + +class ExtensionPart(Schema): extensions = fields.Dict(required=True) +class SinkConsentAttributes(Schema): + common_part = fields.Nested(nested=SinkCommonConsentAttributes, required=True) + role_specific_part = fields.Nested(nested=SinkRoleSpecificAttributes, required=True) + consent_receipt_part = fields.Nested(nested=ConsentReceiptPart, required=True) + extension_part = fields.Nested(nested=ExtensionPart, required=True) + + class SourceConsentAttributes(Schema): - common_part = fields.Nested(nested=CommonConsentAttributes, required=True) + common_part = fields.Nested(nested=SourceCommonConsentAttributes, required=True) role_specific_part = fields.Nested(nested=SourceRoleSpecificAttributes, required=True) - ki_cr = fields.Dict(required=True) - extensions = fields.Dict(required=True) + consent_receipt_part = fields.Nested(nested=ConsentReceiptPart, required=True) + extension_part = fields.Nested(nested=ExtensionPart, required=True) class SourceConsentPayload(Schema): @@ -116,3 +141,7 @@ class NewConsentData(Schema): class NewConsent(Schema): data = fields.Nested(nested=NewConsentData, required=True) + +class NewConsentStatus(Schema): + data = fields.Nested(nested=ConsentStatusPayload, required=True) + diff --git a/Account/app/mod_authorization/view_api.py b/Account/app/mod_authorization/view_api.py index e998f43..2255d12 100644 --- a/Account/app/mod_authorization/view_api.py +++ b/Account/app/mod_authorization/view_api.py @@ -37,8 +37,9 @@ verify_jws_signature_with_jwk from app.mod_database.helpers import get_db_cursor from app.mod_database.models import ServiceLinkRecord, ServiceLinkStatusRecord, ConsentRecord, ConsentStatusRecord -from app.mod_authorization.controllers import sign_cr, sign_csr, store_cr_and_csr, get_auth_token_data -from app.mod_authorization.models import NewConsent +from app.mod_authorization.controllers import sign_cr, sign_csr, store_cr_and_csr, get_auth_token_data, \ + get_last_cr_status, add_csr, get_csrs +from app.mod_authorization.models import NewConsent, NewConsentStatus mod_authorization_api = Blueprint('authorization_api', __name__, template_folder='templates') @@ -143,11 +144,11 @@ def post(self, account_id, source_slr_id, sink_slr_id): # Source CR try: source_cr_cr_id = source_cr_payload['common_part']['cr_id'] - source_cr_rs_id = source_cr_payload['common_part']['rs_id'] + source_cr_rs_id = source_cr_payload['common_part']['rs_description']['resource_set']['rs_id'] source_cr_slr_id = source_cr_payload['common_part']['slr_id'] source_cr_subject_id = source_cr_payload['common_part']['subject_id'] source_cr_surrogate_id = source_cr_payload['common_part']['surrogate_id'] - source_cr_role = source_cr_payload['role_specific_part']['role'] + source_cr_role = source_cr_payload['common_part']['role'] except Exception as exp: error_title = "Could not fetch IDs from Source CR payload" raise @@ -155,11 +156,12 @@ def post(self, account_id, source_slr_id, sink_slr_id): # Source CSR try: - source_csr_surrogate_id = source_csr_payload['account_id'] + source_csr_surrogate_id = source_csr_payload['surrogate_id'] source_csr_cr_id = source_csr_payload['cr_id'] source_csr_prev_record_id = source_csr_payload['prev_record_id'] source_csr_record_id = source_csr_payload['record_id'] source_csr_consent_status = source_csr_payload['consent_status'] + source_csr_issued = source_csr_payload['iat'] except Exception as exp: error_title = "Could not fetch IDs from Source CSR payload" raise @@ -167,22 +169,23 @@ def post(self, account_id, source_slr_id, sink_slr_id): # Sink CR try: sink_cr_cr_id = sink_cr_payload['common_part']['cr_id'] - sink_cr_rs_id = sink_cr_payload['common_part']['rs_id'] + sink_cr_rs_id = sink_cr_payload['common_part']['rs_description']['resource_set']['rs_id'] sink_cr_slr_id = sink_cr_payload['common_part']['slr_id'] sink_cr_subject_id = sink_cr_payload['common_part']['subject_id'] sink_cr_surrogate_id = sink_cr_payload['common_part']['surrogate_id'] - sink_cr_role = sink_cr_payload['role_specific_part']['role'] + sink_cr_role = sink_cr_payload['common_part']['role'] except Exception as exp: error_title = "Could not fetch IDs from Sink CR payload" raise # Sink CSR try: - sink_csr_surrogate_id = sink_csr_payload['account_id'] + sink_csr_surrogate_id = sink_csr_payload['surrogate_id'] sink_csr_cr_id = sink_csr_payload['cr_id'] sink_csr_prev_record_id = sink_csr_payload['prev_record_id'] sink_csr_record_id = sink_csr_payload['record_id'] sink_csr_consent_status = sink_csr_payload['consent_status'] + sink_csr_issued = sink_csr_payload['iat'] except Exception as exp: error_title = "Could not fetch IDs from Sink CSR payload" raise @@ -200,46 +203,52 @@ def post(self, account_id, source_slr_id, sink_slr_id): # Sign Source CR try: - source_cr_signed, source_cr_issued = sign_cr(account_id=account_id, payload=source_cr_payload, endpoint=endpoint) + source_cr_signed = sign_cr(account_id=account_id, payload=source_cr_payload, endpoint=endpoint) except Exception as exp: logger.error("Could not sign Source's CR: " + repr(exp)) raise else: logger.info("Source CR signed") + logger.debug("source_cr_signed: " + json.dumps(source_cr_signed)) # Sign Source CSR try: - source_csr_signed, source_csr_issued = sign_csr(account_id=account_id, payload=source_csr_payload, endpoint=endpoint) + source_csr_signed = sign_csr(account_id=account_id, payload=source_csr_payload, endpoint=endpoint) except Exception as exp: logger.error("Could not sign Source's CSR: " + repr(exp)) raise else: - logger.info("Source CR signed") + logger.info("Source CSR signed") + logger.debug("source_csr_signed: " + json.dumps(source_csr_signed)) # Sign Sink CR try: - sink_cr_signed, sink_cr_issued = sign_cr(account_id=account_id, payload=sink_cr_payload, endpoint=endpoint) + sink_cr_signed = sign_cr(account_id=account_id, payload=sink_cr_payload, endpoint=endpoint) except Exception as exp: logger.error("Could not sign Source's CR: " + repr(exp)) raise else: logger.info("Sink's CR signed") + logger.debug("sink_cr_signed: " + json.dumps(sink_cr_signed)) # Sign Sink CSR try: - sink_csr_signed, sink_csr_issued = sign_csr(account_id=account_id, payload=sink_csr_payload, endpoint=endpoint) + sink_csr_signed = sign_csr(account_id=account_id, payload=sink_csr_payload, endpoint=endpoint) except Exception as exp: logger.error("Could not sign Sink's CSR: " + repr(exp)) raise else: logger.info("Sink's CSR signed") + logger.debug("sink_csr_signed: " + json.dumps(sink_csr_signed)) ######### # Store # ######### + logger.info("Creating objects to store") # Source SLR try: + logger.info("Creating Source SLR") source_slr_entry = ServiceLinkRecord( surrogate_id=source_cr_surrogate_id, account_id=account_id, @@ -249,9 +258,13 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Source's Service Link Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("source_slr_entry created") + logger.info("source_slr_entry: " + source_slr_entry.log_entry) # Sink SLR try: + logger.info("Creating Sink SLR") sink_slr_entry = ServiceLinkRecord( surrogate_id=sink_cr_surrogate_id, account_id=account_id, @@ -261,9 +274,13 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Sink's Service Link Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("sink_slr_entry created") + logger.info("sink_slr_entry: " + sink_slr_entry.log_entry) # Source CR try: + logger.info("Creating Source CR") source_cr_entry = ConsentRecord( consent_record=source_cr_signed, consent_id=source_cr_cr_id, @@ -277,9 +294,13 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Source's Consent Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("source_cr_entry created") + logger.info("source_cr_entry: " + source_cr_entry.log_entry) # Sink CR try: + logger.info("Creating Sink CR") sink_cr_entry = ConsentRecord( consent_record=sink_cr_signed, consent_id=sink_cr_cr_id, @@ -293,10 +314,15 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Sink's Consent Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("sink_cr_entry created") + logger.info("sink_cr_entry: " + sink_cr_entry.log_entry) # Source CSR try: + logger.info("Creating Source CSR") source_csr_entry = ConsentStatusRecord( + consent_status_record_id=source_csr_record_id, status=source_csr_consent_status, consent_status_record=source_csr_signed, consent_record_id=source_csr_cr_id, @@ -307,10 +333,15 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Source's Consent Status Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("source_csr_entry created") + logger.info("source_csr_entry: " + source_csr_entry.log_entry) # Sink CSR try: + logger.info("Creating Sink CSR") sink_csr_entry = ConsentStatusRecord( + consent_status_record_id=sink_csr_record_id, status=sink_csr_consent_status, consent_status_record=sink_csr_signed, consent_record_id=sink_csr_cr_id, @@ -321,10 +352,14 @@ def post(self, account_id, source_slr_id, sink_slr_id): error_title = "Failed to create Sink's Consent Status Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("sink_csr_entry created") + logger.info("sink_csr_entry: " + sink_csr_entry.log_entry) # Store CRs and CSRs try: - db_meta = store_cr_and_csr( + logger.info("About to store Consent Records and Consent Status Records") + stored_source_cr_entry, stored_source_csr_entry, stored_sink_cr_entry, stored_sink_csr_entry = store_cr_and_csr( source_slr_entry=source_slr_entry, sink_slr_entry=sink_slr_entry, source_cr_entry=source_cr_entry, @@ -339,7 +374,10 @@ def post(self, account_id, source_slr_id, sink_slr_id): raise else: logger.info("Stored Consent Record and Consent Status Record") - logger.debug("DB Meta: " + json.dumps(db_meta)) + logger.info("Source CR: " + stored_source_cr_entry.log_entry) + logger.info("Source CSR: " + stored_source_csr_entry.log_entry) + logger.info("Sink CR: " + stored_sink_cr_entry.log_entry) + logger.info("Sink CSR: " + stored_sink_csr_entry.log_entry) # Response data container try: @@ -350,33 +388,33 @@ def post(self, account_id, source_slr_id, sink_slr_id): response_data['data']['source']['consentRecord'] = {} response_data['data']['source']['consentRecord']['type'] = "ConsentRecord" response_data['data']['source']['consentRecord']['attributes'] = {} - response_data['data']['source']['consentRecord']['attributes']['cr'] = json.loads(source_cr_signed) + response_data['data']['source']['consentRecord']['attributes']['cr'] = stored_source_cr_entry.to_record_dict response_data['data']['source']['consentStatusRecord'] = {} response_data['data']['source']['consentStatusRecord']['type'] = "ConsentStatusRecord" response_data['data']['source']['consentStatusRecord']['attributes'] = {} - response_data['data']['source']['consentStatusRecord']['attributes']['csr'] = json.loads(source_csr_signed) + response_data['data']['source']['consentStatusRecord']['attributes']['csr'] = stored_source_csr_entry.to_record_dict response_data['data']['sink'] = {} response_data['data']['sink']['consentRecord'] = {} response_data['data']['sink']['consentRecord']['type'] = "ConsentRecord" response_data['data']['sink']['consentRecord']['attributes'] = {} - response_data['data']['sink']['consentRecord']['attributes']['cr'] = json.loads(sink_cr_signed) + response_data['data']['sink']['consentRecord']['attributes']['cr'] = stored_sink_cr_entry.to_record_dict response_data['data']['sink']['consentStatusRecord'] = {} response_data['data']['sink']['consentStatusRecord']['type'] = "ConsentStatusRecord" response_data['data']['sink']['consentStatusRecord']['attributes'] = {} - response_data['data']['sink']['consentStatusRecord']['attributes']['csr'] = json.loads(sink_csr_signed) + response_data['data']['sink']['consentStatusRecord']['attributes']['csr'] = stored_sink_csr_entry.to_record_dict except Exception as exp: logger.error('Could not prepare response data: ' + repr(exp)) raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) else: logger.info('Response data ready') - logger.debug('response_data: ' + repr(response_data)) + logger.debug('response_data: ' + json.dumps(response_data)) response_data_dict = dict(response_data) - logger.debug('response_data_dict: ' + repr(response_data_dict)) + logger.debug('response_data_dict: ' + json.dumps(response_data_dict)) return make_json_response(data=response_data_dict, status_code=201) @@ -400,7 +438,7 @@ def get(self, sink_cr_id): sink_cr_id = str(sink_cr_id) except Exception as exp: raise ApiError(code=400, title="Unsupported sink_cr_id", detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_cr_id: " + repr(sink_cr_id)) # Init Sink's Consent Record Object @@ -410,21 +448,18 @@ def get(self, sink_cr_id): error_title = "Failed to create Sink's Consent Record object" logger.error(error_title + ": " + repr(exp)) raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: + else: logger.debug("sink_cr_entry: " + sink_cr_entry.log_entry) - source_cr = {} - sink_slr = {} try: source_cr, sink_slr = get_auth_token_data(sink_cr_object=sink_cr_entry) except Exception as exp: error_title = "Failed to get Authorization token data" logger.error(error_title + ": " + repr(exp)) - #raise raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) - finally: - logger.debug("source_cr: " + json.dumps(source_cr)) - logger.debug("sink_slr: " + json.dumps(sink_slr)) + else: + logger.debug("source_cr: " + source_cr.log_entry) + logger.debug("sink_slr: " + sink_slr.log_entry) # Response data container @@ -436,27 +471,226 @@ def get(self, sink_cr_id): response_data['data']['source']['consentRecord'] = {} response_data['data']['source']['consentRecord']['type'] = "ConsentRecord" response_data['data']['source']['consentRecord']['attributes'] = {} - response_data['data']['source']['consentRecord']['attributes']['cr'] = source_cr + response_data['data']['source']['consentRecord']['attributes']['cr'] = source_cr.to_record_dict response_data['data']['sink'] = {} response_data['data']['sink']['serviceLinkRecord'] = {} response_data['data']['sink']['serviceLinkRecord']['type'] = "ServiceLinkRecord" response_data['data']['sink']['serviceLinkRecord']['attributes'] = {} - response_data['data']['sink']['serviceLinkRecord']['attributes']['slr'] = sink_slr + response_data['data']['sink']['serviceLinkRecord']['attributes']['slr'] = sink_slr.to_record_dict except Exception as exp: logger.error('Could not prepare response data: ' + repr(exp)) raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) else: logger.info('Response data ready') - logger.debug('response_data: ' + repr(response_data)) + logger.debug('response_data: ' + json.dumps(response_data)) response_data_dict = dict(response_data) - logger.debug('response_data_dict: ' + repr(response_data_dict)) + logger.debug('response_data_dict: ' + json.dumps(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class LastCrStatus(Resource): + @requires_api_auth_sdk + def get(self, cr_id): + + try: + endpoint = str(api.url_for(self, cr_id=cr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers") + logger.debug("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + + try: + cr_id = str(cr_id) + except Exception as exp: + raise ApiError(code=400, title="Unsupported cr_id", detail=repr(exp), source=endpoint) + else: + logger.debug("cr_id: " + repr(cr_id)) + + # Get last Consent Status Record + try: + last_csr_object = get_last_cr_status(cr_id=cr_id) + except Exception as exp: + error_title = "Failed to get last Consent Status Record of Consent" + logger.error(error_title + ": " + repr(exp)) + raise + else: + logger.debug("last_cr_status_object: " + last_csr_object.log_entry) + + # Response data container + try: + response_data = {} + response_data['data'] = last_csr_object.to_record_dict + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + json.dumps(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + json.dumps(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) + + +class CrStatus(Resource): + @requires_api_auth_sdk + def post(self, cr_id): + logger.info("CrStatus") + try: + endpoint = str(api.url_for(self, cr_id=cr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers") + logger.debug("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + + try: + cr_id = str(cr_id) + except Exception as exp: + raise ApiError(code=400, title="Unsupported cr_id", detail=repr(exp), source=endpoint) + else: + logger.debug("cr_id: " + repr(cr_id)) + + # load JSON + json_data = request.get_json() + if not json_data: + error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'} + raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint) + else: + logger.debug("json_data: " + json.dumps(json_data)) + + # Validate payload content + schema = NewConsentStatus() + schema_validation_result = schema.load(json_data) + + # Check validation errors + if schema_validation_result.errors: + logger.error("Invalid payload") + raise ApiError(code=400, title="Invalid payload", detail=dict(schema_validation_result.errors), source=endpoint) + else: + logger.debug("JSON validation -> OK") + + # Payload + # Consent Status Record + try: + csr_payload = json_data['data']['attributes'] + except Exception as exp: + raise ApiError(code=400, title="Could not fetch source_csr_payload from json", detail=repr(exp), source=endpoint) + else: + logger.debug("Got csr_payload: " + json.dumps(csr_payload)) + + # + # Create new Consent Status Record + try: + new_csr_object = add_csr(cr_id=cr_id, csr_payload=csr_payload, endpoint=endpoint) + except ApiError as exp: + error_title = "Failed to add new Consent Status Record for Consent" + logger.error(error_title + ": " + repr(exp)) + raise + except Exception as exp: + error_title = "Unexpected error. Failed to add new Consent Status Record for Consent" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.debug("new_csr_object: " + new_csr_object.log_entry) + + # Response data container + try: + response_data = {} + response_data['data'] = new_csr_object.to_record_dict + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + json.dumps(response_data)) + + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + json.dumps(response_data_dict)) return make_json_response(data=response_data_dict, status_code=201) + @requires_api_auth_sdk + def get(self, cr_id): + logger.info("CrStatus") + try: + endpoint = str(api.url_for(self, cr_id=cr_id)) + except Exception as exp: + endpoint = str(__name__) + + try: + logger.info("Fetching Api-Key from Headers") + api_key = request.headers.get('Api-Key') + except Exception as exp: + logger.error("No ApiKey in headers: " + repr(repr(exp))) + return provideApiKey(endpoint=endpoint) + else: + logger.info("Api-Key: " + api_key) + + try: + cr_id = str(cr_id) + except Exception as exp: + error_title = "Unsupported cr_id" + logger.error(error_title) + raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("cr_id: " + cr_id) + + # Get last CSR ID from query parameters + try: + logger.info("Get last CSR ID from query parameters") + last_csr_id = request.args.get('csr_id', None) + except Exception as exp: + error_title = "Unexpected error when getting last CSR ID from query parameters" + logger.error(error_title + " " + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("last_csr_id: " + repr(last_csr_id)) + + # Get ConsentStatusRecords + try: + logger.info("Fetching ConsentStatusRecords") + db_entries = get_csrs(cr_id=cr_id, last_csr_id=last_csr_id) + except StandardError as exp: + error_title = "ConsentStatusRecords not accessible" + logger.error(error_title + ": " + repr(exp)) + raise ApiError(code=403, title=error_title, detail=repr(exp), source=endpoint) + except Exception as exp: + error_title = "No ConsentStatusRecords found" + logger.error(error_title + repr(exp)) + raise ApiError(code=404, title=error_title, detail=repr(exp), source=endpoint) + else: + logger.info("ConsentStatusRecords Fetched") + + # Response data container + try: + db_entry_list = db_entries + response_data = {} + response_data['data'] = db_entry_list + except Exception as exp: + logger.error('Could not prepare response data: ' + repr(exp)) + raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint) + else: + logger.info('Response data ready') + logger.debug('response_data: ' + repr(response_data)) + response_data_dict = dict(response_data) + logger.debug('response_data_dict: ' + repr(response_data_dict)) + return make_json_response(data=response_data_dict, status_code=200) # Register resources api.add_resource(ConsentSignAndStore, '/api/account//servicelink///consent/', endpoint='mydata-authorization') api.add_resource(AuthorizationTokenData, '/api/consent//authorizationtoken/', endpoint='mydata-authorizationtoken') +api.add_resource(LastCrStatus, '/api/consent//status/last/', endpoint='mydata-last-cr') +api.add_resource(CrStatus, '/api/consent//status/', endpoint='mydata-csr') diff --git a/Account/app/mod_blackbox/controllers.py b/Account/app/mod_blackbox/controllers.py index 2942852..56d0498 100644 --- a/Account/app/mod_blackbox/controllers.py +++ b/Account/app/mod_blackbox/controllers.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ -Minimum viable Key management +Minimum viable Key management. NOT FOR PRODUCTION USE. __author__ = "Jani Yli-Kantola" __copyright__ = "" @@ -283,11 +283,7 @@ def sign_jws_with_jwk(account_id=None, jws_json_to_sign=None): if account_id is None: raise AttributeError("Provide account_id or as parameter") if jws_json_to_sign is None: - # raise AttributeError("Provide jws_to_sign or as parameter") - # Fake request payload to json - # TODO: Following two lines, NOT FOR PRODUCTION - jws_json_to_sign = json.dumps(SLR_PAYLOAD['slr']) - logger.info("No jws_json_to_sign provided as parameter. Using SLR_PAYLOAD -template instead.") + raise AttributeError("Provide jws_json_to_sign or as parameter") # jws_json_to_sign to dict try: @@ -419,10 +415,7 @@ def verify_jws_signature_with_jwk(account_id=None, jws_json_to_verify=None): if account_id is None: raise AttributeError("Provide account_id or as parameter") if jws_json_to_verify is None: - # raise AttributeError("Provide jws_to_sign or as parameter") - # TODO: Following two lines, NOT FOR PRODUCTION - jws_json_to_verify = sign_jws_with_jwk(account_id=account_id) - logger.info("No jws_json_to_sign provided as parameter. Using SLR_PAYLOAD -template instead.") + raise AttributeError("Provide jws_json_to_verify or as parameter") # Prepare JWS for signing try: @@ -480,10 +473,7 @@ def generate_and_sign_jws(account_id=None, jws_payload=None): if account_id is None: raise AttributeError("Provide account_id or as parameter") if jws_payload is None: - # raise AttributeError("Provide jws_to_sign or as parameter") - # TODO: Following two lines, NOT FOR PRODUCTION - jws_payload = CR_CSR_PAYLOAD['sink']['cr'] - logger.info("No jws_payload provided as parameter. Using CR_CSR_PAYLOAD -template instead.") + raise AttributeError("Provide jws_payload or as parameter") # Prepare database connection try: diff --git a/Account/app/mod_blackbox/helpers.py b/Account/app/mod_blackbox/helpers.py index 5ff1c4b..c716f75 100644 --- a/Account/app/mod_blackbox/helpers.py +++ b/Account/app/mod_blackbox/helpers.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- """ +Minimum viable Key management. NOT FOR PRODUCTION USE. + + __author__ = "Jani Yli-Kantola" __copyright__ = "" __credits__ = ["Harri Hirvonsalo", "Aleksi Palomäki"] diff --git a/Account/app/mod_blackbox/services.py b/Account/app/mod_blackbox/services.py index ab3ef75..85f5071 100644 --- a/Account/app/mod_blackbox/services.py +++ b/Account/app/mod_blackbox/services.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- """ +Minimum viable Key management. NOT FOR PRODUCTION USE. + + __author__ = "Jani Yli-Kantola" __copyright__ = "" __credits__ = ["Harri Hirvonsalo", "Aleksi Palomäki"] @@ -68,7 +71,7 @@ def init_sqlite_db(connection=None): id INTEGER PRIMARY KEY AUTOINCREMENT, kid TEXT UNIQUE NOT NULL, account_id INTEGER UNIQUE NOT NULL, - jws_key BLOB NOT NULL + jwk BLOB NOT NULL );''' try: @@ -229,7 +232,7 @@ def store_jwk_to_db(account_id=None, account_kid=None, account_key=None, cursor= if cursor is None: raise AttributeError("Provide cursor as parameter") - sql_query = "INSERT INTO account_keys (kid, account_id, jws_key) VALUES ('%s', '%s', '%s')" % \ + sql_query = "INSERT INTO account_keys (kid, account_id, jwk) VALUES ('%s', '%s', '%s')" % \ (account_kid, account_id, account_key) try: @@ -258,8 +261,7 @@ def get_key(account_id=None, cursor=None): jwk_dict = {} - # TODO: Fix field name in SQL jws_key-> jwk - sql_query = "SELECT id, kid, account_id, jws_key FROM account_keys WHERE account_id='%s' ORDER BY id DESC LIMIT 1" % (account_id) + sql_query = "SELECT id, kid, account_id, jwk FROM account_keys WHERE account_id='%s' ORDER BY id DESC LIMIT 1" % (account_id) try: cursor, data = execute_sql_select(sql_query=sql_query, cursor=cursor) @@ -472,11 +474,10 @@ def jws_generate(payload=None): if payload is None: raise AttributeError("Provide payload as parameter") - payload_json = json.dumps(payload) - logger.debug('payload_json: ' + payload_json) + logger.debug('payload: ' + payload) try: - jws_object = jws.JWS(payload=payload_json) + jws_object = jws.JWS(payload=payload) except Exception as exp: exp = append_description_to_exception(exp=exp, description='Could not generate JWS object with payload') logger.error('Could not generate JWS object with payload: ' + repr(exp)) @@ -576,7 +577,7 @@ def jws_sign(account_id=None, account_kid=None, jws_object=None, jwk_object=None raise AttributeError("Provide alg as parameter") try: - unprotected_header = {'kid': account_kid, 'jwk': json.loads(jwk_public_json)} + unprotected_header = {'kid': account_kid} protected_header = {'alg': alg} unprotected_header_json = json.dumps(unprotected_header) protected_header_json = json.dumps(protected_header) diff --git a/Account/app/mod_database/helpers.py b/Account/app/mod_database/helpers.py index 73c6fd6..9929732 100644 --- a/Account/app/mod_database/helpers.py +++ b/Account/app/mod_database/helpers.py @@ -9,10 +9,26 @@ # create logger with 'spam_application' from app.helpers import get_custom_logger -logger = get_custom_logger('mod_database_helpers') +logger = get_custom_logger(__name__) + + +def log_query(sql_query=None, arguments=None): + logger.info("Executing") + if sql_query is None: + raise AttributeError("Provide sql_query as parameter") + if arguments is None: + raise AttributeError("Provide arguments as parameter") + + logger.debug('sql_query: ' + repr(sql_query)) + + for index in range(len(arguments)): + logger.debug("arguments[" + str(index) + "]: " + str(arguments[index])) + + logger.debug('SQL query to execute: ' + repr(sql_query % arguments)) def get_db_cursor(): + logger.info("Executing") try: cursor = db.connection.cursor() except Exception as exp: @@ -32,6 +48,7 @@ def execute_sql_insert(cursor, sql_query): INSERT to MySQL """ + logger.info("Executing") last_id = "" @@ -66,18 +83,17 @@ def execute_sql_insert_2(cursor, sql_query, arguments): INSERT to MySQL """ + logger.info("Executing") last_id = "" - logger.debug('sql_query: ' + str(sql_query)) - - for index in range(len(arguments)): - logger.debug("arguments[" + str(index) + "]: " + str(arguments[index])) + log_query(sql_query=sql_query, arguments=arguments) try: # Should be done like here: http://stackoverflow.com/questions/3617052/escape-string-python-for-mysql/27575399#27575399 cursor.execute(sql_query, (arguments)) - + logger.debug("Executed SQL query: " + str(cursor._last_executed)) + logger.debug("Affected rows: " + str(cursor.rowcount)) except Exception as exp: logger.debug('Error in SQL query execution: ' + repr(exp)) raise @@ -93,6 +109,35 @@ def execute_sql_insert_2(cursor, sql_query, arguments): return cursor, last_id +def execute_sql_update(cursor, sql_query, arguments): + """ + :param arguments: + :param cursor: + :param sql_query: + :return: cursor: + + INSERT to MySQL + """ + logger.info("Executing") + + logger.debug('sql_query: ' + str(sql_query)) + + for index in range(len(arguments)): + logger.debug("arguments[" + str(index) + "]: " + str(arguments[index])) + + try: + # Should be done like here: http://stackoverflow.com/questions/3617052/escape-string-python-for-mysql/27575399#27575399 + cursor.execute(sql_query, (arguments)) + logger.debug("Executed SQL query: " + str(cursor._last_executed)) + logger.debug("Affected rows SQL query: " + str(cursor.rowcount)) + except Exception as exp: + logger.debug('Error in SQL query execution: ' + repr(exp)) + raise + else: + logger.debug('db entry updated') + return cursor + + def execute_sql_select(cursor=None, sql_query=None): """ :param cursor: @@ -102,6 +147,7 @@ def execute_sql_select(cursor=None, sql_query=None): SELECT from MySQL """ + logger.info("Executing") if app.config["SUPER_DEBUG"]: logger.debug('sql_query: ' + repr(sql_query)) @@ -134,13 +180,15 @@ def execute_sql_select_2(cursor=None, sql_query=None, arguments=None): SELECT from MySQL """ + logger.info("Executing") - if app.config["SUPER_DEBUG"]: - logger.debug('sql_query: ' + repr(sql_query)) + log_query(sql_query=sql_query, arguments=arguments) try: - cursor.execute(sql_query, (arguments)) + cursor.execute(sql_query, (arguments)) + logger.debug("Executed SQL query: " + str(cursor._last_executed)) + logger.debug("Affected rows: " + str(cursor.rowcount)) except Exception as exp: logger.debug('Error in SQL query execution: ' + repr(exp)) raise @@ -151,8 +199,7 @@ def execute_sql_select_2(cursor=None, sql_query=None, arguments=None): logger.debug('cursor.fetchall() failed: ' + repr(exp)) data = 'No content' - if app.config["SUPER_DEBUG"]: - logger.debug('data ' + repr(data)) + logger.debug('data ' + repr(data)) return cursor, data @@ -166,6 +213,7 @@ def execute_sql_count(cursor=None, sql_query=None): SELECT from MySQL """ + logger.info("Executing") consent_count = 0 @@ -201,6 +249,7 @@ def drop_table_content(): Drop table content """ + logger.info("Executing") try: cursor = get_db_cursor() @@ -211,8 +260,9 @@ def drop_table_content(): sql_query = "SELECT Concat('TRUNCATE TABLE ',table_schema,'.',TABLE_NAME, ';') " \ "FROM INFORMATION_SCHEMA.TABLES where table_schema in ('MyDataAccount');" - sql_query1 = "SELECT Concat('DELETE FROM ',table_schema,'.',TABLE_NAME, '; ALTER TABLE ',table_schema,'.',TABLE_NAME, ' AUTO_INCREMENT = 1;') " \ - "FROM INFORMATION_SCHEMA.TABLES where table_schema in ('MyDataAccount');" + # sql_query1 = "SELECT Concat('DELETE FROM ',table_schema,'.',TABLE_NAME, '; ALTER TABLE ',table_schema,'.',TABLE_NAME, ' AUTO_INCREMENT = 1;') " \ + # "FROM INFORMATION_SCHEMA.TABLES where table_schema in ('MyDataAccount');" + # TODO: Remove two upper rows try: cursor.execute(sql_query) @@ -248,3 +298,314 @@ def drop_table_content(): cursor.execute("SET FOREIGN_KEY_CHECKS = 1;") return True + + +def get_primary_keys_by_account_id(cursor=None, account_id=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + + sql_query = "SELECT id " \ + "FROM " + table_name + " " \ + "WHERE Accounts_id LIKE %s;" + + arguments = ( + '%' + str(account_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data[0]) + logger.info("Got data_list: " + repr(data_list)) + + for i in range(len(data_list)): + data_list[i] = str(data_list[i]) + + id_list = data_list + logger.info("Got id_list: " + repr(id_list)) + + return cursor, id_list + + +def get_slr_ids(cursor=None, account_id=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if account_id is None: + raise AttributeError("Provide account_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + + sql_query = "SELECT serviceLinkRecordId " \ + "FROM " + table_name + " " \ + "WHERE Accounts_id LIKE %s;" + + arguments = ( + '%' + str(account_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + #logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data) + logger.info("Got data_list: " + repr(data_list)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + for i in range(len(data_list)): + data_list[i] = str(data_list[i][0]) + logger.info("Formatted data_list: " + repr(data_list)) + + id_list = data_list + logger.info("Got id_list: " + repr(id_list)) + + return cursor, id_list + + +def get_slsr_ids(cursor=None, slr_id=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + + sql_query = "SELECT serviceLinkStatusRecordId " \ + "FROM " + table_name + " " \ + "WHERE serviceLinkRecordId LIKE %s;" + + arguments = ( + '%' + str(slr_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data[0]) + logger.info("Got data_list: " + repr(data_list)) + + for i in range(len(data_list)): + data_list[i] = str(data_list[i]) + + id_list = data_list + logger.info("Got id_list: " + repr(id_list)) + + return cursor, id_list + + +def get_cr_ids(cursor=None, slr_id=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if slr_id is None: + raise AttributeError("Provide slr_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + + sql_query = "SELECT consentRecordId " \ + "FROM " + table_name + " " \ + "WHERE serviceLinkRecordId LIKE %s;" + + arguments = ( + '%' + str(slr_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data[0]) + logger.info("Got data_list: " + repr(data_list)) + + for i in range(len(data_list)): + data_list[i] = str(data_list[i]) + + id_list = data_list + logger.info("Got id_list: " + repr(id_list)) + + return cursor, id_list + + +def get_csr_ids(cursor=None, cr_id=None, csr_primary_key=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + if csr_primary_key is None: + sql_query = "SELECT consentStatusRecordId " \ + "FROM " + table_name + " " \ + "WHERE consentRecordId LIKE %s;" + + arguments = ( + '%' + str(cr_id) + '%', + ) + else: + sql_query = "SELECT consentStatusRecordId " \ + "FROM " + table_name + " " \ + "WHERE consentRecordId LIKE %s AND id > %s;" + + arguments = ( + '%' + str(cr_id) + '%', + int(csr_primary_key), + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data) + logger.info("Got data_list: " + repr(data_list)) + + for i in range(len(data_list)): + data_list[i] = str(data_list[i][-1]) + + id_list = data_list + logger.info("Got id_list: " + repr(id_list)) + return cursor, id_list + + +def get_last_csr_id(cursor=None, cr_id=None, table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if table_name is None: + raise AttributeError("Provide table_name as parameter") + + sql_query = "SELECT consentStatusRecordId " \ + "FROM " + table_name + " " \ + "WHERE consentRecordId LIKE %s " \ + "ORDER BY id DESC " \ + "LIMIT 1;" + + arguments = ( + '%' + str(cr_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data[0]) + logger.info("Got data_list: " + repr(data_list)) + + entry_id = str(data_list[0]) + logger.info("Got entry_id: " + repr(entry_id)) + + return cursor, entry_id + + +def get_account_id_by_csr_id(cursor=None, cr_id=None, acc_table_name=None, slr_table_name=None, cr_table_name=None): + logger.info("Executing") + if cursor is None: + raise AttributeError("Provide cursor as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if acc_table_name is None: + raise AttributeError("Provide acc_table_name as parameter") + if slr_table_name is None: + raise AttributeError("Provide slr_table_name as parameter") + if cr_table_name is None: + raise AttributeError("Provide cr_table_name as parameter") + + + sql_query = "SELECT `Accounts`.`id` " \ + "FROM " + acc_table_name + " " \ + "INNER JOIN " + slr_table_name + " on " + acc_table_name + ".`id` = " + slr_table_name + ".`Accounts_id` " \ + "INNER JOIN " + cr_table_name + " on " + slr_table_name + ".`id` = " + cr_table_name + ".`ServiceLinkRecords_id` " \ + "WHERE " + cr_table_name + ".`consentRecordId` LIKE %s " \ + "LIMIT 1;" + + arguments = ( + '%' + str(cr_id) + '%', + ) + + try: + cursor, data = execute_sql_select_2(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.debug("Got data: " + repr(data)) + + if len(data) == 0: + logger.error("IndexError('DB query returned no results')") + raise IndexError("DB query returned no results") + + logger.debug("Got data[0]: " + repr(data[0])) + data_list = list(data[0]) + logger.info("Got data_list: " + repr(data_list)) + + entry_id = str(data_list[0]) + logger.info("Got entry_id: " + repr(entry_id)) + + return cursor, entry_id + + + + + diff --git a/Account/app/mod_database/models.py b/Account/app/mod_database/models.py index f33e96a..b4cce83 100644 --- a/Account/app/mod_database/models.py +++ b/Account/app/mod_database/models.py @@ -6,13 +6,14 @@ import bcrypt # https://github.com/pyca/bcrypt/, https://pypi.python.org/pypi/bcrypt/2.0.0 # Import the database object from the main app module +import datetime from flask import json from app import db, api, login_manager, app # create logger with 'spam_application' from app.helpers import get_custom_logger -from app.mod_database.helpers import execute_sql_insert, execute_sql_insert_2, execute_sql_select_2 +from app.mod_database.helpers import execute_sql_insert, execute_sql_insert_2, execute_sql_select_2, execute_sql_update logger = get_custom_logger(__name__) @@ -24,14 +25,24 @@ class Account(): id = None global_identifier = None activated = None + table_name = "" + deleted = "" - def __init__(self, id="", global_identifyer="", activated=""): + def __init__(self, id="", global_identifyer="", activated="", deleted=0, table_name="MyDataAccount.Accounts"): if id is not None: self.id = id if global_identifyer is not None: self.global_identifier = global_identifyer if activated is not None: self.activated = activated + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -63,8 +74,10 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] + del dictionary['deleted'] + del dictionary['table_name'] return dictionary @property @@ -77,10 +90,14 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Accounts (globalIdenttifyer) VALUES ('%s')" % (self.global_identifier) + sql_query = "INSERT INTO " + self.table_name + " (globalIdentifier) VALUES (%s)" + + arguments = ( + str(self.global_identifier), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -92,11 +109,9 @@ def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria - - sql_query = "SELECT id, globalIdenttifyer, activated " \ - "FROM MyDataAccount.Accounts " \ - "WHERE id LIKE %s AND globalIdenttifyer LIKE %s AND activated LIKE %s;" + sql_query = "SELECT id, globalIdentifier, activated " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND globalIdentifier LIKE %s AND activated LIKE %s;" arguments = ( '%' + str(self.id) + '%', @@ -133,8 +148,10 @@ class LocalIdentity(): username = None pwd_id = None accounts_id = None + table_name = "" + deleted = "" - def __init__(self, id="", username="", pwd_id="", accounts_id=""): + def __init__(self, id="", username="", pwd_id="", accounts_id="", deleted=0, table_name="MyDataAccount.LocalIdentities"): if id is not None: self.id = id if username is not None: @@ -143,6 +160,14 @@ def __init__(self, id="", username="", pwd_id="", accounts_id=""): self.pwd_id = pwd_id if accounts_id is not None: self.accounts_id = accounts_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -182,9 +207,11 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['accounts_id'] + del dictionary['table_name'] + del dictionary['deleted'] return dictionary @property @@ -197,12 +224,17 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO LocalIdentities (username, Accounts_id, LocalIdentityPWDs_id) " \ - "VALUES ('%s', '%s', '%s')" % \ - (self.username, self.accounts_id, self.pwd_id) + sql_query = "INSERT INTO " + self.table_name + " (username, Accounts_id, LocalIdentityPWDs_id) " \ + "VALUES (%s, %s, %s)" + + arguments = ( + str(self.username), + int(self.accounts_id), + str(self.pwd_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -217,7 +249,7 @@ def from_db(self, cursor=None): # TODO: Don't allow if role is only criteria sql_query = "SELECT id, username, LocalIdentityPWDs_id, Accounts_id " \ - "FROM MyDataAccount.LocalIdentities " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND username LIKE %s AND LocalIdentityPWDs_id LIKE %s AND Accounts_id LIKE %s;" arguments = ( @@ -254,12 +286,22 @@ def from_db(self, cursor=None): class LocalIdentityPWD(): id = None password = None + table_name = "" + deleted = "" - def __init__(self, id="", password=""): + def __init__(self, id="", password="", deleted=0, table_name="MyDataAccount.LocalIdentityPWDs"): if id is not None: self.id = id if password is not None: self.password = password + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -283,8 +325,10 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] + del dictionary['table_name'] + del dictionary['deleted'] return dictionary @property @@ -297,10 +341,14 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO LocalIdentityPWDs (password) VALUES ('%s')" % (self.password) + sql_query = "INSERT INTO " + self.table_name + " (password) VALUES (%s)" + + arguments = ( + str(self.password), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -315,7 +363,7 @@ def from_db(self, cursor=None): # TODO: Don't allow if role is only criteria sql_query = "SELECT id, password " \ - "FROM MyDataAccount.LocalIdentityPWDs " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND password LIKE %s;" arguments = ( @@ -350,8 +398,10 @@ class OneTimeCookie(): created = None updated = None identity_id = None + table_name = "" + deleted = "" - def __init__(self, id="", cookie="", used="", created="", updated="", identity_id=""): + def __init__(self, id="", cookie="", used="", created="", updated="", identity_id="", deleted=0, table_name="MyDataAccount.OneTimeCookies"): if id is not None: self.id = id if cookie is not None: @@ -364,6 +414,14 @@ def __init__(self, id="", cookie="", used="", created="", updated="", identity_i self.updated = updated if identity_id is not None: self.identity_id = identity_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -419,8 +477,10 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] + del dictionary['table_name'] + del dictionary['deleted'] return dictionary @property @@ -433,12 +493,16 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO OneTimeCookie (oneTimeCookie, LocalIdentities_id) " \ - "VALUES ('%s', '%s')" % \ - (self.cookie, self.identity_id) + sql_query = "INSERT INTO " + self.table_name + " (oneTimeCookie, LocalIdentities_id) " \ + "VALUES (%s, %s)" + + arguments = ( + str(self.cookie), + int(self.identity_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -450,10 +514,8 @@ def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria - sql_query = "SELECT id, oneTimeCookie, used, created, updated, LocalIdentities_id " \ - "FROM MyDataAccount.OneTimeCookies " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND oneTimeCookie LIKE %s AND used LIKE %s AND created LIKE %s " \ "AND updated LIKE %s AND LocalIdentities_id LIKE %s;" @@ -498,14 +560,24 @@ class Salt(): id = None salt = None identity_id = None + table_name = "" + deleted = "" - def __init__(self, id="", salt="", identity_id=""): + def __init__(self, id="", salt="", identity_id="", deleted=0, table_name="MyDataAccount.Salts"): if id is not None: self.id = id if salt is not None: self.salt = salt if identity_id is not None: self.identity_id = identity_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -537,8 +609,10 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] + del dictionary['table_name'] + del dictionary['deleted'] return dictionary @property @@ -551,11 +625,15 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Salts (salt, LocalIdentities_id) VALUES ('%s', '%s')" % \ - (self.salt, self.identity_id) + sql_query = "INSERT INTO " + self.table_name + " (salt, LocalIdentities_id) VALUES (%s, %s)" + + arguments = ( + str(self.salt), + int(self.identity_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -570,7 +648,7 @@ def from_db(self, cursor=None): # TODO: Don't allow if role is only criteria sql_query = "SELECT id, salt, LocalIdentities_id " \ - "FROM MyDataAccount.Salts " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND salt LIKE %s AND LocalIdentities_id LIKE %s;" arguments = ( @@ -610,8 +688,10 @@ class Particulars(): date_of_birth = None img_url = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", firstname="", lastname="", date_of_birth="", img_url=app.config['AVATAR_URL'], account_id=""): + def __init__(self, id="", firstname="", lastname="", date_of_birth="", img_url=app.config['AVATAR_URL'], account_id="", deleted=0, table_name="MyDataAccount.Particulars"): if id is not None: self.id = id if firstname is not None: @@ -624,6 +704,14 @@ def __init__(self, id="", firstname="", lastname="", date_of_birth="", img_url=a self.img_url = str(img_url) if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -679,13 +767,25 @@ def full_name(self): @property def to_dict(self): + if isinstance(self.date_of_birth, datetime.date): + self.date_of_birth = self.date_of_birth.strftime("%Y-%m-%d") return self.__dict__ @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Particular" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -702,11 +802,19 @@ def __repr__(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Particulars (firstname, lastname, dateOfBirth, img_url, Accounts_id) " \ - "VALUES ('%s', '%s', STR_TO_DATE('%s', '%%d-%%m-%%Y'), '%s', '%s')" % \ - (self.firstname, self.lastname, self.date_of_birth, self.img_url, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (firstname, lastname, dateOfBirth, img_url, Accounts_id) " \ + "VALUES (%s, %s, STR_TO_DATE(%s, '%%Y-%%m-%%d'), %s, %s)" + + arguments = ( + str(self.firstname), + str(self.lastname), + str(self.date_of_birth), + str(self.img_url), + int(self.account_id), + ) + try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -714,23 +822,55 @@ def to_db(self, cursor=""): self.id = last_id return cursor + def update_db(self, cursor=""): + + sql_query = "UPDATE " + self.table_name + " SET firstname=%s, lastname=%s, dateOfBirth=STR_TO_DATE(%s, '%%Y-%%m-%%d'), img_url=%s " \ + "WHERE id=%s AND Accounts_id=%s" + + arguments = ( + str(self.firstname), + str(self.lastname), + str(self.date_of_birth), + str(self.img_url), + str(self.id), + str(self.account_id), + ) + + try: + cursor = execute_sql_update(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.info("SQL query executed") + return cursor + def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria + # Querying with all data disabled due formatting problems + # TODO: Enable Querying with Date + # sql_query = "SELECT id, firstname, lastname, dateOfBirth, img_url, Accounts_id " \ + # "FROM " + self.table_name + " " \ + # "WHERE id LIKE %s AND firstname LIKE %s AND lastname LIKE %s AND dateOfBirth LIKE %s " \ + # "AND img_url LIKE %s AND Accounts_id LIKE %s;" + # + # arguments = ( + # '%' + str(self.id) + '%', + # '%' + str(self.firstname) + '%', + # '%' + str(self.lastname) + '%', + # '%' + str(self.date_of_birth) + '%', + # '%' + str(self.img_url) + '%', + # '%' + str(self.account_id) + '%', + # ) sql_query = "SELECT id, firstname, lastname, dateOfBirth, img_url, Accounts_id " \ - "FROM MyDataAccount.Particulars " \ - "WHERE id LIKE %s AND firstname LIKE %s AND lastname LIKE %s AND dateOfBirth LIKE %s " \ - "AND img_url LIKE %s AND Accounts_id LIKE %s;" + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.firstname) + '%', - '%' + str(self.lastname) + '%', - '%' + str(self.date_of_birth) + '%', - '%' + str(self.img_url) + '%', '%' + str(self.account_id) + '%', ) @@ -768,8 +908,10 @@ class Email(): type = None prime = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", email="", type="Personal", prime="", account_id=""): + def __init__(self, id="", email="", type="Personal", prime="", account_id="", deleted=0, table_name="MyDataAccount.Emails"): if id is not None: self.id = id if email is not None: @@ -777,9 +919,20 @@ def __init__(self, id="", email="", type="Personal", prime="", account_id=""): if type is not None: self.type = type if prime is not None: - self.prime = prime + if prime == "True": + self.prime = 1 + else: + self.prime = 0 if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -827,9 +980,23 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + if dictionary['prime'] == 1: + dictionary['prime'] = "True" + elif dictionary['prime'] == 0: + dictionary['prime'] = "False" + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Email" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -842,12 +1009,18 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Emails (email, typeEnum, prime, Accounts_id) " \ - "VALUES ('%s', '%s', '%s', '%s')" % \ - (self.email, self.type, self.prime, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (email, entryType, prime, Accounts_id) " \ + "VALUES (%s, %s, %s, %s)" + + arguments = ( + str(self.email), + str(self.type), + str(self.prime), + int(self.account_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -859,17 +1032,12 @@ def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria - - sql_query = "SELECT id, email, typeEnum, prime, Accounts_id " \ - "FROM MyDataAccount.Particulars " \ - "WHERE id LIKE %s AND email LIKE %s AND typeEnum LIKE %s AND prime LIKE %s AND Accounts_id LIKE %s;" + sql_query = "SELECT id, email, entryType, prime, Accounts_id " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.email) + '%', - '%' + str(self.type) + '%', - '%' + str(self.prime) + '%', '%' + str(self.account_id) + '%', ) @@ -897,6 +1065,28 @@ def from_db(self, cursor=None): return cursor + def update_db(self, cursor=""): + + sql_query = "UPDATE " + self.table_name + " SET email=%s, entryType=%s, prime=%s " \ + "WHERE id=%s AND Accounts_id=%s" + + arguments = ( + str(self.email), + str(self.type), + str(self.prime), + str(self.id), + str(self.account_id), + ) + + try: + cursor = execute_sql_update(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.info("SQL query executed") + return cursor + ##################### class Telephone(): @@ -905,8 +1095,10 @@ class Telephone(): type = None prime = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", tel="", type="Personal", prime="", account_id=""): + def __init__(self, id="", tel="", type="Personal", prime="", account_id="", deleted=0, table_name="MyDataAccount.Telephones"): if id is not None: self.id = id if tel is not None: @@ -914,9 +1106,20 @@ def __init__(self, id="", tel="", type="Personal", prime="", account_id=""): if type is not None: self.type = type if prime is not None: - self.prime = prime + if prime == "True": + self.prime = 1 + else: + self.prime = 0 if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -964,9 +1167,23 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + if dictionary['prime'] == 1: + dictionary['prime'] = "True" + elif dictionary['prime'] == 0: + dictionary['prime'] = "False" + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Telephone" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -979,12 +1196,18 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Telephones (tel, typeEnum, prime, Accounts_id) " \ - "VALUES ('%s', '%s', '%s', '%s')" % \ - (self.tel, self.type, self.prime, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (tel, entryType, prime, Accounts_id) " \ + "VALUES (%s, %s, %s, %s)" + + arguments = ( + str(self.tel), + str(self.type), + str(self.prime), + int(self.account_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -998,15 +1221,12 @@ def from_db(self, cursor=None): # TODO: Don't allow if role is only criteria - sql_query = "SELECT id, tel, typeEnum, prime, Accounts_id " \ - "FROM MyDataAccount.Particulars " \ - "WHERE id LIKE %s AND tel LIKE %s AND typeEnum LIKE %s AND prime LIKE %s AND Accounts_id LIKE %s;" + sql_query = "SELECT id, tel, entryType, prime, Accounts_id " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.tel) + '%', - '%' + str(self.type) + '%', - '%' + str(self.prime) + '%', '%' + str(self.account_id) + '%', ) @@ -1034,6 +1254,28 @@ def from_db(self, cursor=None): return cursor + def update_db(self, cursor=""): + + sql_query = "UPDATE " + self.table_name + " SET tel=%s, entryType=%s, prime=%s " \ + "WHERE id=%s AND Accounts_id=%s" + + arguments = ( + str(self.tel), + str(self.type), + str(self.prime), + str(self.id), + str(self.account_id), + ) + + try: + cursor = execute_sql_update(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.info("SQL query executed") + return cursor + ##################### class Settings(): @@ -1041,8 +1283,10 @@ class Settings(): key = None value = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", key="", value="", account_id=""): + def __init__(self, id="", key="", value="", account_id="", deleted=0, table_name="MyDataAccount.Settings"): if id is not None: self.id = id if key is not None: @@ -1053,6 +1297,14 @@ def __init__(self, id="", key="", value="", account_id=""): self.value = value if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -1092,9 +1344,19 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Setting" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -1107,12 +1369,17 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Settings (prefLang, timezone, Accounts_id) " \ - "VALUES ('%s', '%s', '%s')" % \ - (self.pref_lang, self.timezone, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (setting_key, setting_value, Accounts_id) " \ + "VALUES (%s, %s, %s)" + + arguments = ( + str(self.key), + str(self.value), + int(self.account_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -1124,14 +1391,12 @@ def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - sql_query = "SELECT id, key, value, Accounts_id " \ - "FROM MyDataAccount.Settings " \ - "WHERE id LIKE %s AND key LIKE %s AND value LIKE %s AND Accounts_id LIKE %s;" + sql_query = "SELECT id, setting_key, setting_value, Accounts_id " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.key) + '%', - '%' + str(self.value) + '%', '%' + str(self.account_id) + '%', ) @@ -1157,6 +1422,27 @@ def from_db(self, cursor=None): return cursor + def update_db(self, cursor=""): + + sql_query = "UPDATE " + self.table_name + " SET setting_key=%s, setting_value=%s " \ + "WHERE id=%s AND Accounts_id=%s" + + arguments = ( + str(self.key), + str(self.value), + str(self.id), + str(self.account_id), + ) + + try: + cursor = execute_sql_update(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.info("SQL query executed") + return cursor + ##################### class EventLog(): @@ -1165,8 +1451,10 @@ class EventLog(): event = None created = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", actor="", event="", created="", account_id=""): + def __init__(self, id="", actor="", event="", created="", account_id="", deleted=0, table_name="MyDataAccount.EventLogs"): if id is not None: self.id = id if actor is not None: @@ -1177,6 +1465,14 @@ def __init__(self, id="", actor="", event="", created="", account_id=""): self.created = created if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -1210,6 +1506,14 @@ def created(self): def created(self, value): self.created = value + @property + def table_name(self): + return self.table_name + + @table_name.setter + def table_name(self, value): + self._table_name = value + @property def account_id(self): return self.account_id @@ -1224,9 +1528,19 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Event" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -1239,12 +1553,18 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO EventLogs (actor, event, created, Accounts_id) " \ - "VALUES ('%s', '%s', '%s', '%s')" % \ - (self.actor, self.event, self.created, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (actor, event, created, Accounts_id) " \ + "VALUES (%s, %s, %s, %s)" + + arguments = ( + str(self.actor), + str(self.event), + int(self.created), + int(self.account_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -1256,17 +1576,12 @@ def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria - sql_query = "SELECT id, actor, event, created, Accounts_id " \ - "FROM MyDataAccount.EventLogs " \ - "WHERE id LIKE %s AND actor LIKE %s AND event LIKE %s AND created LIKE %s AND Accounts_id LIKE %s;" + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.actor) + '%', - '%' + str(self.event) + '%', - '%' + str(self.created) + '%', '%' + str(self.account_id) + '%', ) @@ -1292,6 +1607,14 @@ def from_db(self, cursor=None): self.created = data[0][3] self.account_id = data[4] + try: + event_copy = self.event + logger.info("event to dict") + self.event = json.loads(self.event) + except Exception as exp: + logger.info("Could not event consent_status_record to dict. Using original") + self.event = event_copy + return cursor @@ -1304,11 +1627,13 @@ class Contacts(): city = None state = None country = None - typeEnum = None + entryType = None prime = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", address1="", address2="", postal_code="", city="", state="", country="", type="Personal", prime="", account_id=""): + def __init__(self, id="", address1="", address2="", postal_code="", city="", state="", country="", type="Personal", prime="", account_id="", deleted=0, table_name="MyDataAccount.Contacts"): if id is not None: self.id = id if address1 is not None: @@ -1326,9 +1651,20 @@ def __init__(self, id="", address1="", address2="", postal_code="", city="", sta if type is not None: self.type = type if prime is not None: - self.prime = prime + if prime == "True": + self.prime = 1 + else: + self.prime = 0 if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -1416,9 +1752,23 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + if dictionary['prime'] == 1: + dictionary['prime'] = "True" + elif dictionary['prime'] == 0: + dictionary['prime'] = "False" + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "Contact" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external return dictionary @property @@ -1431,12 +1781,23 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO Contacts (address1, address2, postalCode, city, state, country, typeEnum, prime, Accounts_id) " \ - "VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \ - (self.address1, self.address2, self.postal_code, self.city, self.state, self.country, self.type, self.prime, self.account_id) + sql_query = "INSERT INTO " + self.table_name + " (address1, address2, postalCode, city, state, country, entryType, prime, Accounts_id) " \ + "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)" + + arguments = ( + str(self.address1), + str(self.address2), + str(self.postal_code), + str(self.city), + str(self.state), + str(self.country), + str(self.type), + str(self.prime), + int(self.account_id), + ) try: - cursor, last_id = execute_sql_insert(cursor=cursor, sql_query=sql_query) + cursor, last_id = execute_sql_insert_2(cursor=cursor, sql_query=sql_query, arguments=arguments) except Exception as exp: logger.debug('sql_query: ' + repr(exp)) raise @@ -1444,28 +1805,44 @@ def to_db(self, cursor=""): self.id = last_id return cursor + def update_db(self, cursor=""): + + sql_query = "UPDATE " + self.table_name + " SET address1=%s, address2=%s, postalCode=%s, city=%s, state=%s, " \ + "country=%s, entryType=%s, prime=%s " \ + "WHERE id=%s AND Accounts_id=%s" + + arguments = ( + str(self.address1), + str(self.address2), + str(self.postal_code), + str(self.city), + str(self.state), + str(self.country), + str(self.type), + str(self.prime), + str(self.id), + str(self.account_id), + ) + + try: + cursor = execute_sql_update(cursor=cursor, sql_query=sql_query, arguments=arguments) + except Exception as exp: + logger.debug('sql_query: ' + repr(exp)) + raise + else: + logger.info("SQL query executed") + return cursor + def from_db(self, cursor=None): if cursor is None: raise AttributeError("Provide cursor as parameter") - # TODO: Don't allow if role is only criteria - - sql_query = "SELECT id, address1, address2, postal_code, city, state, country, typeEnum, prime, Accounts_id " \ - "FROM MyDataAccount.Contacts " \ - "WHERE id LIKE %s AND address1 LIKE %s AND address2 LIKE %s AND postal_code LIKE %s " \ - "AND city LIKE %s AND state LIKE %s AND country LIKE %s AND typeEnum LIKE %s " \ - "AND prime LIKE %s AND Accounts_id LIKE %s;" + sql_query = "SELECT id, address1, address2, postalCode, city, state, country, entryType, prime, Accounts_id " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND Accounts_id LIKE %s;" arguments = ( '%' + str(self.id) + '%', - '%' + str(self.address1) + '%', - '%' + str(self.address2) + '%', - '%' + str(self.postal_code) + '%', - '%' + str(self.city) + '%', - '%' + str(self.state) + '%', - '%' + str(self.country) + '%', - '%' + str(self.typeEnum) + '%', - '%' + str(self.prime) + '%', '%' + str(self.account_id) + '%', ) @@ -1513,8 +1890,10 @@ class ServiceLinkRecord(): surrogate_id = None operator_id = None account_id = None + table_name = "" + deleted = "" - def __init__(self, id="", service_link_record="", service_link_record_id="", service_id="", surrogate_id="", operator_id="", account_id=""): + def __init__(self, id="", service_link_record="", service_link_record_id="", service_id="", surrogate_id="", operator_id="", account_id="", deleted=0, table_name="MyDataAccount.ServiceLinkRecords"): if id is not None: self.id = id if service_link_record is not None: @@ -1529,6 +1908,14 @@ def __init__(self, id="", service_link_record="", service_link_record_id="", ser self.surrogate_id = surrogate_id if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -1592,9 +1979,33 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['account_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "ServiceLinkRecord" + dictionary['id'] = str(self.service_link_record_id) + dictionary['attributes'] = self.to_dict_external + return dictionary + + @property + def to_record_dict_external(self): + dictionary = {} + dictionary["slr"] = self.service_link_record + return dictionary + + @property + def to_record_dict(self): + dictionary = {} + dictionary['type'] = "ServiceLinkRecord" + dictionary['id'] = str(self.service_link_record_id) + dictionary['attributes'] = self.to_record_dict_external return dictionary @property @@ -1609,10 +2020,10 @@ def to_db(self, cursor=""): # http://stackoverflow.com/questions/3617052/escape-string-python-for-mysql/27575399#27575399 # sql_query = "INSERT INTO ServiceLinkRecords (serviceLinkRecord, serviceLinkRecordId, serviceId, surrogateId, operatorId, Accounts_id) " \ - # "VALUES ('%s', '%s', '%s', '%s', '%s', '%s')" % \ + # "VALUES (%s, %s, %s, %s, %s, %s)" % \ # (self.service_link_record, self.service_link_record_id, self.service_id, self.surrogate_id, self.operator_id, self.account_id) - sql_query = "INSERT INTO ServiceLinkRecords (" \ + sql_query = "INSERT INTO " + self.table_name + " (" \ "serviceLinkRecord, " \ "serviceLinkRecordId, " \ "serviceId, " \ @@ -1622,12 +2033,12 @@ def to_db(self, cursor=""): ") VALUES (%s, %s, %s, %s, %s, %s)" arguments = ( - str(self.service_link_record), + json.dumps(self.service_link_record), str(self.service_link_record_id), str(self.service_id), str(self.surrogate_id), str(self.operator_id), - str(self.account_id), + int(self.account_id), ) try: @@ -1645,7 +2056,7 @@ def from_db(self, cursor=""): # TODO: Don't allow if role is only criteria sql_query = "SELECT id, serviceLinkRecord, Accounts_id, serviceLinkRecordId, serviceId, surrogateId, operatorId " \ - "FROM MyDataAccount.ServiceLinkRecords " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND serviceLinkRecord LIKE %s AND serviceLinkRecordId LIKE %s AND " \ "serviceId LIKE %s AND surrogateId LIKE %s AND operatorId LIKE %s AND Accounts_id LIKE %s;" @@ -1684,6 +2095,16 @@ def from_db(self, cursor=""): self.service_id = data[4] self.surrogate_id = data[5] self.operator_id = data[6] + + try: + slr_copy = self.service_link_record + logger.info("service_link_record to dict") + self.service_link_record = json.loads(self.service_link_record) + except Exception as exp: + attribute_type = type(self.service_link_record) + logger.info("Could not convert service_link_record to dict. Type of attribute: " + repr(attribute_type) + " Using original" + repr(attribute_type) + " Using original: " + repr(exp)) + self.service_link_record = slr_copy + return cursor @@ -1696,8 +2117,10 @@ class ServiceLinkStatusRecord(): issued_at = None prev_record_id = None service_link_records_id = None + table_name = "" + deleted = "" - def __init__(self, id="", service_link_status_record_id="", status="", service_link_status_record="", service_link_record_id="", issued_at="", prev_record_id="", service_link_records_id=""): + def __init__(self, id="", service_link_status_record_id="", status="", service_link_status_record="", service_link_record_id="", issued_at="", prev_record_id="", service_link_records_id="", deleted=0, table_name="MyDataAccount.ServiceLinkStatusRecords"): if id is not None: self.id = id if service_link_status_record_id is not None: @@ -1714,6 +2137,14 @@ def __init__(self, id="", service_link_status_record_id="", status="", service_l self.prev_record_id = prev_record_id if service_link_records_id is not None: self.service_link_records_id = service_link_records_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -1785,9 +2216,33 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['service_link_records_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "ServiceLinkStatusRecord" + dictionary['id'] = str(self.service_link_status_record_id) + dictionary['attributes'] = self.to_dict_external + return dictionary + + @property + def to_record_dict_external(self): + dictionary = {} + dictionary["slsr"] = self.service_link_status_record + return dictionary + + @property + def to_record_dict(self): + dictionary = {} + dictionary['type'] = "ServiceLinkStatusRecord" + dictionary['id'] = str(self.service_link_status_record_id) + dictionary['attributes'] = self.to_record_dict_external return dictionary @property @@ -1801,10 +2256,10 @@ def log_entry(self): def to_db(self, cursor=""): # sql_query = "INSERT INTO ServiceLinkRecords (serviceLinkStatusRecordId, status, serviceLinkStatusRecord, ServiceLinkRecords_id, serviceLinkRecordId, issued_at, prevRecordId) " \ - # "VALUES ('%s','%s', '%s', '%s', '%s', '%s', '%s')" % \ + # "VALUES (%s,%s, %s, %s, %s, %s, %s)" % \ # (self.service_link_status_record_id, self.status, self.service_link_status_record, self.service_link_records_id, self.service_link_record_id, self.issued_at, self.prev_record_id) - sql_query = "INSERT INTO ServiceLinkStatusRecords (" \ + sql_query = "INSERT INTO " + self.table_name + " (" \ "serviceLinkStatusRecordId, " \ "serviceLinkStatus, " \ "serviceLinkStatusRecord, " \ @@ -1817,10 +2272,10 @@ def to_db(self, cursor=""): arguments = ( str(self.service_link_status_record_id), str(self.status), - str(self.service_link_status_record), + json.dumps(self.service_link_status_record), int(self.service_link_records_id), str(self.service_link_record_id), - str(self.issued_at), + int(self.issued_at), str(self.prev_record_id), ) @@ -1842,7 +2297,7 @@ def from_db(self, cursor=None): sql_query = "SELECT id, serviceLinkStatus, serviceLinkStatusRecord, ServiceLinkRecords_id, serviceLinkRecordId, " \ "issued_at, prevRecordId, serviceLinkStatusRecordId " \ - "FROM MyDataAccount.ServiceLinkStatusRecords " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND serviceLinkStatus LIKE %s AND serviceLinkStatusRecord LIKE %s AND " \ "ServiceLinkRecords_id LIKE %s AND serviceLinkRecordId LIKE %s AND issued_at LIKE %s AND " \ "prevRecordId LIKE %s AND serviceLinkStatusRecordId LIKE %s;" @@ -1886,21 +2341,37 @@ def from_db(self, cursor=None): self.prev_record_id = data[6] self.service_link_status_record_id = data[7] + try: + slsr_copy = self.service_link_status_record + logger.info("service_link_status_record to dict") + self.service_link_status_record = json.loads(self.service_link_status_record) + except Exception as exp: + attribute_type = type(self.service_link_status_record) + logger.info("Could not convert service_link_status_record to dict. Type of attribute: " + repr(attribute_type) + " Using original" + repr(attribute_type) + " Using original: " + repr(exp)) + self.service_link_status_record = slsr_copy + return cursor class SurrogateId(): # TODO: Rename to SlrIDs or similar + # TODO: How to react if slr is deleted? surrogate_id = None servicelinkrecord_id = None service_id = None account_id = None + table_name = "" + deleted = "" - def __init__(self, service_id=None, account_id=None): + def __init__(self, service_id=None, account_id=None, deleted=0, table_name="MyDataAccount.ServiceLinkRecords"): if service_id is not None: self.service_id = service_id if account_id is not None: self.account_id = account_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted @property def surrogate_id(self): @@ -1932,10 +2403,18 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] return dictionary + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "SurrogateId" + dictionary['id'] = str(self.id) + dictionary['attributes'] = self.to_dict_external + return dictionary + @property def to_json(self): return json.dumps(self.to_dict) @@ -1947,7 +2426,7 @@ def log_entry(self): def from_db(self, cursor=""): sql_query = "SELECT surrogateId, serviceLinkRecordId " \ - "FROM MyDataAccount.ServiceLinkRecords " \ + "FROM " + self.table_name + " " \ "WHERE serviceId LIKE %s AND Accounts_id LIKE %s ORDER BY id DESC LIMIT 1;" arguments = ( @@ -1984,8 +2463,10 @@ class ConsentRecord(): subject_id = None service_link_records_id = None role = None + table_name = "" + deleted = "" - def __init__(self, id="", consent_record="", consent_id="", surrogate_id="", resource_set_id="", service_link_record_id="", subject_id="", service_link_records_id="", role=""): + def __init__(self, id="", consent_record="", consent_id="", surrogate_id="", resource_set_id="", service_link_record_id="", subject_id="", service_link_records_id="", role="", deleted=0, table_name="MyDataAccount.ConsentRecords"): self.id = id self.consent_record = consent_record self.surrogate_id = surrogate_id @@ -1995,6 +2476,14 @@ def __init__(self, id="", consent_record="", consent_id="", surrogate_id="", res self.subject_id = subject_id self.service_link_records_id = service_link_records_id self.role = role + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -2074,9 +2563,33 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['service_link_records_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "ConsentRecord" + dictionary['id'] = str(self.consent_id) + dictionary['attributes'] = self.to_dict_external + return dictionary + + @property + def to_record_dict_external(self): + dictionary = {} + dictionary["cr"] = self.consent_record + return dictionary + + @property + def to_record_dict(self): + dictionary = {} + dictionary['type'] = "ConsentRecord" + dictionary['id'] = str(self.consent_id) + dictionary['attributes'] = self.to_record_dict_external return dictionary @property @@ -2089,7 +2602,7 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO ConsentRecords (" \ + sql_query = "INSERT INTO " + self.table_name + " (" \ "consentRecord, " \ "surrogateId, " \ "consentRecordId, " \ @@ -2101,13 +2614,13 @@ def to_db(self, cursor=""): ") VALUES (%s, %s, %s, %s, %s, %s, %s, %s)" arguments = ( - str(self.consent_record), + json.dumps(self.consent_record), str(self.surrogate_id), str(self.consent_id), str(self.resource_set_id), str(self.service_link_record_id), str(self.subject_id), - str(self.service_link_records_id), + int(self.service_link_records_id), str(self.role), ) @@ -2126,7 +2639,7 @@ def from_db(self, cursor=""): # TODO: Don't allow if role is only criteria sql_query = "SELECT id, consentRecord, ServiceLinkRecords_id, surrogateId, consentRecordId, ResourceSetId, serviceLinkRecordId, subjectId, role " \ - "FROM MyDataAccount.ConsentRecords " \ + "FROM " + self.table_name + " " \ "WHERE id LIKE %s AND ServiceLinkRecords_id LIKE %s AND surrogateId LIKE %s AND " \ "consentRecordId LIKE %s AND ResourceSetId LIKE %s AND serviceLinkRecordId LIKE %s AND " \ "subjectId LIKE %s AND role LIKE %s;" @@ -2150,7 +2663,7 @@ def from_db(self, cursor=""): else: logger.debug("Got data: " + repr(data)) if len(data) == 0: - raise IndexError("Surrogate Id and serviceLinkRecordId could not be found with provided information") + raise IndexError("Consent Record could not be found with provided information") if len(data[0]): self.id = data[0][0] self.consent_record = data[0][1] @@ -2173,10 +2686,13 @@ def from_db(self, cursor=""): self.role = data[8] try: + cr_copy = self.consent_record + logger.info("consent_record to dict") self.consent_record = json.loads(self.consent_record) except Exception as exp: - logger.debug('Could not load json from consent_record: ' + repr(exp)) - raise + attribute_type = type(self.consent_record) + logger.error("Could not convert consent_record to dict. Type of attribute: " + repr(attribute_type) + " Using original: " + repr(exp)) + self.consent_record = cr_copy return cursor @@ -2184,15 +2700,20 @@ def from_db(self, cursor=""): class ConsentStatusRecord(): id = None status = None + consent_status_record_id = None consent_status_record = None consent_records_id = None consent_record_id = None issued_at = None prev_record_id = None + table_name = "" + deleted = "" - def __init__(self, id="", status="", consent_status_record="", consent_records_id="", consent_record_id="", issued_at="", prev_record_id=""): + def __init__(self, id="", consent_status_record_id="", status="", consent_status_record="", consent_records_id="", consent_record_id="", issued_at="", prev_record_id="", deleted=0, table_name="MyDataAccount.ConsentStatusRecords"): if id is not None: self.id = id + if consent_status_record_id is not None: + self.consent_status_record_id = consent_status_record_id if status is not None: self.status = status if consent_status_record is not None: @@ -2205,6 +2726,14 @@ def __init__(self, id="", status="", consent_status_record="", consent_records_i self.issued_at = issued_at if prev_record_id is not None: self.prev_record_id = prev_record_id + if table_name is not None: + self.table_name = table_name + if deleted is not None: + self.deleted = deleted + + @property + def table_name(self): + return self.table_name @property def id(self): @@ -2214,6 +2743,14 @@ def id(self): def id(self, value): self.id = value + @property + def consent_status_record_id(self): + return self.consent_status_record_id + + @consent_status_record_id.setter + def consent_status_record_id(self, value): + self.consent_status_record_id = value + @property def status(self): return self.status @@ -2268,9 +2805,33 @@ def to_dict(self): @property def to_dict_external(self): - dictionary = self.__dict__ + dictionary = self.to_dict del dictionary['id'] del dictionary['consent_records_id'] + del dictionary['table_name'] + del dictionary['deleted'] + return dictionary + + @property + def to_api_dict(self): + dictionary = {} + dictionary['type'] = "ConsentStatusRecord" + dictionary['id'] = str(self.consent_status_record_id) + dictionary['attributes'] = self.to_dict_external + return dictionary + + @property + def to_record_dict_external(self): + dictionary = {} + dictionary["csr"] = self.consent_status_record + return dictionary + + @property + def to_record_dict(self): + dictionary = {} + dictionary['type'] = "ConsentStatusRecord" + dictionary['id'] = str(self.consent_status_record_id) + dictionary['attributes'] = self.to_record_dict_external return dictionary @property @@ -2283,21 +2844,23 @@ def log_entry(self): def to_db(self, cursor=""): - sql_query = "INSERT INTO ConsentStatusRecords (" \ + sql_query = "INSERT INTO " + self.table_name + " (" \ + "consentStatusRecordId, " \ "consentStatus, " \ "consentStatusRecord, " \ "ConsentRecords_id, " \ "consentRecordId, " \ "issued_at, " \ "prevRecordId" \ - ") VALUES (%s, %s, %s, %s, %s, %s)" + ") VALUES (%s, %s, %s, %s, %s, %s, %s)" arguments = ( + str(self.consent_status_record_id), str(self.status), - str(self.consent_status_record), - str(self.consent_records_id), + json.dumps(self.consent_status_record), + int(self.consent_records_id), str(self.consent_record_id), - str(self.issued_at), + int(self.issued_at), str(self.prev_record_id), ) @@ -2317,15 +2880,16 @@ def from_db(self, cursor=None): # TODO: Don't allow if role is only criteria - sql_query = "SELECT id, consentStatus, consentStatusRecord, ConsentRecords_id, consentRecordId, " \ - "issued_at, prevRecordId " \ - "FROM MyDataAccount.ConsentStatusRecords " \ - "WHERE id LIKE %s AND consentStatus LIKE %s AND consentStatusRecord LIKE %s AND " \ - "ConsentRecords_id LIKE %s AND consentRecordId LIKE %s AND issued_at LIKE %s AND " \ - "prevRecordId LIKE %s;" + sql_query = "SELECT id, consentStatusRecordId, consentStatus, consentStatusRecord, ConsentRecords_id, " \ + "consentRecordId, issued_at, prevRecordId " \ + "FROM " + self.table_name + " " \ + "WHERE id LIKE %s AND consentStatusRecordId LIKE %s AND consentStatus LIKE %s " \ + "AND consentStatusRecord LIKE %s AND ConsentRecords_id LIKE %s AND " \ + "consentRecordId LIKE %s AND issued_at LIKE %s AND prevRecordId LIKE %s;" arguments = ( '%' + str(self.id) + '%', + '%' + str(self.consent_status_record_id) + '%', '%' + str(self.status) + '%', '%' + str(self.consent_status_record) + '%', '%' + str(self.consent_records_id) + '%', @@ -2345,20 +2909,31 @@ def from_db(self, cursor=None): raise IndexError("DB query returned no results") if len(data[0]): self.id = data[0][0] - self.status = data[0][1] - self.consent_status_record = data[0][2] - self.consent_records_id = data[0][3] - self.consent_record_id = data[0][4] - self.issued_at = data[0][5] - self.prev_record_id = data[0][6] + self.consent_status_record_id = data[0][1] + self.status = data[0][2] + self.consent_status_record = data[0][3] + self.consent_records_id = data[0][4] + self.consent_record_id = data[0][5] + self.issued_at = data[0][6] + self.prev_record_id = data[0][7] else: self.id = data[0] - self.status = data[1] - self.consent_status_record = data[2] - self.consent_records_id = data[3] - self.consent_record_id = data[4] - self.issued_at = data[5] - self.prev_record_id = data[6] + self.consent_status_record_id = data[1] + self.status = data[2] + self.consent_status_record = data[3] + self.consent_records_id = data[4] + self.consent_record_id = data[5] + self.issued_at = data[6] + self.prev_record_id = data[7] + + try: + csr_copy = self.consent_status_record + logger.info("consent_status_record to dict") + self.consent_status_record = json.loads(self.consent_status_record) + except Exception as exp: + attribute_type = type(self.consent_status_record) + logger.error("Could not convert consent_status_record to dict. Type of attribute: " + repr(attribute_type) + " Using original: " + repr(exp)) + self.consent_status_record = csr_copy return cursor diff --git a/Account/app/mod_service/controllers.py b/Account/app/mod_service/controllers.py index 13c8c7e..d949849 100644 --- a/Account/app/mod_service/controllers.py +++ b/Account/app/mod_service/controllers.py @@ -48,7 +48,6 @@ def sign_slr(account_id=None, slr_payload=None, endpoint="sign_slr(account_id, s raise ApiError(code=500, title="Failed to get account owner's public key", detail=repr(exp), source=endpoint) else: logger.info("Account owner's public key and kid fetched") - finally: logger.debug("account_public_key: " + account_public_key_log_entry) # Fill Account key to cr_keys @@ -62,37 +61,28 @@ def sign_slr(account_id=None, slr_payload=None, endpoint="sign_slr(account_id, s else: logger.info("Account owner's public key added to cr_keys") - # Fill timestamp to created in slr - try: - timestamp_to_fill = get_utc_time() - except Exception as exp: - logger.error("Could not get UTC time: " + repr(exp)) - raise ApiError(code=500, title="Could not get UTC time", detail=repr(exp), source=endpoint) - else: - logger.info("timestamp_to_fill: " + timestamp_to_fill) - - timestamp_to_fill = int(time()) - try: - slr_payload['created'] = timestamp_to_fill - except Exception as exp: - logger.error("Could not fill timestamp to created in slr: " + repr(exp)) - raise ApiError(code=500, title="Failed to fill timestamp to created in slr", detail=repr(exp), source=endpoint) - else: - logger.info("Timestamp filled to created in slr") - # Sign slr slr_signed = {} try: - slr_signed = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(slr_payload)) + slr_signed_json = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(slr_payload)) except Exception as exp: logger.error('Could not create Service Link Record: ' + repr(exp)) raise ApiError(code=500, title="Failed to create Service Link Record", detail=repr(exp), source=endpoint) else: logger.info('Service Link Record created and signed') - return slr_signed - finally: logger.debug("slr_payload: " + json.dumps(slr_payload)) - logger.debug("slr_signed: " + slr_signed) + logger.debug("slr_signed_json: " + slr_signed_json) + try: + logger.info("Converting signed CSR from json to dict") + slr_signed_dict = json.loads(slr_signed_json) + except Exception as exp: + logger.error('Could not convert signed SLR from json to dict: ' + repr(exp)) + raise ApiError(code=500, title="Failed to convert signed SLR from json to dict", detail=repr(exp), source=endpoint) + else: + logger.info('Converted signed SLR from json to dict') + logger.debug('slr_signed_dict: ' + json.dumps(slr_signed_dict)) + + return slr_signed_dict def sign_ssr(account_id=None, ssr_payload=None, endpoint="sign_ssr(account_id, slr_payload, endpoint)"): @@ -103,36 +93,28 @@ def sign_ssr(account_id=None, ssr_payload=None, endpoint="sign_ssr(account_id, s logger.info("Signing Service Link Status Record") - # Fill timestamp to created in slr - try: - timestamp_to_fill = get_utc_time() - except Exception as exp: - logger.error("Could not get UTC time: " + repr(exp)) - raise ApiError(code=500, title="Could not get UTC time", detail=repr(exp), source=endpoint) - else: - logger.info("timestamp_to_fill: " + timestamp_to_fill) - - try: - ssr_payload['iat'] = timestamp_to_fill - except Exception as exp: - logger.error("Could not fill timestamp to iat in ssr_payload: " + repr(exp)) - raise ApiError(code=500, title="Failed to fill timestamp to iat in ssr_payload", detail=repr(exp), source=endpoint) - else: - logger.info("Timestamp filled to created in ssr_payload") - # Sign ssr ssr_signed = {} try: - ssr_signed = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(ssr_payload)) + ssr_signed_json = generate_and_sign_jws(account_id=account_id, jws_payload=json.dumps(ssr_payload)) except Exception as exp: logger.error('Could not create Service Link Status Record: ' + repr(exp)) raise ApiError(code=500, title="Failed to create Service Link Record", detail=repr(exp), source=endpoint) else: logger.info('Service Link Status Record created and signed') - return ssr_signed, timestamp_to_fill - finally: logger.debug("ssr_payload: " + json.dumps(ssr_payload)) - logger.debug("ssr_signed: " + ssr_signed) + logger.debug("ssr_signed_json: " + ssr_signed_json) + try: + logger.info("Converting signed CSR from json to dict") + ssr_signed_dict = json.loads(ssr_signed_json) + except Exception as exp: + logger.error('Could not convert signed SLR from json to dict: ' + repr(exp)) + raise ApiError(code=500, title="Failed to convert signed SLR from json to dict", detail=repr(exp), source=endpoint) + else: + logger.info('Converted signed SLR from json to dict') + logger.debug('ssr_signed_dict: ' + json.dumps(ssr_signed_dict)) + + return ssr_signed_dict def store_slr_and_ssr(slr_entry=None, ssr_entry=None, endpoint="sign_ssr(account_id, slr_payload, endpoint)"): @@ -157,20 +139,19 @@ def store_slr_and_ssr(slr_entry=None, ssr_entry=None, endpoint="sign_ssr(account cursor = ssr_entry.to_db(cursor=cursor) - data = {'slr_id': slr_id, 'ssr_id': ssr_entry.id} + #data = {'slr_id': slr_id, 'ssr_id': ssr_entry.id} db.connection.commit() except Exception as exp: - logger.debug('commit failed: ' + repr(exp)) + logger.debug('Slr and Ssr commit failed: ' + repr(exp)) db.connection.rollback() logger.debug('--> rollback') raise ApiError(code=500, title="Failed to store slr and ssr", detail=repr(exp), source=endpoint) else: logger.debug('Slr and Ssr commited') - return data - finally: logger.debug("slr_entry: " + slr_entry.log_entry) logger.debug("ssr_entry: " + ssr_entry.log_entry) + return slr_entry, ssr_entry def get_surrogate_id_by_account_and_service(account_id=None, service_id=None, endpoint="(get_surrogate_id_by_account_and_Service)"): @@ -187,7 +168,6 @@ def get_surrogate_id_by_account_and_service(account_id=None, service_id=None, en raise else: logger.info("SurrogateId object created") - finally: logger.debug("sur_id_obj: " + sur_id_obj.log_entry) # Get DB cursor @@ -204,8 +184,6 @@ def get_surrogate_id_by_account_and_service(account_id=None, service_id=None, en raise else: logger.debug("Got sur_id_obj:" + json.dumps(sur_id_obj.to_dict)) - return sur_id_obj.to_dict - finally: logger.debug("sur_id_obj: " + sur_id_obj.log_entry) - + return sur_id_obj.to_dict diff --git a/Account/app/mod_service/models.py b/Account/app/mod_service/models.py index e740d9c..f154122 100644 --- a/Account/app/mod_service/models.py +++ b/Account/app/mod_service/models.py @@ -16,16 +16,18 @@ from marshmallow.validate import Equal, OneOf +STATUS_LIST = ["Active", "Removed"] # List that contains status entries + + class SlrAttributes(Schema): version = fields.Str(required=True) link_id = fields.Str(required=True) operator_id = fields.Str(required=True) service_id = fields.Str(required=True) surrogate_id = fields.Str(required=True) - token_key = fields.Dict(required=True) operator_key = fields.Dict(required=True) cr_keys = fields.Str(required=True) - created = fields.Str(required=True) + iat = fields.Int(required=True) class SurrogateAttributes(Schema): @@ -33,6 +35,7 @@ class SurrogateAttributes(Schema): service_id = fields.Str(required=True) account_id = fields.Str(required=True) + class SlrContent(Schema): type = fields.Str(required=True, validate=Equal("ServiceLinkRecord")) attributes = fields.Nested(nested=SlrAttributes, required=True) @@ -56,10 +59,10 @@ class NewServiceLink(Schema): ############ class SsrAttributes(Schema): record_id = fields.Str(required=True) - account_id = fields.Str(required=True) + surrogate_id = fields.Str(required=True) slr_id = fields.Str(required=True) - sl_status = fields.Str(required=True, validate=OneOf(["Active", "Removed"])) - iat = fields.Str(required=True) + sl_status = fields.Str(required=True, validate=OneOf(STATUS_LIST)) + iat = fields.Int(required=True) prev_record_id = fields.Str(required=True) diff --git a/Account/app/mod_service/view_api.py b/Account/app/mod_service/view_api.py index ef14b86..7230169 100644 --- a/Account/app/mod_service/view_api.py +++ b/Account/app/mod_service/view_api.py @@ -136,7 +136,7 @@ def post(self, account_id): # Sign SLR try: - slr_signed = sign_slr(account_id=account_id, slr_payload=slr_payload, endpoint=str(endpoint)) + slr_signed_dict = sign_slr(account_id=account_id, slr_payload=slr_payload, endpoint=str(endpoint)) except Exception as exp: logger.error("Could not sign SLR") logger.debug("Could not sign SLR: " + repr(exp)) @@ -150,7 +150,7 @@ def post(self, account_id): response_data['data']['slr'] = {} response_data['data']['slr']['type'] = "ServiceLinkRecord" response_data['data']['slr']['attributes'] = {} - response_data['data']['slr']['attributes']['slr'] = json.loads(slr_signed) + response_data['data']['slr']['attributes']['slr'] = slr_signed_dict response_data['data']['surrogate_id'] = surrogate_id except Exception as exp: logger.error('Could not prepare response data: ' + repr(exp)) @@ -225,7 +225,7 @@ def post(self, account_id): # Decode slr payload try: - print (json.dumps(json_data)) + #print (json.dumps(json_data)) slr_payload_encoded = slr['payload'] slr_payload_encoded += '=' * (-len(slr_payload_encoded) % 4) # Fix incorrect padding, base64 slr_payload_decoded = b64decode(slr_payload_encoded).replace('\\', '').replace('"{', '{').replace('}"', '}') @@ -301,6 +301,14 @@ def post(self, account_id): else: logger.debug("Got prev_ssr_id: " + str(prev_ssr_id)) + # Get iat + try: + ssr_iat = int(ssr_payload['iat']) + except Exception as exp: + raise ApiError(code=400, title="Could not fetch iat from ssr_payload", detail=repr(exp), source=endpoint) + else: + logger.debug("Got iat: " + str(prev_ssr_id)) + # # Get code try: @@ -331,7 +339,7 @@ def post(self, account_id): # Sign Ssr try: - ssr_signed, ssr_iat = sign_ssr(account_id=account_id, ssr_payload=ssr_payload, endpoint=str(endpoint)) + ssr_signed = sign_ssr(account_id=account_id, ssr_payload=ssr_payload, endpoint=str(endpoint)) except Exception as exp: logger.error("Could not sign Ssr") logger.debug("Could not sign Ssr: " + repr(exp)) @@ -342,7 +350,7 @@ def post(self, account_id): logger.info("Storing Service Link Record and Service Link Status Record") try: slr_entry = ServiceLinkRecord( - service_link_record=json.dumps(slr), + service_link_record=slr, service_link_record_id=slr_id, service_id=service_id, surrogate_id=surrogate_id, @@ -367,14 +375,15 @@ def post(self, account_id): raise ApiError(code=500, title="Failed to create Service Link Status Record object", detail=repr(exp), source=endpoint) try: - db_meta = store_slr_and_ssr(slr_entry=slr_entry, ssr_entry=ssr_entry, endpoint=str(endpoint)) + stored_slr_entry, stored_ssr_entry = store_slr_and_ssr(slr_entry=slr_entry, ssr_entry=ssr_entry, endpoint=str(endpoint)) except Exception as exp: logger.error("Could not store Service Link Record and Service Link Status Record") logger.debug("Could not store SLR and Ssr: " + repr(exp)) raise else: logger.info("Stored Service Link Record and Service Link Status Record") - logger.debug("DB Meta: " + json.dumps(db_meta)) + logger.debug("stored_slr_entry: " + stored_slr_entry.log_entry) + logger.debug("stored_ssr_entry: " + stored_ssr_entry.log_entry) # Response data container try: @@ -383,15 +392,9 @@ def post(self, account_id): response_data['data'] = {} - response_data['data']['slr'] = {} - response_data['data']['slr']['type'] = "ServiceLinkRecord" - response_data['data']['slr']['attributes'] = {} - response_data['data']['slr']['attributes']['slr'] = slr + response_data['data']['slr'] = stored_slr_entry.to_record_dict - response_data['data']['ssr'] = {} - response_data['data']['ssr']['type'] = "ServiceLinkStatusRecord" - response_data['data']['ssr']['attributes'] = {} - response_data['data']['ssr']['attributes']['ssr'] = json.loads(ssr_signed) + response_data['data']['ssr'] = stored_ssr_entry.to_record_dict response_data['data']['surrogate_id'] = surrogate_id except Exception as exp: diff --git a/Account/app/mod_system/controllers.py b/Account/app/mod_system/controllers.py index 07764c5..08d6051 100644 --- a/Account/app/mod_system/controllers.py +++ b/Account/app/mod_system/controllers.py @@ -98,38 +98,54 @@ def get(self, secret=None): logger.info("Initing MySQL") json_data = [ { - 'firstName': 'Erkki', - 'lastName': 'Esimerkki', - 'dateOfBirth': '31-05-2016', - 'email': 'erkki.esimerkki@examlpe.org', - 'username': 'testUser', - 'password': 'Hello', - 'acceptTermsOfService': 'True' + "data": { + "type": "Account", + "attributes": { + 'firstName': 'Erkki', + 'lastName': 'Esimerkki', + 'dateOfBirth': '2016-04-29', + 'email': 'erkki.esimerkki@examlpe.org', + 'username': 'testUser', + 'password': 'Hello', + 'acceptTermsOfService': 'True' + } + } }, { - 'firstName': 'Iso', - 'lastName': 'Pasi', - 'dateOfBirth': '31-05-2016', - 'email': 'iso.pasi@examlpe.org', - 'username': 'pasi', - 'password': '0nk0va', - 'acceptTermsOfService': 'True' + "data": { + "type": "Account", + "attributes": { + 'firstName': 'Iso', + 'lastName': 'Pasi', + 'dateOfBirth': '2016-08-12', + 'email': 'iso.pasi@examlpe.org', + 'username': 'pasi', + 'password': '0nk0va', + 'acceptTermsOfService': 'True' + } + } }, { - 'firstName': 'Dude', - 'lastName': 'Dudeson', - 'dateOfBirth': '31-05-2016', - 'email': 'dude.dudeson@examlpe.org', - 'username': 'mydata', - 'password': 'Hello', - 'acceptTermsOfService': 'True' + "data": { + "type": "Account", + "attributes": { + 'firstName': 'Dude', + 'lastName': 'Dudeson', + 'dateOfBirth': '2016-05-31', + 'email': 'dude.dudeson@examlpe.org', + 'username': 'mydata', + 'password': 'Hello', + 'acceptTermsOfService': 'True' + } + } } ] + form_data = [ { 'firstname': 'Erkki', 'lastname': 'Esimerkki', - 'dateofbirth': '31-05-2016', + 'dateofbirth': '2016-05-31', 'email': 'erkki.esimerkki@examlpe.org', 'username': 'testUser', 'password': 'Hello' @@ -137,7 +153,7 @@ def get(self, secret=None): { 'firstname': 'Iso', 'lastname': 'Pasi', - 'dateofbirth': '31-05-2016', + 'dateofbirth': '2016-05-31', 'email': 'iso.pasi@examlpe.org', 'username': 'pasi', 'password': '0nk0va' @@ -145,7 +161,7 @@ def get(self, secret=None): { 'firstname': 'Dude', 'lastname': 'Dudeson', - 'dateofbirth': '31-05-2016', + 'dateofbirth': '2016-05-31', 'email': 'dude.dudeson@examlpe.org', 'username': 'mydata', 'password': 'Hello' @@ -160,7 +176,7 @@ def get(self, secret=None): logger.debug("Posting: " + str(url)) logger.debug("##########") - logger.debug("Creating: " + repr(form_data[0])) + logger.debug("Creating: " + repr(json_data[0])) #r = requests.post(url, data=form_data[0]) r = requests.post(url, json=json_data[0], headers=headers) logger.debug("Response status: " + str(r.status_code)) diff --git a/Account/config.py b/Account/config.py index 86b3678..48bc693 100644 --- a/Account/config.py +++ b/Account/config.py @@ -34,7 +34,7 @@ #MYSQL_READ_DEFAULT_FILE = '' # MySQL configuration file to read, see the MySQL documentation for mysql_options(). #MYSQL_USE_UNICODE = '' # If True, CHAR and VARCHAR and TEXT columns are returned as Unicode strings, using the configured character set. MYSQL_CHARSET = 'utf8' # If present, the connection character set will be changed to this character set, if they are not equal. Default: utf-8 -#MYSQL_SQL_MODE = '' # If present, the session SQL mode will be set to the given string. +MYSQL_SQL_MODE = 'TRADITIONAL' # If present, the session SQL mode will be set to the given string. #MYSQL_CURSORCLASS = '' # If present, the cursor class will be set to the given string. diff --git a/Account/doc/api/account_api_external.yaml b/Account/doc/api/account_api_external.yaml index 7715084..777a9fd 100644 --- a/Account/doc/api/account_api_external.yaml +++ b/Account/doc/api/account_api_external.yaml @@ -4,7 +4,7 @@ info: title: 'Digital Health Revolution - MyData Account' description: ' #### MyData-SDK - MyData Account API - External ' - version: '1.2' + version: '1.2.1' contact: url: 'https://github.com/HIIT/mydata-stack' license: @@ -1779,28 +1779,23 @@ paths: definitions: errors: - type: array - items: - type: object - properties: - status: - type: string - description: HTTP status code as string value. - code: - type: integer - description: HTTP status code - title: - type: string - description: Title of error message. - detail: - type: string - description: Detailed error message. - source: - type: object - properties: - pointer: - type: string - description: Source URI + type: object + properties: + status: + type: string + description: HTTP status code as string value. + code: + type: integer + description: HTTP status code + title: + type: string + description: Title of error message. + detail: + type: string + description: Detailed error message. + source: + type: string + description: Source URI apiKeyResponse: type: object @@ -1808,6 +1803,9 @@ definitions: Api-Key: type: string description: ApiKey + account_id: + type: string + description: Account ID newAccount: type: object diff --git a/Account/doc/api/account_api_internal.yaml b/Account/doc/api/account_api_internal.yaml index 3fa407c..cb82c5e 100644 --- a/Account/doc/api/account_api_internal.yaml +++ b/Account/doc/api/account_api_internal.yaml @@ -4,7 +4,7 @@ info: title: 'Digital Health Revolution - MyData Account' description: ' #### MyData-SDK - MyData Account API - Internal ' - version: '1.2' + version: '1.2.1' contact: url: 'https://github.com/HIIT/mydata-stack' license: @@ -303,6 +303,134 @@ paths: $ref: '#/definitions/errors' + /consent/{cr_id}/status/last/: + get: + security: + - internalApiKeyAuth: [] + description: "Get last Status Record of Consent" + + parameters: + - name: cr_id + in: path + type: string + description: "Consent Record ID" + required: true + + tags: + - Consent + - Authorizationtoken + + responses: + '200': + description: 'Last Status Record of Consent' + schema: + $ref: '#/definitions/ConsentStatusRecordResponse' + '400': + description: Bad Request + schema: + $ref: '#/definitions/errors' + '401': + description: Unauthorized + schema: + $ref: '#/definitions/errors' + '403': + description: Forbidden + schema: + $ref: '#/definitions/errors' + '500': + description: Internal Server error + schema: + $ref: '#/definitions/errors' + + + /consent/{cr_id}/status/: + get: + security: + - internalApiKeyAuth: [] + description: "Get missing Consent Status Records" + + parameters: + - name: cr_id + in: path + type: string + description: "Consent Record ID" + required: true + - name: csr_id + in: query + type: string + description: "Last valid Consent Status Record ID" + required: true + + tags: + - Consent + - Status + + responses: + '200': + description: 'Array of Consent Status Record objects' + schema: + $ref: '#/definitions/MissingConsentStatusRecordResponse' + '400': + description: Bad Request + schema: + $ref: '#/definitions/errors' + '401': + description: Unauthorized + schema: + $ref: '#/definitions/errors' + '403': + description: Forbidden + schema: + $ref: '#/definitions/errors' + '500': + description: Internal Server error + schema: + $ref: '#/definitions/errors' + + post: + security: + - internalApiKeyAuth: [] + description: "Issue new status" + + parameters: + - name: cr_id + in: path + type: string + description: "Consent Record ID" + required: true + - name: csr_payload + in: body + description: "Consent Record payload" + required: true + schema: + $ref: '#/definitions/newConsentStatus' + + tags: + - Consent + - Status + + responses: + '200': + description: 'New Consent Status Record' + schema: + $ref: '#/definitions/ConsentStatusRecordResponse' + '400': + description: Bad Request + schema: + $ref: '#/definitions/errors' + '401': + description: Unauthorized + schema: + $ref: '#/definitions/errors' + '403': + description: Forbidden + schema: + $ref: '#/definitions/errors' + '500': + description: Internal Server error + schema: + $ref: '#/definitions/errors' + definitions: @@ -324,25 +452,23 @@ definitions: $ref: '#/definitions/Slr' errors: - type: array - items: - type: object - properties: - status: - type: string - description: HTTP status code as string value. - code: - type: integer - description: HTTP status code - title: - type: string - description: Title of error message. - detail: - type: string - description: Detailed error message. - source: - type: string - description: Source URI + type: object + properties: + status: + type: string + description: HTTP status code as string value. + code: + type: integer + description: HTTP status code + title: + type: string + description: Title of error message. + detail: + type: string + description: Detailed error message. + source: + type: string + description: Source URI apiKeyResponse: type: object @@ -377,6 +503,9 @@ definitions: type: type: string description: "Resource type: 'ServiceLinkRecord'" + id: + type: string + description: "ID of resource" attributes: type: object properties: @@ -390,6 +519,9 @@ definitions: type: type: string description: "Resource type: 'ServiceLinkStatusRecord'" + id: + type: string + description: "ID of resource" attributes: type: object properties: @@ -516,6 +648,9 @@ definitions: type: type: string description: "Resource type: 'ConsentRecord'" + id: + type: string + description: "ID of resource" attributes: type: object properties: @@ -529,6 +664,9 @@ definitions: type: type: string description: "Resource type: 'ConsentStatusRecord'" + id: + type: string + description: "ID of resource" attributes: type: object properties: @@ -577,3 +715,19 @@ definitions: $ref: '#/definitions/ConsentRecord' consentStatusRecord: $ref: '#/definitions/ConsentStatusRecord' + + + ConsentStatusRecordResponse: + type: object + properties: + data: + $ref: '#/definitions/ConsentStatusRecord' + + + MissingConsentStatusRecordResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/definitions/ConsentStatusRecord' diff --git a/Account/doc/database/MyDataAccount-DBinit.sql b/Account/doc/database/MyDataAccount-DBinit.sql old mode 100644 new mode 100755 index 998946e..db3dfbb --- a/Account/doc/database/MyDataAccount-DBinit.sql +++ b/Account/doc/database/MyDataAccount-DBinit.sql @@ -1,11 +1,11 @@ -- MySQL Script generated by MySQL Workbench --- 09/14/16 10:31:08 +-- 11/17/16 16:42:54 -- Model: New Model Version: 1.0 -- MySQL Workbench Forward Engineering SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; -SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE=''; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; -- ----------------------------------------------------- -- Schema MyDataAccount @@ -24,10 +24,11 @@ DROP TABLE IF EXISTS `MyDataAccount`.`Accounts` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Accounts` ( `id` INT NOT NULL AUTO_INCREMENT, - `globalIdenttifyer` VARCHAR(255) NOT NULL, + `globalIdentifier` VARCHAR(255) NOT NULL, `activated` TINYINT(1) NOT NULL DEFAULT 0, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), - UNIQUE INDEX `globalIdenttifyer_UNIQUE` (`globalIdenttifyer` ASC)) + UNIQUE INDEX `globalIdenttifyer_UNIQUE` (`globalIdentifier` ASC)) ENGINE = InnoDB; @@ -42,6 +43,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Particulars` ( `lastname` VARCHAR(255) NOT NULL, `dateOfBirth` DATE NULL DEFAULT NULL, `img_url` VARCHAR(255) NULL DEFAULT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, `Accounts_id` INT NOT NULL, PRIMARY KEY (`id`), INDEX `fk_Particulars_Accounts1_idx` (`Accounts_id` ASC), @@ -66,6 +68,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ServiceLinkRecords` ( `serviceId` VARCHAR(255) NOT NULL, `surrogateId` VARCHAR(255) NOT NULL, `operatorId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_ServiceLinkRecords_Accounts1_idx` (`Accounts_id` ASC), UNIQUE INDEX `serviceLinkRecordId_UNIQUE` (`serviceLinkRecordId` ASC), @@ -92,10 +95,10 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ConsentRecords` ( `serviceLinkRecordId` VARCHAR(255) NOT NULL, `subjectId` VARCHAR(255) NOT NULL, `role` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_ConsentRecords_ServiceLinkRecords1_idx` (`ServiceLinkRecords_id` ASC), - UNIQUE INDEX `ConsentRecordId_UNIQUE` (`consentRecordId` ASC), - UNIQUE INDEX `serviceLinkRecordId_UNIQUE` (`serviceLinkRecordId` ASC), + UNIQUE INDEX `consentRecordId_UNIQUE` (`consentRecordId` ASC), CONSTRAINT `fk_ConsentRecords_ServiceLinkRecords1` FOREIGN KEY (`ServiceLinkRecords_id`) REFERENCES `MyDataAccount`.`ServiceLinkRecords` (`id`) @@ -112,6 +115,7 @@ DROP TABLE IF EXISTS `MyDataAccount`.`LocalIdentityPWDs` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`LocalIdentityPWDs` ( `id` INT NOT NULL AUTO_INCREMENT, `password` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`)) ENGINE = InnoDB; @@ -126,6 +130,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`LocalIdentities` ( `username` VARCHAR(255) NOT NULL, `LocalIdentityPWDs_id` INT NOT NULL, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), UNIQUE INDEX `username_UNIQUE` (`username` ASC), INDEX `fk_LocalIdentities_LocalIdentityPWDs1_idx` (`LocalIdentityPWDs_id` ASC), @@ -151,6 +156,7 @@ DROP TABLE IF EXISTS `MyDataAccount`.`RemoteIdentityProviders` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`RemoteIdentityProviders` ( `id` INT NOT NULL AUTO_INCREMENT, `name` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`)) ENGINE = InnoDB; @@ -165,6 +171,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`RemoteIdentities` ( `remoteUniqueId` VARCHAR(255) NOT NULL, `Accounts_id` INT NOT NULL, `RemoteIdentityProviders_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), UNIQUE INDEX `opdenIdIdentifyer_UNIQUE` (`remoteUniqueId` ASC), INDEX `fk_RemoteIdentities_Accounts1_idx` (`Accounts_id` ASC), @@ -191,6 +198,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Salts` ( `id` INT NOT NULL AUTO_INCREMENT, `salt` VARCHAR(255) NOT NULL, `LocalIdentities_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), UNIQUE INDEX `hash_UNIQUE` (`salt` ASC), INDEX `fk_Salts_LocalIdentities1_idx` (`LocalIdentities_id` ASC), @@ -209,9 +217,10 @@ DROP TABLE IF EXISTS `MyDataAccount`.`Settings` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Settings` ( `id` INT NOT NULL AUTO_INCREMENT, - `key` VARCHAR(255) NOT NULL, - `value` VARCHAR(255) NOT NULL, + `setting_key` VARCHAR(255) NOT NULL, + `setting_value` VARCHAR(255) NOT NULL, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_Settings_Accounts1_idx` (`Accounts_id` ASC), CONSTRAINT `fk_Settings_Accounts1` @@ -230,14 +239,15 @@ DROP TABLE IF EXISTS `MyDataAccount`.`Contacts` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Contacts` ( `id` INT NOT NULL AUTO_INCREMENT, `address1` VARCHAR(255) NULL DEFAULT NULL, - `address2` VARCHAR(255) NULL DEFAULT NULL, + `address2` VARCHAR(255) NULL, `postalCode` VARCHAR(255) NULL DEFAULT NULL, `city` VARCHAR(255) NULL DEFAULT NULL, `state` VARCHAR(255) NULL DEFAULT NULL, `country` VARCHAR(255) NULL DEFAULT NULL, - `typeEnum` ENUM('Personal', 'Work', 'School', 'Other') NOT NULL, + `entryType` VARCHAR(255) NOT NULL, `prime` TINYINT(1) NOT NULL DEFAULT 0, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_Contacts_Accounts1_idx` (`Accounts_id` ASC), CONSTRAINT `fk_Contacts_Accounts1` @@ -255,14 +265,17 @@ DROP TABLE IF EXISTS `MyDataAccount`.`ConsentStatusRecords` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ConsentStatusRecords` ( `id` INT NOT NULL AUTO_INCREMENT, - `consentStatus` ENUM('Active', 'Paused', 'Withdrawn', 'NoSLR') NOT NULL, + `consentStatusRecordId` VARCHAR(255) NOT NULL, + `consentStatus` VARCHAR(255) NOT NULL, `consentStatusRecord` BLOB NOT NULL, `ConsentRecords_id` INT NOT NULL, `consentRecordId` VARCHAR(255) NOT NULL, - `issued_at` VARCHAR(255) NOT NULL, + `issued_at` BIGINT NOT NULL, `prevRecordId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_ConsentStatusRecords_ConsentRecords1_idx` (`ConsentRecords_id` ASC), + UNIQUE INDEX `consentStatusRecordId_UNIQUE` (`consentStatusRecordId` ASC), CONSTRAINT `fk_ConsentStatusRecords_ConsentRecords1` FOREIGN KEY (`ConsentRecords_id`) REFERENCES `MyDataAccount`.`ConsentRecords` (`id`) @@ -282,9 +295,10 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ServiceLinkStatusRecords` ( `serviceLinkStatusRecord` BLOB NOT NULL, `ServiceLinkRecords_id` INT NOT NULL, `serviceLinkRecordId` VARCHAR(255) NOT NULL, - `issued_at` VARCHAR(255) NOT NULL, + `issued_at` BIGINT NOT NULL, `prevRecordId` VARCHAR(255) NOT NULL, `serviceLinkStatusRecordId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_ServiceLinkStatusRecords_ServiceLinkRecords1_idx` (`ServiceLinkRecords_id` ASC), UNIQUE INDEX `serviceLinkStatusRecordId_UNIQUE` (`serviceLinkStatusRecordId` ASC), @@ -303,10 +317,11 @@ DROP TABLE IF EXISTS `MyDataAccount`.`EventLogs` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`EventLogs` ( `id` INT NOT NULL AUTO_INCREMENT, - `actor` ENUM('User', 'Operator', 'Service') NOT NULL, + `actor` VARCHAR(255) NOT NULL, `event` BLOB NOT NULL, - `created` TIMESTAMP NOT NULL, + `created` BIGINT NOT NULL, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_EventLogs_Accounts1_idx` (`Accounts_id` ASC), CONSTRAINT `fk_EventLogs_Accounts1` @@ -325,9 +340,10 @@ DROP TABLE IF EXISTS `MyDataAccount`.`Emails` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Emails` ( `id` INT NOT NULL AUTO_INCREMENT, `email` VARCHAR(255) NULL DEFAULT NULL, - `typeEnum` ENUM('Personal', 'Work', 'School', 'Other') NOT NULL, + `entryType` VARCHAR(255) NOT NULL, `prime` TINYINT(1) NOT NULL DEFAULT 0, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_Emails_Accounts1_idx` (`Accounts_id` ASC), CONSTRAINT `fk_Emails_Accounts1` @@ -346,9 +362,10 @@ DROP TABLE IF EXISTS `MyDataAccount`.`Telephones` ; CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Telephones` ( `id` INT NOT NULL AUTO_INCREMENT, `tel` VARCHAR(255) NULL DEFAULT NULL, - `typeEnum` ENUM('Personal', 'Work', 'School', 'Other') NOT NULL, + `entryType` VARCHAR(255) NOT NULL, `prime` TINYINT(1) NOT NULL DEFAULT 0, `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, PRIMARY KEY (`id`), INDEX `fk_Telephones_Accounts1_idx` (`Accounts_id` ASC), CONSTRAINT `fk_Telephones_Accounts1` @@ -371,6 +388,7 @@ CREATE TABLE IF NOT EXISTS `MyDataAccount`.`OneTimeCookies` ( `created` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, `updated` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `LocalIdentities_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, UNIQUE INDEX `oneTimeCookie_UNIQUE` (`oneTimeCookie` ASC), PRIMARY KEY (`id`), INDEX `fk_OneTimeCookies_LocalIdentities1_idx` (`LocalIdentities_id` ASC), @@ -385,6 +403,3 @@ ENGINE = InnoDB; SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; - -GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON MyDataAccount.* TO 'mydataaccount'@'%'; -FLUSH PRIVILEGES; diff --git a/Account/doc/database/MyDataAccount-UserInit.sql b/Account/doc/database/MyDataAccount-UserInit.sql new file mode 100644 index 0000000..042a923 --- /dev/null +++ b/Account/doc/database/MyDataAccount-UserInit.sql @@ -0,0 +1,11 @@ +-- MySQL Script +-- 09/14/16 10:31:08 + +REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'mydataaccount'@'%'; +DROP USER 'mydataaccount'@'%'; +DELETE FROM mysql.user WHERE user='mydataaccount'; +FLUSH PRIVILEGES; + +CREATE USER 'mydataaccount'@'%' IDENTIFIED BY 'wr8gabrA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON MyDataAccount.* TO 'mydataaccount'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/Account/doc/database/MyDataAccount.mwb b/Account/doc/database/MyDataAccount.mwb old mode 100644 new mode 100755 index 906cfaa..35a3086 Binary files a/Account/doc/database/MyDataAccount.mwb and b/Account/doc/database/MyDataAccount.mwb differ diff --git a/Account/doc/database/MyDataAccount.png b/Account/doc/database/MyDataAccount.png old mode 100644 new mode 100755 index 2a930f3..133e909 Binary files a/Account/doc/database/MyDataAccount.png and b/Account/doc/database/MyDataAccount.png differ diff --git a/Account/doc/deployment.md b/Account/doc/deployment.md index a945b90..c71fb69 100644 --- a/Account/doc/deployment.md +++ b/Account/doc/deployment.md @@ -58,10 +58,8 @@ These instructions are using 'Y3xUcheg' as root password. ##### In MySQL shell - CREATE USER 'mydataaccount'@'localhost' IDENTIFIED BY 'wr8gabrA'; - DROP DATABASE MyDataAccount; source doc/database/MyDataAccount-DBinit.sql - FLUSH PRIVILEGES; + source doc/database/MyDataAccount-UserInit.sql ##### Quit from MySQL shell diff --git a/Account/doc/developer_oneliners.md b/Account/doc/developer_oneliners.md index c1c9de8..fa714f5 100644 --- a/Account/doc/developer_oneliners.md +++ b/Account/doc/developer_oneliners.md @@ -9,15 +9,14 @@ Log files are usually found from /var/log/nginx/ - ### uWSGI uWSGI logging path is defined in uwsgi.ini. If deployed as instructed in log file can be found from - /var/www/myDataSDK/mydata-sdk/Account/uwsgi.log + /var/www/myDataAccount/mydata-sdk/Account/uwsgi.log ### If logging to file is enabled in Flask Application's config file log can be found from - /var/www/myDataSDK/mydata-sdk/Account/logs + /var/www/myDataAccount/mydata-sdk/Account/logs diff --git a/Account/docker-entrypoint-account.sh b/Account/docker-entrypoint-account.sh new file mode 100755 index 0000000..9e57d2d --- /dev/null +++ b/Account/docker-entrypoint-account.sh @@ -0,0 +1,19 @@ +#!/bin/bash +#docker-entrypoint.sh + +# Note: This script uses the exec Bash command so that the final running +# application becomes the container’s PID 1. This allows the application to +# receive any Unix signals sent to the container. +# See the ENTRYPOINT help for more details. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/entrypoint + +# -e - Exit immediately if a command exits with a non-zero status. +set -e + +# Preprocess configuration files based on environment variables given to +# "docker run" -command or Docker Compose +j2 $APP_INSTALL_PATH/account_config_template.py.j2 > \ + $APP_INSTALL_PATH/config.py + +# Try to start whatever was given as a parameter to "docker run" -command +exec "$@" diff --git a/Account/operator_emulator/operator_emulator.py b/Account/operator_emulator/operator_emulator.py index 401980e..d403189 100644 --- a/Account/operator_emulator/operator_emulator.py +++ b/Account/operator_emulator/operator_emulator.py @@ -17,6 +17,8 @@ import requests import time + +import sys from requests.auth import HTTPBasicAuth import json @@ -46,9 +48,26 @@ source_csr_id = "SOURCE-CSR-" + str(uuid4()) sink_csr_id = "SINK-CSR-" + str(uuid4()) +source_csr_id_new = "SOURCE-CSR-NEW-" + str(uuid4()) +source_csr_id_new_2 = "SOURCE-CSR-NEW2-" + str(uuid4()) + rs_id = "RS-ID-" + str(uuid4()) -not_before = str(time.time()) -not_after = str(time.time() + (60*60*24*7)) + +epoch = int(time.time()) + +source_slr_iat = epoch +sink_slr_iat = epoch +source_slsr_iat = epoch +sink_slsr_iat = epoch +cr_not_before = epoch +cr_not_after = epoch + (60*60*24*7) +csr_not_before = epoch +csr_not_after = epoch + (60*60*24*7) +source_cr_iat = epoch +sink_cr_iat = epoch +source_csr_iat = epoch +sink_csr_iat = epoch + distribution_id = "DISTRIBUTION-ID-" + str(uuid4()) dataset_id = "DATASET-ID-" + str(uuid4()) @@ -63,15 +82,6 @@ "operator_id": operator_id, "service_id": source_service_id, "surrogate_id": source_surrogate_id, - "token_key": { - "key": { - "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", - "x": "5IxIntzP7SPShzbGVW6dVYQlMsJ9kg9rjrE5Z3B6fmg", - "kid": "SRVMGNT-IDK3Y", - "crv": "P-256", - "kty": "EC" - } - }, "operator_key": { "key": { "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", @@ -82,7 +92,7 @@ } }, "cr_keys": "", - "created": "" + "iat": source_slr_iat } }, "surrogate_id": { @@ -107,15 +117,6 @@ "operator_id": operator_id, "service_id": sink_service_id, "surrogate_id": sink_surrogate_id, - "token_key": { - "key": { - "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", - "x": "5IxIntzP7SPShzbGVW6dVYQlMsJ9kg9rjrE5Z3B6fmg", - "kid": "SRVMGNT-IDK3Y", - "crv": "P-256", - "kty": "EC" - } - }, "operator_key": { "key": { "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", @@ -126,7 +127,7 @@ } }, "cr_keys": "", - "created": "" + "iat": sink_slr_iat } }, "surrogate_id": { @@ -152,10 +153,10 @@ "ssr": { "attributes": { "record_id": source_ssr_id, - "account_id": source_surrogate_id, + "surrogate_id": source_surrogate_id, "slr_id": source_slr_id, "sl_status": "Active", - "iat": "", + "iat": source_slsr_iat, "prev_record_id": "NULL" }, "type": "ServiceLinkStatusRecord" @@ -183,10 +184,10 @@ "ssr": { "attributes": { "record_id": sink_ssr_id, - "account_id": sink_surrogate_id, + "surrogate_id": sink_surrogate_id, "slr_id": sink_slr_id, "sl_status": "Active", - "iat": "", + "iat": sink_slsr_iat, "prev_record_id": "NULL" }, "type": "ServiceLinkStatusRecord" @@ -209,21 +210,10 @@ "type": "ConsentRecord", "attributes": { "common_part": { - "version_number": "1.2", + "version": "1.2", "cr_id": source_cr_id, "surrogate_id": source_surrogate_id, - "rs_id": rs_id, - "slr_id": source_slr_id, - "issued": "timestamp", - "not_before": not_before, - "not_after": not_after, - "issued_at": operator_id, - "subject_id": source_service_id - }, - "role_specific_part": { - "role": "Source", - "auth_token_issuer_key": {}, - "resource_set_description": { + "rs_description": { "resource_set": { "rs_id": rs_id, "dataset": [ @@ -237,20 +227,51 @@ } ] } + }, + "slr_id": source_slr_id, + "iat": source_cr_iat, + "nbf": cr_not_before, + "exp": cr_not_after, + "operator": operator_id, + "subject_id": source_service_id, + "role": "Source" + }, + "role_specific_part": { + "pop_key": { + "key": { + "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", + "x": "5IxIntzP7SPShzbGVW6dVYQlMsJ9kg9rjrE5Z3B6fmg", + "kid": "SRVMGNT-IDK3Y", + "crv": "P-256", + "kty": "EC" + } + }, + "token_issuer_key": { + "key": { + "y": "FFuMENxef5suGtcBz4PWXt_KvRUHdURU5kH7EI5GZj8", + "x": "5IxIntzP7SPShzbGVW6dVYQlMsJ9kg9rjrE5Z3B6fmg", + "kid": "SRVMGNT-IDK3Y", + "crv": "P-256", + "kty": "EC" + } } }, - "ki_cr": {}, - "extensions": {} + "consent_receipt_part": { + "ki_cr": {} + }, + "extension_part": { + "extensions": {} + } } }, "consentStatusRecordPayload": { "type": "ConsentStatusRecord", "attributes": { "record_id": source_csr_id, - "account_id": source_surrogate_id, + "surrogate_id": source_surrogate_id, "cr_id": source_cr_id, "consent_status": "Active", - "iat": "timestamp", + "iat": source_csr_iat, "prev_record_id": "Null" } } @@ -260,37 +281,56 @@ "type": "ConsentRecord", "attributes": { "common_part": { - "version_number": "1.2", + "version": "1.2", "cr_id": sink_cr_id, "surrogate_id": sink_surrogate_id, - "rs_id": rs_id, + "rs_description": { + "resource_set": { + "rs_id": rs_id, + "dataset": [ + { + "dataset_id": dataset_id + "_1", + "distribution_id": distribution_id + "_1" + }, + { + "dataset_id": dataset_id + "_2", + "distribution_id": distribution_id + "_2" + } + ] + } + }, "slr_id": sink_slr_id, - "issued": "timestamp", - "not_before": not_before, - "not_after": not_after, - "issued_at": operator_id, - "subject_id": sink_service_id + "iat": sink_cr_iat, + "nbf": cr_not_before, + "exp": cr_not_after, + "operator": operator_id, + "subject_id": sink_service_id, + "role": "Sink" }, "role_specific_part": { - "role": "Sink", + "source_cr_id": source_cr_id, "usage_rules": [ "Rule 1", "Rule 2", "Rule 3" ] }, - "ki_cr": {}, - "extensions": {} + "consent_receipt_part": { + "ki_cr": {} + }, + "extension_part": { + "extensions": {} + } } }, "consentStatusRecordPayload": { "type": "ConsentStatusRecord", "attributes": { "record_id": sink_csr_id, - "account_id": sink_surrogate_id, + "surrogate_id": sink_surrogate_id, "cr_id": sink_cr_id, "consent_status": "Active", - "iat": "timestamp", + "iat": sink_csr_iat, "prev_record_id": "Null" } } @@ -298,6 +338,34 @@ } } +source_change_cr_status_payload = { + "data": { + "type": "ConsentStatusRecord", + "attributes": { + "record_id": source_csr_id_new, + "surrogate_id": source_surrogate_id, + "cr_id": source_cr_id, + "consent_status": "Disabled", + "iat": source_csr_iat, + "prev_record_id": source_csr_id + } + } + } + +source_change_cr_status_payload_2 = { + "data": { + "type": "ConsentStatusRecord", + "attributes": { + "record_id": source_csr_id_new_2, + "surrogate_id": source_surrogate_id, + "cr_id": source_cr_id, + "consent_status": "Active", + "iat": source_csr_iat, + "prev_record_id": source_csr_id_new + } + } + } + def slr_sign(host=None, account_id=None, headers=None, data=None): if host is None: @@ -314,7 +382,7 @@ def slr_sign(host=None, account_id=None, headers=None, data=None): print("Request") print("Endpoint: " + endpoint) - print("Payload: " + json.dumps(data, indent=3)) + print("Payload: " + json.dumps(data)) req = requests.post(url, headers=headers, json=data) status_code = str(req.status_code) @@ -348,7 +416,7 @@ def slr_verify(host=None, account_id=None, headers=None, slr_to_verify=None, dat print("Request") print("Endpoint: " + endpoint) - print("Payload: " + json.dumps(data, indent=3)) + print("Payload: " + json.dumps(data)) req = requests.post(url, headers=headers, json=data) status_code = str(req.status_code) @@ -399,7 +467,103 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non print("Request") print("Endpoint: " + endpoint) - print("Payload: " + json.dumps(data, indent=3)) + print("Payload: " + json.dumps(data)) + + req = requests.post(url, headers=headers, json=data) + status_code = str(req.status_code) + response_data = json.loads(req.text) + + return status_code, response_data + + +# Get Authorization token data +def get_auth_token_data(host=None, headers=None, cr_id=None): + if host is None: + raise AttributeError("Provide host as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + endpoint = "/api/consent/" + str(cr_id) + "/authorizationtoken/" + url = host + endpoint + + print("Request") + print("Endpoint: " + endpoint) + + req = requests.get(url, headers=headers) + status_code = str(req.status_code) + print("status_code:" + status_code) + response_data = json.loads(req.text) + + return status_code, response_data + + +# Get last CR status +def get_last_cr_status(host=None, headers=None, cr_id=None): + if host is None: + raise AttributeError("Provide host as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + + endpoint = "/api/consent/" + str(cr_id) + "/status/last/" + url = host + endpoint + + print("Request") + print("Endpoint: " + endpoint) + + req = requests.get(url, headers=headers) + status_code = str(req.status_code) + print("status_code:" + status_code) + response_data = json.loads(req.text) + + return status_code, response_data + + +# Get Missing CR statuses +def get_cr_statuses(host=None, headers=None, cr_id=None, last_csr_id=None): + if host is None: + raise AttributeError("Provide host as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if last_csr_id is None: + endpoint = "/api/consent/" + str(cr_id) + "/status/" + else: + endpoint = "/api/consent/" + str(cr_id) + "/status/?csr_id=" + last_csr_id + + url = host + endpoint + + print("Request") + print("Endpoint: " + endpoint) + + req = requests.get(url, headers=headers) + status_code = str(req.status_code) + print("status_code:" + status_code) + response_data = json.loads(req.text) + + return status_code, response_data + + +def change_consent_status(host=None, cr_id=None, headers=None, data=None): + if host is None: + raise AttributeError("Provide host as parameter") + if cr_id is None: + raise AttributeError("Provide cr_id as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if data is None: + raise AttributeError("Provide consent_data as parameter") + + endpoint = "/api/consent/" + str(cr_id) + "/status/" + url = host + endpoint + + print("Request") + print("Endpoint: " + endpoint) + print("Payload: " + json.dumps(data)) req = requests.post(url, headers=headers, json=data) status_code = str(req.status_code) @@ -415,37 +579,41 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non except Exception as exp: error_title = "Source SLR filed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Source SLR: " + source_slr[0]) + request_statuses.append("Source SLR: " + source_slr[0] + " | " + json.dumps(source_slr[1])) print ("Response: " + source_slr[0]) - print (json.dumps(source_slr[1], indent=3)) + print (json.dumps(source_slr[1])) -# Sink SLR sign + +# Source SLR verify print ("------------------------------------") -print("Sink SLR") +print("Source SLR verify") try: - sink_slr = slr_sign(host=account_host, account_id=account_id, headers=headers, data=sink_slr_payload) + source_slr_verified = slr_verify(host=account_host, account_id=account_id, headers=headers, slr_to_verify=source_slr[1]['data']['slr']['attributes']['slr'], data_template=source_ssr_payload) except Exception as exp: - error_title = "Sink SLR filed" + error_title = "Source SLR verification filed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Sink SLR: " + sink_slr[0]) - print ("Response: " + sink_slr[0]) - print (json.dumps(sink_slr[1], indent=3)) + request_statuses.append("Source SLR verify: " + source_slr_verified[0] + " | " + json.dumps(source_slr_verified[1])) + print ("Response: " + source_slr_verified[0]) + print (json.dumps(source_slr_verified[1])) -# Source SLR verify +# Sink SLR sign print ("------------------------------------") -print("Source SLR verify") +print("Sink SLR") try: - source_slr_verified = slr_verify(host=account_host, account_id=account_id, headers=headers, slr_to_verify=source_slr[1]['data']['slr']['attributes']['slr'], data_template=source_ssr_payload) + sink_slr = slr_sign(host=account_host, account_id=account_id, headers=headers, data=sink_slr_payload) except Exception as exp: - error_title = "Source SLR verification filed" + error_title = "Sink SLR filed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Source SLR verify: " + source_slr_verified[0]) - print ("Response: " + source_slr_verified[0]) - print (json.dumps(source_slr_verified[1], indent=3)) + request_statuses.append("Sink SLR: " + sink_slr[0] + " | " + json.dumps(sink_slr[1])) + print ("Response: " + sink_slr[0]) + print (json.dumps(sink_slr[1])) # Sink SLR verify print ("------------------------------------") @@ -455,10 +623,11 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non except Exception as exp: error_title = "Sink SLR verification filed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Sink SLR verify: " + sink_slr_verified[0]) + request_statuses.append("Sink SLR verify: " + sink_slr_verified[0] + " | " + json.dumps(sink_slr_verified[1])) print ("Response: " + sink_slr_verified[0]) - print (json.dumps(sink_slr_verified[1], indent=3)) + print (json.dumps(sink_slr_verified[1])) # Surrogate Source @@ -469,10 +638,11 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non except Exception as exp: error_title = "Consenting failed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Get Surrogate Source: " + sur[0]) + request_statuses.append("Get Surrogate Source: " + sur[0] + " | " + json.dumps(sur[1])) print ("Response: " + sur[0]) - print (json.dumps(sur[1], indent=3)) + print (json.dumps(sur[1])) # Surrogate Sink @@ -483,10 +653,11 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non except Exception as exp: error_title = "Consenting failed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Get Surrogate Sink: " + sur[0]) + request_statuses.append("Get Surrogate Sink: " + sur[0] + " | " + json.dumps(sur[1])) print ("Response: " + sur[0]) - print (json.dumps(sur[1], indent=3)) + print (json.dumps(sur[1])) # Give consent @@ -497,10 +668,107 @@ def give_consent(host=None, account_id=None, source_slr_id=None, sink_slr_id=Non except Exception as exp: error_title = "Consenting failed" print(error_title + ": " + repr(exp)) + raise else: - request_statuses.append("Give Consent: " + consenting[0]) + request_statuses.append("Give Consent: " + consenting[0] + " | " + json.dumps(consenting[1])) print ("Response: " + consenting[0]) - print (json.dumps(consenting[1], indent=3)) + print (json.dumps(consenting[1])) + + +# Get Authorization token +print ("------------------------------------") +print("Get Authorization token") + +try: + token = get_auth_token_data(host=account_host, cr_id=sink_cr_id, headers=headers) +except Exception as exp: + error_title = "Could not get Authorization token" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("Authorization token: " + token[0] + " | " + json.dumps(token[1])) + print ("Response: " + token[0]) + print (json.dumps(token[1])) + + +# Get last CR Status +print ("------------------------------------") +print("Get last CR Status") + +try: + last_cr = get_last_cr_status(host=account_host, cr_id=source_cr_id, headers=headers) +except Exception as exp: + error_title = "Could not get last CR Status" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("Last CR Status: " + last_cr[0] + " | " + json.dumps(last_cr[1])) + print ("Response: " + last_cr[0]) + print (json.dumps(last_cr[1])) + + +# Change CR Status +print ("------------------------------------") +print("Change CR Status") + +try: + new_cr = change_consent_status(host=account_host, cr_id=source_cr_id, headers=headers, data=source_change_cr_status_payload) +except Exception as exp: + error_title = "Could not change CR Status" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("Change CR Status: " + new_cr[0] + " | " + json.dumps(new_cr[1])) + print ("Response: " + new_cr[0]) + print (json.dumps(new_cr[1])) + +# +# Change CR Status again +print ("------------------------------------") +print("Change CR Status") + +try: + new_cr = change_consent_status(host=account_host, cr_id=source_cr_id, headers=headers, data=source_change_cr_status_payload_2) +except Exception as exp: + error_title = "Could not change CR Status" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("Change CR Status: " + new_cr[0] + " | " + json.dumps(new_cr[1])) + print ("Response: " + new_cr[0]) + print (json.dumps(new_cr[1])) + + +# Get new last CR Status +print ("------------------------------------") +print("Get new last CR Status") + +try: + last_cr = get_last_cr_status(host=account_host, cr_id=source_cr_id, headers=headers) +except Exception as exp: + error_title = "Could not get new last CR Status" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("New last CR Status: " + last_cr[0] + " | " + json.dumps(last_cr[1])) + print ("Response: " + last_cr[0]) + print (json.dumps(last_cr[1])) + + +# Get missing CR Statuses +print ("------------------------------------") +print("Get missing CR Statuses") + +try: + last_cr = get_cr_statuses(host=account_host, cr_id=source_cr_id, last_csr_id=source_csr_id_new, headers=headers) +except Exception as exp: + error_title = "Could not get missing CR Statuses" + print(error_title + ": " + repr(exp)) + raise +else: + request_statuses.append("Missing CR Statuses: " + last_cr[0] + " | " + json.dumps(last_cr[1])) + print ("Response: " + last_cr[0]) + print (json.dumps(last_cr[1])) print ("------------------------------------") diff --git a/Account/operator_emulator/ui_emulator.py b/Account/operator_emulator/ui_emulator.py new file mode 100644 index 0000000..62384ec --- /dev/null +++ b/Account/operator_emulator/ui_emulator.py @@ -0,0 +1,911 @@ +# -*- coding: utf-8 -*- + +""" +Minimum viable account - MyData Operator UI Emulator + +__author__ = "Jani Yli-Kantola" +__copyright__ = "Digital Health Revolution (c) 2016" +__credits__ = ["Harri Hirvonsalo", "Aleksi Palomäki"] +__license__ = "MIT" +__version__ = "0.0.1" +__maintainer__ = "Jani Yli-Kantola" +__contact__ = "https://github.com/HIIT/mydata-stack" +__status__ = "Development" +__date__ = 12.8.2016 +""" +from uuid import uuid4 + +import requests +import time +from requests.auth import HTTPBasicAuth +import json + +request_statuses = [] + +account_ip = "http://127.0.0.1" +account_port = "8080" +account_host = account_ip+":"+account_port +headers = {'Content-Type': 'application/json'} + +account_id = "" +particular_id = "" +contacts_id = "" + + +predefined_account_username = "testUser" +predefined_account_password = "Hello" + +username = "example_username-" + str(uuid4()) +password = "example_password" + +account_template = { + "data": { + "type": "Account", + "attributes": { + 'firstName': 'ExampleFirstName', + 'lastName': 'ExampleLastName', + 'dateOfBirth': '2010-05-14', + 'email': username + '@examlpe.org', + 'username': username, + 'password': password, + 'acceptTermsOfService': 'True' + } + } +} + +particular_template_for_patch = { + "data": { + "type": "Particular", + "attributes": { + 'lastname': 'NewExampleLastName' + } + } +} + +contact_template = { + "data": { + "type": "Contact", + "attributes": { + 'address1': 'Example address 1', + 'address2': 'Example address 2', + 'postalCode': '97584', + 'city': 'Example city', + 'state': 'Example state', + 'country': 'Example country', + 'type': 'Personal', + 'primary': 'True' + } + } +} + +contact_template_for_patch = { + "data": { + "type": "Contact", + "attributes": { + 'address1': 'Example address 1', + 'address2': 'Example address 2', + 'postalCode': '65784', + 'city': 'Example city', + 'state': 'Example state', + 'country': 'Example country', + 'type': 'Personal', + 'primary': 'False' + } + } +} + +email_template = { + "data": { + "type": "Email", + "attributes": { + 'email': 'erkki@example.com', + 'type': 'Personal', + 'primary': 'True' + } + } +} + +email_template_for_patch = { + "data": { + "type": "Email", + "attributes": { + 'email': 'pasi@example.org', + 'type': 'School', + 'primary': 'False' + } + } +} + +telephone_template = { + "data": { + "type": "Telephone", + "attributes": { + 'tel': '0501234567', + 'type': 'Personal', + 'primary': 'True' + } + } +} + +telephone_template_for_patch = { + "data": { + "type": "Telephone", + "attributes": { + 'tel': '+358 50 123 4567', + 'type': 'School', + 'primary': 'False' + } + } +} + +setting_template = { + "data": { + "type": "Setting", + "attributes": { + 'key': 'lang', + 'value': 'fi' + } + } +} + +setting_template_for_patch = { + "data": { + "type": "Setting", + "attributes": { + 'key': 'lang', + 'value': 'se' + } + } +} + + + +def post(host=None, endpoint=None, headers=None, data=None): + if host is None: + raise AttributeError("Provide host as parameter") + if endpoint is None: + raise AttributeError("Provide endpoint as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if data is None: + raise AttributeError("Provide data as parameter") + + url = host + endpoint + print("Endpoint: " + endpoint) + print("Headers: " + json.dumps(headers)) + print("Payload: " + json.dumps(data)) + + req = requests.post(url, headers=headers, json=data) + status_code = str(req.status_code) + print ("Response status: " + str(req.status_code)) + try: + response_data = json.loads(req.text) + except Exception as exp: + print(repr(exp)) + print("req.text: " + repr(req.text)) + response_data = repr(req.text) + + return status_code, response_data + + +def patch(host=None, endpoint=None, headers=None, data=None): + if host is None: + raise AttributeError("Provide host as parameter") + if endpoint is None: + raise AttributeError("Provide endpoint as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + if data is None: + raise AttributeError("Provide data as parameter") + + url = host + endpoint + print("Endpoint: " + endpoint) + print("Headers: " + json.dumps(headers)) + print("Payload: " + json.dumps(data)) + + req = requests.patch(url, headers=headers, json=data) + status_code = str(req.status_code) + print ("Response status: " + str(req.status_code)) + try: + response_data = json.loads(req.text) + except Exception as exp: + print(repr(exp)) + print("req.text: " + repr(req.text)) + response_data = repr(req.text) + + return status_code, response_data + + +def get(host=None, endpoint=None, headers=None, username=None, password=None): + if host is None: + raise AttributeError("Provide host as parameter") + if endpoint is None: + raise AttributeError("Provide endpoint as parameter") + if headers is None: + raise AttributeError("Provide headers as parameter") + + url = host + endpoint + print("Endpoint: " + endpoint) + print("Headers: " + json.dumps(headers)) + + if username is not None and password is not None: + req = requests.get(url, headers=headers, auth=HTTPBasicAuth(username=username, password=password)) + else: + req = requests.get(url, headers=headers) + status_code = str(req.status_code) + print ("Response status: " + str(req.status_code)) + try: + response_data = json.loads(req.text) + except Exception as exp: + print(repr(exp)) + print("req.text: " + repr(req.text)) + response_data = repr(req.text) + + return status_code, response_data + + +######### Actions + +################################## +# Create Account and Authenticate +################################## +label = "# \n# Create Account and Authenticate \n#################################" +print(label) +request_statuses.append(label) + +if not predefined_account_username and not predefined_account_password: + # + # Create Account + title = "Create Account" + print(title) + try: + account = post(host=account_host, endpoint="/api/accounts/", headers=headers, data=account_template) + except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise + else: + request_response = title + ": " + account[0] + ": " + json.dumps(account[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + account_id = str(account[1]['data'].get("id", "None")) + print ("Response " + account[0] + ": " + json.dumps(account[1])) + print ("Account ID: " + account_id) + +else: + print("Using predefined account") + username = predefined_account_username + password = predefined_account_password + +# +# Authenticate +print ("------------------------------------") +title = "Authenticate" +print(title) +try: + api_auth = get(host=account_host, endpoint="/api/auth/user/", headers=headers, username=username, password=password) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + api_auth[0] + ": " + json.dumps(api_auth[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + apikey = str(api_auth[1].get("Api-Key", "None")) + account_id = str(api_auth[1].get("account_id", "None")) + headers['Api-Key'] = apikey + print ("Response " + api_auth[0] + ": " + json.dumps(api_auth[1])) + print ("apikey: " + apikey) + +# +# ################################## +# # PARTICULARS +# ################################## +label = "# \n# PARTICULARS \n#################################" +print(label) +request_statuses.append(label) + +title = "List Particulars" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/particulars/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + particular_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("particular_id: " + particular_id) + + +print ("------------------------------------") +title = "One Particular" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/particulars/" + particular_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entry[0] + ": " + json.dumps(entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("particular_id: " + str(entry[1]['data'].get("id", "None"))) + + +print ("------------------------------------") +title = "Patch Particular" +print(title) +try: + particular_template_for_patch['data']['id'] = str(particular_id) + updated_entry = patch(host=account_host, endpoint="/api/accounts/" + account_id + "/particulars/" + particular_id + "/", headers=headers, data=particular_template_for_patch) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + updated_entry[0] + ": " + json.dumps(updated_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + updated_entry[0] + ": " + json.dumps(updated_entry[1])) + + +# ################################## +# # CONTACTS +# ################################## +label = "# \n# CONTACTS \n#################################" +print(label) +request_statuses.append(label) + +title = "Add Contact" +print(title) +try: + new_entry = post(host=account_host, endpoint="/api/accounts/" + account_id + "/contacts/", headers=headers, data=contact_template) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + new_entry[0] + ": " + json.dumps(new_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + new_entry[0] + ": " + json.dumps(new_entry[1])) + +print ("------------------------------------") +title = "List Contacts" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/contacts/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + contacts_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("contacts_id: " + contacts_id) + + +print ("------------------------------------") +title = "One Contact" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/contacts/" + contacts_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("contacts_id: " + str(entry[1]['data'].get("id", "None"))) + + +print ("------------------------------------") +title = "Patch Contact" +print(title) +try: + contact_template_for_patch['data']['id'] = str(contacts_id) + updated_entry = patch(host=account_host, endpoint="/api/accounts/" + account_id + "/contacts/" + contacts_id + "/", headers=headers, data=contact_template_for_patch) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + updated_entry[0] + ": " + json.dumps(updated_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + updated_entry[0] + ": " + json.dumps(updated_entry[1])) + + +# ################################## +# # EMAIL +# ################################## +label = "# \n# EMAIL \n#################################" +print(label) +request_statuses.append(label) + +title = "Add Email" +print(title) +try: + new_entry = post(host=account_host, endpoint="/api/accounts/" + account_id + "/emails/", headers=headers, data=email_template) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + new_entry[0] + ": " + json.dumps(new_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + new_entry[0] + ": " + json.dumps(new_entry[1])) + +print ("------------------------------------") +title = "List Emails" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/emails/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + email_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("email_id: " + email_id) + + +print ("------------------------------------") +title = "One Email" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/emails/" + email_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("email_id: " + str(entry[1]['data'].get("id", "None"))) + + +print ("------------------------------------") +title = "Patch Email" +print(title) +try: + email_template_for_patch['data']['id'] = str(email_id) + updated_entry = patch(host=account_host, endpoint="/api/accounts/" + account_id + "/emails/" + email_id + "/", headers=headers, data=email_template_for_patch) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + updated_entry[0] + ": " + json.dumps(updated_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + updated_entry[0] + ": " + json.dumps(updated_entry[1])) + + +# ################################## +# # TELEPHONE +# ################################## +label = "# \n# TELEPHONE \n#################################" +print(label) +request_statuses.append(label) + +title = "Add Telephone" +print(title) +try: + new_entry = post(host=account_host, endpoint="/api/accounts/" + account_id + "/telephones/", headers=headers, data=telephone_template) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + new_entry[0] + ": " + json.dumps(new_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + new_entry[0] + ": " + json.dumps(new_entry[1])) + +print ("------------------------------------") +title = "List Telephones" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/telephones/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + telephones_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("telephones_id: " + telephones_id) + + +print ("------------------------------------") +title = "One Telephone" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/telephones/" + telephones_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("telephones_id: " + str(entry[1]['data'].get("id", "None"))) + + +print ("------------------------------------") +title = "Patch Telephone" +print(title) +try: + telephone_template_for_patch['data']['id'] = str(telephones_id) + updated_entry = patch(host=account_host, endpoint="/api/accounts/" + account_id + "/telephones/" + telephones_id + "/", headers=headers, data=telephone_template_for_patch) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + updated_entry[0] + ": " + json.dumps(updated_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + updated_entry[0] + ": " + json.dumps(updated_entry[1])) + + +# ################################## +# # SETTINGS +# ################################## +label = "# \n# SETTINGS \n#################################" +print(label) +request_statuses.append(label) + +title = "Add Setting" +print(title) +try: + new_entry = post(host=account_host, endpoint="/api/accounts/" + account_id + "/settings/", headers=headers, data=setting_template) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + new_entry[0] + ": " + json.dumps(new_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + new_entry[0] + ": " + json.dumps(new_entry[1])) + +print ("------------------------------------") +title = "List Settings" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/settings/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + settings_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("settings_id: " + settings_id) + + +print ("------------------------------------") +title = "One Setting" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/settings/" + settings_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("settings_id: " + str(entry[1]['data'].get("id", "None"))) + + +print ("------------------------------------") +title = "Patch Setting" +print(title) +try: + setting_template_for_patch['data']['id'] = str(settings_id) + updated_entry = patch(host=account_host, endpoint="/api/accounts/" + account_id + "/settings/" + settings_id + "/", headers=headers, data=setting_template_for_patch) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + updated_entry[0] + ": " + json.dumps(updated_entry[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + updated_entry[0] + ": " + json.dumps(updated_entry[1])) + + +# ################################## +# # EVENT LOGS +# ################################## +# # label = "# \n# EVENT LOGS \n#################################" +# # print(label) +# # request_statuses.append(label) +# # +# # print ("------------------------------------") +# # title = "List Events" +# # print(title) +# # try: +# # entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/logs/events/", headers=headers) +# # except Exception as exp: +# # print(title + ": " + repr(exp)) +# # request_response = title + ": " + repr(exp) +# # request_statuses.append(request_response) +# # raise +# # else: +# # request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) +# # print('request_response: ' + request_response) +# # request_statuses.append(request_response) +# # event_log_id = str(entries[1]['data'][0].get("id", "None")) +# # print ("Response " + new_entry[0] + ": " + json.dumps(new_entry[1])) +# # print ("event_log_id: " + event_log_id) +# # +# # +# # print ("------------------------------------") +# # title = "One Event" +# # print(title) +# # try: +# # entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/logs/events/" + event_log_id + "/", headers=headers) +# # except Exception as exp: +# # print(title + ": " + repr(exp)) +# # request_response = title + ": " + repr(exp) +# # request_statuses.append(request_response) +# # raise +# # else: +# # request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) +# # print ("Response " + entry[0] + ": " + json.dumps(entry[1])) +# # print ("event_log_id: " + str(entry[1]['data'].get("id", "None"))) +# +# +# ################################## +# # Service Link Records +# ################################## +label = "# \n# Service Link Records \n#################################" +print(label) +request_statuses.append(label) + +print ("------------------------------------") +title = "Service Link Records" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + slr_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("slr_id: " + slr_id) + + +print ("------------------------------------") +title = "One Service Link Record" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("slr_id: " + str(entry[1]['data'].get("id", "None"))) + + +################################## +# Service Link Status Records +################################## +label = "# \n# Service Link Status Records \n#################################" +print(label) +request_statuses.append(label) + +print ("------------------------------------") +title = "Service Link Status Records" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/statuses/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + slsr_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("slsr_id: " + slsr_id) + + +print ("------------------------------------") +title = "One Service Link Status Record" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/statuses/" + slsr_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("slsr_id: " + str(entry[1]['data'].get("id", "None"))) + + +################################## +# Consent Records +################################## +label = "# \n# Consent Records \n#################################" +print(label) +request_statuses.append(label) + +print ("------------------------------------") +title = "Consent Records" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/consents/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + cr_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("cr_id: " + cr_id) + + +print ("------------------------------------") +title = "One Consent Record" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/consents/" + cr_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("cr_id: " + str(entry[1]['data'].get("id", "None"))) + + +################################## +# Consent Status Records +################################## +label = "# \n# Consent Status Records \n#################################" +print(label) +request_statuses.append(label) + +print ("------------------------------------") +title = "Consent Status Records" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/consents/" + cr_id + "/statuses/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + csr_id = str(entries[1]['data'][0].get("id", "None")) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + print ("csr_id: " + csr_id) + + +print ("------------------------------------") +title = "One Consent Status Record" +print(title) +try: + entry = get(host=account_host, endpoint="/api/accounts/" + account_id + "/servicelinks/" + slr_id + "/consents/" + cr_id + "/statuses/" + csr_id + "/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_statuses.append(title + ": " + entry[0] + ": " + json.dumps(entry[1])) + request_statuses.append("csr_id: " + str(entry[1]['data'].get("id", "None"))) + print ("Response " + entry[0] + ": " + json.dumps(entry[1])) + print ("csr_id: " + str(entry[1]['data'].get("id", "None"))) + + +################################## +# Export Account +################################## +label = "# \n# Account Export \n#################################" +print(label) +request_statuses.append(label) + +print ("------------------------------------") +title = "Account Export" +print(title) +try: + entries = get(host=account_host, endpoint="/api/accounts/" + account_id + "/export/", headers=headers) +except Exception as exp: + print(title + ": " + repr(exp)) + request_response = title + ": " + repr(exp) + request_statuses.append(request_response) + raise +else: + request_response = title + ": " + entries[0] + ": " + json.dumps(entries[1]) + print('request_response: ' + request_response) + request_statuses.append(request_response) + print ("Response " + entries[0] + ": " + json.dumps(entries[1])) + + + +################################# +################################# +################################# +################################# +# REPORT # +################################# +print ("=====================================") +print("Request report") +for request in request_statuses: + print(request) + diff --git a/Dockerfile-overholt b/Dockerfile-overholt new file mode 100644 index 0000000..718fa54 --- /dev/null +++ b/Dockerfile-overholt @@ -0,0 +1,105 @@ +FROM python:2.7 +MAINTAINER hjhsalo + +# NOTE: Baseimage python:2.7 already contains latest pip + +# TODO: Compile cryptography (and everything else pip related) elsewhere and +# get rid of "build-essential libssl-dev libffi-dev python-dev" +# Maybe according to these instructions: +# https://glyph.twistedmatrix.com/2015/03/docker-deploy-double-dutch.html + +# TODO: Double check and think about the order of commands. Should application +# specific stuff be moved to the end of the file? +# What are actually application specific? etc. + +# TODO: Have brainstorming session on how to properly setup EXPOSE ports, hosts, etc. +# Now it is difficult to come up with sensible defaults. +# Remember to check out what Docker Compose offers. + +# TODO: Make a new user and usergroup. +# Now everything including the ENTRYPOINT is being run as root which is bad +# practise and for example uWSGI complains about this. + +### +# Install +# Specific structure where a single RUN is used to execute everything. +# Based on Docker Best practices -document. To force cache busting. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/apt-get +# NOTE: python-mysql.connector is MyData Account specific dependency. +RUN apt-get update && apt-get install -y \ + build-essential \ + libffi-dev \ + libssl-dev \ + python-dev \ + celeryd \ + && rm -rf /var/lib/apt/lists/* + + +### +# Create a installation directory into the container +ARG APP_INSTALL_PATH=/mydata-sdk-components +ENV APP_INSTALL_PATH ${APP_INSTALL_PATH:-/mydata-sdk-components} + +RUN mkdir -p $APP_INSTALL_PATH + +# Change current directory inside the container / image to this path. +WORKDIR $APP_INSTALL_PATH + +ARG OVERHOLT_APPLICATION_PATH=/ +ENV OVERHOLT_APPLICATION_PATH ${OVERHOLT_APPLICATION_PATH:-/} + + +### +# Install application specific Python-dependencies. + +# NOTE: If you have multiple Dockerfile steps that use different files from +# your context, COPY them individually, rather than all at once. This will +# ensure that each step’s build cache is only invalidated (forcing the step +# to be re-run) if the specifically required files change. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/add-or-copy +COPY $OVERHOLT_APPLICATION_PATH/requirements.txt /tmp/ +RUN pip install --requirement /tmp/requirements.txt + +# NOTE: j2cli is needed to preprocess config files based on values +# environment variables +# https://github.com/kolypto/j2cli +# https://tryolabs.com/blog/2015/03/26/configurable-docker-containers-for-multiple-environments/ +RUN pip install j2cli + +# Copy everything (including previously copied filed and folders) from directory +# where Overholt -application is located to current WORKDIR inside container. +# Remember that must be inside the context of the build: +# http://serverfault.com/a/666154 +COPY .$OVERHOLT_APPLICATION_PATH .$OVERHOLT_APPLICATION_PATH + + +#### These will probably be removed when we start using uwsgi for all python applications +# Install a init-system in order to avoid python processes to return 137 (in response to SIGKILL) +#RUN apt-get update && apt-get install -y \ +# curl \ +# && rm -rf /var/lib/apt/lists/* + +ENV DUMB_INIT_VERSION v1.1.3 +ENV DUMB_INIT_WOVERSION 1.1.3 +RUN curl -SL https://github.com/Yelp/dumb-init/releases/download/${DUMB_INIT_VERSION}/dumb-init_${DUMB_INIT_WOVERSION}_amd64.deb \ + -o dumb-init_${DUMB_INIT_WOVERSION}_amd64.deb \ + && dpkg -i dumb-init_*.deb + +### +# Configure and run the application using entrypoint.sh. +# NOTE: Content of CMD are the default parameters passed to entrypoint.sh. +# These can be overwritten on "docker run " +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/entrypoint +COPY ./docker-entrypoint-overholt.sh / + +#ENTRYPOINT ["/docker-entrypoint-overholt.sh"] + +#### These will probably be removed when we start using uwsgi for all python applications +ENTRYPOINT ["/usr/bin/dumb-init", "--"] + +WORKDIR $APP_INSTALL_PATH$OVERHOLT_APPLICATION_PATH + +# NOTE: Maybe this should be replaced with something that doesn't run anything +# and the command below should go to compose.yml ?? +#CMD ["sh", "-c", "python $APP_INSTALL_PATH${OVERHOLT_APPLICATION_PATH}/wsgi.py"] +CMD ["/docker-entrypoint-overholt.sh", "sh", "-c", "python $APP_INSTALL_PATH${OVERHOLT_APPLICATION_PATH}/wsgi.py"] diff --git a/Operator_Components/Operator_CR/auth_token.py b/Operator_Components/Operator_CR/auth_token.py index 29d2f53..927c059 100644 --- a/Operator_Components/Operator_CR/auth_token.py +++ b/Operator_Components/Operator_CR/auth_token.py @@ -13,7 +13,7 @@ api = Api() api.init_app(api_CR_blueprint) debug_log = logging.getLogger("debug") - +logger = logging.getLogger("sequence") class AuthToken(Resource): def __init__(self): super(AuthToken, self).__init__() @@ -48,7 +48,7 @@ def get(self, cr_id): trace=traceback.format_exc(limit=100).splitlines()) debug_log.debug(dumps(result, indent=2)) token = self.gen_auth_token(result) - + debug_log.info(dumps(result, indent=2)) return {"auth_token" : token} diff --git a/Operator_Components/Operator_CR/consent_form.py b/Operator_Components/Operator_CR/consent_form.py index d48e1b1..188e95b 100644 --- a/Operator_Components/Operator_CR/consent_form.py +++ b/Operator_Components/Operator_CR/consent_form.py @@ -1,16 +1,19 @@ # -*- coding: utf-8 -*- +from datetime import datetime +import time + __author__ = 'alpaloma' import logging import traceback -from json import dumps +from json import dumps, loads from DetailedHTTPException import DetailedHTTPException, error_handler from Templates import ServiceRegistryHandler, Consent_form_Out, Sequences from flask import request, Blueprint, current_app from flask_restful import Resource, Api from helpers import AccountManagerHandler, Helpers -from tasks import CR_installer - +from op_tasks import CR_installer +from requests import post logger = logging.getLogger("sequence") debug_log = logging.getLogger("debug") @@ -18,9 +21,6 @@ api = Api() api.init_app(api_CR_blueprint) -SH = ServiceRegistryHandler() -getService = SH.getService - sq = Sequences("Operator_Components Mgmnt", {}) Operator_public_key = {} class ConsentFormHandler(Resource): @@ -30,14 +30,15 @@ def __init__(self): self.am_user = current_app.config["ACCOUNT_MANAGEMENT_USER"] self.am_password = current_app.config["ACCOUNT_MANAGEMENT_PASSWORD"] self.timeout = current_app.config["TIMEOUT"] + self.debug_mode = current_app.config["DEBUG_MODE"] try: self.AM = AccountManagerHandler(self.am_url, self.am_user, self.am_password, self.timeout) except Exception as e: debug_log.warn("Initialization of AccountManager failed. We will crash later but note it here.\n{}".format(repr(e))) - + self.SH = ServiceRegistryHandler(current_app.config["SERVICE_REGISTRY_SEARCH_DOMAIN"], current_app.config["SERVICE_REGISTRY_SEARCH_ENDPOINT"]) + self.getService = self.SH.getService self.Helpers = Helpers(current_app.config) - - + self.operator_url = current_app.config["OPERATOR_URL"] @error_handler def get(self, account_id): @@ -48,15 +49,51 @@ def get(self, account_id): service_ids = request.args sq.task("Fetch services") - sink = getService(service_ids["sink"]) - _consent_form["sink"]["service_id"] = sink["name"] - source = getService(service_ids["source"]) - _consent_form["source"]["service_id"] = source["name"] + sink = self.getService(service_ids["sink"]) + _consent_form["sink"]["service_id"] = sink["serviceId"] + purposes = _consent_form["sink"]["dataset"][0]["purposes"] # TODO replace this once Service registry stops being stupid. + _consent_form["sink"]["dataset"] = [] # Clear out template. + for dataset in sink["serviceDescription"]["serviceDataDescription"][0]["dataset"]: + item = { + "dataset_id": dataset["datasetId"], + "title": dataset["title"], + "description": dataset["description"], + "keyword": dataset["keyword"], + "publisher": dataset["publisher"], + "purposes": dataset["purpose"] + } + + _consent_form["sink"]["dataset"].append(item) + + + source = self.getService(service_ids["source"]) + _consent_form["source"]["service_id"] = source["serviceId"] + _consent_form["source"]["dataset"] = [] # Clear out template. + for dataset in source["serviceDescription"]["serviceDataDescription"][0]["dataset"]: + item = { + "dataset_id": dataset["datasetId"], + "title": dataset["title"], + "description": dataset["description"], + "keyword": dataset["keyword"], + "publisher": dataset["publisher"], + "distribution": { + "distribution_id": dataset["distribution"][0]["distributionId"], + "access_url": "{}{}{}".format(source["serviceInstance"][0]["domain"], + source["serviceInstance"][0]["serviceAccessEndPoint"][ + "serviceAccessURI"] + , dataset["distribution"][0]["accessURL"]), + + } + } + _consent_form["source"]["dataset"].append(item) + sq.task("Generate RS_ID") - sq.task("Store RS_ID") - rs_id = self.Helpers.gen_rs_id(source["name"]) + source_domain = source["serviceInstance"][0]["domain"] + source_access_uri = source["serviceInstance"][0]["serviceAccessEndPoint"]["serviceAccessURI"] + rs_id = self.Helpers.gen_rs_id(source["serviceInstance"][0]["domain"]) + sq.task("Store RS_ID") _consent_form["source"]["rs_id"] = rs_id sq.reply_to("UI", msg="Consent Form+RS_ID") @@ -82,7 +119,7 @@ def post(self, account_id): detail="RS_ID could not be validated.", status=403) - sq.send_to("Account Mgmt", "GET surrogate_id & slr_id") + sq.send_to("Account Manager", "GET surrogate_id & slr_id") try: sink_sur = self.AM.getSUR_ID(sink_srv_id, account_id) source_sur = self.AM.getSUR_ID(source_srv_id, account_id) @@ -100,20 +137,57 @@ def post(self, account_id): slr_id_source, surrogate_id_source = source_sur["data"]["surrogate_id"]["attributes"]["servicelinkrecord_id"],\ source_sur["data"]["surrogate_id"]["attributes"]["surrogate_id"] # One for Sink, one for Source + sink_keys = self.Helpers.get_service_keys(surrogate_id_sink) + try: + sink_key = loads(sink_keys[0]) + except IndexError as e: + raise DetailedHTTPException(status=500, + title="Fetching service keys for sink has failed.", + detail="Couldn't find keys for surrogate id ({}).".format(surrogate_id_sink), + trace=traceback.format_exc(limit=100).splitlines()) + debug_log.info("Sink keys:\n{}".format(dumps(sink_key, indent=2))) + sink_pop_key = sink_key["pop_key"] # Generate common_cr for both sink and source. sq.task("Generate common CR") - common_cr_source = self.Helpers.gen_cr_common(surrogate_id_source, _consent_form["source"]["rs_id"], slr_id_source) - common_cr_sink = self.Helpers.gen_cr_common(surrogate_id_sink, _consent_form["source"]["rs_id"], slr_id_sink) + + issued = int(time.time()) #datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") + not_before = int(time.time()) #datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") # TODO: This and not after are Optional, who says when to put them? + not_after = int(time.time()+current_app.config["NOT_AFTER_INTERVAL"]) #datetime.fromtimestamp(time.time()+current_app.config["NOT_AFTER_INTERVAL"]).strftime("%Y-%m-%dT%H:%M:%SZ") + operator_id = current_app.config["UID"] + + + common_cr_source = self.Helpers.gen_cr_common(surrogate_id_source, + _consent_form["source"]["rs_id"], + slr_id_source, + issued, + not_before, + not_after, + source_srv_id, + operator_id, + "Source") + + common_cr_sink = self.Helpers.gen_cr_common(surrogate_id_sink, + _consent_form["source"]["rs_id"], + slr_id_sink, + issued, + not_before, + not_after, + sink_srv_id, + operator_id, + "Sink") sq.task("Generate ki_cr") ki_cr = self.Helpers.Gen_ki_cr(self) sq.task("Generate CR for sink") - sink_cr = self.Helpers.gen_cr_sink(common_cr_sink, _consent_form) + sink_cr = self.Helpers.gen_cr_sink(common_cr_sink, _consent_form, common_cr_source["cr_id"]) sq.task("Generate CR for source") source_cr = self.Helpers.gen_cr_source(common_cr_source, _consent_form, - Operator_public_key) + sink_pop_key) + + sink_cr["cr"]["common_part"]["rs_description"] = source_cr["cr"]["common_part"]["rs_description"] + debug_log.info(sink_cr) debug_log.info(source_cr) sq.task("Generate CSR's") @@ -122,8 +196,30 @@ def post(self, account_id): source_csr = self.Helpers.gen_csr(surrogate_id_source, source_cr["cr"]["common_part"]["cr_id"], "Active", "null") - sq.send_to("Account Mgmt", "Send CR/CSR to sign and store") + sq.send_to("Account Manager", "Send CR/CSR to sign and store") result = self.AM.signAndstore(sink_cr, sink_csr, source_cr, source_csr, account_id) + + # TODO: These are debugging and testing calls, remove them once operation is verified. + if self.debug_mode: + own_addr = self.operator_url #request.url_root.rstrip(request.script_root) + debug_log.info("Our own address is: {}".format(own_addr)) + req = post(own_addr+"/api/1.2/cr/account_id/{}/service/{}/consent/{}/status/Disabled" + .format(surrogate_id_source, source_srv_id, common_cr_source["cr_id"])) + + debug_log.info("Changed csr status, request status ({}) reason ({}) and the following content:\n{}".format( + req.status_code, + req.reason, + dumps(loads(req.content), indent=2) + )) + req = post(own_addr+"/api/1.2/cr/account_id/{}/service/{}/consent/{}/status/Active" + .format(surrogate_id_source, source_srv_id, common_cr_source["cr_id"])) + debug_log.info("Changed csr status, request status ({}) reason ({}) and the following content:\n{}".format( + req.status_code, + req.reason, + dumps(loads(req.content), indent=2) + )) + + debug_log.info(dumps(result, indent=3)) sink_cr = result["data"]["sink"]["consentRecord"]["attributes"]["cr"] sink_csr = result["data"]["sink"]["consentStatusRecord"]["attributes"]["csr"] @@ -135,11 +231,11 @@ def post(self, account_id): crs_csrs_payload = {"sink": {"cr": sink_cr, "csr": sink_csr}, "source": {"cr": source_cr, "csr": source_csr}} #logger.info("Going to Celery task") - sq.send_to("Sink", "Post CR-Sink, CSR-Sink") - sq.send_to("Source", "Post CR-Source, CSR-Source") + sq.send_to("Service_Components Mgmnt (Sink)", "Post CR-Sink, CSR-Sink") + sq.send_to("Service_Components Mgmnt (Source)", "Post CR-Source, CSR-Source") debug_log.info(dumps(crs_csrs_payload, indent=2)) - CR_installer.delay(crs_csrs_payload, SH.getService_url(sink_srv_id), SH.getService_url(source_srv_id)) + CR_installer.delay(crs_csrs_payload, self.SH.getService_url(sink_srv_id), self.SH.getService_url(source_srv_id)) return {"status": 201, "msg": "CREATED"}, 201 diff --git a/Operator_Components/Operator_CR/introspection.py b/Operator_Components/Operator_CR/introspection.py new file mode 100644 index 0000000..bcd8f51 --- /dev/null +++ b/Operator_Components/Operator_CR/introspection.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +import logging +import traceback +from json import dumps + +from DetailedHTTPException import error_handler, DetailedHTTPException +from flask import Blueprint, current_app +from flask_restful import Api, Resource +from helpers import AccountManagerHandler +from helpers import Helpers + +api_CR_blueprint = Blueprint("api_Introspection_blueprint", __name__) +api = Api() +api.init_app(api_CR_blueprint) +debug_log = logging.getLogger("debug") +logger = logging.getLogger("sequence") +class Introspection(Resource): + def __init__(self): + super(Introspection, self).__init__() + self.am_url = current_app.config["ACCOUNT_MANAGEMENT_URL"] + self.am_user = current_app.config["ACCOUNT_MANAGEMENT_USER"] + self.am_password = current_app.config["ACCOUNT_MANAGEMENT_PASSWORD"] + self.timeout = current_app.config["TIMEOUT"] + try: + self.AM = AccountManagerHandler(self.am_url, self.am_user, self.am_password, self.timeout) + except Exception as e: + debug_log.warn("Initialization of AccountManager failed. We will crash later but note it here.\n{}".format(repr(e))) + helper_object = Helpers(current_app.config) + + @error_handler + def get(self, cr_id): + '''post + + :return: Returns latest csr for source + ''' + try: + debug_log.info("We received introspection request for cr_id ({})".format(cr_id)) + result = self.AM.get_last_csr(cr_id) + except AttributeError as e: + raise DetailedHTTPException(status=502, + title="It would seem initiating Account Manager Handler has failed.", + detail="Account Manager might be down or unresponsive.", + trace=traceback.format_exc(limit=100).splitlines()) + debug_log.info(dumps(result)) + return result + +class Introspection_Missing(Resource): + def __init__(self): + super(Introspection_Missing, self).__init__() + self.am_url = current_app.config["ACCOUNT_MANAGEMENT_URL"] + self.am_user = current_app.config["ACCOUNT_MANAGEMENT_USER"] + self.am_password = current_app.config["ACCOUNT_MANAGEMENT_PASSWORD"] + self.timeout = current_app.config["TIMEOUT"] + try: + self.AM = AccountManagerHandler(self.am_url, self.am_user, self.am_password, self.timeout) + except Exception as e: + debug_log.warn("Initialization of AccountManager failed. We will crash later but note it here.\n{}".format(repr(e))) + helper_object = Helpers(current_app.config) + + @error_handler + def get(self, cr_id, csr_id): + '''get + + :return: Returns latest csr for source + ''' + try: + debug_log.info("We received introspection request for cr_id ({})".format(cr_id)) + result = self.AM.get_missing_csr(cr_id, csr_id) + except AttributeError as e: + raise DetailedHTTPException(status=502, + title="It would seem initiating Account Manager Handler has failed.", + detail="Account Manager might be down or unresponsive.", + trace=traceback.format_exc(limit=100).splitlines()) + debug_log.info(dumps(result)) + return result + +api.add_resource(Introspection, '/introspection/') +api.add_resource(Introspection_Missing, '/consent//missing_since/') diff --git a/Operator_Components/Operator_CR/status_change.py b/Operator_Components/Operator_CR/status_change.py new file mode 100644 index 0000000..30de2b7 --- /dev/null +++ b/Operator_Components/Operator_CR/status_change.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +import logging +import traceback +from json import dumps + +from DetailedHTTPException import error_handler, DetailedHTTPException +from flask import Blueprint, current_app +from flask_restful import Api, Resource +from helpers import AccountManagerHandler +from helpers import Helpers + +api_CR_blueprint = Blueprint("api_Status_Change_blueprint", __name__) +api = Api() +api.init_app(api_CR_blueprint) +debug_log = logging.getLogger("debug") +logger = logging.getLogger("sequence") +class Status_Change(Resource): + def __init__(self): + super(Status_Change, self).__init__() + self.am_url = current_app.config["ACCOUNT_MANAGEMENT_URL"] + self.am_user = current_app.config["ACCOUNT_MANAGEMENT_USER"] + self.am_password = current_app.config["ACCOUNT_MANAGEMENT_PASSWORD"] + self.timeout = current_app.config["TIMEOUT"] + try: + self.AM = AccountManagerHandler(self.am_url, self.am_user, self.am_password, self.timeout) + except Exception as e: + debug_log.warn("Initialization of AccountManager failed. We will crash later but note it here.\n{}".format(repr(e))) + self.helper_object = Helpers(current_app.config) + + @error_handler + def post(self, acc_id, srv_id, cr_id, new_status): + '''post + + :return: Returns latest csr for source + ''' + try: + debug_log.info("We received status change request for cr_id ({}) for srv_id ({}) on account ({})" + .format(cr_id, srv_id, acc_id)) + # TODO: Do we need srv_id for anything? + # TODO: How do we authorize this request? Who is allowed to make it? + # Get previous_csr_id + previous_csr_id = self.AM.get_last_csr(cr_id)["csr_id"] + csr_payload = self.helper_object.gen_csr(acc_id, cr_id, new_status, previous_csr_id) + debug_log.info("Created CSR payload:\n {}".format(csr_payload)) + result = self.AM.create_new_csr(cr_id, csr_payload) + + # result = self.AM.get_last_csr(cr_id) + except AttributeError as e: + raise DetailedHTTPException(status=502, + title="It would seem initiating Account Manager Handler has failed.", + detail="Account Manager might be down or unresponsive.", + trace=traceback.format_exc(limit=100).splitlines()) + # debug_log.info(dumps(result)) + return {"status": "OK"} # result + + +api.add_resource(Status_Change, '/account_id//service//consent//status/') diff --git a/Operator_Components/Operator_SLR/registerSur.py b/Operator_Components/Operator_SLR/registerSur.py index 4cb9ab5..048adf6 100644 --- a/Operator_Components/Operator_SLR/registerSur.py +++ b/Operator_Components/Operator_SLR/registerSur.py @@ -3,9 +3,8 @@ import logging import traceback from json import loads, dumps, load, dump -from requests import post from uuid import uuid4 as guid - +import time from DetailedHTTPException import DetailedHTTPException, error_handler from Templates import ServiceRegistryHandler, Sequences from flask import request, Blueprint, current_app @@ -13,6 +12,7 @@ from flask_restful import Resource, Api from helpers import AccountManagerHandler, Helpers from jwcrypto import jwk +from requests import post api_SLR_RegisterSur = Blueprint("api_SLR_RegisterSur", __name__) @@ -43,53 +43,25 @@ class RegisterSur(Resource): def __init__(self): super(RegisterSur, self).__init__() self.app = current_app - #print(current_app.config) - keysize = current_app.config["KEYSIZE"] - cert_key_path = current_app.config["CERT_KEY_PATH"] - self.request_timeout = current_app.config["TIMEOUT"] - - SUPER_DEBUG = True + self.Helpers = Helpers(self.app.config) account_id = "ACC-ID-RANDOM" - user_account_id = account_id + "_" + str(guid()) - - # Keys need to come from somewhere else instead of being generated each time. - gen = {"generate": "EC", "cvr": "P-256", "kid": user_account_id} - gen3 = {"generate": "RSA", "size": keysize, "kid": account_id} - operator_key = jwk.JWK(**gen3) - try: - with open(cert_key_path, "r") as cert_file: - operator_key2 = jwk.JWK(**loads(load(cert_file))) - operator_key = operator_key2 - except Exception as e: - print(e) - with open(cert_key_path, "w+") as cert_file: - dump(operator_key.export(), cert_file, indent=2) - - # Template to send the key to key server - template = {account_id: {"cr_keys": loads(operator_key.export_public()), - "token_keys": loads(operator_key.export_public()) - } - } - # post("http://localhost:6666/key", json=template) + self.operator_key = self.Helpers.get_key() + self.request_timeout = self.app.config["TIMEOUT"] self.payload = \ { "version": "1.2", "link_id": "", "operator_id": account_id, - "service_id": "SRV-SH14W4S3", # How do we know this? + "service_id": "", "surrogate_id": "", - "token_key": "", - "operator_key": loads(operator_key.export_public()), + "operator_key": self.operator_key["pub"], "cr_keys": "", - "created": "" # time.time(), + "iat": int(time.time()), # TODO: set to iat when Account version used supports it } debug_log.info(dumps(self.payload, indent=3)) - - protti = {"alg": "RS256"} - headeri = {"kid": user_account_id, "jwk": loads(operator_key.export_public())} - self.service_registry_handler = ServiceRegistryHandler() + self.service_registry_handler = ServiceRegistryHandler(current_app.config["SERVICE_REGISTRY_SEARCH_DOMAIN"], current_app.config["SERVICE_REGISTRY_SEARCH_ENDPOINT"]) self.am_url = current_app.config["ACCOUNT_MANAGEMENT_URL"] self.am_user = current_app.config["ACCOUNT_MANAGEMENT_USER"] self.am_password = current_app.config["ACCOUNT_MANAGEMENT_PASSWORD"] @@ -99,7 +71,7 @@ def __init__(self): except Exception as e: debug_log.warn("Initialization of AccountManager failed. We will crash later but note it here.\n{}".format(repr(e))) - self.Helpers = Helpers(current_app.config) + self.query_db = self.Helpers.query_db @@ -111,27 +83,41 @@ def post(self): js = request.json sq.task("Load account_id and service_id from database") - for code_json in self.query_db("select * from session_store where code = ?;", [js["code"]]): - debug_log.debug("{} {}".format(type(code_json), code_json)) - account_id = loads(code_json["json"])["account_id"] - self.payload["service_id"] = loads(code_json["json"])["service_id"] + query = self.query_db("select * from session_store where code=%s;", (js["code"],)) + debug_log.info(type(query)) + debug_log.info(query) + dict_query = loads(query) + debug_log.debug("{} {}".format(type(query), query)) + account_id = dict_query["account_id"] + self.payload["service_id"] = dict_query["service_id"] # Check Surrogate_ID exists. # Fill token_key try: sq.task("Verify surrogate_id and token_key exist") + token_key = js["token_key"] self.payload["surrogate_id"] = js["surrogate_id"] - self.payload["token_key"] = {"key": js["token_key"]} + #self.payload["token_key"] = {"key": token_key} + + sq.task("Store surrogate_id and keys for CR steps later on.") + key_template = {"token_key": token_key, + "pop_key": token_key} # TODO: Get pop_key here? + + self.Helpers.store_service_key_json(kid=token_key["kid"], surrogate_id=js["surrogate_id"], key_json=key_template) except Exception as e: + debug_log.exception(e) raise DetailedHTTPException(exception=e, detail={"msg": "Received Invalid JSON that may not contain surrogate_id", "json": js}) + #sq.task("Fetch and fill token_issuer_keys") + # TODO: Token keys separetely when the time is right. + #self.payload["token_issuer_keys"][0] = self.Helpers.get_key()["pub"] # Create template self.payload["link_id"] = str(guid()) # TODO: Currently you can generate endlessly new slr even if one exists already - sq.task("Fill template for Account Mgmnt") + sq.task("Fill template for Account Manager") template = {"code": js["code"], - "data":{ + "data": { "slr": { "type": "ServiceLinkRecord", "attributes": self.payload, diff --git a/Operator_Components/Operator_SLR/start.py b/Operator_Components/Operator_SLR/start.py index 637ba7e..0ad2d92 100644 --- a/Operator_Components/Operator_SLR/start.py +++ b/Operator_Components/Operator_SLR/start.py @@ -48,7 +48,7 @@ class Start(Resource): def __init__(self): super(Start, self).__init__() self.app = current_app - self.service_registry_handler = ServiceRegistryHandler() + self.service_registry_handler = ServiceRegistryHandler(current_app.config["SERVICE_REGISTRY_SEARCH_DOMAIN"], current_app.config["SERVICE_REGISTRY_SEARCH_ENDPOINT"]) self.request_timeout = current_app.config["TIMEOUT"] self.helper = Helpers(current_app.config) self.store_session = self.helper.store_session @@ -60,14 +60,16 @@ def get(self, account_id, service_id): to_store = {} # We want to store some information for later parts of flow. # This address needs to be fetched somewhere to support multiple services - service_mgmnt_address = self.service_registry_handler.getService_url(service_id) - + service_json = self.service_registry_handler.getService(service_id) + service_domain = service_json["serviceInstance"][0]["domain"] + service_access_uri = service_json["serviceInstance"][0]["serviceAccessEndPoint"]["serviceAccessURI"] + service_login_uri = service_json["serviceInstance"][0]["loginUri"] # Endpoint address should be fetched somewhere as well so we can re-use the service address later easily. - endpoint = "/api/1.2/slr/code" - + endpoint = "/slr/code" # TODO: Comment above + endpoint = "{}{}{}".format(service_domain, service_access_uri, endpoint) sq.send_to("Service_Components Mgmnt", "Fetch code from service_mgmnt") - result = get("{}{}".format(service_mgmnt_address, endpoint), timeout=self.request_timeout) + result = get(endpoint, timeout=self.request_timeout) code_status = result.status_code sq.task("Check code request is valid") @@ -101,8 +103,9 @@ def get(self, account_id, service_id): try: endpoint = "/api/1.2/slr/login" + endpoint = "{}{}{}".format(service_domain, service_access_uri, service_login_uri) sq.send_to("Service_Components Mgmnt", "Redirect user to Service_Components Mgmnt login") - result = post("{}{}".format(service_mgmnt_address, endpoint), json=code, timeout=self.request_timeout) + result = post(endpoint, json=code, timeout=self.request_timeout) debug_log.info("####Response to this end point: {}\n{}".format(result.status_code, result.text)) if not result.ok: raise DetailedHTTPException(status=result.status_code, diff --git a/Operator_Components/Operator_SLR/verify.py b/Operator_Components/Operator_SLR/verify.py index 954e53d..ef77709 100644 --- a/Operator_Components/Operator_SLR/verify.py +++ b/Operator_Components/Operator_SLR/verify.py @@ -136,22 +136,25 @@ def post(self): content = decode(payload.encode()) sq.task("Load decoded payload as python dict") - payload = loads(loads(content.decode("utf-8"))) # TODO: Figure out why we get str out of loads the first time? + payload = loads(content.decode("utf-8")) # TODO: Figure out why we get str out of loads the first time? debug_log.info(payload) debug_log.info(type(payload)) sq.task("Fetch link_id from decoded payload") slr_id = payload["link_id"] - + code = request.json["data"]["code"].decode() + debug_log.info(code) + debug_log.info(request.json["data"]["code"]) try: ## # Verify SLR with key from Service_Components Management ## sq.task("Load account_id from database") - for code_json in self.query_db("select * from session_store where code = ?;", - [request.json["data"]["code"]]): - debug_log.debug("{} {}".format(type(code_json), code_json)) - account_id = loads(code_json["json"])["account_id"] + query = self.query_db("select * from session_store where code=%s;", (request.json["data"]["code"],)) + debug_log.info(query) + dict_query = loads(query) + debug_log.info("{} {}".format(type(dict_query), dict_query)) + account_id = dict_query["account_id"] debug_log.info("################Verify########################") debug_log.info(dumps(request.json)) diff --git a/Operator_Components/Templates.py b/Operator_Components/Templates.py index b4455f4..2b1d228 100644 --- a/Operator_Components/Templates.py +++ b/Operator_Components/Templates.py @@ -24,7 +24,7 @@ "rs_id": "String", "dataset": [ { - "datase_id": "String", + "dataset_id": "String", "title": "String", "description": "String", "keyword": [], @@ -72,23 +72,41 @@ from instance.settings import SERVICE_URL +from requests import get class ServiceRegistryHandler: - def __init__(self): + def __init__(self, domain, endpoint): # Here could be some code to setup where ServiceRegistry is located etc + # TODO: Get this from config or such. + # self.registry_url = "http://178.62.229.148:8081"+"/api/v1/services/" + self.registry_url = domain + endpoint #"/api/v1/services/" pass def getService(self, service_id): - return Services[str(service_id)] + try: + debug_log.info("Making request GET {}{}".format(self.registry_url, service_id)) + req = get(self.registry_url+service_id) + service = req.json() + debug_log.info(service) + service = service[0] + except Exception as e: + debug_log.exception(e) + raise e + return service def getService_url(self, service_id): debug_log.info("getService_url got {} of type {} as parameter.".format(service_id, type(service_id))) if isinstance(service_id, unicode): service_id = service_id.encode() - services = { - "1": SERVICE_URL, - "2": SERVICE_URL# Our Service_Mgmnt - } - return services[service_id] + try: + service = get(self.registry_url+service_id).json() + debug_log.info(service_id) + service = service[0] + except Exception as e: + debug_log.exception(e) + raise e + url = service["serviceInstance"][0]["domain"] + + return url diff --git a/Operator_Components/db_handler.py b/Operator_Components/db_handler.py index 4aa43bb..14eb29d 100644 --- a/Operator_Components/db_handler.py +++ b/Operator_Components/db_handler.py @@ -1,35 +1,18 @@ # -*- coding: utf-8 -*- -import sqlite3 +import logging +import MySQLdb -def get_db(db_path): +debug_log = logging.getLogger("debug") +def get_db(host, user, password, database, port): db = None if db is None: - db = sqlite3.connect(db_path) - db.row_factory = sqlite3.Row - - try: - init_db(db) - except Exception as e: - pass + db = MySQLdb.connect(host=host, user=user, passwd=password, db=database, port=port, sql_mode="TRADITIONAL") return db - def make_dicts(cursor, row): return dict((cursor.description[idx][0], value) for idx, value in enumerate(row)) -def init_db(conn): - # create db for codes - conn.execute('''CREATE TABLE cr_tbl - (rs_id TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL);''') - conn.execute('''CREATE TABLE rs_id_tbl - (rs_id TEXT PRIMARY KEY NOT NULL, - used BOOL NOT NULL);''') - conn.execute('''CREATE TABLE session_store - (code TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL);''') - conn.commit() diff --git a/Operator_Components/doc/api/swagger_Operator_CR.yml b/Operator_Components/doc/api/swagger_Operator_CR.yml index c5e792c..a2ce342 100644 --- a/Operator_Components/doc/api/swagger_Operator_CR.yml +++ b/Operator_Components/doc/api/swagger_Operator_CR.yml @@ -39,9 +39,13 @@ paths: # Expected responses for this operation: responses: 200: - description: "Returns 200 OK or Error message" + description: "Returns 200 OK" schema: $ref: "#/definitions/Consent_FormReply" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" post: # Describe this verb here. Note: you can use markdown @@ -77,10 +81,10 @@ paths: $ref: "#/definitions/Consent_FormReply" # Expected responses for this operation: responses: - 200: - description: "Returns 200 OK or Error message" + 201: + description: "Returns 201 Created" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" 502: @@ -100,17 +104,114 @@ paths: parameters: - name: "cr_id" in: "path" - description: "Unique ID consent record" + description: "Unique ID of consent record" required: true type: "string" format: "uuid4" responses: 200: - description: "returns 200 OK and Auth Token, or Error message" + description: "returns 200 OK and Auth Token" schema: $ref: "#/definitions/Auth_TokenReply" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" + 502: + description: "Bad Gateway" + schema: + $ref: "#/definitions/errors" + + /introspection/{cr_id}: + get: + tags: + - "Operator" + - "Source" + description: "Gets last csr for given cr_id" + parameters: + - name: "cr_id" + in: "path" + description: "Unique ID of consent record" + required: true + type: "string" + + responses: + 200: + description: "returns 200 OK and latest csr_id" + schema: + $ref: "#/definitions/IntrospectionReply" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" + 502: + description: "Bad Gateway" + schema: + $ref: "#/definitions/errors" + + /consent/{cr_id}/missing_since/{csr_id}: + get: + tags: + - "Operator" + - "Source" + description: "Gets new csr's for given cr since given csr_id" + parameters: + - name: "cr_id" + in: "path" + description: "Unique ID of consent record" + required: true + type: "string" + - name: "csr_id" + in: "path" + description: "Unique ID of consent status record" + required: true + type: "string" + responses: + 200: + description: "returns 200 OK and new csr's since given csr_id" + schema: + $ref: "#/definitions/IntrospectionMissingReply" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" + 502: + description: "Bad Gateway" + schema: + $ref: "#/definitions/errors" + + /account_id/{acc_id}/service/{srv_id}/consent/{cr_id}/status/{new_status}: + post: + tags: + - "Operator" + - "Source" + description: "Change status of consent" + parameters: + - name: "acc_id" + in: "path" + description: "Unique Surrogate id for service" + required: true + type: "string" + - name: "srv_id" + in: "path" + description: "Unique ID of service id" + required: true + type: "string" + - name: "cr_id" + in: "path" + description: "Unique ID of consent record" + required: true + type: "string" + - name: "new_status" + in: "path" + description: "new status as Active/Disabled/Withdrawn" + required: true + type: "string" + responses: + 200: + description: "returns 200 OK" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" 502: @@ -146,6 +247,27 @@ definitions: type: string description: string containing auth_token + IntrospectionReply: + type: object + properties: + csr_id: + type: string + description: string containing csr_id + + IntrospectionMissingReply: + type: object + properties: + missing_csr: + type: object + properties: + data: + type: array + items: + type: object + properties: + attributes: + type: object + description: Missing csr DataSet_Sink: type: object @@ -169,10 +291,10 @@ definitions: type: object properties: required: - type: string + type: boolean description: boolean containing required selected: - type: string + type: boolean description: boolean containing selected title: type: string @@ -213,7 +335,7 @@ definitions: type: string description: string containing access_url selected: - type: string + type: boolean description: boolean containing selected datase_id: type: string diff --git a/Operator_Components/doc/api/swagger_Operator_SLR.yml b/Operator_Components/doc/api/swagger_Operator_SLR.yml index 4b75ae2..4b3a47e 100644 --- a/Operator_Components/doc/api/swagger_Operator_SLR.yml +++ b/Operator_Components/doc/api/swagger_Operator_SLR.yml @@ -25,7 +25,7 @@ paths: $ref: "#/definitions/LinkParams" responses: 200: - description: "Returns 200 OK or Error message" + description: "Returns 200 OK" 500: description: "Internal server error" schema: @@ -39,8 +39,8 @@ paths: get: tags: - "Operator" - description: "Entry point for creating new SLR with service\nWill takes a UUID\ - \ of service to link with as paramater" + description: "Entry point for creating new SLR with service.\nWill take UUID\ + \ of service to link with as paramater. This will start chain of events." parameters: - name: "account_id" in: "path" @@ -58,7 +58,7 @@ paths: 200: description: "Returns 200 OK" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" 504: @@ -96,7 +96,7 @@ paths: schema: $ref: "#/definitions/VerifyResponse" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" 502: diff --git a/Operator_Components/doc/database/Operator_Components-DBinit.sql b/Operator_Components/doc/database/Operator_Components-DBinit.sql new file mode 100644 index 0000000..2025b6f --- /dev/null +++ b/Operator_Components/doc/database/Operator_Components-DBinit.sql @@ -0,0 +1,83 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 15.32.11 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Operator +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Operator +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Operator` DEFAULT CHARACTER SET utf8 ; +USE `db_Operator` ; + +-- ----------------------------------------------------- +-- Table `db_Operator`.`cr_tbl` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`cr_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`cr_tbl` ( + `rs_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`rs_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Operator`.`rs_id_tbl` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`rs_id_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`rs_id_tbl` ( + `rs_id` LONGTEXT NOT NULL, + `used` TINYINT(1) NOT NULL, + PRIMARY KEY (`rs_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Operator`.`session_store` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`session_store` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`session_store` ( + `code` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`code`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + +-- ----------------------------------------------------- +-- Table `db_Operator`.`keys_tbl` TODO: Check this, used to have kid as PK but would cause fails since service gives same key for all surrogates atm. +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`service_keys_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`service_keys_tbl` ( + `kid` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `key_json` LONGTEXT NOT NULL, + PRIMARY KEY (`surrogate_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; + +CREATE USER 'operator'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Operator.* TO 'operator'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/Operator_Components/doc/database/Operator_Components_db_image-v001.png b/Operator_Components/doc/database/Operator_Components_db_image-v001.png new file mode 100644 index 0000000..6fb8ce5 Binary files /dev/null and b/Operator_Components/doc/database/Operator_Components_db_image-v001.png differ diff --git a/Operator_Components/doc/deployment.md b/Operator_Components/doc/deployment.md index 3beecfb..e3c9080 100644 --- a/Operator_Components/doc/deployment.md +++ b/Operator_Components/doc/deployment.md @@ -43,5 +43,5 @@ You can find configurable settings in /instance/settings.py cd Operator_Components/ source virtual_env_2_7/bin/activate - nohup celery -A tasks worker > op_celery.log & + nohup celery -A op_tasks worker > op_celery.log & nohup python wsgi.py > op_flask.log & diff --git a/Operator_Components/factory.py b/Operator_Components/factory.py index da07400..5865d78 100644 --- a/Operator_Components/factory.py +++ b/Operator_Components/factory.py @@ -15,7 +15,7 @@ def create_app(package_name, package_path, settings_override=None, - register_security_blueprint=False): + register_security_blueprint=False, register_bps=True): """Returns a :class:`Flask` application instance configured with common functionality for the Overholt platform. :param package_name: application package name @@ -28,8 +28,9 @@ def create_app(package_name, package_path, settings_override=None, app = Flask(package_name, instance_relative_config=True) app.config.from_pyfile('settings.py', silent=False) app.config.from_object(settings_override) - - rv, apis =register_blueprints(app, package_name, package_path) + apis = None + if register_bps: + rv, apis =register_blueprints(app, package_name, package_path) return app, apis @@ -38,7 +39,7 @@ def create_celery_app(app=None): if app is not None: app = app else: - app, apis = create_app('operator_component', os.path.dirname(__file__)) + app, apis = create_app('op_queue', os.path.dirname(__file__), register_bps=False) celery = Celery(__name__, broker=app.config['SELERY_BROKER_URL']) celery.conf.update(app.config) TaskBase = celery.Task diff --git a/Operator_Components/helpers.py b/Operator_Components/helpers.py index 37603cc..2f2e5a3 100644 --- a/Operator_Components/helpers.py +++ b/Operator_Components/helpers.py @@ -1,22 +1,21 @@ # -*- coding: utf-8 -*- -import pkgutil import importlib +import logging +import pkgutil +import time +from base64 import urlsafe_b64decode as decode +from sqlite3 import IntegrityError +from Crypto.PublicKey.RSA import importKey as import_rsa_key from flask import Blueprint from flask_restful import Api -from Crypto.PublicKey.RSA import importKey as import_rsa_key -from base64 import urlsafe_b64decode as decode +from Templates import ServiceRegistryHandler import db_handler as db_handler -from sqlite3 import OperationalError, IntegrityError -import time -import logging + debug_log = logging.getLogger("debug") from datetime import datetime - - - def read_key(path, password=None, ): ## # Read RSA key from PEM file and return JWK object of it. @@ -60,9 +59,6 @@ def read_key(path, password=None, ): return jwssa - - - def register_blueprints(app, package_name, package_path): """Register all Blueprint instances on the specified Flask application found in all modules for the specified package. @@ -83,32 +79,34 @@ def register_blueprints(app, package_name, package_path): apis.append(item) return rv, apis + from jwcrypto import jwt, jwk -#from Templates import SLR_tool -from json import dumps, loads, dump, load +# from Templates import SLR_tool +from json import dumps, dump, load from uuid import uuid4 as guid - from requests import get, post from json import loads from core import DetailedHTTPException -class AccountManagerHandler: + +class AccountManagerHandler: def __init__(self, account_management_url, account_management_username, account_management_password, timeout): self.username = account_management_username - self.password = account_management_password # possibly we don't need this here, does it matter? + self.password = account_management_password # possibly we don't need this here, does it matter? self.url = account_management_url self.timeout = timeout self.endpoint = { - "token": "api/auth/sdk/", - "surrogate": "api/account/{account_id}/service/{service_id}/surrogate/", - "sign_slr": "api/account/{account_id}/servicelink/", - "verify_slr": "api/account/{account_id}/servicelink/verify/", - "sign_consent": "api/account/consent/sign/", - "consent": "api/account/{account_id}/servicelink/{source_slr_id}/{sink_slr_id}/consent/", - "auth_token": "api/consent/{sink_cr_id}/authorizationtoken/"} - req = get(self.url+self.endpoint["token"], auth=(self.username, self.password), timeout=timeout) - + "token": "api/auth/sdk/", + "surrogate": "api/account/{account_id}/service/{service_id}/surrogate/", + "sign_slr": "api/account/{account_id}/servicelink/", + "verify_slr": "api/account/{account_id}/servicelink/verify/", + "sign_consent": "api/account/consent/sign/", + "consent": "api/account/{account_id}/servicelink/{source_slr_id}/{sink_slr_id}/consent/", + "auth_token": "api/consent/{sink_cr_id}/authorizationtoken/", + "last_csr": "api/consent/{cr_id}/status/last/", + "new_csr": "api/consent/{cr_id}/status/"} # Works as path to GET missing csr and POST new ones + req = get(self.url + self.endpoint["token"], auth=(self.username, self.password), timeout=timeout) # check if the request for token succeeded debug_log.debug("{} {} {}".format(req.status_code, req.reason, req.text)) @@ -116,10 +114,12 @@ def __init__(self, account_management_url, account_management_username, account_ self.token = loads(req.text)["api_key"] else: raise DetailedHTTPException(status=req.status_code, - detail={"msg":"Getting account management token failed.","content": req.content}, + detail={"msg": "Getting account management token failed.", + "content": req.content}, title=req.reason) - # Here could be some code to setup where AccountManager is located etc, get these from ServiceRegistry? + # Here could be some code to setup where AccountManager is located etc, get these from ServiceRegistry? + def get_AuthTokenInfo(self, cr_id): req = get(self.url + self.endpoint["auth_token"] .replace("{sink_cr_id}", cr_id), @@ -128,34 +128,103 @@ def get_AuthTokenInfo(self, cr_id): templ = loads(req.text) else: raise DetailedHTTPException(status=req.status_code, - detail={"msg": "Getting AuthToken info from account management failed.","content": req.content}, + detail={"msg": "Getting AuthToken info from account management failed.", + "content": req.content}, title=req.reason) return templ def getSUR_ID(self, service_id, account_id): - debug_log.debug(""+self.url+self.endpoint["surrogate"].replace("{account_id}",account_id).replace("{service_id}", service_id)) + debug_log.debug( + "" + self.url + self.endpoint["surrogate"].replace("{account_id}", account_id).replace("{service_id}", + service_id)) - req = get(self.url+self.endpoint["surrogate"].replace("{account_id}",account_id).replace("{service_id}", service_id), + req = get(self.url + self.endpoint["surrogate"].replace("{account_id}", account_id).replace("{service_id}", + service_id), headers={'Api-Key': self.token}, timeout=self.timeout) if req.ok: templ = loads(req.text) else: raise DetailedHTTPException(status=req.status_code, - detail={"msg": "Getting surrogate_id from account management failed.", "content": req.content}, + detail={"msg": "Getting surrogate_id from account management failed.", + "content": req.content}, title=req.reason) return templ + def get_last_csr(self, cr_id): + endpoint_url = self.url + self.endpoint["last_csr"].replace("{cr_id}", cr_id) + debug_log.debug("" + endpoint_url) + + req = get(endpoint_url, + headers={'Api-Key': self.token}, + timeout=self.timeout) + if req.ok: + templ = loads(req.text) + tool = SLR_tool() + payload = tool.decrypt_payload(templ["data"]["attributes"]["csr"]["payload"]) + debug_log.info("Got CSR payload from account:\n{}".format(dumps(payload, indent=2))) + csr_id = payload["record_id"] + return {"csr_id": csr_id} + else: + raise DetailedHTTPException(status=req.status_code, + detail={"msg": "Getting last csr from account management failed.", + "content": req.content}, + title=req.reason) + + def create_new_csr(self, cr_id, payload): + endpoint_url = self.url + self.endpoint["new_csr"].replace("{cr_id}", cr_id) + debug_log.debug("" + endpoint_url) + payload = {"data": {"attributes": payload, "type": "ConsentStatusRecord"}} + req = post(endpoint_url, json=payload, + headers={'Api-Key': self.token}, + timeout=self.timeout) + if req.ok: + templ = loads(req.text) + #tool = SLR_tool() + #payload = tool.decrypt_payload(templ["data"]["attributes"]["csr"]["payload"]) + debug_log.info("Created CSR:\n{}".format(dumps(templ, indent=2))) + #csr_id = payload["record_id"] + + return {"csr": templ} + else: + raise DetailedHTTPException(status=req.status_code, + detail={"msg": "Creating new csr at account management failed.", + "content": req.content}, + title=req.reason) + + def get_missing_csr(self, cr_id, csr_id): + endpoint_url = self.url + self.endpoint["new_csr"].replace("{cr_id}", cr_id) + debug_log.debug("" + endpoint_url) + payload = {"csr_id": csr_id} + req = get(endpoint_url, params=payload, + headers={'Api-Key': self.token}, + timeout=self.timeout) + if req.ok: + templ = loads(req.text) + #tool = SLR_tool() + #payload = tool.decrypt_payload(templ["data"]["attributes"]["csr"]["payload"]) + debug_log.info("Fetched missing CSR:\n{}".format(dumps(templ, indent=2))) + #csr_id = payload["record_id"] + + return {"missing_csr": templ} + else: + raise DetailedHTTPException(status=req.status_code, + detail={"msg": "Creating new csr at account management failed.", + "content": req.content}, + title=req.reason) + def sign_slr(self, template, account_id): - templu =template - req = post(self.url+self.endpoint["sign_slr"].replace("{account_id}", account_id), json=templu, headers={'Api-Key': self.token}, timeout=self.timeout) + templu = template + req = post(self.url + self.endpoint["sign_slr"].replace("{account_id}", account_id), json=templu, + headers={'Api-Key': self.token}, timeout=self.timeout) debug_log.debug("API token: {}".format(self.token)) debug_log.debug("{} {} {} {}".format(req.status_code, req.reason, req.text, req.content)) if req.ok: templ = loads(req.text) else: raise DetailedHTTPException(status=req.status_code, - detail={"msg": "Getting surrogate_id from account management failed.","content": loads(req.text)}, + detail={"msg": "Getting surrogate_id from account management failed.", + "content": loads(req.text)}, title=req.reason) debug_log.debug(templ) @@ -173,16 +242,18 @@ def verify_slr(self, payload, code, slr, account_id): }, "ssr": { "attributes": { + "version": "1.2", + "surrogate_id": payload["surrogate_id"], "record_id": str(guid()), "account_id": account_id, "slr_id": payload["link_id"], "sl_status": "Active", - "iat": "", + "iat": int(time.time()), "prev_record_id": "NULL" }, "type": "ServiceLinkStatusRecord" }, - "surrogate_id":{ + "surrogate_id": { "attributes": { "account_id": "2", "service_id": payload["service_id"], @@ -192,7 +263,10 @@ def verify_slr(self, payload, code, slr, account_id): } } } - req = post(self.url + self.endpoint["verify_slr"].replace("{account_id}", account_id), json=templa, headers={'Api-Key': self.token}, timeout=self.timeout) + debug_log.info("Template sent to Account Manager:") + debug_log.info(dumps(templa, indent=2)) + req = post(self.url + self.endpoint["verify_slr"].replace("{account_id}", account_id), json=templa, + headers={'Api-Key': self.token}, timeout=self.timeout) return req def signAndstore(self, sink_cr, sink_csr, source_cr, source_csr, account_id): @@ -207,34 +281,33 @@ def signAndstore(self, sink_cr, sink_csr, source_cr, source_csr, account_id): } template = { - "data": { + "data": { "source": { - "consentRecordPayload": { - "type": "ConsentRecord", - "attributes": source_cr["cr"] - }, - "consentStatusRecordPayload": { - "type": "ConsentStatusRecord", - "attributes": source_csr, - } + "consentRecordPayload": { + "type": "ConsentRecord", + "attributes": source_cr["cr"] + }, + "consentStatusRecordPayload": { + "type": "ConsentStatusRecord", + "attributes": source_csr, + } }, "sink": { - "consentRecordPayload": { - "type": "ConsentRecord", - "attributes": sink_cr["cr"], - }, - "consentStatusRecordPayload": { - "type": "ConsentStatusRecord", - "attributes": sink_csr, - }, + "consentRecordPayload": { + "type": "ConsentRecord", + "attributes": sink_cr["cr"], + }, + "consentStatusRecordPayload": { + "type": "ConsentStatusRecord", + "attributes": sink_csr, + }, }, - }, - } - + }, + } slr_id_sink = template["data"]["sink"]["consentRecordPayload"]["attributes"]["common_part"]["slr_id"] slr_id_source = template["data"]["source"]["consentRecordPayload"]["attributes"]["common_part"]["slr_id"] - #print(type(slr_id_source), type(slr_id_sink), account_id) + # print(type(slr_id_source), type(slr_id_sink), account_id) debug_log.debug(dumps(template, indent=2)) req = post(self.url + self.endpoint["consent"].replace("{account_id}", account_id) .replace("{source_slr_id}", slr_id_source). @@ -247,7 +320,8 @@ def signAndstore(self, sink_cr, sink_csr, source_cr, source_csr, account_id): debug_log.debug(dumps(loads(req.text), indent=2)) else: raise DetailedHTTPException(status=req.status_code, - detail={"msg": "Getting surrogate_id from account management failed.", "content": loads(req.text)}, + detail={"msg": "Getting surrogate_id from account management failed.", + "content": loads(req.text)}, title=req.reason) return loads(req.text) @@ -255,9 +329,39 @@ def signAndstore(self, sink_cr, sink_csr, source_cr, source_csr, account_id): class Helpers: def __init__(self, app_config): - self.db_path = app_config["DATABASE_PATH"] + self.host = app_config["MYSQL_HOST"] self.cert_key_path = app_config["CERT_KEY_PATH"] self.keysize = app_config["KEYSIZE"] + self.user = app_config["MYSQL_USER"] + self.passwd = app_config["MYSQL_PASSWORD"] + self.db = app_config["MYSQL_DB"] + self.port = app_config["MYSQL_PORT"] + self.operator_id = app_config["UID"] + self.not_after_interval = app_config["NOT_AFTER_INTERVAL"] + self.service_registry_search_domain = app_config["SERVICE_REGISTRY_SEARCH_DOMAIN"] + self.service_registry_search_endpoint = app_config["SERVICE_REGISTRY_SEARCH_ENDPOINT"] + + def get_key(self): + keysize = self.keysize + cert_key_path = self.cert_key_path + gen3 = {"generate": "RSA", "size": keysize, "kid": self.operator_id} + operator_key = jwk.JWK(**gen3) + try: + with open(cert_key_path, "r") as cert_file: + operator_key2 = jwk.JWK(**loads(load(cert_file))) + operator_key = operator_key2 + except Exception as e: + debug_log.error(e) + with open(cert_key_path, "w+") as cert_file: + dump(operator_key.export(), cert_file, indent=2) + public_key = loads(operator_key.export_public()) + full_key = loads(operator_key.export()) + protti = {"alg": "RS256"} + headeri = {"kid": self.operator_id, "jwk": public_key} + return {"pub": public_key, + "key": full_key, + "prot": protti, + "header": headeri} def validate_rs_id(self, rs_id): ## @@ -265,72 +369,150 @@ def validate_rs_id(self, rs_id): ## return self.change_rs_id_status(rs_id, True) + # TODO: This should return list, now returns single object. + def get_service_keys(self, surrogate_id): + """ + + """ + storage_rows = self.query_db_multiple("select * from service_keys_tbl where surrogate_id = %s;", + (surrogate_id,)) + list_of_keys = [] + for item in storage_rows: + list_of_keys.append(item[2]) + + debug_log.info("Found keys:\n {}".format(list_of_keys)) + return list_of_keys + + def get_service_key(self, surrogate_id, kid): + """ + + """ + storage_row = self.query_db_multiple("select * from service_keys_tbl where surrogate_id = %s and kid = %s;", + (surrogate_id, kid,), one=True) + # Third item in this tuple should be the key JSON {token_key: {}, pop_key:{}} + key_json_from_db = loads(storage_row[2]) + + return key_json_from_db + + def store_service_key_json(self, kid, surrogate_id, key_json): + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + cursor.execute("INSERT INTO service_keys_tbl (kid, surrogate_id, key_json) \ + VALUES (%s, %s, %s);", (kid, surrogate_id, dumps(key_json))) + db.commit() +# cursor.execute("UPDATE service_keys_tbl SET key_json=%s WHERE kid=%s ;", (dumps(key_json), kid)) +# db.commit() + debug_log.info("Stored key_json({}) for surrogate_id({}) into DB".format(key_json, surrogate_id)) + cursor.close() + def storeRS_ID(self, rs_id): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() rs_id_status = False - db.execute("INSERT INTO rs_id_tbl (rs_id, used) \ - VALUES (?, ?)", [rs_id, rs_id_status]) + cursor.execute("INSERT INTO rs_id_tbl (rs_id, used) \ + VALUES (%s, %s)", (rs_id, rs_id_status)) db.commit() - db.close() + debug_log.info("Stored RS_ID({}) into DB".format(rs_id)) + cursor.close() def change_rs_id_status(self, rs_id, status): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass - for rs_id_object in self.query_db("select * from rs_id_tbl where rs_id = ?;", [rs_id]): - rs_id_from_db = rs_id_object["rs_id"] - status_from_db = bool(rs_id_object["used"]) - status_is_unused = status_from_db == False - if (status_is_unused): - db.execute("UPDATE rs_id_tbl SET used=? WHERE rs_id=? ;", [status, rs_id]) - db.commit() - db.close() - return True - else: - db.close() - return False - + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + query = cursor.execute("select * from rs_id_tbl where rs_id=%s;", (rs_id,)) + result = cursor.fetchone() + rs_id = result[0] + used = result[1] + debug_log.info(result) + status_from_db = bool(used) + status_is_unused = status_from_db is False + if status_is_unused: + cursor.execute("UPDATE rs_id_tbl SET used=%s WHERE rs_id=%s ;", (status, rs_id)) + db.commit() + cursor.close() + return True + else: + cursor.close() + return False def store_session(self, DictionaryToStore): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() debug_log.info(DictionaryToStore) for key in DictionaryToStore: debug_log.info(key) try: - db.execute("INSERT INTO session_store (code,json) \ - VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) + cursor.execute("INSERT INTO session_store (code,json) \ + VALUES (%s, %s)", (key, dumps(DictionaryToStore[key]))) db.commit() - db.close() + # db.close() except IntegrityError as e: - db.execute("UPDATE session_store SET json=? WHERE code=? ;", [dumps(DictionaryToStore[key]), key]) + cursor.execute("UPDATE session_store SET json=%s WHERE code=%s ;", (dumps(DictionaryToStore[key]), key)) db.commit() - db.close() + # db.close() + db.close() - def query_db(self, query, args=(), one=False): - db = db_handler.get_db(self.db_path) - cur = db.execute(query, args) - rv = cur.fetchall() - cur.close() - return (rv[0] if rv else None) if one else rv + def query_db(self, query, args=()): + ''' + Simple queries to DB + :param query: SQL query + :param args: Arguments to inject into the query + :return: Single hit for the given query + ''' + + result = self.query_db_multiple(query, args=args, one=True) + if result is not None: + return result[1] + else: + return None + + def query_db_multiple(self, query, args=(), one=False): + ''' + Simple queries to DB + :param query: SQL query + :param args: Arguments to inject into the query + :return: all hits for the given query + ''' + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + cur = cursor.execute(query, args) + if one: + try: + rv = cursor.fetchone() # Returns tuple + debug_log.info(rv) + if rv is not None: + db.close() + return rv # The second value in the tuple. + else: + return None + except Exception as e: + debug_log.exception(e) + debug_log.info(cur) + db.close() + return None + else: + try: + rv = cursor.fetchall() # Returns tuple + debug_log.info(rv) + if rv is not None: + db.close() + return rv # This should be list of tuples [(1,2,3), (3,4,5)...] + else: + return None + except Exception as e: + debug_log.exception(e) + debug_log.info(cur) + db.close() + return None - def gen_rs_id(self, source_name): + def gen_rs_id(self, source_URI): ## # Something to check state here? # Also store RS_ID in DB around here. ## - rs_id = "{}_{}".format(source_name, str(guid())) + + rs_id = "{}{}".format(source_URI.replace("http://", "").replace("https://", ""), str(guid())) self.storeRS_ID(rs_id) return rs_id @@ -340,75 +522,96 @@ def store_consent_form(self, consent_form): ## return True - def gen_cr_common(self, sur_id, rs_ID, slr_id): + def gen_cr_common(self, sur_id, rs_ID, slr_id, issued, not_before, not_after, subject_id, operator_id, role): ## # Return common part of CR + # Some of these fields are filled in consent_form.py ## common_cr = { - "version_number": "String", + "version": "1.2", "cr_id": str(guid()), "surrogate_id": sur_id, "rs_id": rs_ID, "slr_id": slr_id, - "issued": "String", - "not_before": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z "), - "not_after": datetime.fromtimestamp(time.time()+2592000).strftime("%Y-%m-%dT%H:%M:%S %Z "), - "issued_at": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z "), - "subject_id": "String" # TODO: Should this really be in common_cr? + "iat": issued, + "nbf": not_before, + "exp": not_after, + "operator": operator_id, + "subject_id": subject_id, # TODO: Should this really be in common_cr? + "role": role } return common_cr - def gen_cr_sink(self, common_CR, consent_form): + def gen_cr_sink(self, common_CR, consent_form, source_cr_id): _rules = [] common_CR["subject_id"] = consent_form["sink"]["service_id"] # This iters trough all datasets, iters though all purposes in those data sets, and add title to # _rules. It seems to be enough efficient for this purpose. - [[_rules.append(purpose["title"]) for purpose in dataset["purposes"] # 2 - if purpose["selected"] == True or purpose["required"] == True] # 3 - for dataset in consent_form["sink"]["dataset"]] # 1 + # [[_rules.append(purpose["title"]) for purpose in dataset["purposes"] # 2 + # if purpose["selected"] == True or purpose["required"] == True] # 3 + # for dataset in consent_form["sink"]["dataset"]] # 1 + for dataset in consent_form["sink"]["dataset"]: + for purpose in dataset["purposes"]: + _rules.append(purpose) - _rules = list(set(_rules)) # Remove duplicates + _rules = list(set(_rules)) # Remove duplicates _tmpl = {"cr": { "common_part": common_CR, "role_specific_part": { - "role": "Sink", + "source_cr_id": source_cr_id, "usage_rules": _rules }, - "ki_cr": {}, # TODO: Rename ki_cr - "extensions": {} + "consent_receipt_part": {"ki_cr": {}}, + "extension_part": {"extensions": {}} } } return _tmpl - def gen_cr_source(self, common_CR, consent_form, Operator_public_key): + def gen_cr_source(self, common_CR, consent_form, + sink_pop_key): # TODO: Operator_public key is now fetched with function. common_CR["subject_id"] = consent_form["source"]["service_id"] - _tmpl = {"cr": { - "common_part": common_CR, - "role_specific_part": { - "role": "Source", - "auth_token_issuer_key": Operator_public_key, - "resource_set_description": { + rs_description = \ + { + "rs_description": { "resource_set": { - "rs_id": "String", + "rs_id": consent_form["source"]["rs_id"], "dataset": [ { "dataset_id": "String", - "distribution_id": "String" - } - ] + "distribution_id": "String", + "distribution_url": "" + } + ] } - } + } + } + common_CR.update(rs_description) + _tmpl = {"cr": { + "common_part": common_CR, + "role_specific_part": { + "pop_key": sink_pop_key, + "token_issuer_key": self.get_key()["pub"], }, - "ki_cr": {}, - "extensions": {} + "consent_receipt_part": {"ki_cr": {}}, + "extension_part": {"extensions": {}} } } + _tmpl["cr"]["common_part"]["rs_description"]["resource_set"]["dataset"] = [] + + for dataset in consent_form["source"]["dataset"]: + dt_tmp = { + "dataset_id": dataset["dataset_id"], + "distribution_id": dataset["distribution"]["distribution_id"], + "distribution_url": dataset["distribution"]["access_url"] + } + _tmpl["cr"]["common_part"]["rs_description"]["resource_set"]["dataset"].append(dt_tmp) + return _tmpl def Gen_ki_cr(self, everything): @@ -417,10 +620,10 @@ def Gen_ki_cr(self, everything): def gen_csr(self, account_id, consent_record_id, consent_status, previous_record_id): _tmpl = { "record_id": str(guid()), - "account_id": account_id, + "surrogate_id": account_id, "cr_id": consent_record_id, "consent_status": consent_status, # "Active/Disabled/Withdrawn", - "iat": "", + "iat": int(time.time()), "prev_record_id": previous_record_id, } return _tmpl @@ -444,15 +647,19 @@ def gen_auth_token(self, auth_token_info): header = {"typ": "JWT", "alg": "HS256"} # Claims - payload = {"iss": slrt.get_operator_key(), # Operator_Key - "sub": slrt.get_sink_key(), # Service_Components(Sink) Key - "aud": slrt.get_dataset(), # Hard to build real - "exp": datetime.fromtimestamp(time.time()+2592000).strftime("%Y-%m-%dT%H:%M:%S %Z "), # 30 days in seconds + srv_handler = ServiceRegistryHandler(self.service_registry_search_domain, self.service_registry_search_endpoint) + payload = {"iss": self.operator_id, # Operator ID, + "cnf": {"kid": slrt.get_source_cr_id()}, + "aud": srv_handler.getService_url(slrt.get_source_service_id()), + "exp": int(time.time() + self.not_after_interval), + # datetime.fromtimestamp(time.time()+2592000).strftime("%Y-%m-%dT%H:%M:%S %Z"), # 30 days in seconds # Experiation time of token on or after which token MUST NOT be accepted - "nbf": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z "), # The time before which token MUST NOT be accepted - "iat": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z "), # The time which the JWT was issued + "nbf": int(time.time()), + # datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z"), # The time before which token MUST NOT be accepted + "iat": int(time.time()), + # datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S %Z"), # The time which the JWT was issued "jti": str(guid()), # JWT id claim provides a unique identifier for the JWT - "rs_id": slrt.get_rs_id(), # Resource set id that was assigned in the linked Consent Record + "pi_id": slrt.get_source_cr_id(), # Resource set id that was assigned in the linked Consent Record } debug_log.debug(dumps(payload, indent=2)) key = operator_key @@ -464,87 +671,91 @@ def gen_auth_token(self, auth_token_info): token.make_signed_token(key) return token.serialize() + class SLR_tool: def __init__(self): self.slr = { - "data": { + "data": { "source": { - "consentRecord": { - "attributes": { - "cr": { - "payload": "IntcImNvbW1vbl9wYXJ0XCI6IHtcInNscl9pZFwiOiBcIjcwZjQwNTM1LTY2NzgtNDY1My1hZTdlLWJmMmU1MTc3NGFlNVwiLCBcInZlcnNpb25fbnVtYmVyXCI6IFwiU3RyaW5nXCIsIFwicnNfaWRcIjogXCIyXzM2MWNhOTY5LWMyNTktNDVkOS1iZWUwLTlmMzg4NmY2MjA1NlwiLCBcImNyX2lkXCI6IFwiMDQ3MmEyZTMtZGI2Yy00MTA5LWE1N2EtYzI1YWY5Y2IxNDUxXCIsIFwibm90X2FmdGVyXCI6IFwiU3RyaW5nXCIsIFwic3Vycm9nYXRlX2lkXCI6IFwiZTAyNTE3ZjgtNzkzZi00ZDNkLTg0MGEtNzJhNzFiN2E0OTViXzJcIiwgXCJub3RfYmVmb3JlXCI6IFwiU3RyaW5nXCIsIFwiaXNzdWVkXCI6IDE0NzE2MDQ0MDcsIFwiaXNzdWVkX2F0XCI6IFwiU3RyaW5nXCIsIFwic3ViamVjdF9pZFwiOiBcIjJcIn0sIFwicm9sZV9zcGVjaWZpY19wYXJ0XCI6IHtcImF1dGhfdG9rZW5faXNzdWVyX2tleVwiOiB7fSwgXCJyb2xlXCI6IFwiU291cmNlXCIsIFwicmVzb3VyY2Vfc2V0X2Rlc2NyaXB0aW9uXCI6IHtcInJlc291cmNlX3NldFwiOiB7XCJyc19pZFwiOiBcIlN0cmluZ1wiLCBcImRhdGFzZXRcIjogW3tcImRpc3RyaWJ1dGlvbl9pZFwiOiBcIlN0cmluZ1wiLCBcImRhdGFzZXRfaWRcIjogXCJTdHJpbmdcIn1dfX19LCBcImV4dGVuc2lvbnNcIjoge30sIFwibXZjclwiOiB7fX0i", - "signature": "JuZ_7tNcxO7_P9SGbBptllfVHNuZ2pQQZ4FLJeQISKBgA8pCra3i9Z81VbcachhLwnSBvv1qVVEuFEm5lnHR_g", - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "header": { - "jwk": { - "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", - "crv": "P-256", - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae", - "kty": "EC", - "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao" - }, - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" - } - } - }, - "type": "ConsentRecord" - } + "consentRecord": { + "attributes": { + "cr": { + "payload": "IntcImNvbW1vbl9wYXJ0XCI6IHtcInNscl9pZFwiOiBcIjcwZjQwNTM1LTY2NzgtNDY1My1hZTdlLWJmMmU1MTc3NGFlNVwiLCBcInZlcnNpb25fbnVtYmVyXCI6IFwiU3RyaW5nXCIsIFwicnNfaWRcIjogXCIyXzM2MWNhOTY5LWMyNTktNDVkOS1iZWUwLTlmMzg4NmY2MjA1NlwiLCBcImNyX2lkXCI6IFwiMDQ3MmEyZTMtZGI2Yy00MTA5LWE1N2EtYzI1YWY5Y2IxNDUxXCIsIFwibm90X2FmdGVyXCI6IFwiU3RyaW5nXCIsIFwic3Vycm9nYXRlX2lkXCI6IFwiZTAyNTE3ZjgtNzkzZi00ZDNkLTg0MGEtNzJhNzFiN2E0OTViXzJcIiwgXCJub3RfYmVmb3JlXCI6IFwiU3RyaW5nXCIsIFwiaXNzdWVkXCI6IDE0NzE2MDQ0MDcsIFwiaXNzdWVkX2F0XCI6IFwiU3RyaW5nXCIsIFwic3ViamVjdF9pZFwiOiBcIjJcIn0sIFwicm9sZV9zcGVjaWZpY19wYXJ0XCI6IHtcImF1dGhfdG9rZW5faXNzdWVyX2tleVwiOiB7fSwgXCJyb2xlXCI6IFwiU291cmNlXCIsIFwicmVzb3VyY2Vfc2V0X2Rlc2NyaXB0aW9uXCI6IHtcInJlc291cmNlX3NldFwiOiB7XCJyc19pZFwiOiBcIlN0cmluZ1wiLCBcImRhdGFzZXRcIjogW3tcImRpc3RyaWJ1dGlvbl9pZFwiOiBcIlN0cmluZ1wiLCBcImRhdGFzZXRfaWRcIjogXCJTdHJpbmdcIn1dfX19LCBcImV4dGVuc2lvbnNcIjoge30sIFwibXZjclwiOiB7fX0i", + "signature": "JuZ_7tNcxO7_P9SGbBptllfVHNuZ2pQQZ4FLJeQISKBgA8pCra3i9Z81VbcachhLwnSBvv1qVVEuFEm5lnHR_g", + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "header": { + "jwk": { + "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", + "crv": "P-256", + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae", + "kty": "EC", + "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao" + }, + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + } + } + }, + "type": "ConsentRecord" + } }, "sink": { - "serviceLinkRecord": { - "attributes": { - "slr": { - "signatures": [ - { - "signature": "aQB65Kv07kL9Q62INPZXMsNJuvfsEa0OuAI9c83DBTFK8cn1qFhDNZ76vVl84B0wImt3RgsPITNJiW3OvIGdag", - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "header": { - "jwk": { - "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", - "crv": "P-256", - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae", - "kty": "EC", - "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao" - }, - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + "serviceLinkRecord": { + "attributes": { + "slr": { + "signatures": [ + { + "signature": "aQB65Kv07kL9Q62INPZXMsNJuvfsEa0OuAI9c83DBTFK8cn1qFhDNZ76vVl84B0wImt3RgsPITNJiW3OvIGdag", + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "header": { + "jwk": { + "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", + "crv": "P-256", + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae", + "kty": "EC", + "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao" + }, + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + } + }, + { + "signature": "MOBfIeQ6G4Bg6-4Q9v-Ta6_6Otd7sfXBg3YqVimtT0aL-9apMHl-i2lsuOKRySpe-tXnjQKoawjHpP8rTprqcG677TF0AbhS91LLepUsxt-NwdxnkhjDI8TSew0uVBirjY8-ZHYpLinu0ZMtAGoV-0WLuBPC-RBVqgOUQusJQSAfNyb5lpq2bTo7Xkry41XlrjdbE6tXMuGHmc2Hy9eytNf13597Q0xC0cOOlw92A92WT-6J9PLg4oArLgpBe8Tgc2GZp392DyyKvmTVENxEL1WgS5TlsxdKTH8tCSXwq5pWwkmm3Rnxfk3GUgV8hVaz0r3n1xX7EQKboondOpPeosOnpMu4ZrvoDB5aZz0KGTWuVqE7tHmVsG4lLQlww_e2KpTXfmxzLcpsOm_IfsyE-obI4_Dqi60ArjQ-kcMF6Djb0S-i1-PI-vEbSavYbcKdSjWVB1Z5-pw1rfch3inB2t5uzgjXVdipLH_jLvEUx0RrmRtG7Lq_cyJiV4wRW_YVgZbjVFZqwdsygo9-hg7YO9v-GgZr7d3z7nD6M1z4vJbJfmjXjt--2UtoY71DskxFDHUzajaMuwKiM1uBXt_TIUo3gEIM6xTpB5OEDHqN67aRTmhxK-Hqn1iHAxbnilcNjXIULiEfPQuAIpQWelO6j5drRzmyt04yIgrWQqQ5oFA", + "protected": "eyJhbGciOiAiUlMyNTYifQ", + "header": { + "jwk": { + "n": "v6QswzNJbJj2b9mE3IvPYDZx8K6MiJBDI9RJ1SwEWw0NsblAlxew3YdxvpE0iIfA-G5MHm5sG7DOmNCC9baILosVnG8UGI2QMfhZ8R4Vg-WlKQmGs_jNYaUnD2lr_gs6DTrzmfsYj_UH4NHCCm9CTW-f1s4vMpFaYAPWfTCK2OogBX0BH3f_Q8lFXmdllLN0lT5p18QY9xa9hqWkIbAOPH3Tv66kfJHdSbKeT7HqOeKRj4aBH_kokJWZcMmQAHYPuR2Y46nDQdYKRt822tmEONalupSzNdEErlSzKZ5uPileqIAitHTG0QFzL1ZfiqfI861nrKlFi3LOhXGzk_skXZYZGvLLAZ1TtBIUcM97VyBlJVNRpK9fypLyHN3ezxuazwwZ4gi8-T39E2Xpr0TRj5eVfoflau6LF4MgwQTs6PyKzkwKlcipTcrmMMhoT9MYNih_Sb2E7qlF_gXEfgFzcXO8AkArwGoNlpvYdZdNyu4u6mviH7-ZK6YnkudI6qRCrbG7sYltGXO809NdSnGklMqXDSvghlgHvagLyXJ4C8geRH_9aGzYVjweYmwQxgBMFtpvzotd1KIoeFkKFIXf1p9P02AwgQJSVTdVHltNU9Vkom-TLcO3SZ5FvpC5W1hS67bkD_qStQPWAZ-RtWH0QkjJFGdQVLdK07uZNkSVee8", + "kid": "SRVMGNT-RSA-4096", + "e": "AQAB", + "kty": "RSA" + }, + "kid": "SRVMGNT-RSA-4096" + } + } + ], + "payload": "IntcIm9wZXJhdG9yX2lkXCI6IFwiQUNDLUlELVJBTkRPTVwiLCBcImNyZWF0ZWRcIjogMTQ3MTYwNDQwNSwgXCJzdXJyb2dhdGVfaWRcIjogXCJkMTJjN2UyOC04NzRiLTQwNDAtYmVjNS02NzkzYTYwMzhjMTlfMlwiLCBcInRva2VuX2tleVwiOiB7XCJrZXlcIjoge1wiblwiOiBcInY2UXN3ek5KYkpqMmI5bUUzSXZQWURaeDhLNk1pSkJESTlSSjFTd0VXdzBOc2JsQWx4ZXczWWR4dnBFMGlJZkEtRzVNSG01c0c3RE9tTkNDOWJhSUxvc1ZuRzhVR0kyUU1maFo4UjRWZy1XbEtRbUdzX2pOWWFVbkQybHJfZ3M2RFRyem1mc1lqX1VINE5IQ0NtOUNUVy1mMXM0dk1wRmFZQVBXZlRDSzJPb2dCWDBCSDNmX1E4bEZYbWRsbExOMGxUNXAxOFFZOXhhOWhxV2tJYkFPUEgzVHY2NmtmSkhkU2JLZVQ3SHFPZUtSajRhQkhfa29rSldaY01tUUFIWVB1UjJZNDZuRFFkWUtSdDgyMnRtRU9OYWx1cFN6TmRFRXJsU3pLWjV1UGlsZXFJQWl0SFRHMFFGekwxWmZpcWZJODYxbnJLbEZpM0xPaFhHemtfc2tYWllaR3ZMTEFaMVR0QklVY005N1Z5QmxKVk5ScEs5ZnlwTHlITjNlenh1YXp3d1o0Z2k4LVQzOUUyWHByMFRSajVlVmZvZmxhdTZMRjRNZ3dRVHM2UHlLemt3S2xjaXBUY3JtTU1ob1Q5TVlOaWhfU2IyRTdxbEZfZ1hFZmdGemNYTzhBa0Fyd0dvTmxwdllkWmROeXU0dTZtdmlINy1aSzZZbmt1ZEk2cVJDcmJHN3NZbHRHWE84MDlOZFNuR2tsTXFYRFN2Z2hsZ0h2YWdMeVhKNEM4Z2VSSF85YUd6WVZqd2VZbXdReGdCTUZ0cHZ6b3RkMUtJb2VGa0tGSVhmMXA5UDAyQXdnUUpTVlRkVkhsdE5VOVZrb20tVExjTzNTWjVGdnBDNVcxaFM2N2JrRF9xU3RRUFdBWi1SdFdIMFFrakpGR2RRVkxkSzA3dVpOa1NWZWU4XCIsIFwiZVwiOiBcIkFRQUJcIiwgXCJrdHlcIjogXCJSU0FcIiwgXCJraWRcIjogXCJTUlZNR05ULVJTQS00MDk2XCJ9fSwgXCJsaW5rX2lkXCI6IFwiYTk4ZDg4Y2ItZDA3ZS00YTMyLTk3ODctY2IzODgxZDBiMDZlXCIsIFwib3BlcmF0b3Jfa2V5XCI6IHtcInVzZVwiOiBcInNpZ1wiLCBcImVcIjogXCJBUUFCXCIsIFwia3R5XCI6IFwiUlNBXCIsIFwiblwiOiBcIndITUFwQ2FVSkZpcHlGU2NUNzgxd2VuTm5mbU5jVkQxZTBmSFhfcmVfcWFTNWZvQkJzN1c0aWE1bnVxNjVFQWJKdWFxaGVPR2FEamVIaVU4V1Q5cWdnYks5cTY4SXZUTDN1bjN6R2o5WmQ3N3MySXdzNE1BSW1EeWN3Rml0aDE2M3lxdW9ETXFMX1YySXl5Mm45Uzloa1M5ZkV6cXJsZ01sYklnczJtVkJpNmdWVTJwYnJTN0gxUGFSV194YlFSX1puN19laV9uOFdlWFA1d2NEX3NJYldNa1NCc3VVZ21jam9XM1ktNW1ERDJWYmRFejJFbWtZaTlHZmstcDlBenlVbk56ZkIyTE1jSk1aekpWUWNYaUdCTzdrcG9uRkEwY3VIMV9CR0NsZXJ6Mnh2TWxXdjlPVnZzN3ZDTmRlQV9mano2eloyMUtadVo0RG1nZzBrOTRsd1wifSwgXCJ2ZXJzaW9uXCI6IFwiMS4yXCIsIFwiY3Jfa2V5c1wiOiBbe1wieVwiOiBcIlhJcEdJWjdiejd1YW9qXzlMMDVDUVNPdzZWeWt1RDZiSzRyX09NVlFTYW9cIiwgXCJ4XCI6IFwiR2ZKQ09YaW1HYjNaVzRJSkpJbEtVWmVvajhHQ1c3WVlKUlpnSHVZVXNkc1wiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJhY2Mta2lkLTM4MDJmZDE3LTQ5ZjQtNDhmYy04YWMxLTA5NjI0YTUyYTNhZVwifV0sIFwic2VydmljZV9pZFwiOiBcIjFcIn0i" } - }, - { - "signature": "MOBfIeQ6G4Bg6-4Q9v-Ta6_6Otd7sfXBg3YqVimtT0aL-9apMHl-i2lsuOKRySpe-tXnjQKoawjHpP8rTprqcG677TF0AbhS91LLepUsxt-NwdxnkhjDI8TSew0uVBirjY8-ZHYpLinu0ZMtAGoV-0WLuBPC-RBVqgOUQusJQSAfNyb5lpq2bTo7Xkry41XlrjdbE6tXMuGHmc2Hy9eytNf13597Q0xC0cOOlw92A92WT-6J9PLg4oArLgpBe8Tgc2GZp392DyyKvmTVENxEL1WgS5TlsxdKTH8tCSXwq5pWwkmm3Rnxfk3GUgV8hVaz0r3n1xX7EQKboondOpPeosOnpMu4ZrvoDB5aZz0KGTWuVqE7tHmVsG4lLQlww_e2KpTXfmxzLcpsOm_IfsyE-obI4_Dqi60ArjQ-kcMF6Djb0S-i1-PI-vEbSavYbcKdSjWVB1Z5-pw1rfch3inB2t5uzgjXVdipLH_jLvEUx0RrmRtG7Lq_cyJiV4wRW_YVgZbjVFZqwdsygo9-hg7YO9v-GgZr7d3z7nD6M1z4vJbJfmjXjt--2UtoY71DskxFDHUzajaMuwKiM1uBXt_TIUo3gEIM6xTpB5OEDHqN67aRTmhxK-Hqn1iHAxbnilcNjXIULiEfPQuAIpQWelO6j5drRzmyt04yIgrWQqQ5oFA", - "protected": "eyJhbGciOiAiUlMyNTYifQ", - "header": { - "jwk": { - "n": "v6QswzNJbJj2b9mE3IvPYDZx8K6MiJBDI9RJ1SwEWw0NsblAlxew3YdxvpE0iIfA-G5MHm5sG7DOmNCC9baILosVnG8UGI2QMfhZ8R4Vg-WlKQmGs_jNYaUnD2lr_gs6DTrzmfsYj_UH4NHCCm9CTW-f1s4vMpFaYAPWfTCK2OogBX0BH3f_Q8lFXmdllLN0lT5p18QY9xa9hqWkIbAOPH3Tv66kfJHdSbKeT7HqOeKRj4aBH_kokJWZcMmQAHYPuR2Y46nDQdYKRt822tmEONalupSzNdEErlSzKZ5uPileqIAitHTG0QFzL1ZfiqfI861nrKlFi3LOhXGzk_skXZYZGvLLAZ1TtBIUcM97VyBlJVNRpK9fypLyHN3ezxuazwwZ4gi8-T39E2Xpr0TRj5eVfoflau6LF4MgwQTs6PyKzkwKlcipTcrmMMhoT9MYNih_Sb2E7qlF_gXEfgFzcXO8AkArwGoNlpvYdZdNyu4u6mviH7-ZK6YnkudI6qRCrbG7sYltGXO809NdSnGklMqXDSvghlgHvagLyXJ4C8geRH_9aGzYVjweYmwQxgBMFtpvzotd1KIoeFkKFIXf1p9P02AwgQJSVTdVHltNU9Vkom-TLcO3SZ5FvpC5W1hS67bkD_qStQPWAZ-RtWH0QkjJFGdQVLdK07uZNkSVee8", - "kid": "SRVMGNT-RSA-4096", - "e": "AQAB", - "kty": "RSA" - }, - "kid": "SRVMGNT-RSA-4096" - } - } - ], - "payload": "IntcIm9wZXJhdG9yX2lkXCI6IFwiQUNDLUlELVJBTkRPTVwiLCBcImNyZWF0ZWRcIjogMTQ3MTYwNDQwNSwgXCJzdXJyb2dhdGVfaWRcIjogXCJkMTJjN2UyOC04NzRiLTQwNDAtYmVjNS02NzkzYTYwMzhjMTlfMlwiLCBcInRva2VuX2tleVwiOiB7XCJrZXlcIjoge1wiblwiOiBcInY2UXN3ek5KYkpqMmI5bUUzSXZQWURaeDhLNk1pSkJESTlSSjFTd0VXdzBOc2JsQWx4ZXczWWR4dnBFMGlJZkEtRzVNSG01c0c3RE9tTkNDOWJhSUxvc1ZuRzhVR0kyUU1maFo4UjRWZy1XbEtRbUdzX2pOWWFVbkQybHJfZ3M2RFRyem1mc1lqX1VINE5IQ0NtOUNUVy1mMXM0dk1wRmFZQVBXZlRDSzJPb2dCWDBCSDNmX1E4bEZYbWRsbExOMGxUNXAxOFFZOXhhOWhxV2tJYkFPUEgzVHY2NmtmSkhkU2JLZVQ3SHFPZUtSajRhQkhfa29rSldaY01tUUFIWVB1UjJZNDZuRFFkWUtSdDgyMnRtRU9OYWx1cFN6TmRFRXJsU3pLWjV1UGlsZXFJQWl0SFRHMFFGekwxWmZpcWZJODYxbnJLbEZpM0xPaFhHemtfc2tYWllaR3ZMTEFaMVR0QklVY005N1Z5QmxKVk5ScEs5ZnlwTHlITjNlenh1YXp3d1o0Z2k4LVQzOUUyWHByMFRSajVlVmZvZmxhdTZMRjRNZ3dRVHM2UHlLemt3S2xjaXBUY3JtTU1ob1Q5TVlOaWhfU2IyRTdxbEZfZ1hFZmdGemNYTzhBa0Fyd0dvTmxwdllkWmROeXU0dTZtdmlINy1aSzZZbmt1ZEk2cVJDcmJHN3NZbHRHWE84MDlOZFNuR2tsTXFYRFN2Z2hsZ0h2YWdMeVhKNEM4Z2VSSF85YUd6WVZqd2VZbXdReGdCTUZ0cHZ6b3RkMUtJb2VGa0tGSVhmMXA5UDAyQXdnUUpTVlRkVkhsdE5VOVZrb20tVExjTzNTWjVGdnBDNVcxaFM2N2JrRF9xU3RRUFdBWi1SdFdIMFFrakpGR2RRVkxkSzA3dVpOa1NWZWU4XCIsIFwiZVwiOiBcIkFRQUJcIiwgXCJrdHlcIjogXCJSU0FcIiwgXCJraWRcIjogXCJTUlZNR05ULVJTQS00MDk2XCJ9fSwgXCJsaW5rX2lkXCI6IFwiYTk4ZDg4Y2ItZDA3ZS00YTMyLTk3ODctY2IzODgxZDBiMDZlXCIsIFwib3BlcmF0b3Jfa2V5XCI6IHtcInVzZVwiOiBcInNpZ1wiLCBcImVcIjogXCJBUUFCXCIsIFwia3R5XCI6IFwiUlNBXCIsIFwiblwiOiBcIndITUFwQ2FVSkZpcHlGU2NUNzgxd2VuTm5mbU5jVkQxZTBmSFhfcmVfcWFTNWZvQkJzN1c0aWE1bnVxNjVFQWJKdWFxaGVPR2FEamVIaVU4V1Q5cWdnYks5cTY4SXZUTDN1bjN6R2o5WmQ3N3MySXdzNE1BSW1EeWN3Rml0aDE2M3lxdW9ETXFMX1YySXl5Mm45Uzloa1M5ZkV6cXJsZ01sYklnczJtVkJpNmdWVTJwYnJTN0gxUGFSV194YlFSX1puN19laV9uOFdlWFA1d2NEX3NJYldNa1NCc3VVZ21jam9XM1ktNW1ERDJWYmRFejJFbWtZaTlHZmstcDlBenlVbk56ZkIyTE1jSk1aekpWUWNYaUdCTzdrcG9uRkEwY3VIMV9CR0NsZXJ6Mnh2TWxXdjlPVnZzN3ZDTmRlQV9mano2eloyMUtadVo0RG1nZzBrOTRsd1wifSwgXCJ2ZXJzaW9uXCI6IFwiMS4yXCIsIFwiY3Jfa2V5c1wiOiBbe1wieVwiOiBcIlhJcEdJWjdiejd1YW9qXzlMMDVDUVNPdzZWeWt1RDZiSzRyX09NVlFTYW9cIiwgXCJ4XCI6IFwiR2ZKQ09YaW1HYjNaVzRJSkpJbEtVWmVvajhHQ1c3WVlKUlpnSHVZVXNkc1wiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJhY2Mta2lkLTM4MDJmZDE3LTQ5ZjQtNDhmYy04YWMxLTA5NjI0YTUyYTNhZVwifV0sIFwic2VydmljZV9pZFwiOiBcIjFcIn0i" - } - }, - "type": "ServiceLinkRecord" - } + }, + "type": "ServiceLinkRecord" + } } - } } + } def decrypt_payload(self, payload): payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. content = decode(payload.encode()) - payload = loads(loads(content.decode("utf-8"))) + payload = loads(content.decode("utf-8")) return payload def get_SLR_payload(self): - base64_payload = self.slr["data"]["sink"]["serviceLinkRecord"]["attributes"]["slr"]["payload"] + debug_log.info(dumps(self.slr, indent=2)) + base64_payload = self.slr["data"]["sink"]["serviceLinkRecord"]["attributes"]["slr"]["attributes"]["slr"][ + "payload"] # TODO: This is a workaround for structure repetition. payload = self.decrypt_payload(base64_payload) return payload def get_CR_payload(self): - base64_payload = self.slr["data"]["source"]["consentRecord"]["attributes"]["cr"]["payload"] + base64_payload = self.slr["data"]["source"]["consentRecord"]["attributes"]["cr"]["attributes"]["cr"][ + "payload"] # TODO: This is a workaround for structure repetition. payload = self.decrypt_payload(base64_payload) return payload @@ -560,6 +771,9 @@ def get_cr_keys(self): def get_rs_id(self): return self.get_CR_payload()["common_part"]["rs_id"] + def get_source_cr_id(self): + return self.get_CR_payload()["common_part"]["cr_id"] + def get_surrogate_id(self): return self.get_CR_payload()["common_part"]["surrogate_id"] @@ -567,4 +781,10 @@ def get_sink_key(self): return self.get_SLR_payload()["token_key"]["key"] def get_dataset(self): - return self.get_CR_payload()["role_specific_part"]["resource_set_description"]["resource_set"]["dataset"] + return self.get_CR_payload()["common_part"]["rs_description"]["resource_set"]["dataset"] + + def get_source_service_id(self): + return self.get_CR_payload()["common_part"]["subject_id"] + + def get_sink_service_id(self): + return self.slr["data"]["sink"]["serviceLinkRecord"]["attributes"]["slr"]["attributes"]["service_id"] diff --git a/Operator_Components/instance/__init__.py b/Operator_Components/instance/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Operator_Components/instance/settings.py b/Operator_Components/instance/settings.py new file mode 100644 index 0000000..365f108 --- /dev/null +++ b/Operator_Components/instance/settings.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +from kombu import Exchange, Queue + +TIMEOUT = 8 +KEYSIZE = 512 + +# TODO give these as parameter to init AccountManagerHandler + +ACCOUNT_MANAGEMENT_URL = 'http://account:8080/' + +ACCOUNT_MANAGEMENT_USER = "test_sdk" + +ACCOUNT_MANAGEMENT_PASSWORD = "test_sdk_pw" + +# Setting to /tmp or other ramdisk makes it faster. + +DATABASE_PATH = "./db_Operator.sqlite" + +SELERY_BROKER_URL = 'redis://redis:6379/0' + +SELERY_RESULT_BACKEND = 'redis://redis:6379/0' + +CERT_PATH = "./service_key.jwk" + +CERT_KEY_PATH = "./service_key.jwk" + +CERT_PASSWORD_PATH = "./cert_pw" + +OPERATOR_UID = "41e19fcd-1951-455f-9169-a303f990f52d" + +OPERATOR_ROOT_PATH = "/api/1.2" + +OPERATOR_CR_PATH = "/cr" + +OPERATOR_SLR_PATH = "/slr" + +SERVICE_URL = "http://service_components:7000" + +DEBUG_MODE = False + +CELERY_QUEUES = ( + Queue('op_queue', Exchange('op_queue'), routing_key='op_queue'), +) + +CELERY_DEFAULT_QUEUE = 'op_queue' + +CELERY_ROUTES = { + 'CR_Installer': {'queue': 'op_queue','routing_key': "op_queue"}, +} \ No newline at end of file diff --git a/Operator_Components/instance/settings_template.py.j2 b/Operator_Components/instance/settings_template.py.j2 new file mode 100644 index 0000000..6f4f1fe --- /dev/null +++ b/Operator_Components/instance/settings_template.py.j2 @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +from kombu import Exchange, Queue + +TIMEOUT = 8 +KEYSIZE = 512 + +{% if UID is defined %} +UID = {{ UID }} +{% else %} +UID = 'Operator112' +{% endif %} + + +{% if SERVICE_REGISTRY_SEARCH_DOMAIN is defined %} +SERVICE_REGISTRY_SEARCH_DOMAIN = {{ SERVICE_REGISTRY_SEARCH_DOMAIN }} +{% else %} +SERVICE_REGISTRY_SEARCH_DOMAIN = "http://178.62.229.148:8081" +{% endif %} + +{% if SERVICE_REGISTRY_SEARCH_ENDPOINT is defined %} +SERVICE_REGISTRY_SEARCH_ENDPOINT = {{ SERVICE_REGISTRY_SEARCH_ENDPOINT }} +{% else %} +SERVICE_REGISTRY_SEARCH_ENDPOINT = "/api/v1/services/" +{% endif %} + + +{% if NOT_AFTER_INTERVAL is defined %} +NOT_AFTER_INTERVAL = {{ NOT_AFTER_INTERVAL }} +{% else %} +NOT_AFTER_INTERVAL = 2592000 # 30 days in seconds. +{% endif %} + +# Name of host to connect to. Default: use the local host via a UNIX socket (where applicable) +{% if MYSQL_HOST is defined %} +MYSQL_HOST = {{ MYSQL_HOST }} +{% else %} +MYSQL_HOST = 'localhost' +{% endif %} + + # User to authenticate as. Default: current effective user. +{% if MYSQL_USER is defined %} +MYSQL_USER = {{ MYSQL_USER }} +{% else %} +MYSQL_USER = 'operator' +{% endif %} + +# Password to authenticate with. Default: no password. +{% if MYSQL_PASSWORD is defined %} +MYSQL_PASSWORD = {{ MYSQL_PASSWORD }} +{% else %} +MYSQL_PASSWORD = 'MynorcA' +{% endif %} + +# Database to use. Default: no default database. +{% if MYSQL_DB is defined %} +MYSQL_DB = {{ MYSQL_DB }} +{% else %} +MYSQL_DB = 'MyDataOperator' +{% endif %} + +# TCP port of MySQL server. Default: 3306. +{% if MYSQL_PORT is defined %} +MYSQL_PORT = {{ MYSQL_PORT }} +{% else %} +MYSQL_PORT = 3306 +{% endif %} + + + +# TODO give these as parameter to init AccountManagerHandler +{% if ACCOUNT_MANAGEMENT_URL is defined %} +ACCOUNT_MANAGEMENT_URL = {{ ACCOUNT_MANAGEMENT_URL }} +{% else %} + +ACCOUNT_MANAGEMENT_URL = "http://myaccount.dy.fi/" + +{% endif %} + +{% if ACCOUNT_MANAGEMENT_USER is defined %} +ACCOUNT_MANAGEMENT_USER = {{ ACCOUNT_MANAGEMENT_USER }} +{% else %} +ACCOUNT_MANAGEMENT_USER = "test_sdk" +{% endif %} + +{% if ACCOUNT_MANAGEMENT_PASSWORD is defined %} +ACCOUNT_MANAGEMENT_PASSWORD = {{ ACCOUNT_MANAGEMENT_PASSWORD }} +{% else %} +ACCOUNT_MANAGEMENT_PASSWORD = "test_sdk_pw" +{% endif %} + + +# Setting to /tmp or other ramdisk makes it faster. +{% if DATABASE_PATH is defined %} +DATABASE_PATH = {{ DATABASE_PATH }} +{% else %} + +DATABASE_PATH = "./db_Operator.sqlite" +{% endif %} + + +{% if SELERY_BROKER_URL is defined %} +SELERY_BROKER_URL = {{ SELERY_BROKER_URL }} +{% else %} +SELERY_BROKER_URL = 'redis://localhost:6379/0' +{% endif %} + +{% if SELERY_RESULT_BACKEND is defined %} +SELERY_RESULT_BACKEND = {{ SELERY_RESULT_BACKEND }} +{% else %} +SELERY_RESULT_BACKEND = 'redis://localhost:6379/0' + +{% endif %} + + +{% if CERT_PATH is defined %} +CERT_PATH = {{ CERT_PATH }} +{% else %} +CERT_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_KEY_PATH is defined %} +CERT_KEY_PATH = {{ CERT_KEY_PATH }} +{% else %} +CERT_KEY_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_PASSWORD_PATH is defined %} +CERT_PASSWORD_PATH = {{ CERT_PASSWORD_PATH }} +{% else %} +CERT_PASSWORD_PATH = "./cert_pw" +{% endif %} + + +{% if OPERATOR_URL is defined %} +OPERATOR_URL = {{ OPERATOR_URL }} +{% else %} +OPERATOR_URL = "http://localhost:5000" +{% endif %} + +{% if OPERATOR_UID is defined %} +OPERATOR_UID = {{ OPERATOR_UID }} +{% else %} +OPERATOR_UID = "41e19fcd-1951-455f-9169-a303f990f52d" +{% endif %} + + +{% if OPERATOR_ROOT_PATH is defined %} +OPERATOR_ROOT_PATH = {{ OPERATOR_ROOT_PATH }} +{% else %} +OPERATOR_ROOT_PATH = "/api/1.2" +{% endif %} + +{% if OPERATOR_CR_PATH is defined %} +OPERATOR_CR_PATH = {{ OPERATOR_CR_PATH }} +{% else %} +OPERATOR_CR_PATH = "/cr" +{% endif %} + +{% if OPERATOR_SLR_PATH is defined %} +OPERATOR_SLR_PATH = {{ OPERATOR_SLR_PATH }} +{% else %} +OPERATOR_SLR_PATH = "/slr" +{% endif %} + +{% if SERVICE_URL is defined %} +SERVICE_URL = {{ SERVICE_URL }} +{% else %} +SERVICE_URL = "http://localhost:7000" +{% endif %} + +{% if DEBUG_MODE is defined %} +DEBUG_MODE = {{ DEBUG_MODE }} +{% else %} +DEBUG_MODE = True +{% endif %} + + + +CELERY_QUEUES = ( + Queue('op_queue', Exchange('op_queue'), routing_key='op_queue'), +) + +CELERY_DEFAULT_QUEUE = 'op_queue' + +CELERY_ROUTES = { + 'CR_Installer': {'queue': 'op_queue','routing_key': "op_queue"}, +} \ No newline at end of file diff --git a/Operator_Components/op_tasks.py b/Operator_Components/op_tasks.py new file mode 100644 index 0000000..c75e16f --- /dev/null +++ b/Operator_Components/op_tasks.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +from requests import post +from factory import create_celery_app + +celery = create_celery_app() + +@celery.task +def CR_installer(crs_csrs_payload, sink_url, source_url): + # Get these as parameter or inside crs_csrs_payload + endpoint = "/api/1.2/cr/add_cr" + print(crs_csrs_payload) + source = post(source_url+endpoint, json=crs_csrs_payload["source"]) + print(source.url, source.reason, source.status_code, source.text) + + sink = post(sink_url+endpoint, json=crs_csrs_payload["sink"]) + print(sink.url, sink.reason, sink.status_code, sink.text) + +# # TODO Possibly remove this on release +# from sqlite3 import OperationalError, IntegrityError +# import db_handler +# from json import dumps, loads +# from requests import get +# @celery.task +# def get_AuthToken(cr_id, operator_url, db_path): +# print(operator_url, db_path, cr_id) +# def storeToken(DictionaryToStore): +# db = db_handler.get_db(db_path) +# try: +# db_handler.init_db(db) +# except OperationalError: +# pass +# for key in DictionaryToStore: +# try: +# db.execute("INSERT INTO token_storage (cr_id,token) \ +# VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) +# db.commit() +# except IntegrityError as e: # Rewrite incase we get new token. +# db.execute("UPDATE token_storage SET token=? WHERE cr_id=? ;", [dumps(DictionaryToStore[key]), key]) +# db.commit() +# +# print(cr_id) +# token = get("{}/api/1.2/cr/auth_token/{}".format(operator_url, cr_id)) # TODO Get api path from some config? +# print(token.url, token.reason, token.status_code, token.text) +# store_dict = {cr_id: dumps(loads(token.text.encode()))} +# storeToken(store_dict) diff --git a/Operator_Components/requirements.txt b/Operator_Components/requirements.txt index 6a0fe52..d1978bd 100644 --- a/Operator_Components/requirements.txt +++ b/Operator_Components/requirements.txt @@ -20,6 +20,7 @@ jsonschema==2.5.1 jwcrypto==0.3.1 kombu==3.0.35 MarkupSafe==0.23 +mysqlclient==1.3.7 pyasn1==0.1.9 pycparser==2.14 pycryptodome==3.4 @@ -29,5 +30,7 @@ pytz==2016.6.1 redis==2.10.5 requests==2.11.1 six==1.10.0 +uWSGI==2.0.13.1 Werkzeug==0.11.10 wheel==0.24.0 +restapi-logging-handler==0.2.2 \ No newline at end of file diff --git a/Operator_Components/wsgi.py b/Operator_Components/wsgi.py index 12942cd..95978e4 100644 --- a/Operator_Components/wsgi.py +++ b/Operator_Components/wsgi.py @@ -17,7 +17,7 @@ try: from restapi_logging_handler import RestApiHandler - restapihandler = RestApiHandler("http://localhost:9004/") + restapihandler = RestApiHandler("http://172.17.0.1:9004/") logger.addHandler(restapihandler) except Exception as e: diff --git a/Service_Components/Authorization_Management/authorization_management.py b/Service_Components/Authorization_Management/authorization_management.py index 2dabc78..690a52b 100644 --- a/Service_Components/Authorization_Management/authorization_management.py +++ b/Service_Components/Authorization_Management/authorization_management.py @@ -12,7 +12,7 @@ from flask_restful import Resource, Api from helpers import validate_json, SLR_tool, CR_tool, Helpers from jwcrypto import jwk -from tasks import get_AuthToken +from srv_tasks import get_AuthToken api_Service_Mgmnt = Blueprint("api_Service_Mgmnt", __name__) # TODO Rename this @@ -41,16 +41,6 @@ ''' -Service_ID = "SRVMGNT-IDK3Y" -gen = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} -gen2 = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} -service_key = jwk.JWK(**gen) -token_key = jwk.JWK(**gen) - -templ = {Service_ID: {"cr_keys": loads(token_key.export_public())}} -protti = {"alg": "ES256"} -headeri = {"kid": Service_ID, "jwk": loads(service_key.export_public())} - logger = logging.getLogger("sequence") debug_log = logging.getLogger("debug") @@ -74,7 +64,7 @@ def __init__(self): super(Install_CR, self).__init__() self.helpers = Helpers(current_app.config) self.operator_url = current_app.config["OPERATOR_URL"] - + self.db_path = current_app.config["DATABASE_PATH"] @error_handler def post(self): debug_log.info("arrived at Install_CR") @@ -95,6 +85,8 @@ def post(self): sq.task("Verify CR format and mandatory fields") if role == "Source": debug_log.info("Source CR") + debug_log.info(dumps(crt.get_CR_payload(), indent=2)) + debug_log.info(type(crt.get_CR_payload())) errors = validate_json(source_cr_schema, crt.get_CR_payload()) for e in errors: raise DetailedHTTPException(detail={"msg": "Validating Source CR format and fields failed", @@ -131,7 +123,7 @@ def post(self): else: raise DetailedHTTPException(detail={"msg": "Verifying CR failed",}, title="Failure in CR verifying", - status=451) + status=403) sq.task("Verify CSR integrity") # SLR includes CR keys which means we need to get key from stored SLR and use it to verify this @@ -142,7 +134,7 @@ def post(self): else: raise DetailedHTTPException(detail={"msg": "Verifying CSR failed",}, title="Failure in CSR verifying", - status=451) + status=403) sq.task("Verify Status Record") @@ -160,15 +152,16 @@ def post(self): else: raise DetailedHTTPException(detail={"msg": "Verifying CSR cr_id == CR cr_id failed",}, title="Failure in CSR verifying", - status=451) + status=403) # 2) CSR has link to previous CSR prev_csr_id_refers_to_null_as_it_should = crt.get_prev_record_id() == "null" if prev_csr_id_refers_to_null_as_it_should: debug_log.info("prev_csr_id_referred to null as it should.") else: + # TODO: Check here that the csr chain is intact. and then continue. raise DetailedHTTPException(detail={"msg": "Verifying CSR previous_id == 'null' failed",}, title="Failure in CSR verifying", - status=451) + status=403) verify_is_success = crt.verify_cr(slrt.get_cr_keys()) if verify_is_success: @@ -178,24 +171,28 @@ def post(self): raise DetailedHTTPException(detail={"msg": "Verifying CSR failed",}, title="Failure in CSR verifying") # 5) Previous CSR has not been withdrawn - # TODO Implement + # If previous_id is null this step can be ignored. + # Else fetch previous_id from db and check the status. sq.task("Store CR and CSR") store_dict = { "rs_id": crt.get_rs_id(), + "csr_id": crt.get_csr_id(), + "consent_status": crt.get_consent_status(), + "previous_record_id": crt.get_prev_record_id(), "cr_id": crt.get_cr_id_from_cr(), "surrogate_id": surr_id, "slr_id": crt.get_slr_id(), - "json": crt.get_CR_payload() # possibly store the base64 representation + "json": crt.cr["cr"] # possibly store the base64 representation } self.helpers.storeCR_JSON(store_dict) - store_dict["json"] = crt.get_CSR_payload() + store_dict["json"] = crt.cr["csr"] self.helpers.storeCSR_JSON(store_dict) if role == "Sink": debug_log.info("Requesting auth_token") - get_AuthToken.delay(crt.get_cr_id_from_cr(), self.operator_url) + get_AuthToken.delay(crt.get_cr_id_from_cr(), self.operator_url, current_app.config) return {"status": 200, "msg": "OK"}, 200 diff --git a/Service_Components/Service_Mgmnt/service_mgmnt.py b/Service_Components/Service_Mgmnt/service_mgmnt.py index 4033c76..7095141 100644 --- a/Service_Components/Service_Mgmnt/service_mgmnt.py +++ b/Service_Components/Service_Mgmnt/service_mgmnt.py @@ -52,6 +52,7 @@ def __init__(self): @error_handler def get(self): + code_storage = None try: sq.task("Generate code") code = str(guid()) @@ -61,6 +62,8 @@ def get(self): sq.reply_to("Operator_Components Mgmnt", "Returning code") return {'code': code} except Exception as e: + if code_storage is None: + code_storage = "code json structure is broken." raise DetailedHTTPException(exception=e, detail={"msg": "Most likely storing code failed.", "code_json": code_storage}, title="Failure in GenCode endpoint", @@ -72,35 +75,18 @@ def __init__(self): super(UserAuthenticated, self).__init__() keysize = current_app.config["KEYSIZE"] cert_key_path = current_app.config["CERT_KEY_PATH"] - Service_ID = "SRVMGNT-RSA-{}".format(keysize) - gen = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} - gen2 = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} - - gen3 = {"generate": "RSA", "size": keysize, "kid": Service_ID} - self.service_key = jwk.JWK(**gen3) - try: - with open(cert_key_path, "r") as cert_file: - service_key2 = jwk.JWK(**loads(load(cert_file))) - self.service_key = service_key2 - except Exception as e: - debug_log.error(e) - with open(cert_key_path, "w+") as cert_file: - dump(self.service_key.export(), cert_file, indent=2) - service_cert = self.service_key.export_public() - self.token_key = self.service_key - - templ = {Service_ID: {"cr_keys": loads(self.token_key.export_public())}} - protti = {"alg": "RS256"} - headeri = {"kid": Service_ID, "jwk": loads(self.service_key.export_public())} + self.helpers = Helpers(current_app.config) + self.service_key = self.helpers.get_key() self.service_url = current_app.config["SERVICE_URL"] self.operator_url = current_app.config["OPERATOR_URL"] - self.helpers = Helpers(current_app.config) + @timeme @error_handler def post(self): try: + debug_log.info("UserAuthenticated class, method post got json:") debug_log.info(request.json) user_id = request.json["user_id"] code = request.json["code"] @@ -111,9 +97,9 @@ def post(self): sq.task("Link code to generated surrogate_id") self.helpers.add_surrogate_id_to_code(request.json["code"], surrogate_id) data = {"surrogate_id": surrogate_id, "code": request.json["code"], - "token_key": loads(self.service_key.export_public())} + "token_key": self.service_key["pub"]} - sq.send_to("Service_Components", "Send surrogate_id to Service_Components") + sq.send_to("Service_Components", "Send surrogate_id to Service_Mockup") endpoint = "/api/1.2/slr/link" content_json = {"code": code, "surrogate_id": surrogate_id} result_service = post("{}{}".format(self.service_url, endpoint), json=content_json) @@ -152,7 +138,7 @@ def __init__(self): @error_handler def post(self): - + debug_log.info("SignInRedisrector class, method post got json:") debug_log.info(request.json) code = request.json @@ -186,6 +172,7 @@ def verify(jws, header): jws.verify(sign_key) return True except Exception as e: + debug_log.info("JWS verification failed with:") debug_log.info(repr(e)) try: @@ -208,6 +195,7 @@ def verify(jws, header): return True return False except Exception as e: + debug_log.info("JWS verification failed with:") debug_log.info("M:", repr(e)) return False @@ -228,37 +216,25 @@ def header_fix(malformed_dictionary): # We do not check if its malformed, we ex class StoreSLR(Resource): def __init__(self): super(StoreSLR, self).__init__() - keysize = current_app.config["KEYSIZE"] - cert_key_path = current_app.config["CERT_KEY_PATH"] - Service_ID = "SRVMGNT-RSA-{}".format(keysize) - gen = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} - gen2 = {"generate": "EC", "cvr": "P-256", "kid": Service_ID} + config = current_app.config + keysize = config["KEYSIZE"] + cert_key_path = config["CERT_KEY_PATH"] + self.helpers = Helpers(config) + self.service_key = self.helpers.get_key() - gen3 = {"generate": "RSA", "size": keysize, "kid": Service_ID} - self.service_key = jwk.JWK(**gen3) - try: - with open(cert_key_path, "r") as cert_file: - service_key2 = jwk.JWK(**loads(load(cert_file))) - self.service_key = service_key2 - except Exception as e: - debug_log.error(e) - with open(cert_key_path, "w+") as cert_file: - dump(self.service_key.export(), cert_file, indent=2) - service_cert = self.service_key.export_public() - self.token_key = self.service_key # - templ = {Service_ID: {"cr_keys": loads(self.token_key.export_public())}} - self.protti = {"alg": "RS256"} - self.headeri = {"kid": Service_ID, "jwk": loads(self.service_key.export_public())} + self.protti = self.service_key["prot"] + self.headeri = self.service_key["header"] + + self.service_url = config["SERVICE_URL"] + self.operator_url = config["OPERATOR_URL"] - self.service_url = current_app.config["SERVICE_URL"] - self.operator_url = current_app.config["OPERATOR_URL"] - self.helpers = Helpers(current_app.config) @timeme @error_handler def post(self): try: + debug_log.info("StoreSLR class method post got json:") debug_log.info(dumps(request.json, indent=2)) sq.task("Load SLR to object") @@ -268,31 +244,32 @@ def post(self): sq.task("Load slr payload as object") payload = slr["payload"] payload = slr["payload"] - debug_log.info("Before Fix:{}".format(payload)) + debug_log.info("Before padding fix:{}".format(payload)) sq.task("Fix possible incorrect padding in payload") payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. - debug_log.info("After Fix :{}".format(payload)) + debug_log.info("After padding fix :{}".format(payload)) - sq.task("Decode payload and store it into object") + sq.task("Decode SLR payload and store it into object") debug_log.info(payload.encode()) content = decode(payload.encode()) sq.task("Load decoded payload as python dict") - payload = loads( - loads(content.decode("utf-8"))) # TODO: Figure out why we get str out of loads the first time? - debug_log.info(payload) + payload = loads(content.decode("utf-8")) + debug_log.info("Decoded SLR payload:") debug_log.info(type(payload)) + debug_log.info(dumps(payload, indent=2)) + - sq.task("Fetch surrogate_id from decoded payload") + sq.task("Fetch surrogate_id from decoded SLR payload") surrogate_id = payload["surrogate_id"].encode() - debug_log.info(content) sq.task("Load code from json payload") code = request.json["data"]["code"].encode() + debug_log.info("SLR payload contained code: {}".format(code)) sq.task("Verify surrogate_id and code") - debug_log.info("Surrogate was found: {}".format(self.helpers.verifySurrogate(code, surrogate_id))) + debug_log.info("Surrogate {} has been verified for code {}.".format(self.helpers.verifySurrogate(code, surrogate_id), code)) except Exception as e: raise DetailedHTTPException(title="Verifying Surrogate ID failed", @@ -302,14 +279,13 @@ def post(self): try: sq.task("Create empty JSW object") jwssa = jws.JWS() - debug_log.info("SLR R:\n", loads(dumps(slr))) - debug_log.info(slr["header"]["jwk"]) + debug_log.info("SLR Received:\n"+(dumps(slr, indent=2))) sq.task("Deserialize slr to JWS object created before") jwssa.deserialize(dumps(slr)) - sq.task("Load JWK used to sign JWS from the slr headers into an object") - sign_key = jwk.JWK(**slr["header"]["jwk"]) + sq.task("Load JWK used to sign JWS from the slr payload's cr_keys field into an object") + sign_key = jwk.JWK(**payload["cr_keys"][0]) sq.task("Verify SLR was signed using the key shipped with it") debug_log.info(verifyJWS(slr)) @@ -321,12 +297,12 @@ def post(self): try: sq.task("Fix possible serialization errors in JWS") - faulty_JSON = loads(jwssa.serialize( - compact=False)) # For some reason serialization messes up "header" from "header": {} to "header": "{}" + faulty_JSON = loads(jwssa.serialize(compact=False)) # For some reason serialization messes up "header" from "header": {} to "header": "{}" faulty_JSON["header"] = faulty_JSON["header"] sq.task("Add our signature in the JWS") - jwssa.add_signature(self.service_key, alg="RS256", header=dumps(self.headeri), protected=dumps(self.protti)) + key = jwk.JWK(**self.service_key["key"]) + jwssa.add_signature(key, header=dumps(self.headeri), protected=dumps(self.protti)) sq.task("Fix possible header errors") fixed = header_fix(loads(jwssa.serialize(compact=False))) @@ -344,17 +320,18 @@ def post(self): sq.send_to("Operator_Components Mgmnt", "Verify SLR(JWS)") endpoint = "/api/1.2/slr/verify" result = post("{}{}".format(self.operator_url, endpoint), json=req) - debug_log.info(result.status_code) + debug_log.info("Sent SLR to Operator for verification, results:") + debug_log.info("status code:{}\nreason: {}\ncontent: {}".format(result.status_code, result.reason, result.content)) if result.ok: - sq.task("Store SLR into db") + sq.task("Store following SLR into db") store = loads(loads(result.text)) debug_log.debug(dumps(store, indent=2)) self.helpers.storeJSON({store["data"]["surrogate_id"]: store}) endpoint = "/api/1.2/slr/store_slr" + debug_log.info("Posting SLR for storage in Service Mockup") result = post("{}{}".format(self.service_url, endpoint), json=store) # Send copy to Service_Components else: - debug_log.debug(result.reason) raise DetailedHTTPException(status=result.status_code, detail={"msg": "Something went wrong while verifying SLR with Operator_SLR.", "Error from Operator_SLR": loads(result.text)}, diff --git a/Service_Components/Sink/Sink_DataFlow.py b/Service_Components/Sink/Sink_DataFlow.py new file mode 100644 index 0000000..2914b18 --- /dev/null +++ b/Service_Components/Sink/Sink_DataFlow.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +from signed_requests.signed_request_auth import SignedRequest + +__author__ = 'alpaloma' +from flask import Blueprint, current_app, request +from helpers import Helpers +import requests +from json import dumps, loads +from DetailedHTTPException import error_handler +from flask_restful import Resource, Api +import logging +from jwcrypto import jwk +from Templates import Sequences +debug_log = logging.getLogger("debug") +logger = logging.getLogger("sequence") +api_Sink_blueprint = Blueprint("api_Sink_blueprint", __name__) +api = Api() +api.init_app(api_Sink_blueprint) + +sq = Sequences("Service_Components Mgmnt (Sink)", {}) +# import xmltodict +# @api.representation('application/xml') +# def output_xml(data, code, headers=None): +# if isinstance(data, dict): +# xm = {"response": data} +# resp = make_response(xmltodict.unparse(xm, pretty=True), code) +# resp.headers.extend(headers) +# return resp + +class Status(Resource): + @error_handler + def get(self): + status = {"status": "running", "service_mode": "Sink"} + return status + +class DataFlow(Resource): + def __init__(self): + super(DataFlow, self).__init__() + self.service_url = current_app.config["SERVICE_URL"] + self.operator_url = current_app.config["OPERATOR_URL"] + self.helpers = Helpers(current_app.config) + + @error_handler + def post(self): # TODO Make this a GET + def renew_token(operator_url, record_id): + sq.task("Renewing Auth Token.") + token = requests.get( + "{}/api/1.2/cr/auth_token/{}".format(operator_url, record_id)) # TODO Get api path from some config? + debug_log.info("{}, {}, {}, {}".format(token.url, token.reason, token.status_code, token.text)) + store_dict = {cr_id: dumps(loads(token.text.encode()))} + self.helpers.storeToken(store_dict) + + def step_1(): + params = request.json + debug_log.info(params) + debug_log.info(request.json) + user_id = params["user_id"] + cr_id = params["cr_id"] + rs_id = params["rs_id"] + sq.task("Get data_set_id from POST json") + data_set_id = request.args.get("dataset_id", None) + debug_log.info("data_set_id is ({}), cr_id is ({}), user_id ({}) and rs_id ({})" + .format(data_set_id, cr_id, user_id, rs_id)) + sq.task("Create request") + req = {"we want": "data"} + + sq.task("Validate CR") + cr = self.helpers.validate_cr(cr_id, surrogate_id=user_id) + + sq.task("Validate Request from UI") + distribution_urls = self.helpers.validate_request_from_ui(cr, data_set_id, rs_id) + + # Fetch data request urls + # Data request urls fetched. + debug_log.info("Data request urls fetched.") + return cr_id, cr, distribution_urls + cr_id, cr, distribution_urls = step_1() + + sq.task("Validate Authorisation Token") + surrogate_id = cr["cr"]["common_part"]["surrogate_id"] + our_key = self.helpers.get_key() + our_key_pub = our_key["pub"] + tries = 3 # TODO: Get this from config + while True: + try: + aud = self.helpers.validate_authorization_token(cr_id, surrogate_id, our_key_pub) + break + except ValueError as e: + debug_log.exception(e) + renew_token(self.operator_url, cr_id) + if tries == 0: + raise EnvironmentError("Auth token validation failed and retry counter exceeded.") + tries -= 1 + except TypeError as e: + debug_log.exception(e) + raise EnvironmentError("Token used too soon, halting.") + + # Most verifying and checking below is done in the validate_authorization_token function by jwcrypto + # Fetch Authorisation Token related to CR from data storage by rs_id (cr_id?) + # Check Integrity ( Signed by operator, Operator's public key can be found from SLR) + # Check "Issued" timestamp + # Check "Not Before" timestamp + # Check "Not After" timestamp + + # Check that "sub" contains correct public key(Our key.) + + # OPT: Token expired + # Get new Authorization token, start again from validation. # TODO: Make these steps work as functions that call the next step. + + # Check URL patterns in "aud" field + # Check that fetched distribution urls can be found from "aud" field + + + # Token validated + debug_log.info("Auth Token Validated.") + # With these two steps Sink has verified that it's allowed to make request. + + # Construct request + sq.task("Construct request") + # Select request URL from "aud" field + # Add Authorisation Token to request + # Request constructed. + # Sign request + # Fetch private key pair of public key specified in Authorisation Token's "sub" field. + # Sign with fetched private key + sq.task("Fetch key used to sign request") + our_key_full = jwk.JWK() + our_key_full.import_key(**our_key["key"]) + # Add signature to request + # Request signed. + # Request created. + sq.send_to("Service_Components Mgmnt (Source)", "Data Request (PoP stuff)") + # Make Data Request + for url in distribution_urls: + req = requests.get(url, + auth=SignedRequest(token=aud, sign_method=True, sign_path=True, key=our_key_full, protected=dumps(our_key["prot"]))) + debug_log.info("Made data request and received following data from Source: \n{}" + .format(dumps(loads(req.content), indent=2))) + status = {"status": "ok", "service_mode": "Sink"} + return status + + + +api.add_resource(Status, '/init') +api.add_resource(DataFlow, '/dc') + +#api.add_resource(DataFlow, '/user//consentRecord//resourceSet/') +#"http://service_components:7000/api/1.2/sink_flow/user/95479a08-80cc-4359-ba28-b8ca23ff5572_53af88dc-33de-44be-bc30-e0826db9bd6c/consentRecord/cd431509-777a-4285-8211-95c5ac577537/resourceSet/http%3A%2F%2Fservice_components%3A7000%7C%7C9aebb487-0c83-4139-b12c-d7fcea93a3ad" \ No newline at end of file diff --git a/Service_Components/Sink/__init__.py b/Service_Components/Sink/__init__.py new file mode 100644 index 0000000..51aa6d3 --- /dev/null +++ b/Service_Components/Sink/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +from functools import wraps +from flask_restful import Api +import logging + +import factory + +def create_app(settings_override=None, register_security_blueprint=False): + """Returns the Overholt API application instance""" + + app, apis = factory.create_app(__name__, __path__, settings_override, + register_security_blueprint=register_security_blueprint) + debug_log = logging.getLogger("debug") + debug_log.info("Started up Service Components, Service_Sink module successfully.") + return app \ No newline at end of file diff --git a/Service_Components/Source/Source_DataFlow.py b/Service_Components/Source/Source_DataFlow.py new file mode 100644 index 0000000..44fc285 --- /dev/null +++ b/Service_Components/Source/Source_DataFlow.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +__author__ = 'alpaloma' + +from DetailedHTTPException import error_handler +from flask import Blueprint, request, current_app +from flask_restful import Resource, Api +from helpers import Helpers, Token_tool +import logging +from jwcrypto import jwk, jwt, jws +from json import loads, dumps +from Templates import Sequences +from signed_requests.json_builder import pop_handler +debug_log = logging.getLogger("debug") +logger = logging.getLogger("sequence") +api_Source_blueprint = Blueprint("api_Source_blueprint", __name__) +api = Api() +api.init_app(api_Source_blueprint) + +sq = Sequences("Service_Components Mgmnt (Source)", {}) +# import xmltodict +# @api.representation('application/xml') +# def output_xml(data, code, headers=None): +# if isinstance(data, dict): +# xm = {"response": data} +# resp = make_response(xmltodict.unparse(xm, pretty=True), code) +# resp.headers.extend(headers) +# return resp + +class Status(Resource): + @error_handler + def get(self): + status = {"status": "running", "service_mode": "Source"} + return status + +class DataRequest(Resource): + def __init__(self): + super(DataRequest, self).__init__() + self.service_url = current_app.config["SERVICE_URL"] + self.operator_url = current_app.config["OPERATOR_URL"] # TODO: Where do we really get this? + self.helpers = Helpers(current_app.config) + + @error_handler + def get(self): + sq.task("Fetch PoP from authorization header") + authorization = request.headers["Authorization"] + debug_log.info(authorization) + pop_h = pop_handler(token=authorization.split(" ")[1]) # TODO: Logic to pick up PoP + sq.task("Fetch at field from PoP") + decrypted_pop_token = loads(pop_h.get_at()) + debug_log.info("Token verified state should be False here, it is: {}".format(pop_h.verified)) + + debug_log.info(type(decrypted_pop_token)) + debug_log.info(dumps(decrypted_pop_token, indent=2)) + + + sq.task("Decrypt auth_token from PoP and get cr_id.") + token = decrypted_pop_token["at"]["auth_token"] + jws_holder = jwt.JWS() + jws_holder.deserialize(raw_jws=token) + auth_token_payload = loads(jws_holder.__dict__["objects"]["payload"]) + debug_log.info("We got auth_token_payload: {}".format(auth_token_payload)) + + cr_id = auth_token_payload["pi_id"] + debug_log.info("We got cr_id {} from auth_token_payload.".format(cr_id)) + + sq.task("Fetch surrogate_id with cr_id") + surrogate_id = self.helpers.get_surrogate_from_cr_id(cr_id) + + sq.task("Verify CR") + cr = self.helpers.validate_cr(cr_id, surrogate_id) + pop_key = cr["cr"]["role_specific_part"]["pop_key"] + pop_key = jwk.JWK(**pop_key) + + + token_issuer_key = cr["cr"]["role_specific_part"]["token_issuer_key"] + token_issuer_key = jwk.JWK(**token_issuer_key) + + sq.task("Validate auth token") + auth_token = jwt.JWT(jwt=token, key=token_issuer_key) + + debug_log.info("Following auth_token claims successfully verified with token_issuer_key: {}".format(auth_token.claims)) + + sq.task("Validate Request(PoP token)") + pop_h = pop_handler(token=authorization.split(" ")[1], key=pop_key) + decrypted_pop_token = loads(pop_h.get_at()) # This step affects verified state of object. + debug_log.info("Token verified state should be True here, it is: {}".format(pop_h.verified)) + # Validate Request + if pop_h.verified is False: + raise ValueError("Request verification failed.") + + + # Check that related Consent Record exists with the same rs_id # TODO: Bunch of these comments may be outdated, check them all. + # Check that auth_token_issuer_key field of CR matches iss-field in Authorization token + # Check Token's integrity against the signature + # Check Token's validity period includes time of data request + # Check Token's "aud" field includes the URI to which the data request was made + # Token validated. + + # Validate request # TODO: Check that we fill this properly, we should though. + # Check that request was signed with the key in the Token + # Request validated. + + # Validate related CR # TODO: Recheck what this should hold and compare what we do. + # Validate the related Consent Record as defined in MyData Authorisation Specification + # CR Validated. + + # OPT: Introspection # TODO: Implement + # introspect = is_introspection_necessary() + try: + sq.task("Intropection") + self.helpers.introspection(cr_id, self.operator_url) + sq.task("Return requested data.") + return {"Some test data": "like so", "and it continues": "like so!"} + except LookupError as e: + debug_log.exception(e) + return {"error message is": "appropriate."} + # Process request + # Return. + + status = {"status": "running", "service_mode": "Source"} + return status + +api.add_resource(DataRequest, '/datarequest') +api.add_resource(Status, '/init') + diff --git a/Service_Components/Source/__init__.py b/Service_Components/Source/__init__.py new file mode 100644 index 0000000..e3ddf79 --- /dev/null +++ b/Service_Components/Source/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +from functools import wraps +from flask_restful import Api +import logging + +import factory + +def create_app(settings_override=None, register_security_blueprint=False): + """Returns the Overholt API application instance""" + + app, apis = factory.create_app(__name__, __path__, settings_override, + register_security_blueprint=register_security_blueprint) + debug_log = logging.getLogger("debug") + debug_log.info("Started up Service Components, Service_Source module successfully.") + return app \ No newline at end of file diff --git a/Service_Components/Templates.py b/Service_Components/Templates.py index c9461f0..b4a8123 100644 --- a/Service_Components/Templates.py +++ b/Service_Components/Templates.py @@ -1,73 +1,95 @@ # -*- coding: utf-8 -*- from time import time -import logging -from json import dumps, loads - -from base64 import urlsafe_b64decode as decode - - -from base64 import urlsafe_b64decode as decode -from json import loads -from jwcrypto import jws, jwk #### Schemas source_cr_schema = { "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": { - "extensions": { - "type": "object", - "properties": {} - }, - "ki_cr": { - "type": "object", - "properties": {} - }, "common_part": { "type": "object", "properties": { - "issued_at": { - "type": "string" - }, - "surrogate_id": { - "type": "string" - }, - "subject_id": { + "version": { "type": "string" }, "cr_id": { "type": "string" }, - "version_number": { + "surrogate_id": { "type": "string" }, - "not_before": { - "type": "string" + "rs_description": { + "type": "object", + "properties": { + "resource_set": { + "type": "object", + "properties": { + "rs_id": { + "type": "string" + }, + "dataset": { + "type": "array", + "items": { + "type": "object", + "properties": { + "dataset_id": { + "type": "string" + }, + "distribution_id": { + "type": "string" + } + }, + "required": [ + "dataset_id", + "distribution_id" + ] + } + } + }, + "required": [ + "rs_id", + "dataset" + ] + } + }, + "required": [ + "resource_set" + ] }, "slr_id": { "type": "string" }, - "issued": { + "iat": { + "type": "integer" + }, + "nbf": { + "type": "integer" + }, + "exp": { + "type": "integer" + }, + "operator": { "type": "string" }, - "not_after": { + "subject_id": { "type": "string" }, - "rs_id": { + "role": { "type": "string" } }, "required": [ - "issued_at", - "surrogate_id", - "subject_id", + "version", "cr_id", - "version_number", - "not_before", + "surrogate_id", + "rs_description", "slr_id", - "issued", - "not_after", - "rs_id" + "iat", + "nbf", + "exp", + "operator", + "subject_id", + "role" ] }, "role_specific_part": { @@ -76,11 +98,62 @@ "auth_token_issuer_key": { "type": "object", "properties": {} + } + }, + "required": [ + "token_issuer_key" + ] + }, + "consent_receipt_part": { + "type": "object", + "properties": { + "ki_cr": { + "type": "object", + "properties": {} + } + }, + "required": [ + "ki_cr" + ] + }, + "extension_part": { + "type": "object", + "properties": { + "extensions": { + "type": "object", + "properties": {} + } + }, + "required": [ + "extensions" + ] + } + }, + "required": [ + "common_part", + "role_specific_part", + "consent_receipt_part", + "extension_part" + ] +} + +sink_cr_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "common_part": { + "type": "object", + "properties": { + "version": { + "type": "string" }, - "role": { + "cr_id": { + "type": "string" + }, + "surrogate_id": { "type": "string" }, - "resource_set_description": { + "rs_description": { "type": "object", "properties": { "resource_set": { @@ -94,16 +167,16 @@ "items": { "type": "object", "properties": { - "distribution_id": { + "dataset_id": { "type": "string" }, - "dataset_id": { + "distribution_id": { "type": "string" } }, "required": [ - "distribution_id", - "dataset_id" + "dataset_id", + "distribution_id" ] } } @@ -117,106 +190,91 @@ "required": [ "resource_set" ] - } - }, - "required": [ - "auth_token_issuer_key", - "role", - "resource_set_description" - ] - } - }, - "required": [ - "extensions", - "ki_cr", - "common_part", - "role_specific_part" - ] -} - -sink_cr_schema = { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "extensions": { - "type": "object", - "properties": {} - }, - "ki_cr": { - "type": "object", - "properties": {} - }, - "common_part": { - "type": "object", - "properties": { - "issued_at": { - "type": "string" }, - "surrogate_id": { - "type": "string" - }, - "subject_id": { - "type": "string" - }, - "cr_id": { + "slr_id": { "type": "string" }, - "version_number": { - "type": "string" + "iat": { + "type": "integer" }, - "not_before": { - "type": "string" + "nbf": { + "type": "integer" }, - "slr_id": { - "type": "string" + "exp": { + "type": "integer" }, - "issued": { + "operator": { "type": "string" }, - "not_after": { + "subject_id": { "type": "string" }, - "rs_id": { + "role": { "type": "string" } }, "required": [ - "issued_at", - "surrogate_id", - "subject_id", + "version", "cr_id", - "version_number", - "not_before", + "surrogate_id", + "rs_description", "slr_id", - "issued", - "not_after", - "rs_id" + "iat", + "nbf", + "exp", + "operator", + "subject_id", + "role" ] }, "role_specific_part": { "type": "object", "properties": { - "role": { - "type": "string" - }, "usage_rules": { "type": "array", "items": { "type": "string" } + }, + "source_cr_id": { + "type": "string" } }, "required": [ - "role", - "usage_rules" + "usage_rules", + "source_cr_id" + ] + }, + "consent_receipt_part": { + "type": "object", + "properties": { + "ki_cr": { + "type": "object", + "properties": {} + } + }, + "required": [ + "ki_cr" + ] + }, + "extension_part": { + "type": "object", + "properties": { + "extensions": { + "type": "object", + "properties": {} + } + }, + "required": [ + "extensions" ] } }, "required": [ - "extensions", - "ki_cr", "common_part", - "role_specific_part" + "role_specific_part", + "consent_receipt_part", + "extension_part" ] } @@ -233,11 +291,14 @@ #### +import logging +from json import dumps, loads class Sequences: def __init__(self, name, seq=False): ''' :param name: + :param seq: seq should always be dictionary with "seq" field. ''' self.logger = logging.getLogger("sequence") self.name = name @@ -273,4 +334,4 @@ def dump_sequence(self): def seq_form(self, line, seq): self.logger.info(dumps({"seq": line, "time": time()})) - return {"seq": {}} + return {"seq": {}} \ No newline at end of file diff --git a/Service_Components/db_handler.py b/Service_Components/db_handler.py index ba934b7..439d937 100644 --- a/Service_Components/db_handler.py +++ b/Service_Components/db_handler.py @@ -1,44 +1,18 @@ # -*- coding: utf-8 -*- -import sqlite3 +import logging +import MySQLdb -def get_db(db_path): +debug_log = logging.getLogger("debug") +def get_db(host, user, password, database, port): db = None if db is None: - db = sqlite3.connect(db_path) - db.row_factory = sqlite3.Row - try: - init_db(db) - except Exception as e: - pass + db = MySQLdb.connect(host=host, user=user, passwd=password, db=database, port=port) return db + def make_dicts(cursor, row): return dict((cursor.description[idx][0], value) for idx, value in enumerate(row)) -def sqlite_create_table(conn, table_name, table_columns): - conn.cursor.execute("CREATE TABLE {} ({});".format(table_name, ",".join(table_columns))) - conn.commit() -def init_db(conn): - # create db for codes - conn.execute('''CREATE TABLE csr_storage - (cr_id TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL, - slr_id TEXT NOT NULL, - surrogate_id TEXT NOT NULL, - rs_id TEXT NOT NULL);''') - conn.execute('''CREATE TABLE cr_storage - (cr_id TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL, - slr_id TEXT NOT NULL, - surrogate_id TEXT NOT NULL, - rs_id TEXT NOT NULL);''') - conn.execute('''CREATE TABLE codes - (ID TEXT PRIMARY KEY NOT NULL, - code TEXT NOT NULL);''') - conn.execute('''CREATE TABLE storage - (surrogate_id TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL);''') - conn.commit() \ No newline at end of file diff --git a/Service_Components/doc/api/swagger_Authorization_Management.yml b/Service_Components/doc/api/swagger_Authorization_Management.yml index ed495a7..ab85594 100644 --- a/Service_Components/doc/api/swagger_Authorization_Management.yml +++ b/Service_Components/doc/api/swagger_Authorization_Management.yml @@ -27,15 +27,15 @@ paths: 200: description: "Returns 200 OK" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" 400: description: "Bad Request" schema: $ref: "#/definitions/errors" - 451: - description: "Unavailable For Legal Reasons" + 403: + description: "Forbidden" schema: $ref: "#/definitions/errors" definitions: diff --git a/Service_Components/doc/api/swagger_Service_Mgmnt.yml b/Service_Components/doc/api/swagger_Service_Mgmnt.yml index 1a39679..7291952 100644 --- a/Service_Components/doc/api/swagger_Service_Mgmnt.yml +++ b/Service_Components/doc/api/swagger_Service_Mgmnt.yml @@ -6,13 +6,30 @@ info: host: "example.service_mgmnt.example" basePath: "/api/1.2/slr" paths: + /code: + get: + tags: + - "Operator" + - "Service Management" + description: "Generates, store and returns unique code for the SLR flow" + parameters: [] + responses: + 200: + description: "Returns the code used in next steps of SLR flow. This is required\ + \ on most endpoints that follow." + schema: + $ref: "#/definitions/CodeResponse" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" /auth: post: tags: - "Service Management" - "Service" description: "Service calls this after user has authenticated. Causes Service\ - \ Management to generate surrogate_id for the operator." + \ Management to generate surrogate_id for the operator. This endpoint starts a long chain of events." consumes: - "application/json" produces: @@ -29,26 +46,10 @@ paths: description: "Returns 200 OK so Service Mngmt knows SLR was verified\ \ successfully" 500: - description: "Internal server error" - schema: - $ref: "#/definitions/errors" - /code: - get: - tags: - - "Operator" - - "Service Management" - description: "Generates, store and returns unique code for the SLR flow" - parameters: [] - responses: - 200: - description: "Returns the code used in next steps of SLR flow. This is required\ - \ on most endpoints that follow." - schema: - $ref: "#/definitions/CodeResponse" - 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" + /login: post: tags: @@ -69,9 +70,9 @@ paths: $ref: "#/definitions/LoginParams" responses: 200: - description: "Returns 200 OK or Error message" + description: "Returns 200 OK" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" /slr: @@ -79,7 +80,7 @@ paths: tags: - "Service Management" - "Operator" - description: "Takes SLR signed by Operator, signs it with own key and sends\ + description: "Takes SLR signed by account owner at Operator, signs it with own key and sends\ \ to Operator for verification." consumes: - "application/json" @@ -94,9 +95,9 @@ paths: $ref: "#/definitions/SlrParams" responses: 200: - description: "Returns 200 OK or Error message" + description: "Returns 200 OK" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" definitions: diff --git a/Service_Components/doc/api/swagger_Sink_DC.yaml b/Service_Components/doc/api/swagger_Sink_DC.yaml new file mode 100644 index 0000000..42c2e2c --- /dev/null +++ b/Service_Components/doc/api/swagger_Sink_DC.yaml @@ -0,0 +1,71 @@ +--- +swagger: "2.0" +info: + version: "1.2" + title: "Sink DC API" +host: "example.sink.example" +basePath: "/api/1.2/sink_flow" +paths: + /dc: + get: + tags: + - "Sink" + description: "End point that initializes data connection flow " + consumes: + - "application/json" + produces: + - "application/json" + parameters: + - in: "body" + name: "body" + description: "payload detailing data" + required: true + schema: + $ref: "#/definitions/DcPayload" + - in: "query" + name: dataset_id + description: "Dataset we want to request data for" + type: string + responses: + 200: + description: "Returns 200 OK" + + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" + + + +definitions: + errors: + type: object + properties: + status: + type: string + description: HTTP status code as string value. + code: + type: integer + description: HTTP status code + title: + type: string + description: Title of error message. + detail: + type: string + description: Detailed error message. + source: + type: string + description: Source URI + + DcPayload: + type: object + properties: + surrogate_id: + type: string + description: string containing surrogate_id + cr_id: + type: string + description: string contaning cr_id + rs_id: + type: string + description: string containing rs_id diff --git a/Service_Components/doc/api/swagger_Source_DC.yaml b/Service_Components/doc/api/swagger_Source_DC.yaml new file mode 100644 index 0000000..1e811ad --- /dev/null +++ b/Service_Components/doc/api/swagger_Source_DC.yaml @@ -0,0 +1,57 @@ +--- +swagger: "2.0" +info: + version: "1.2" + title: "Source DC API" +host: "example.source.example" +basePath: "/api/1.2/source_flow" +paths: + /datarequest: + get: + tags: + - "Source" + description: "End point that receives data request from sink. Contains PoP in Authorization field in Header" + consumes: + - "application/json" + produces: + - "application/json" + parameters: + - in: "header" + name: "PoP-token" + description: "Authorization Token" + required: true + type: string + + responses: + 200: + description: "Returns 200" + schema: + $ref: "#/definitions/data" + 500: + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." + schema: + $ref: "#/definitions/errors" + +definitions: + errors: + type: object + properties: + status: + type: string + description: HTTP status code as string value. + code: + type: integer + description: HTTP status code + title: + type: string + description: Title of error message. + detail: + type: string + description: Detailed error message. + source: + type: string + description: Source URI + + data: + type: object + diff --git a/Service_Components/doc/database/Service_Components-DBinit.sql b/Service_Components/doc/database/Service_Components-DBinit.sql new file mode 100644 index 0000000..ba9719e --- /dev/null +++ b/Service_Components/doc/database/Service_Components-DBinit.sql @@ -0,0 +1,103 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 14.58.51 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE=''; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Srv +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Srv +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Srv` DEFAULT CHARACTER SET utf8 ; +USE `db_Srv` ; + +-- ----------------------------------------------------- +-- Table `db_Srv`.`codes` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`codes` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`codes` ( + `ID` LONGTEXT NOT NULL, + `code` LONGTEXT NOT NULL, + PRIMARY KEY (`ID`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`cr_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`cr_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`cr_storage` ( + `cr_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + `slr_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `rs_id` LONGTEXT NOT NULL, + PRIMARY KEY (`cr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`csr_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`csr_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`csr_storage` ( + `cr_id` VARCHAR(255) NOT NULL, + `csr_id` VARCHAR(255) NOT NULL, + `previous_record_id` VARCHAr(255) NOT NULL, + `consent_status` VARCHAR(10) NOT NULL, + `json` LONGTEXT NOT NULL, + `slr_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `rs_id` LONGTEXT NOT NULL, + PRIMARY KEY (`csr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`storage` ( + `surrogate_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`surrogate_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`token_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`token_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`token_storage` ( + `cr_id` LONGTEXT NOT NULL, + `token` LONGTEXT NOT NULL, + PRIMARY KEY (`cr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; +CREATE USER 'service'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Srv.* TO 'service'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/Service_Components/doc/database/Service_Components_db_image-v001.png b/Service_Components/doc/database/Service_Components_db_image-v001.png new file mode 100644 index 0000000..19802ea Binary files /dev/null and b/Service_Components/doc/database/Service_Components_db_image-v001.png differ diff --git a/Service_Components/doc/deployment.md b/Service_Components/doc/deployment.md index 4ed0b28..e52582e 100644 --- a/Service_Components/doc/deployment.md +++ b/Service_Components/doc/deployment.md @@ -43,5 +43,5 @@ You can find configurable settings in /instance/settings.py cd Service_Components/ source virtual_env_2_7/bin/activate; - nohup celery -A tasks worker > op_celery.log & - nohup python wsgi.py > srv_flask.log & \ No newline at end of file + nohup celery -A srv_tasks worker > srv_celery.log & + nohup python wsgi.py > srv_flask.log & diff --git a/Service_Components/factory.py b/Service_Components/factory.py index 9791bee..8ab9a34 100644 --- a/Service_Components/factory.py +++ b/Service_Components/factory.py @@ -38,7 +38,7 @@ def create_celery_app(app=None): if app is not None: app = app else: - app, apis = create_app('service_component', os.path.dirname(__file__)) + app, apis = create_app('srv_queue', os.path.dirname(__file__)) celery = Celery(__name__, broker=app.config['SELERY_BROKER_URL']) celery.conf.update(app.config) TaskBase = celery.Task diff --git a/Service_Components/helpers.py b/Service_Components/helpers.py index 33b5cfb..7f33415 100644 --- a/Service_Components/helpers.py +++ b/Service_Components/helpers.py @@ -1,19 +1,23 @@ # -*- coding: utf-8 -*- -import pkgutil import importlib - +import logging +import pkgutil +import urllib +from json import dumps, load, dump +import time +from datetime import datetime from flask import Blueprint from flask_restful import Api -import logging - -from json import dumps, loads -debug_log = logging.getLogger("debug") import jsonschema import db_handler -from sqlite3 import OperationalError, IntegrityError -from DetailedHTTPException import DetailedHTTPException +from requests import get, post +from sqlite3 import IntegrityError +from DetailedHTTPException import DetailedHTTPException + +debug_log = logging.getLogger("debug") -def validate_json(schema, json): # "json" here needs to be python dict. + +def validate_json(schema, json): # "json" here needs to be python dict. errors = [] validator = jsonschema.Draft4Validator(schema) validator.check_schema(schema) @@ -23,136 +27,478 @@ def validate_json(schema, json): # "json" here needs to be python dict. return errors - class Helpers: def __init__(self, app_config): - self.db_path = app_config["DATABASE_PATH"] - - def query_db(self, query, args=(), one=False): - db = db_handler.get_db(self.db_path) - cur = db.execute(query, args) - rv = cur.fetchall() - cur.close() - return (rv[0] if rv else None) if one else rv + self.host = app_config["MYSQL_HOST"] + self.cert_key_path = app_config["CERT_KEY_PATH"] + self.keysize = app_config["KEYSIZE"] + self.user = app_config["MYSQL_USER"] + self.passwd = app_config["MYSQL_PASSWORD"] + self.db = app_config["MYSQL_DB"] + self.port = app_config["MYSQL_PORT"] + self.service_id = app_config["SERVICE_ID"] + + def get_key(self): + keysize = self.keysize + cert_key_path = self.cert_key_path + gen3 = {"generate": "RSA", "size": keysize, "kid": self.service_id} + service_key = jwk.JWK(**gen3) + try: + with open(cert_key_path, "r") as cert_file: + service_key2 = jwk.JWK(**loads(load(cert_file))) + service_key = service_key2 + except Exception as e: + debug_log.error(e) + with open(cert_key_path, "w+") as cert_file: + dump(service_key.export(), cert_file, indent=2) + public_key = loads(service_key.export_public()) + full_key = loads(service_key.export()) + protti = {"alg": "RS256"} + headeri = {"kid": self.service_id, "jwk": public_key} + return {"pub": public_key, + "key": full_key, + "prot": protti, + "header": headeri} + + def query_db(self, query, args=()): + ''' + Simple queries to DB + :param query: SQL query + :param args: Arguments to inject into the query + :return: Single hit for the given query + ''' + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + cur = cursor.execute(query, args) + try: + rv = cursor.fetchone() # Returns tuple + debug_log.info(rv) + if rv is not None: + db.close() + return rv[1] # The second value in the tuple. + else: + return None + except Exception as e: + debug_log.info("query_db failed with error:") + debug_log.exception(e) + debug_log.info(cur) + db.close() + return None def storeJSON(self, DictionaryToStore): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + """ + Store SLR into database + :param DictionaryToStore: Dictionary in form {"key" : "dict_to_store"} + :return: + """ + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info("Storing dictionary:") debug_log.info(DictionaryToStore) for key in DictionaryToStore: + debug_log.info("Storing key:") debug_log.info(key) try: - db.execute("INSERT INTO storage (surrogate_id,json) \ - VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) + cursor.execute("INSERT INTO storage (surrogate_id,json) \ + VALUES (%s, %s)", (key, dumps(DictionaryToStore[key]))) db.commit() except IntegrityError as e: - db.execute("UPDATE storage SET json=? WHERE surrogate_id=? ;", [dumps(DictionaryToStore[key]), key]) + cursor.execute("UPDATE storage SET json=%s WHERE surrogate_id=%s ;", + (dumps(DictionaryToStore[key]), key)) + db.commit() + db.close() + + def storeToken(self, DictionaryToStore): + """ + Store token into database + :param DictionaryToStore: Dictionary in form {"key" : "dict_to_store"} + :return: + """ + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + for key in DictionaryToStore: + try: + cursor.execute("INSERT INTO token_storage (cr_id,token) \ + VALUES (%s, %s)", (key, dumps(DictionaryToStore[key]))) db.commit() + except IntegrityError as e: # Rewrite incase we get new token. + cursor.execute("UPDATE token_storage SET token=? WHERE cr_id=%s ;", + (dumps(DictionaryToStore[key]), key)) + db.commit() + db.close() def storeCode(self, code): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + """ + Store generated code into database + :param code: + :return: None + """ + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() code_key = list(code.keys())[0] code_value = code[code_key] - db.execute("INSERT INTO codes (ID,code) \ - VALUES (?, ?)", [code_key, code_value]) + cursor.execute("INSERT INTO codes (ID,code) \ + VALUES (%s, %s)", (code_key, code_value)) db.commit() - - debug_log.info("{} {}".format(code_key, code_value)) - for code in self.query_db("select * from codes where ID = ?;", [code_key]): - debug_log.info(code["code"]) + debug_log.info("Storing code(key,value): {}, {}".format(code_key, code_value)) db.close() def add_surrogate_id_to_code(self, code, surrogate_id): - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass - for code in self.query_db("select * from codes where code = ?;", [code]): - code_from_db = code["code"] - code_is_valid_and_unused = "!" in code_from_db - if (code_is_valid_and_unused): - db.execute("UPDATE codes SET code=? WHERE ID=? ;", [surrogate_id, code]) - db.commit() - else: - raise Exception("Invalid code") + """ + Link code with a surrogate_id + :param code: + :param surrogate_id: + :return: None + """ + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info("Code we look up is {}".format(code)) + code = self.query_db("select * from codes where ID = %s;", (code,)) + debug_log.info("Result for query: {}".format(code)) + code_from_db = code + code_is_valid_and_unused = "!" in code_from_db + if (code_is_valid_and_unused): + cursor.execute("UPDATE codes SET code=%s WHERE ID=%s ;", (surrogate_id, code)) + db.commit() + db.close() + else: + raise Exception("Invalid code") + + def get_cr_json(self, cr_id): + # TODO: query_db is not really optimal when making two separate queries in row. + cr = self.query_db("select * from cr_storage where cr_id = %s;", (cr_id,)) + csr_id = self.get_latest_csr_id(cr_id) + csr = self.query_db("select cr_id, json from csr_storage where csr_id = %s and cr_id = %s;", (csr_id, cr_id,)) + if cr is None or csr is None: + raise IndexError("CR and CSR couldn't be found with given id ({})".format(cr_id)) + debug_log.info("Found CR ({}) and CSR ({})".format(cr, csr)) + cr_from_db = loads(cr) + csr_from_db = loads(csr) + combined = {"cr": cr_from_db, "csr": csr_from_db} + + return combined + + def validate_cr(self, cr_id, surrogate_id): + """ + Lookup and validate ConsentRecord based on given CR_ID + :param cr_id: + :return: CR if found and validated. + """ + combined = self.get_cr_json(cr_id) + debug_log.info("Constructing cr/csr structure for CR_Tool:") + debug_log.info(dumps(combined, indent=2)) + # Using CR tool we get nice helper functions. + tool = CR_tool() + tool.cr = combined + # To fetch key from SLR we need surrogate_id. + # We get this as parameter so as further check we verify its same as in cr. + surrogate_id_from_cr = tool.get_surrogate_id() + debug_log.info("Surrogate_id as parameter was ({}) and from CR ({})".format(surrogate_id, surrogate_id_from_cr)) + if surrogate_id_from_cr != surrogate_id: + raise NameError("User surrogate_id doesn't match surrogate_id in consent record.") + # Now we fetch the SLR and put it to SLR_Tool + slr_tool = SLR_tool() + slr = self.get_slr(surrogate_id) + slr_tool.slr = slr + # Fetch key from SLR. + keys = slr_tool.get_cr_keys() + + # Verify the CR with the keys from SLR + # Check integrity (signature) + + cr_verified = tool.verify_cr(keys) + csr_verified = tool.verify_csr(keys) + if not (cr_verified and csr_verified): + raise ValueError("CR and CSR verification failed.") + debug_log.info("Verified cr/csr ({}) for surrogate_id ({}) ".format(cr_id, surrogate_id)) + + combined_decrypted = dumps({"cr": tool.get_CR_payload(), "csr": tool.get_CSR_payload()}, indent=2) + debug_log.info("Decrypted cr/csr structure is:") + debug_log.info(combined_decrypted) + # Check that state is "Active" + state = tool.get_state() + if state != "Active": + raise ValueError("CR state is not 'Active' but ({})".format(state)) + + # Check "Issued" timestamp + time_now = int(time.time()) + issued = tool.get_issued() + # issued = datetime.strptime(issued_in_cr, "%Y-%m-%dT%H:%M:%SZ") + if time_now < issued: + raise EnvironmentError("This CR is issued in the future!") + debug_log.info("Issued timestamp is valid.") + + # Check "Not Before" timestamp + not_before = tool.get_not_before() + # not_before = datetime.strptime(not_before_in_cr, "%Y-%m-%dT%H:%M:%SZ") + if time_now < not_before: + raise EnvironmentError("This CR will be available in the future, not yet.") + debug_log.info("Not Before timestamp is valid.") + + # Check "Not After" timestamp + not_after = tool.get_not_after() + # not_after = datetime.strptime(not_after_in_cr, "%Y-%m-%dT%H:%M:%SZ") + if time_now > not_after: + raise EnvironmentError("This CR is expired.") + debug_log.info("Not After timestamp is valid.") + # CR validated. + + debug_log.info("CR has been validated.") + return loads(combined_decrypted) def verifyCode(self, code): - db = db_handler.get_db(self.db_path) - for code_row in self.query_db("select * from codes where ID = ?;", [code]): - code_from_db = code_row["code"] + """ + Verify that code is found in database + :param code: + :return: Boolean True if code is found in db. + """ + code = self.query_db("select * from codes where ID = %s;", (code,)) + if code is not None: return True return False def verifySurrogate(self, code, surrogate): - db = db_handler.get_db(self.db_path) - for code_row in self.query_db("select * from codes where ID = ? AND code = ?;", [code, surrogate]): - code_from_db = code_row["code"] + """ + Verify that surrogate id matches code in database + :param code: + :param surrogate: surrogate_id + :return: Boolean True if surrogate_id matches code + """ + code = self.query_db("select * from codes where ID = %s AND code = %s;", (code, surrogate)) + if code is not None: # TODO: Could we remove code and surrogate_id after this check to ensure they wont be abused later. return True return False def get_slr(self, surrogate_id): - db = db_handler.get_db(self.db_path) - for storage_row in self.query_db("select * from storage where surrogate_id = ?;", [surrogate_id]): - slr_from_db = storage_row["json"] - return loads(slr_from_db) + """ + Fetch SLR for given surrogate_id from the database + :param surrogate_id: surrogate_id + :return: Return SLR made for given surrogate_id or None + """ + storage_row = self.query_db("select * from storage where surrogate_id = %s;", (surrogate_id,)) + slr_from_db = loads(storage_row) + return slr_from_db + + def get_surrogate_from_cr_id(self, cr_id): + storage_row = self.query_db("select cr_id,surrogate_id from cr_storage where cr_id = %s;", (cr_id,)) + debug_log.info("Found surrogate_id {}".format(storage_row)) + surrogate_from_db = storage_row + return surrogate_from_db + + def get_token(self, cr_id): + """ + Fetch token for given cr_id from the database + :param cr_id: cr_id + :return: Return Token made for given cr_id or None + """ + storage_row = self.query_db("select * from token_storage where cr_id = %s;", (cr_id,)) + token_from_db = loads(loads(storage_row)) + return token_from_db def storeCR_JSON(self, DictionaryToStore): + """ + Store CR into database + :param DictionaryToStore: Dictionary in form {"key" : "dict_to_store"} + :return: None + """ cr_id = DictionaryToStore["cr_id"] rs_id = DictionaryToStore["rs_id"] surrogate_id = DictionaryToStore["surrogate_id"] slr_id = DictionaryToStore["slr_id"] json = DictionaryToStore["json"] - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info("Storing following CR structure:") debug_log.info(DictionaryToStore) # debug_log.info(key) try: - db.execute("INSERT INTO cr_storage (cr_id, surrogate_id, slr_id, rs_id, json) \ - VALUES (?, ?, ?, ?, ?)", [cr_id, surrogate_id, slr_id, rs_id, dumps(json)]) + cursor.execute("INSERT INTO cr_storage (cr_id, surrogate_id, slr_id, rs_id, json) \ + VALUES (%s, %s, %s, %s, %s)", (cr_id, surrogate_id, slr_id, rs_id, dumps(json))) db.commit() except IntegrityError as e: # db.execute("UPDATE cr_storage SET json=? WHERE cr_id=? ;", [dumps(DictionaryToStore[key]), key]) # db.commit() db.rollback() - raise DetailedHTTPException(detail={"msg": "Adding CR to the database has failed.",}, + raise DetailedHTTPException(detail={"msg": "Adding CR to the database has failed.", }, title="Failure in CR storage", exception=e) def storeCSR_JSON(self, DictionaryToStore): + """ + Store CSR into database + :param DictionaryToStore: Dictionary in form {"key" : "dict_to_store"} + :return: None + """ cr_id = DictionaryToStore["cr_id"] + csr_id = DictionaryToStore["csr_id"] + consent_status = DictionaryToStore["consent_status"] rs_id = DictionaryToStore["rs_id"] surrogate_id = DictionaryToStore["surrogate_id"] + previous_record_id = DictionaryToStore["previous_record_id"] slr_id = DictionaryToStore["slr_id"] json = DictionaryToStore["json"] - db = db_handler.get_db(self.db_path) - try: - db_handler.init_db(db) - except OperationalError: - pass + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info("Storing following csr structure:") debug_log.info(DictionaryToStore) # debug_log.info(key) try: - db.execute("INSERT INTO csr_storage (cr_id, surrogate_id, slr_id, rs_id, json) \ - VALUES (?, ?, ?, ?, ?)", [cr_id, surrogate_id, slr_id, rs_id, dumps(json)]) + cursor.execute("INSERT INTO csr_storage (cr_id, csr_id, previous_record_id, consent_status, surrogate_id, slr_id, rs_id, json) \ + VALUES (%s, %s, %s, %s, %s, %s, %s, %s)", + [cr_id, csr_id, previous_record_id, consent_status, surrogate_id, slr_id, rs_id, + dumps(json)]) db.commit() except IntegrityError as e: # db.execute("UPDATE csr_storage SET json=? WHERE cr_id=? ;", [dumps(DictionaryToStore[key]), key]) # db.commit() db.rollback() - raise DetailedHTTPException(detail={"msg": "Adding CSR to the database has failed.",}, + raise DetailedHTTPException(detail={"msg": "Adding CSR to the database has failed.", }, title="Failure in CSR storage", exception=e) + def get_active_csr(self, cr_id): + csr = self.query_db("select cr_id, json from csr_storage where cr_id = %s and consent_status = 'Active';", + (cr_id,)) + debug_log.info("Active csr is: {}".format(csr)) + return loads(csr) + + def get_latest_csr_id(self, cr_id): + # Picking first csr_id since its previous record is "null" + csr_id = self.query_db( + "select cr_id, csr_id from csr_storage where cr_id = %s and previous_record_id = 'null';", + (cr_id,)) + debug_log.info("Picked first CSR_ID in search for latest ({})".format(csr_id)) + # If first csr_id is in others csr's previous_record_id field then its not the latest. + newer_csr_id = self.query_db("select cr_id, csr_id from csr_storage where previous_record_id = %s;", + (csr_id,)) + debug_log.info("Later CSR_ID is ({})".format(newer_csr_id)) + # If we don't find newer record but get None, we know we only have one csr in our chain and latest in it is also the first. + if newer_csr_id is None: + return csr_id + # Else we repeat the previous steps in while loop to go trough all records + while True: # TODO: We probably should see to it that this can't get stuck. + try: + newer_csr_id = self.query_db("select cr_id, csr_id from csr_storage where previous_record_id = %s;", + (csr_id,)) + if newer_csr_id is None: + debug_log.info("Latest CSR in our chain seems to be ({})".format(newer_csr_id)) + return csr_id + else: + csr_id = newer_csr_id + except Exception as e: + debug_log.exception(e) + raise e + + def introspection(self, cr_id, operator_url): + # Get our latest csr_id + + # We send cr_id to Operator for inspection. + req = get(operator_url + "/api/1.2/cr" + "/introspection/{}".format(cr_id)) + debug_log.info(req.status_code) + debug_log.info(req.content) + if req.ok: + csr_id = loads(req.content)["csr_id"] + # This is the latest csr we have verifiable chain for. + latest_csr_id = self.get_latest_csr_id(cr_id) + debug_log.info("Comparing our latest csr_id ({}) to ({})".format(latest_csr_id, csr_id)) + if csr_id == latest_csr_id: + debug_log.info("Verified we have latest csr.") + return + else: + debug_log.info("Our csr({}) is outdated!".format(latest_csr_id)) + req = get( + operator_url + "/api/1.2/cr" + "/consent/{}/missing_since/{}".format(cr_id, latest_csr_id)) + if req.ok: + tool = SLR_tool() + content = loads(req.content) + debug_log.info("We got: \n{}".format(content)) + slr_id = self.query_db("select cr_id, slr_id from cr_storage where cr_id = %s;" + , (cr_id,)) + rs_id = self.query_db("select cr_id, rs_id from cr_storage where cr_id = %s;" + , (cr_id,)) + for csr in content["missing_csr"]["data"]: + if not isinstance(csr, dict): + csr = loads(csr) + decoded_payload = tool.decrypt_payload(csr["attributes"]["csr"]["payload"]) + store_dict = { + "rs_id": rs_id, + "csr_id": decoded_payload["record_id"], + "consent_status": decoded_payload["consent_status"], + "previous_record_id": decoded_payload["prev_record_id"], + "cr_id": decoded_payload["cr_id"], + "surrogate_id": decoded_payload["surrogate_id"], + "slr_id": slr_id, + "json": csr # possibly store the base64 representation + } + debug_log.info("Storing CSR: \n{}".format(dumps(store_dict, indent=2))) + self.storeCSR_JSON(store_dict) + debug_log.info("Stored missing csr's to DB") + latest_csr_id = self.get_latest_csr_id(cr_id) + status = self.query_db("select cr_id, consent_status from csr_storage where csr_id = %s;" + , (latest_csr_id,)) + debug_log.info("Our latest csr id now ({}) with status ({})".format(latest_csr_id, status)) + if status == "Active": + debug_log.info("Introspection done successfully.") + else: + debug_log.info("Introspection failed.") + raise LookupError("Introspection failed.") + + + else: + raise ValueError("Request to get missing csr's failed with ({}) and reason ({}), content:\n{} " + .format(req.status_code, req.reason, dumps(loads(req.content), indent=2))) + + else: + raise LookupError("Unable to perform introspect.") + + def validate_request_from_ui(self, cr, data_set_id, rs_id): + debug_log.info("CR passed to validate_request_from_ui:") + debug_log.info(type(cr)) + debug_log.info(cr) + + # The rs_id is urlencoded, do the same to one fetched from cr + rs_id_in_cr = urllib.quote_plus(cr["cr"]["common_part"]["rs_id"]) + + # Check that rs_description field contains rs_id + debug_log.info("rs_id in cr({}) and from ui({})".format(rs_id_in_cr, rs_id)) + if (rs_id != rs_id_in_cr): + raise ValueError("Given rs_id doesn't match CR") + debug_log.info("RS_ID checked successfully") + # Check that rs_description field contains data_set_id (Optional?) + distribution_urls = [] + if data_set_id is not None: + datasets = cr["common_part"]["rs_description"]["resource_set"]["dataset"] + for dataset in datasets: + if dataset["dataset_id"] == data_set_id: + distribution_urls.append(dataset["distribution_url"]) + else: + datasets = cr["cr"]["common_part"]["rs_description"]["resource_set"]["dataset"] + for dataset in datasets: + distribution_urls.append(dataset["distribution_url"]) + debug_log.info("Got following distribution urls") + debug_log.info(distribution_urls) + # Request from UI validated. + debug_log.info("Request from UI validated.") + return distribution_urls + + def validate_authorization_token(self, cr_id, surrogate_id, our_key): + # slr = self.get_slr(surrogate_id) + # slr_tool = SLR_tool() + # slr_tool.slr = slr + # key = slr_tool.get_operator_key() + token = self.get_token(cr_id) + # debug_log.info("Fetched key({}) and token({}).".format(key, token)) + jws_holder = jwt.JWS() + jws_holder.deserialize(raw_jws=token["auth_token"]) + auth_token_payload = loads(jws_holder.__dict__["objects"]["payload"]) + debug_log.info("Decoded Auth Token\n{}".format(dumps(auth_token_payload, indent=2))) + now = time.time() + if auth_token_payload["exp"] < now: + raise ValueError("Token is expired.") + if auth_token_payload["nbf"] > now: + raise TypeError("Token used too soon.") + # debug_log.info(aud) + return token + def register_blueprints(app, package_name, package_path): """Register all Blueprint instances on the specified Flask application found @@ -174,90 +520,98 @@ def register_blueprints(app, package_name, package_path): apis.append(item) return rv, apis + from base64 import urlsafe_b64decode as decode from json import loads + + class SLR_tool: def __init__(self): self.slr = { - "code": "7e4f7cf6-f169-4430-9b23-a4820446fe71", - "data": { - "slr": { - "type": "ServiceLinkRecord", - "attributes": { + "code": "7e4f7cf6-f169-4430-9b23-a4820446fe71", + "data": { + "slr": { + "type": "ServiceLinkRecord", + "attributes": { "slr": { - "payload": "IntcIm9wZXJhdG9yX2lkXCI6IFwiQUNDLUlELVJBTkRPTVwiLCBcImNyZWF0ZWRcIjogMTQ3MTM0NDYyNiwgXCJzdXJyb2dhdGVfaWRcIjogXCI5YjQxNmE5Zi1jYjRmLTRkNWMtYjJiZS01OWQxYjc3ZjJlZmFfMVwiLCBcInRva2VuX2tleVwiOiB7XCJrZXlcIjoge1wieVwiOiBcIkN0NGNHMnpPQzdrano5VWF1WHFqcTRtZ0d0bEdXcDJjcWZneVVlaUU4U2dcIiwgXCJ4XCI6IFwiUnJueHZoZjVsZXppQTZyZms4ZDlRbV96bXd2SDc5X2U5eUhBS2ZJR2dFRVwiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJTUlZNR05ULUlESzNZXCJ9fSwgXCJsaW5rX2lkXCI6IFwiNDJhMzVhN2QtMjkxZS00N2UzLWIyMmYtOTk2NjJmNjgzNDEzXCIsIFwib3BlcmF0b3Jfa2V5XCI6IHtcInVzZVwiOiBcInNpZ1wiLCBcImVcIjogXCJBUUFCXCIsIFwia3R5XCI6IFwiUlNBXCIsIFwiblwiOiBcIndITUFwQ2FVSkZpcHlGU2NUNzgxd2VuTm5mbU5jVkQxZTBmSFhfcmVfcWFTNWZvQkJzN1c0aWE1bnVxNjVFQWJKdWFxaGVPR2FEamVIaVU4V1Q5cWdnYks5cTY4SXZUTDN1bjN6R2o5WmQ3N3MySXdzNE1BSW1EeWN3Rml0aDE2M3lxdW9ETXFMX1YySXl5Mm45Uzloa1M5ZkV6cXJsZ01sYklnczJtVkJpNmdWVTJwYnJTN0gxUGFSV194YlFSX1puN19laV9uOFdlWFA1d2NEX3NJYldNa1NCc3VVZ21jam9XM1ktNW1ERDJWYmRFejJFbWtZaTlHZmstcDlBenlVbk56ZkIyTE1jSk1aekpWUWNYaUdCTzdrcG9uRkEwY3VIMV9CR0NsZXJ6Mnh2TWxXdjlPVnZzN3ZDTmRlQV9mano2eloyMUtadVo0RG1nZzBrOTRsd1wifSwgXCJ2ZXJzaW9uXCI6IFwiMS4yXCIsIFwiY3Jfa2V5c1wiOiBbe1wieVwiOiBcIlhaeWlveV9BME5qQ3Q1ZGt6OW5MOGI3YXdQRl9Cck5iYzVObjFOTTdXS0FcIiwgXCJ4XCI6IFwiR3ZaVEdpMllSb0VCblc2QzB4clpRQ0tNeWwza2lNcjgtRVoySU1ocnpXb1wiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJhY2Mta2lkLTg1MTVhYjQ2LTlkODItNDUzNC1hZDFmLTYzZDFlNDdiZDY2YlwifV0sIFwic2VydmljZV9pZFwiOiBcIjFcIn0i", - "signatures": [ - { - "header": { - "jwk": { - "x": "GvZTGi2YRoEBnW6C0xrZQCKMyl3kiMr8-EZ2IMhrzWo", - "kty": "EC", - "crv": "P-256", - "y": "XZyioy_A0NjCt5dkz9nL8b7awPF_BrNbc5Nn1NM7WKA", - "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" + "payload": "IntcIm9wZXJhdG9yX2lkXCI6IFwiQUNDLUlELVJBTkRPTVwiLCBcImNyZWF0ZWRcIjogMTQ3MTM0NDYyNiwgXCJzdXJyb2dhdGVfaWRcIjogXCI5YjQxNmE5Zi1jYjRmLTRkNWMtYjJiZS01OWQxYjc3ZjJlZmFfMVwiLCBcInRva2VuX2tleVwiOiB7XCJrZXlcIjoge1wieVwiOiBcIkN0NGNHMnpPQzdrano5VWF1WHFqcTRtZ0d0bEdXcDJjcWZneVVlaUU4U2dcIiwgXCJ4XCI6IFwiUnJueHZoZjVsZXppQTZyZms4ZDlRbV96bXd2SDc5X2U5eUhBS2ZJR2dFRVwiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJTUlZNR05ULUlESzNZXCJ9fSwgXCJsaW5rX2lkXCI6IFwiNDJhMzVhN2QtMjkxZS00N2UzLWIyMmYtOTk2NjJmNjgzNDEzXCIsIFwib3BlcmF0b3Jfa2V5XCI6IHtcInVzZVwiOiBcInNpZ1wiLCBcImVcIjogXCJBUUFCXCIsIFwia3R5XCI6IFwiUlNBXCIsIFwiblwiOiBcIndITUFwQ2FVSkZpcHlGU2NUNzgxd2VuTm5mbU5jVkQxZTBmSFhfcmVfcWFTNWZvQkJzN1c0aWE1bnVxNjVFQWJKdWFxaGVPR2FEamVIaVU4V1Q5cWdnYks5cTY4SXZUTDN1bjN6R2o5WmQ3N3MySXdzNE1BSW1EeWN3Rml0aDE2M3lxdW9ETXFMX1YySXl5Mm45Uzloa1M5ZkV6cXJsZ01sYklnczJtVkJpNmdWVTJwYnJTN0gxUGFSV194YlFSX1puN19laV9uOFdlWFA1d2NEX3NJYldNa1NCc3VVZ21jam9XM1ktNW1ERDJWYmRFejJFbWtZaTlHZmstcDlBenlVbk56ZkIyTE1jSk1aekpWUWNYaUdCTzdrcG9uRkEwY3VIMV9CR0NsZXJ6Mnh2TWxXdjlPVnZzN3ZDTmRlQV9mano2eloyMUtadVo0RG1nZzBrOTRsd1wifSwgXCJ2ZXJzaW9uXCI6IFwiMS4yXCIsIFwiY3Jfa2V5c1wiOiBbe1wieVwiOiBcIlhaeWlveV9BME5qQ3Q1ZGt6OW5MOGI3YXdQRl9Cck5iYzVObjFOTTdXS0FcIiwgXCJ4XCI6IFwiR3ZaVEdpMllSb0VCblc2QzB4clpRQ0tNeWwza2lNcjgtRVoySU1ocnpXb1wiLCBcImNydlwiOiBcIlAtMjU2XCIsIFwia3R5XCI6IFwiRUNcIiwgXCJraWRcIjogXCJhY2Mta2lkLTg1MTVhYjQ2LTlkODItNDUzNC1hZDFmLTYzZDFlNDdiZDY2YlwifV0sIFwic2VydmljZV9pZFwiOiBcIjFcIn0i", + "signatures": [ + { + "header": { + "jwk": { + "x": "GvZTGi2YRoEBnW6C0xrZQCKMyl3kiMr8-EZ2IMhrzWo", + "kty": "EC", + "crv": "P-256", + "y": "XZyioy_A0NjCt5dkz9nL8b7awPF_BrNbc5Nn1NM7WKA", + "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" + }, + "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" + }, + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "signature": "fsSuhqLp6suUuT8waseMlpYcFx4vqIviIteBLUNWPUOubHPDY64sbpfx_flpPFymxG_t8r3Ptb96kv-ZDyjb7g" }, - "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" - }, - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "signature": "fsSuhqLp6suUuT8waseMlpYcFx4vqIviIteBLUNWPUOubHPDY64sbpfx_flpPFymxG_t8r3Ptb96kv-ZDyjb7g" - }, - { - "header": { - "jwk": { - "x": "Rrnxvhf5leziA6rfk8d9Qm_zmwvH79_e9yHAKfIGgEE", - "kty": "EC", - "crv": "P-256", - "y": "Ct4cG2zOC7kjz9UauXqjq4mgGtlGWp2cqfgyUeiE8Sg", - "kid": "SRVMGNT-IDK3Y" - }, - "kid": "SRVMGNT-IDK3Y" - }, - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "signature": "3rZCfJxvpD7covQjH_lhkJwId8ynVIMLZ6t1obiCrlwJOJe_Yc7dmImi10w8tc9_7c7u35_ysiD72wIlbJ4oFQ" - } - ] + { + "header": { + "jwk": { + "x": "Rrnxvhf5leziA6rfk8d9Qm_zmwvH79_e9yHAKfIGgEE", + "kty": "EC", + "crv": "P-256", + "y": "Ct4cG2zOC7kjz9UauXqjq4mgGtlGWp2cqfgyUeiE8Sg", + "kid": "SRVMGNT-IDK3Y" + }, + "kid": "SRVMGNT-IDK3Y" + }, + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "signature": "3rZCfJxvpD7covQjH_lhkJwId8ynVIMLZ6t1obiCrlwJOJe_Yc7dmImi10w8tc9_7c7u35_ysiD72wIlbJ4oFQ" + } + ] } - } - }, - "meta": { - "slsr_id": "374707b7-a60b-4596-9f3a-6a5affa414c3", - "slr_id": "42a35a7d-291e-47e3-b22f-99662f683413" - }, - "slsr": { - "type": "ServiceLinkStatusRecord", - "attributes": { + } + }, + "meta": { + "slsr_id": "374707b7-a60b-4596-9f3a-6a5affa414c3", + "slr_id": "42a35a7d-291e-47e3-b22f-99662f683413" + }, + "slsr": { + "type": "ServiceLinkStatusRecord", + "attributes": { "slsr": { - "header": { - "jwk": { - "x": "GvZTGi2YRoEBnW6C0xrZQCKMyl3kiMr8-EZ2IMhrzWo", - "kty": "EC", - "crv": "P-256", - "y": "XZyioy_A0NjCt5dkz9nL8b7awPF_BrNbc5Nn1NM7WKA", - "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" + "header": { + "jwk": { + "x": "GvZTGi2YRoEBnW6C0xrZQCKMyl3kiMr8-EZ2IMhrzWo", + "kty": "EC", + "crv": "P-256", + "y": "XZyioy_A0NjCt5dkz9nL8b7awPF_BrNbc5Nn1NM7WKA", + "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" + }, + "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" }, - "kid": "acc-kid-8515ab46-9d82-4534-ad1f-63d1e47bd66b" - }, - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "payload": "IntcInNscl9pZFwiOiBcIjQyYTM1YTdkLTI5MWUtNDdlMy1iMjJmLTk5NjYyZjY4MzQxM1wiLCBcImFjY291bnRfaWRcIjogXCIxXCIsIFwic2xfc3RhdHVzXCI6IFwiQWN0aXZlXCIsIFwicmVjb3JkX2lkXCI6IFwiMzc0NzA3YjctYTYwYi00NTk2LTlmM2EtNmE1YWZmYTQxNGMzXCIsIFwiaWF0XCI6IDE0NzEzNDQ2MjYsIFwicHJldl9yZWNvcmRfaWRcIjogXCJOVUxMXCJ9Ig", - "signature": "cfj3Zm5ICVtTdUJigKGTxJX4V8vzs1e9qVj83hPmiD-XJonrBRW60zQN-3lRTuJithFbrGgBJShGj1InuNGMsw" + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "payload": "IntcInNscl9pZFwiOiBcIjQyYTM1YTdkLTI5MWUtNDdlMy1iMjJmLTk5NjYyZjY4MzQxM1wiLCBcImFjY291bnRfaWRcIjogXCIxXCIsIFwic2xfc3RhdHVzXCI6IFwiQWN0aXZlXCIsIFwicmVjb3JkX2lkXCI6IFwiMzc0NzA3YjctYTYwYi00NTk2LTlmM2EtNmE1YWZmYTQxNGMzXCIsIFwiaWF0XCI6IDE0NzEzNDQ2MjYsIFwicHJldl9yZWNvcmRfaWRcIjogXCJOVUxMXCJ9Ig", + "signature": "cfj3Zm5ICVtTdUJigKGTxJX4V8vzs1e9qVj83hPmiD-XJonrBRW60zQN-3lRTuJithFbrGgBJShGj1InuNGMsw" } - } - }, - "surrogate_id": "9b416a9f-cb4f-4d5c-b2be-59d1b77f2efa_1" - }} + } + }, + "surrogate_id": "9b416a9f-cb4f-4d5c-b2be-59d1b77f2efa_1" + }} + def decrypt_payload(self, payload): payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. content = decode(payload.encode()) - payload = loads(loads(content.decode("utf-8"))) + payload = loads(content.decode("utf-8")) + debug_log.info("Decrypted payload is:") + debug_log.info(payload) return payload def get_SLR_payload(self): base64_payload = self.slr["data"]["slr"]["attributes"]["slr"]["payload"] + debug_log.info("Decrypting SLR payload:") payload = self.decrypt_payload(base64_payload) return payload def get_SLSR_payload(self): - base64_payload = self.slr["data"]["slsr"]["attributes"]["slsr"]["payload"] + base64_payload = self.slr["data"]["ssr"]["attributes"]["ssr"]["payload"] + debug_log.info("Decrypting SSR payload:") payload = self.decrypt_payload(base64_payload) return payload @@ -283,62 +637,73 @@ def get_cr_keys(self): # print(sl.get_source_surrogate_id()) from jwcrypto import jwk, jws + + class CR_tool: def __init__(self): self.cr = { - "csr": { - "signature": "e4tiFSvnqUb8k1U6BXC5WhbkQWVJZqMsDqc3efPRkBcL1cM21mSJXYOS4dSiCx4ak8S8S1IKN4wcyuAxXfrGeQ", - "payload": "IntcImNvbW1vbl9wYXJ0XCI6IHtcInNscl9pZFwiOiBcImJhYmY5Mjc3LWEyZmItNGI4MS1iMTYyLTE4ZTI5MzUyNzYxN1wiLCBcInZlcnNpb25fbnVtYmVyXCI6IFwiU3RyaW5nXCIsIFwicnNfaWRcIjogXCIyXzYyNmE3YmZiLTk0MmEtNDI2ZC1hNDc2LWE0Mzk5NmYyMDAwNVwiLCBcImNyX2lkXCI6IFwiMjlmZmRkZmMtNjBhMS00YmYwLTkzMWMtNGQ1ZWYwMmQ2N2YyXCIsIFwiaXNzdWVkXCI6IDE0NzE1OTMwMjYsIFwic3ViamVjdF9pZFwiOiBcIjFcIiwgXCJub3RfYmVmb3JlXCI6IFwiU3RyaW5nXCIsIFwibm90X2FmdGVyXCI6IFwiU3RyaW5nXCIsIFwiaXNzdWVkX2F0XCI6IFwiU3RyaW5nXCIsIFwic3Vycm9nYXRlX2lkXCI6IFwiZTZlMjdlNzUtNjUxZi00Y2I0LTg5ZTItYTUxZWI5NDllYjYwXzJcIn0sIFwicm9sZV9zcGVjaWZpY19wYXJ0XCI6IHtcInJvbGVcIjogXCJTaW5rXCIsIFwidXNhZ2VfcnVsZXNcIjogW1wiQWxsIHlvdXIgY2F0cyBhcmUgYmVsb25nIHRvIHVzXCIsIFwiU29tZXRoaW5nIHJhbmRvbVwiXX0sIFwiZXh0ZW5zaW9uc1wiOiB7fSwgXCJtdmNyXCI6IHt9fSI", - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "header": { - "jwk": { - "kty": "EC", - "crv": "P-256", - "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao", - "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" - }, - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" - } - }, - "cr": { - "signature": "fiiVhAPxzYGgkV3D43FvgKSdIvDrsyMm_Vz4WWhBoLaXbTcZKNEvKL5Tx1O6YRwShOc9plK7YRxgWyY9OYd7zA", - "payload": "IntcImFjY291bnRfaWRcIjogXCJlNmUyN2U3NS02NTFmLTRjYjQtODllMi1hNTFlYjk0OWViNjBfMlwiLCBcImNyX2lkXCI6IFwiMjlmZmRkZmMtNjBhMS00YmYwLTkzMWMtNGQ1ZWYwMmQ2N2YyXCIsIFwicHJldl9yZWNvcmRfaWRcIjogXCJudWxsXCIsIFwicmVjb3JkX2lkXCI6IFwiZTBiZDk1MTUtNjA5Zi00YzMxLThiMmQtZDliMTY5NjdiZmQzXCIsIFwiaWF0XCI6IDE0NzE1OTMwMjYsIFwiY29uc2VudF9zdGF0dXNcIjogXCJBY3RpdmVcIn0i", - "protected": "eyJhbGciOiAiRVMyNTYifQ", - "header": { - "jwk": { - "kty": "EC", - "crv": "P-256", - "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao", - "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" - }, - "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" - } - } -} + "csr": { + "signature": "e4tiFSvnqUb8k1U6BXC5WhbkQWVJZqMsDqc3efPRkBcL1cM21mSJXYOS4dSiCx4ak8S8S1IKN4wcyuAxXfrGeQ", + "payload": "IntcImNvbW1vbl9wYXJ0XCI6IHtcInNscl9pZFwiOiBcImJhYmY5Mjc3LWEyZmItNGI4MS1iMTYyLTE4ZTI5MzUyNzYxN1wiLCBcInZlcnNpb25fbnVtYmVyXCI6IFwiU3RyaW5nXCIsIFwicnNfaWRcIjogXCIyXzYyNmE3YmZiLTk0MmEtNDI2ZC1hNDc2LWE0Mzk5NmYyMDAwNVwiLCBcImNyX2lkXCI6IFwiMjlmZmRkZmMtNjBhMS00YmYwLTkzMWMtNGQ1ZWYwMmQ2N2YyXCIsIFwiaXNzdWVkXCI6IDE0NzE1OTMwMjYsIFwic3ViamVjdF9pZFwiOiBcIjFcIiwgXCJub3RfYmVmb3JlXCI6IFwiU3RyaW5nXCIsIFwibm90X2FmdGVyXCI6IFwiU3RyaW5nXCIsIFwiaXNzdWVkX2F0XCI6IFwiU3RyaW5nXCIsIFwic3Vycm9nYXRlX2lkXCI6IFwiZTZlMjdlNzUtNjUxZi00Y2I0LTg5ZTItYTUxZWI5NDllYjYwXzJcIn0sIFwicm9sZV9zcGVjaWZpY19wYXJ0XCI6IHtcInJvbGVcIjogXCJTaW5rXCIsIFwidXNhZ2VfcnVsZXNcIjogW1wiQWxsIHlvdXIgY2F0cyBhcmUgYmVsb25nIHRvIHVzXCIsIFwiU29tZXRoaW5nIHJhbmRvbVwiXX0sIFwiZXh0ZW5zaW9uc1wiOiB7fSwgXCJtdmNyXCI6IHt9fSI", + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "header": { + "jwk": { + "kty": "EC", + "crv": "P-256", + "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao", + "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + }, + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + } + }, + "cr": { + "signature": "fiiVhAPxzYGgkV3D43FvgKSdIvDrsyMm_Vz4WWhBoLaXbTcZKNEvKL5Tx1O6YRwShOc9plK7YRxgWyY9OYd7zA", + "payload": "IntcImFjY291bnRfaWRcIjogXCJlNmUyN2U3NS02NTFmLTRjYjQtODllMi1hNTFlYjk0OWViNjBfMlwiLCBcImNyX2lkXCI6IFwiMjlmZmRkZmMtNjBhMS00YmYwLTkzMWMtNGQ1ZWYwMmQ2N2YyXCIsIFwicHJldl9yZWNvcmRfaWRcIjogXCJudWxsXCIsIFwicmVjb3JkX2lkXCI6IFwiZTBiZDk1MTUtNjA5Zi00YzMxLThiMmQtZDliMTY5NjdiZmQzXCIsIFwiaWF0XCI6IDE0NzE1OTMwMjYsIFwiY29uc2VudF9zdGF0dXNcIjogXCJBY3RpdmVcIn0i", + "protected": "eyJhbGciOiAiRVMyNTYifQ", + "header": { + "jwk": { + "kty": "EC", + "crv": "P-256", + "y": "XIpGIZ7bz7uaoj_9L05CQSOw6VykuD6bK4r_OMVQSao", + "x": "GfJCOXimGb3ZW4IJJIlKUZeoj8GCW7YYJRZgHuYUsds", + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + }, + "kid": "acc-kid-3802fd17-49f4-48fc-8ac1-09624a52a3ae" + } + } + } + def decrypt_payload(self, payload): - #print("payload :\n", slr) - #print("Before Fix:", payload) + # print("payload :\n", slr) + # print("Before Fix:", payload) payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. - #print("After Fix :", payload) + # print("After Fix :", payload) content = decode(payload.encode()) - payload = loads(loads(content.decode("utf-8"))) + payload = loads(content.decode("utf-8")) + debug_log.info("Decrypted payload is:") + debug_log.info(payload) return payload def get_CR_payload(self): - base64_payload = self.cr["cr"]["payload"] + base64_payload = self.cr["cr"]["attributes"]["cr"]["payload"] payload = self.decrypt_payload(base64_payload) return payload def get_CSR_payload(self): - base64_payload = self.cr["csr"]["payload"] + base64_payload = self.cr["csr"]["attributes"]["csr"]["payload"] payload = self.decrypt_payload(base64_payload) return payload def get_cr_id_from_csr(self): return self.get_CSR_payload()["cr_id"] + def get_csr_id(self): + return self.get_CSR_payload()["record_id"] # Perhaps this could just be csr_id + + def get_consent_status(self): + return self.get_CSR_payload()["consent_status"] + def get_prev_record_id(self): return self.get_CSR_payload()["prev_record_id"] @@ -351,12 +716,27 @@ def cr_id_matches_in_csr_and_cr(self): def get_usage_rules(self): return self.get_CR_payload()["role_specific_part"]["usage_rules"] + def get_pop_key(self): + return self.get_CR_payload()["role_specific_part"]["pop_key"] + def get_slr_id(self): return self.get_CR_payload()["common_part"]["slr_id"] + def get_issued(self): + return self.get_CR_payload()["common_part"]["iat"] + + def get_not_before(self): + return self.get_CR_payload()["common_part"]["nbf"] + + def get_not_after(self): + return self.get_CR_payload()["common_part"]["exp"] + def get_rs_id(self): return self.get_CR_payload()["common_part"]["rs_id"] + def get_state(self): + return self.get_CSR_payload()["consent_status"] + def get_subject_id(self): return self.get_CR_payload()["common_part"]["subject_id"] @@ -364,42 +744,110 @@ def get_surrogate_id(self): return self.get_CR_payload()["common_part"]["surrogate_id"] def get_role(self): - return self.get_CR_payload()["role_specific_part"]["role"] + return self.get_CR_payload()["common_part"]["role"] def verify_cr(self, keys): + debug_log.info("CR in object:\n{}".format(dumps(self.cr, indent=2))) for key in keys: cr_jwk = jwk.JWK(**key) cr_jws = jws.JWS() - cr_jws.deserialize(dumps(self.cr["cr"])) + cr = self.cr["cr"]["attributes"]["cr"] + cr_jws.deserialize(dumps(cr)) try: cr_jws.verify(cr_jwk) return True except Exception as e: - pass - #print(repr(e)) - #return False + debug_log.info( + "FAILED key verification for CR: \n({})\n WITH KEY: \n({})".format(cr, cr_jwk.export_public())) + debug_log.exception(e) + # print(repr(e)) + # return False return False - def verify_csr(self, keys): for key in keys: cr_jwk = jwk.JWK(**key) csr_jws = jws.JWS() - csr_jws.deserialize(dumps(self.cr["csr"])) + csr = self.cr["csr"]["attributes"]["csr"] + csr_jws.deserialize(dumps(csr)) try: csr_jws.verify(cr_jwk) return True except Exception as e: + debug_log.info("FAILED key verification for CSR: \n({})\n WITH KEY: \n({})".format(csr, cr_jwk.export_public())) + debug_log.exception(e) pass - #print(repr(e)) - #return False + # print(repr(e)) + # return False return False -#crt = CR_tool() -#print (dumps(crt.get_CR_payload(), indent=2)) -#print (dumps(crt.get_CSR_payload(), indent=2)) -#print(crt.get_role()) + +# crt = CR_tool() +# print (dumps(crt.get_CR_payload(), indent=2)) +# print (dumps(crt.get_CSR_payload(), indent=2)) +# print(crt.get_role()) # print(crt.get_cr_id()) # print(crt.get_usage_rules()) -# print(crt.get_surrogate_id()) \ No newline at end of file +# print(crt.get_surrogate_id()) +from jwcrypto import jwt +from jwcrypto.jwt import JWTExpired + + +class Token_tool: + def __init__(self): + # Replace token. + self.token = { + "auth_token": "eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlt7ImRhdGFzZXRfaWQiOiJTdHJpbmciLCJkaXN0cmlidXRpb25faWQiOiJTdHJpbmcifV0sImV4cCI6IjIwMTYtMTEtMDhUMTM6MzA6MjUgIiwiaWF0IjoiMjAxNi0xMC0wOVQxMzozMDoyNSAiLCJpc3MiOnsiZSI6IkFRQUIiLCJraWQiOiJBQ0MtSUQtUkFORE9NIiwia3R5IjoiUlNBIiwibiI6InRtaGxhUFV3SmdvNHlTVE1yVEdGRnliVnhLMjh1REd0SlNGRGRHazNiYXhUV21nZkswQzZETXF3NWxxcC1FWFRNVFJmSXFNYmRNY0RtVU5ueUpwUTF3In0sImp0aSI6Ijc5ZmI3NDg0LTE2YjYtNDEzYy04ZGI0LWZlMjcwYjg4Y2UxNiIsIm5iZiI6IjIwMTYtMTAtMDlUMTM6MzA6MjUgIiwicnNfaWQiOiJodHRwOi8vc2VydmljZV9jb21wb25lbnRzOjcwMDB8fDljMWYxNTdkLWM4MWEtNGY1Ni1hZmYxLTc2MWZjNTVhNDBkOSIsInN1YiI6eyJlIjoiQVFBQiIsImtpZCI6IlNSVk1HTlQtUlNBLTUxMiIsImt0eSI6IlJTQSIsIm4iOiJ5R2dzUDljV01pUFBtZ09RMEp0WVN3Nnp3dURvdThBR0F5RHV0djVwTHc1aXZ6NnhvTGhaTS1pUVdGN0VzckVHdFNyUU55WUxzMlZzLUpxbW50UGpIUSJ9fQ.s1KOu1Q_ifNEnmBQ6QcmNxd0Oy1Fxp-z_4hsCI5fNfOa5vtWai68_OKN_NoUjtqUCy-CJcLHnGGoxTh_vHcjtg"} + # Replace key. + self.key = None + + def decrypt_payload(self, payload): + key = jwk.JWK() + key.import_key(**self.key) + token = jwt.JWT() + # This step actually verifies the signature, the format and timestamps. + try: + token.deserialize(self.token["auth_token"], key) + except JWTExpired as e: + debug_log.exception(e) + # TODO: get new auth token and start again. + raise e + claims = loads(token.claims) + # payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. + # content = decode(payload.encode('utf-8')) + debug_log.info("Decrypted following claims from token:") + debug_log.info(dumps(claims, indent=2)) + # payload = loads(loads(content.decode('utf-8'))) + return claims + + def get_token(self): + debug_log.info("Fetching token..") + decrypted_token = self.decrypt_payload(self.token["auth_token"]) + debug_log.info("Got following token:") + debug_log.info(dumps(decrypted_token, indent=2)) + return decrypted_token + + def verify_token(self, + our_key): # TODO: Get some clarification what we want to verify now that sub field doesn't contain key. + debug_log.info("Verifying token..\nOur key is:") + debug_log.info(our_key) + debug_log.info(type(our_key)) + + if self.key is None: + raise UnboundLocalError("Set Token_tool objects key variable to Operator key before use.") + token = self.get_token() + kid = token["cnf"]["kid"] + source_cr_id = token["pi_id"] + debug_log.info("Source CR id is:") + debug_log.info(type(source_cr_id)) + debug_log.info(source_cr_id) + # debug_log.info(our_key) + if cmp(source_cr_id, kid) != 0: + raise ValueError("JWK's didn't match.") + + # TODO: Figure out beter way to return aud + return token + +# tt = Token_tool() +# print(tt.decrypt_payload(tt.token["auth_token"])) diff --git a/Service_Components/instance/__init__.py b/Service_Components/instance/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Service_Components/instance/settings.py b/Service_Components/instance/settings.py new file mode 100644 index 0000000..1d34c50 --- /dev/null +++ b/Service_Components/instance/settings.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +from kombu import Exchange, Queue + +TIMEOUT = 8 + +KEYSIZE = 512 + +# Setting to /tmp or other ramdisk makes it faster. + +DATABASE_PATH = "./db_Srv.sqlite" + +SELERY_BROKER_URL = 'redis://redis:6379/0' + +SELERY_RESULT_BACKEND = 'redis://redis:6379/0' + +CERT_PATH = "./service_key.jwk" +CERT_KEY_PATH = "./service_key.jwk" +CERT_PASSWORD_PATH = "./cert_pw" + +SERVICE_URL = "http://service_mockup:2000" + + + +OPERATOR_URL = "http://operator_components:5000" + + + + +SERVICE_ROOT_PATH = "/api/1.2" + + + +SERVICE_CR_PATH = "/cr" + + + +SERVICE_SLR_PATH = "/slr" + + + +DEBUG_MODE = False + + +CELERY_QUEUES = ( + Queue('srv_queue', Exchange('srv_queue'), routing_key='srv_queue'), +) + +CELERY_DEFAULT_QUEUE = 'srv_queue' + +CELERY_ROUTES = { + 'get_AuthToken': {'queue': 'srv_queue', 'routing_key': "srv_queue"} +} \ No newline at end of file diff --git a/Service_Components/instance/settings_template.py.j2 b/Service_Components/instance/settings_template.py.j2 new file mode 100644 index 0000000..a08e581 --- /dev/null +++ b/Service_Components/instance/settings_template.py.j2 @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +from kombu import Exchange, Queue + +TIMEOUT = 8 + +KEYSIZE = 512 + +{% if IS_SINK is defined %} +IS_SINK = {{ IS_SINK }} +{% else %} +IS_SINK = True +{% endif %} + +{% if IS_SOURCE is defined %} +IS_SOURCE = {{ IS_SOURCE }} +{% else %} +IS_SOURCE = True +{% endif %} + +# Name of host to connect to. Default: use the local host via a UNIX socket (where applicable) +{% if MYSQL_HOST is defined %} +MYSQL_HOST = {{ MYSQL_HOST }} +{% else %} +MYSQL_HOST = 'localhost' +{% endif %} + + # User to authenticate as. Default: current effective user. +{% if MYSQL_USER is defined %} +MYSQL_USER = {{ MYSQL_USER }} +{% else %} +MYSQL_USER = 'service' +{% endif %} + +# Password to authenticate with. Default: no password. +{% if MYSQL_PASSWORD is defined %} +MYSQL_PASSWORD = {{ MYSQL_PASSWORD }} +{% else %} +MYSQL_PASSWORD = 'MynorcA' +{% endif %} + +# Database to use. Default: no default database. +{% if MYSQL_DB is defined %} +MYSQL_DB = {{ MYSQL_DB }} +{% else %} +MYSQL_DB = 'db_Srv' +{% endif %} + +# TCP port of MySQL server. Default: 3306. +{% if MYSQL_PORT is defined %} +MYSQL_PORT = {{ MYSQL_PORT }} +{% else %} +MYSQL_PORT = 3306 +{% endif %} + + + + + +# Setting to /tmp or other ramdisk makes it faster. +{% if DATABASE_PATH is defined %} +DATABASE_PATH = {{ DATABASE_PATH }} +{% else %} + +DATABASE_PATH = "./db_Operator.sqlite" + +{% endif %} + + +{% if SELERY_BROKER_URL is defined %} +SELERY_BROKER_URL = {{ SELERY_BROKER_URL }} +{% else %} +SELERY_BROKER_URL = 'redis://localhost:6379/1' +{% endif %} + +{% if SELERY_RESULT_BACKEND is defined %} +SELERY_RESULT_BACKEND = {{ SELERY_RESULT_BACKEND }} +{% else %} +SELERY_RESULT_BACKEND = 'redis://localhost:6379/1' +{% endif %} + + +{% if CERT_PATH is defined %} +CERT_PATH = {{ CERT_PATH }} +{% else %} +CERT_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_KEY_PATH is defined %} +CERT_KEY_PATH = {{ CERT_KEY_PATH }} +{% else %} +CERT_KEY_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_PASSWORD_PATH is defined %} +CERT_PASSWORD_PATH = {{ CERT_PASSWORD_PATH }} +{% else %} +CERT_PASSWORD_PATH = "./cert_pw" +{% endif %} + + + +{% if SERVICE_URL is defined %} +SERVICE_URL = {{ SERVICE_URL }} +{% else %} +SERVICE_URL = "http://localhost:2000" +{% endif %} + +{% if OPERATOR_URL is defined %} +OPERATOR_URL = {{ OPERATOR_URL }} +{% else %} +OPERATOR_URL = "http://localhost:5000" +{% endif %} + +{% if SERVICE_ID is defined %} +SERVICE_ID = {{ SERVICE_ID }} +{% else %} +SERVICE_ID = "SRVMGMNT-CHANGE_ME" +{% endif %} + +{% if SERVICE_ROOT_PATH is defined %} +SERVICE_ROOT_PATH = {{ SERVICE_ROOT_PATH }} +{% else %} +SERVICE_ROOT_PATH = "/api/1.2" +{% endif %} + +{% if SERVICE_CR_PATH is defined %} +SERVICE_CR_PATH = {{ SERVICE_CR_PATH }} +{% else %} +SERVICE_CR_PATH = "/cr" +{% endif %} + +{% if SERVICE_SLR_PATH is defined %} +SERVICE_SLR_PATH = {{ SERVICE_SLR_PATH }} +{% else %} +SERVICE_SLR_PATH = "/slr" +{% endif %} + +{% if DEBUG_MODE is defined %} +DEBUG_MODE = {{ DEBUG_MODE }} +{% else %} +DEBUG_MODE = True +{% endif %} + +CELERY_QUEUES = ( + Queue('srv_queue', Exchange('srv_queue'), routing_key='srv_queue'), +) + +CELERY_DEFAULT_QUEUE = 'srv_queue' + +CELERY_ROUTES = { + 'get_AuthToken': {'queue': 'srv_queue', 'routing_key': "srv_queue"} +} \ No newline at end of file diff --git a/Service_Components/requirements.txt b/Service_Components/requirements.txt index 6a0fe52..d1978bd 100644 --- a/Service_Components/requirements.txt +++ b/Service_Components/requirements.txt @@ -20,6 +20,7 @@ jsonschema==2.5.1 jwcrypto==0.3.1 kombu==3.0.35 MarkupSafe==0.23 +mysqlclient==1.3.7 pyasn1==0.1.9 pycparser==2.14 pycryptodome==3.4 @@ -29,5 +30,7 @@ pytz==2016.6.1 redis==2.10.5 requests==2.11.1 six==1.10.0 +uWSGI==2.0.13.1 Werkzeug==0.11.10 wheel==0.24.0 +restapi-logging-handler==0.2.2 \ No newline at end of file diff --git a/Service_Components/signed_requests/__init__.py b/Service_Components/signed_requests/__init__.py new file mode 100644 index 0000000..0e1d8a9 --- /dev/null +++ b/Service_Components/signed_requests/__init__.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# __init__.py +# +# MIT License +# +# Copyright (c) 2016 Aleksi Palomäki +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +__author__ = "Aleksi Palomäki" diff --git a/Service_Components/signed_requests/doc_tests b/Service_Components/signed_requests/doc_tests new file mode 100644 index 0000000..93acc97 --- /dev/null +++ b/Service_Components/signed_requests/doc_tests @@ -0,0 +1,93 @@ +# MIT License +# +# Copyright (c) 2016 Aleksi Palomäki +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +The ``json_builder` module +====================== + +Using ``hash`` +------------------- + +First import +``hash_params`` from the ``json_builder`` module: + + >>> from json_builder import hash_params + +Create hash_params object: + >>> jsb = hash_params() + +Now use it with string: + + >>> jsb.hash("b=bar&a=foo&c=duck") + 'u4LgkGUWhP9MsKrEjA4dizIllDXluDku6ZqCeyuR-JY' + +Now use it with list: + + >>> jsb.hash([["b","bar"],["a","foo"],["c","duck"]]) + 'u4LgkGUWhP9MsKrEjA4dizIllDXluDku6ZqCeyuR-JY' + +Now use it with list and dict: + + >>> jsb.hash(["b", "a", "c"], {"a":"foo","c":"duck", "b":"bar"}) + 'u4LgkGUWhP9MsKrEjA4dizIllDXluDku6ZqCeyuR-JY' + +The ``signed_request_auth`` module +====================== + +Using ``SignedRequest`` +------------------- +Imports and make a key + >>> import requests + >>> from signed_request_auth import SignedRequest + >>> from jwcrypto import jwk + >>> key = jwk.JWK.generate(kty='oct', size=256) + +Form most request that gets PoP Authorization header and check that it can be generated + >>> req = requests.Request("GET", "http://localhost/", auth=SignedRequest(token="blaa", key=key, sign_query=True), params={"a": "foo", "b": "bar", "c": "duck"}) + >>> req.prepare() # doctest: +ELLIPSIS + + >>> header = req.__dict__["auth"].generate_authorization_header() + >>> header # doctest: +ELLIPSIS + 'PoP eyJ...' + +Import pop_handler and test generated pop with it. + >>> from json_builder import pop_handler + >>> token = header.split(" ")[1] + >>> poppi = pop_handler(token=token, key=key) + >>> poppi.get_at() # doctest: +ELLIPSIS + '{"q": [["a", "c", "b"], "A7zM9tEc3J__xtM6rPf7veMqpehXtSoD3tJMS2OUDTs"], "at": "blaa", "ts": ...}' + + + + +This part is just to ease up seeing actual test results via copy&paste + +import requests +from signed_request_auth import SignedRequest +from jwcrypto import jwk +key = jwk.JWK.generate(kty='oct', size=256) +req = requests.Request("GET", "http://requestb.in/19w0mpv1", auth=SignedRequest(token="blaa", key=key, sign_query=True, sign_method=True, sign_path=True), params={"a": "foo", "b": "bar", "c": "duck"}) +req.prepare() # doctest: +ELLIPSIS +header = req.__dict__["auth"].generate_authorization_header() +from json_builder import pop_handler +token = header.split(" ")[1] +poppi = pop_handler(token=token, key=key) +poppi.get_at() diff --git a/Service_Components/signed_requests/json_builder.py b/Service_Components/signed_requests/json_builder.py new file mode 100644 index 0000000..7d76429 --- /dev/null +++ b/Service_Components/signed_requests/json_builder.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# json_builder.py +# +# MIT License +# +# Copyright (c) 2016 Aleksi Palomäki +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +__author__ = "Aleksi Palomäki" +import base64 +import hashlib +from urllib import quote_plus as precent_encode +from jwcrypto import jws + +class base_hash: + def base64_encode(self, sha_digest): + """ + + :param sha_digest: byte array(octet stream) representation of hashed string, we base 64 encode it here + :return: base64url_safe encode of the hash with padding removed. + """ + return base64.urlsafe_b64encode(sha_digest).replace("=", "") # Note how we remove padding here, apparently everyone does. + + def _hash_string(self, string): + """ + + :param string: string in query string format to be hashed, for example "a=1&b=2&c=3" + :return: byte array(octet stream) representation of hashed string + """ + sha = hashlib.sha256() + sha.update(string) + return sha.digest() + +class hash_params(base_hash): + + def _hash_list(self, param_list): + """ + + :param list: list as [["key", "value"], ["key2", "value2"]] or [("key", "value"), ("key2", "value2")] + :return: byte array(octet stream) representation of hashed string + """ + string = "" + for pair in param_list: + string += "{}={}&".format(precent_encode(pair[0]), precent_encode(pair[1])) + string = string.rstrip("&") + return self._hash_string(string) + + def _hash_list_and_dict(self, list, dict): + """ + + :param list: list of keys as ["key1", "key2"] + :param dict: dict as {"key", "value} + :return: byte array(octet stream) representation of hashed string + """ + string = "" + for key in list: + string += "{}={}&".format(precent_encode(key), precent_encode(dict[key])) + string = string.rstrip("&") + return self._hash_string(string) + + def hash(self, hashable, dictionary=None): + """ + + :return: base64 representation of hash + :param hashable: + """ + if isinstance(hashable, list) and dictionary is None: + hash_value = self.base64_encode(self._hash_list(hashable)) + + elif isinstance(hashable, str): + hash_value = self.base64_encode(self._hash_string(hashable)) + elif isinstance(dictionary, dict): + hash_value = self.base64_encode(self._hash_list_and_dict(hashable, dictionary)) + else: + raise TypeError("Invalid type, hash(hashable) supports only string('a=1&b=2'), " + "list of [key, value] or (key, value) pairs, " + "dict and list ([key1,key2], {key2: value, key1: value}") + return hash_value + +class hash_headers(base_hash): + def __init__(self): + pass + + + def _hash_list_and_dict(self, list, dict): + """ + + :param list: list of keys as ["key1", "key2"] + :param dict: dict as {"key", "value} + :return: byte array(octet stream) representation of hashed string + """ + string = "" + for key in list: + string += "{}={}&".format(precent_encode(key), precent_encode(dict[key])) + string = string.rstrip("&") + return self._hash_string(string) + + +class pop_handler: + def __init__(self, token, key=None, alg=None): + #if alg is None: + # alg = "HS256" + self.verified = False + self.key = key + self.jws_token = jws.JWS() + self.jws_token.deserialize(token, key=key, alg=alg) + try: + self.decrypted = self.jws_token.payload + self.verified = True + except Exception as e: + self.decrypted = self.jws_token.__dict__["objects"]["payload"] + self.verified = False + + def get_at(self): + return self.decrypted + diff --git a/Service_Components/signed_requests/run_tests.py b/Service_Components/signed_requests/run_tests.py new file mode 100644 index 0000000..0ecf30a --- /dev/null +++ b/Service_Components/signed_requests/run_tests.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# run_tests.py +# +# MIT License +# +# Copyright (c) 2016 Aleksi Palomäki +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +__author__ = "Aleksi Palomäki" +import doctest + +doctest.testfile("doc_tests", verbose=True) diff --git a/Service_Components/signed_requests/signed_request_auth.py b/Service_Components/signed_requests/signed_request_auth.py new file mode 100644 index 0000000..5c98da0 --- /dev/null +++ b/Service_Components/signed_requests/signed_request_auth.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# signed_request_auth.py +# +# MIT License +# +# Copyright (c) 2016 Aleksi Palomäki +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +__author__ = "Aleksi Palomäki" +import time +import urlparse +from json import dumps +from requests.auth import AuthBase + +from jwcrypto import jws + +from json_builder import hash_params + + +class SignedRequest(AuthBase): + def generate_authorization_header(self): + # print(dumps(self.json_structure, indent=2)) + """ + Generates the actual PoP token and the string for Authorization header + :return: + """ + token = jws.JWS(dumps(self.json_structure).encode("utf-8")) + token.add_signature(key=self.sign_key, alg=self.alg, header=self.header, protected=self.protected) + authorization_header = "PoP {}".format(token.serialize(compact=True)) + return authorization_header + + def __init__(self, + token=None, # Required + sign_method=False, + sign_url=False, + sign_path=False, + sign_query=False, + sign_header=False, + sign_body=False, + key=None, # Required + alg=None, + protected=None, + header=None): + + """ + + :param token: Token for the "at" field (Required) + :param sign_method: Do we add method to the signed part? (Optional) + :param sign_url: Do we add url to the signed part? (Optional) + :param sign_path: Do we add path to the signed part? (Optional) + :param sign_query: Do we add query parameters to the signed part? (Optional) + :param sign_header: Do we add headers to the signed part? (Optional) + :param sign_body: Do we add content of body to the signed part? (Optional) + :param key: JWK used to sign the signed part (Required) + :param alg: Algorithm used in key (Defaults to HS256) (Optional) + :param protected: Protected field for the signing (Optional) + :param header: Header part for the signing (Optional) + """ + if alg is None: + if protected is None and header is None: + protected = dumps({"typ": "JWS", + "alg": "HS256"}) + + self.sign_method = sign_method + self.sign_url = sign_url + self.sign_path = sign_path + self.sign_query = sign_query + self.sign_header = sign_header + self.sign_body = sign_body + + self.sign_key = key + self.alg = alg + self.header = header + self.protected = protected + + if self.sign_key is None: + raise TypeError("Key can't be type None.") + + self.json_structure = { + "at": token, # Required + "ts": time.time() # Optional but Recommended. + } + + + def __call__(self, r): + """ + + :param r: PreparedRequest object + :return: PreparedRequest object + """ + hasher = hash_params() + # print(r.__dict__) + + if self.sign_query: + params = urlparse.parse_qsl(urlparse.urlparse(r.url).query) + # print(params) + keys = [] + for item in params: + keys.append(item[0]) + hash = hasher.hash(params) + self.json_structure["q"] = [keys, hash] # 'q' for query + if self.sign_method: + self.json_structure["m"] = r.method + if self.sign_path: + self.json_structure["p"] = urlparse.urlparse(r.url).path + auth_header_has_content = r.headers.get("Authorization", False) + if auth_header_has_content: # TODO: Naive attempt to consider existing stuff in Authorization, I need to read more about requests to know if this could work. + r.headers['Authorization'] = "{},{}".format(self.generate_authorization_header(), + r.headers['Authorization']).rstrip(",") + else: + r.headers['Authorization'] = self.generate_authorization_header() + return r diff --git a/Service_Components/srv_tasks.py b/Service_Components/srv_tasks.py new file mode 100644 index 0000000..0e741fd --- /dev/null +++ b/Service_Components/srv_tasks.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +from requests import post +from factory import create_celery_app +import urllib +celery = create_celery_app() + +# TODO Possibly remove this on release +# @celery.task +# def CR_installer(crs_csrs_payload, sink_url, source_url): +# # Get these as parameter or inside crs_csrs_payload +# endpoint = "/api/1.2/cr/add_cr" +# print(crs_csrs_payload) +# source = post(source_url+endpoint, json=crs_csrs_payload["source"]) +# print(source.url, source.reason, source.status_code, source.text) +# +# sink = post(sink_url+endpoint, json=crs_csrs_payload["sink"]) +# print(sink.url, sink.reason, sink.status_code, sink.text) + + +from sqlite3 import OperationalError, IntegrityError +import db_handler +from json import dumps, loads +from requests import get +from instance.settings import MYSQL_HOST, MYSQL_PASSWORD, MYSQL_USER, MYSQL_PORT, MYSQL_DB +from helpers import Helpers, CR_tool +@celery.task +def get_AuthToken(cr_id, operator_url, app_config): + print(operator_url, cr_id) + helpers = Helpers(app_config) + print(cr_id) + token = get("{}/api/1.2/cr/auth_token/{}".format(operator_url, cr_id)) # TODO Get api path from some config? + print(token.url, token.reason, token.status_code, token.text) + store_dict = {cr_id: dumps(loads(token.text.encode()))} + helpers.storeToken(store_dict) + + cr_csr = helpers.get_cr_json(cr_id) + cr_tool = CR_tool() + cr_tool.cr = cr_csr + + user_id = cr_tool.get_surrogate_id() + rs_id = cr_tool.get_rs_id() + + #req = get("http://service_components:7000/api/1.2/sink_flow/init") + #print(req.url, req.status_code, req.content) + + data = {"cr_id": cr_id, + "user_id": user_id, + "rs_id": urllib.quote_plus(rs_id)} + print(dumps(data, indent=2)) + + req = post("http://service_components:7000/api/1.2/sink_flow/dc", json=data) + # req = get("http://service_components:7000/api/1.2/sink_flow/" + # "user/"+"95479a08-80cc-4359-ba28-b8ca23ff5572_53af88dc-33de-44be-bc30-e0826db9bd6c"+"/" + # "consentRecord/"+"cd431509-777a-4285-8211-95c5ac577537"+"/" + # "resourceSet/"+urllib.quote_plus("http://service_components:7000||9aebb487-0c83-4139-b12c-d7fcea93a3ad")) + print(req.url, req.status_code, req.content) diff --git a/Service_Components/wsgi.py b/Service_Components/wsgi.py index 9622e58..fa5d457 100644 --- a/Service_Components/wsgi.py +++ b/Service_Components/wsgi.py @@ -11,6 +11,8 @@ import Service_Mgmnt import Service_Root import Authorization_Management +import Sink +import Source @@ -18,7 +20,7 @@ logger = logging.getLogger("sequence") try: from restapi_logging_handler import RestApiHandler - restapihandler = RestApiHandler("http://localhost:9004/") + restapihandler = RestApiHandler("http://172.18.0.1:9004/") logger.addHandler(restapihandler) except Exception as e: @@ -29,14 +31,24 @@ logging.basicConfig() debug_log.setLevel(logging.INFO) -from instance.settings import SERVICE_ROOT_PATH, SERVICE_CR_PATH, SERVICE_SLR_PATH +from instance.settings import SERVICE_ROOT_PATH, SERVICE_CR_PATH, SERVICE_SLR_PATH, IS_SINK, IS_SOURCE + +# Common parts. +paths = { + SERVICE_ROOT_PATH+SERVICE_SLR_PATH: Service_Mgmnt.create_app(), + SERVICE_ROOT_PATH+SERVICE_CR_PATH: Authorization_Management.create_app() + } + +if IS_SINK: + debug_log.info(SERVICE_ROOT_PATH+"/sink_flow") + paths[SERVICE_ROOT_PATH+"/sink_flow"] = Sink.create_app() +if IS_SOURCE: + paths[SERVICE_ROOT_PATH+"/source_flow"] = Source.create_app() + +application = DispatcherMiddleware(Service_Root.create_app(), paths) + + -application = DispatcherMiddleware(Service_Root.create_app(), - { - SERVICE_ROOT_PATH+SERVICE_SLR_PATH: Service_Mgmnt.create_app(), - SERVICE_ROOT_PATH+SERVICE_CR_PATH: Authorization_Management.create_app() - } - ) if __name__ == "__main__": run_simple('0.0.0.0', 7000, application, use_reloader=False, use_debugger=False, threaded=True) \ No newline at end of file diff --git a/Service_Mockup/Service/service.py b/Service_Mockup/Service/service.py index ed9e7df..fb6244f 100644 --- a/Service_Mockup/Service/service.py +++ b/Service_Mockup/Service/service.py @@ -3,15 +3,14 @@ import logging import time from json import loads -from requests import post -from sqlite3 import OperationalError, IntegrityError -import db_handler as db_handler from DetailedHTTPException import DetailedHTTPException, error_handler from flask import request, Blueprint, current_app from flask_cors import CORS from flask_restful import Resource, Api +from helpers import Helpers from jwcrypto import jwk +from requests import post debug_log = logging.getLogger("debug") @@ -63,80 +62,18 @@ def wrapper(*args, **kw): return wrapper -def storeJSON(DictionaryToStore): - db = db_handler.get_db() - try: - db_handler.init_db(db) - except OperationalError: - pass - - debug_log.info(DictionaryToStore) - - for key in DictionaryToStore: - debug_log.info(key) - # codes = {"jsons": {}} - # codes = {"jsons": {}} - try: - db.execute("INSERT INTO storage (ID,json) \ - VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) - db.commit() - except IntegrityError as e: - db.execute("UPDATE storage SET json=? WHERE ID=? ;", [dumps(DictionaryToStore[key]), key]) - db.commit() - - -def storeCodeUser(DictionaryToStore): - # {"code": "user_id"} - db = db_handler.get_db() - try: - db_handler.init_db(db) - except OperationalError: - pass - - debug_log.info(DictionaryToStore) - - for key in DictionaryToStore: - debug_log.info(key) - db.execute("INSERT INTO code_and_user_mapping (code, user_id) \ - VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) - db.commit() - - -def get_user_id_with_code(code): - db = db_handler.get_db() - for code_row in db_handler.query_db("select * from code_and_user_mapping where code = ?;", [code]): - user_from_db = code_row["user_id"] - return user_from_db - raise DetailedHTTPException(status=500, - detail={"msg": "Unable to link code to user_id in database", "detail": {"code": code}}, - title="Failed to link code to user_id") - # Letting world burn if user was not in db. Fail fast, fail hard. - - -def storeSurrogateJSON(DictionaryToStore): - db = db_handler.get_db() - try: - db_handler.init_db(db) - except OperationalError: - pass - - debug_log.info(DictionaryToStore) - - for key in DictionaryToStore: - debug_log.info(key) - db.execute("INSERT INTO surrogate_and_user_mapping (user_id, surrogate_id) \ - VALUES (?, ?)", [key, dumps(DictionaryToStore[key])]) - db.commit() - - class UserLogin(Resource): + def __init__(self): + super(UserLogin, self).__init__() + self.helpers = Helpers(current_app.config) + @timeme @error_handler def post(self): debug_log.info(dumps(request.json, indent=2)) user_id = request.json["user_id"] code = request.json["code"] - storeCodeUser({code: user_id}) + self.helpers.storeCodeUser({code: user_id}) debug_log.info("User logged in with id ({})".format(format(user_id))) endpoint = "/api/1.2/slr/auth" @@ -156,25 +93,34 @@ def post(self): class RegisterSur(Resource): + def __init__(self): + super(RegisterSur, self).__init__() + self.db_path = current_app.config["DATABASE_PATH"] + self.helpers = Helpers(current_app.config) @timeme @error_handler def post(self): try: # Remove this check once debugging is done. TODO - user_id = get_user_id_with_code(request.json["code"]) + user_id = self.helpers.get_user_id_with_code(request.json["code"]) debug_log.info("We got surrogate_id {} for user_id {}".format(request.json["surrogate_id"], user_id)) debug_log.info(dumps(request.json, indent=2)) - storeSurrogateJSON({user_id: request.json}) + self.helpers.storeSurrogateJSON({user_id: request.json}) except Exception as e: pass class StoreSlr(Resource): + def __init__(self): + super(StoreSlr, self).__init__() + self.db_path = current_app.config["DATABASE_PATH"] + self.helpers = Helpers(current_app.config) + @timeme @error_handler def post(self): debug_log.info(dumps(request.json, indent=2)) store = request.json - storeJSON({store["data"]["surrogate_id"]: store}) + self.helpers.storeJSON({store["data"]["surrogate_id"]: store}) api.add_resource(UserLogin, '/login') diff --git a/Service_Mockup/Service_Root/root.py b/Service_Mockup/Service_Root/root.py index 90af604..9544d06 100644 --- a/Service_Mockup/Service_Root/root.py +++ b/Service_Mockup/Service_Root/root.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- __author__ = 'alpaloma' -from flask import Blueprint, make_response +from flask import Blueprint, make_response, current_app from flask_restful import Resource, Api from DetailedHTTPException import DetailedHTTPException, error_handler @@ -31,9 +31,10 @@ def output_json(data, code, headers=None): class Root(Resource): #@error_handler def get(self): - - status = '{"status": "running"}' - return json.loads(status) + app = current_app + config = app.config + status = {"id": config[""]} + return status api.add_resource(Root, '/') diff --git a/Service_Mockup/db_handler.py b/Service_Mockup/db_handler.py index 69e4f47..439d937 100644 --- a/Service_Mockup/db_handler.py +++ b/Service_Mockup/db_handler.py @@ -1,49 +1,18 @@ # -*- coding: utf-8 -*- -import sqlite3 +import logging +import MySQLdb -DATABASE = '/tmp/db_Service.sqlite' - -def get_db(): - db = None#getattr(g, '_database', None) +debug_log = logging.getLogger("debug") +def get_db(host, user, password, database, port): + db = None if db is None: - db = sqlite3.connect(DATABASE)#g._database = sqlite3.connect(DATABASE) - db.row_factory = sqlite3.Row - try: - init_db(db) - except Exception as e: - pass + db = MySQLdb.connect(host=host, user=user, passwd=password, db=database, port=port) return db - def make_dicts(cursor, row): return dict((cursor.description[idx][0], value) for idx, value in enumerate(row)) -def query_db(query, args=(), one=False): - cur = get_db().execute(query, args) - rv = cur.fetchall() - cur.close() - return (rv[0] if rv else None) if one else rv - -def sqlite_create_table(conn, table_name, table_columns): - conn.cursor.execute("CREATE TABLE {} ({});".format(table_name, ",".join(table_columns))) - conn.commit() - -def init_db(conn): - # create db for codes - # conn.execute('''CREATE TABLE codes (ID TEXT PRIMARY KEY NOT NULL, code TEXT NOT NULL);''') - conn.execute('''CREATE TABLE code_and_user_mapping - (code TEXT PRIMARY KEY NOT NULL, - user_id TEXT NOT NULL);''') - conn.execute('''CREATE TABLE surrogate_and_user_mapping - (user_id TEXT PRIMARY KEY NOT NULL, - surrogate_id TEXT NOT NULL);''') - conn.execute('''CREATE TABLE storage - (ID TEXT PRIMARY KEY NOT NULL, - json TEXT NOT NULL);''') - #sqlite_create_table(conn, "codes", ["id", "text", "code": "text"}) # Create table for codes - #sqlite_create_table(conn, "") - conn.commit() \ No newline at end of file diff --git a/Service_Mockup/doc/api/swagger_Service.yml b/Service_Mockup/doc/api/swagger_Service.yml index 8a83547..049ebd6 100644 --- a/Service_Mockup/doc/api/swagger_Service.yml +++ b/Service_Mockup/doc/api/swagger_Service.yml @@ -27,7 +27,7 @@ paths: 200: description: "Returns 200 OK or Error Message" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" /login: @@ -52,7 +52,7 @@ paths: 200: description: "Returns 200 OK or Error message" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" /store_slr: @@ -76,7 +76,7 @@ paths: 200: description: "Returns 200 OK or Error message" 500: - description: "Internal server error" + description: "Internal server error. The actual status code and content of the error message may vary depending on error occurred." schema: $ref: "#/definitions/errors" definitions: diff --git a/Service_Mockup/doc/database/Service_Mockup-DBinit.sql b/Service_Mockup/doc/database/Service_Mockup-DBinit.sql new file mode 100644 index 0000000..5a61a56 --- /dev/null +++ b/Service_Mockup/doc/database/Service_Mockup-DBinit.sql @@ -0,0 +1,68 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 15.37.01 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Service_Mockup +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Service_Mockup +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Service_Mockup` DEFAULT CHARACTER SET latin1 ; +USE `db_Service_Mockup` ; + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`code_and_user_mapping` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`code_and_user_mapping` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`code_and_user_mapping` ( + `code` LONGTEXT NOT NULL, + `user_id` LONGTEXT NOT NULL, + PRIMARY KEY (`code`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`storage` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`storage` ( + `ID` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`ID`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`surrogate_and_user_mapping` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`surrogate_and_user_mapping` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`surrogate_and_user_mapping` ( + `user_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + PRIMARY KEY (`user_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; + +CREATE USER 'serviceMockup'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Service_Mockup.* TO 'serviceMockup'@'%'; +FLUSH PRIVILEGES; diff --git a/Service_Mockup/doc/database/Service_Mockup_db_image-v001.png b/Service_Mockup/doc/database/Service_Mockup_db_image-v001.png new file mode 100644 index 0000000..bb9337c Binary files /dev/null and b/Service_Mockup/doc/database/Service_Mockup_db_image-v001.png differ diff --git a/Service_Mockup/helpers.py b/Service_Mockup/helpers.py index 1f39f6b..1dd516a 100644 --- a/Service_Mockup/helpers.py +++ b/Service_Mockup/helpers.py @@ -1,12 +1,113 @@ # -*- coding: utf-8 -*- import importlib +import logging import pkgutil +from json import dumps, loads +from sqlite3 import IntegrityError from Crypto.PublicKey.RSA import importKey as import_rsa_key from flask import Blueprint from flask_restful import Api +import db_handler +from DetailedHTTPException import DetailedHTTPException +debug_log = logging.getLogger("debug") + +class Helpers: + def __init__(self, app_config): + self.host = app_config["MYSQL_HOST"] + self.cert_key_path = app_config["CERT_KEY_PATH"] + self.keysize = app_config["KEYSIZE"] + self.user = app_config["MYSQL_USER"] + self.passwd = app_config["MYSQL_PASSWORD"] + self.db = app_config["MYSQL_DB"] + self.port = app_config["MYSQL_PORT"] + + def query_db(self, query, args=()): + ''' + Simple queries to DB + :param query: SQL query + :param args: Arguments to inject into the query + :return: Single hit for the given query + ''' + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + cur = cursor.execute(query, args) + try: + rv = cursor.fetchone() # Returns tuple + debug_log.info(rv) + if rv is not None: + db.close() + return rv[1] # The second value in the tuple. + else: + return None + except Exception as e: + debug_log.exception(e) + debug_log.info(cur) + db.close() + return None + + def storeJSON(self, DictionaryToStore): + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info(DictionaryToStore) + + for key in DictionaryToStore: + debug_log.info(key) + # codes = {"jsons": {}} + # codes = {"jsons": {}} + try: + cursor.execute("INSERT INTO storage (ID,json) \ + VALUES (%s, %s)", (key, dumps(DictionaryToStore[key]))) + db.commit() + db.close() + except IntegrityError as e: + cursor.execute("UPDATE storage SET json=%s WHERE ID=%s ;", (dumps(DictionaryToStore[key]), key)) + db.commit() + db.close() + + def storeCodeUser(self, DictionaryToStore): + # {"code": "user_id"} + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info(DictionaryToStore) + + for key in DictionaryToStore: + debug_log.info(key) + cursor.execute("INSERT INTO code_and_user_mapping (code, user_id) \ + VALUES (%s, %s)", (key, dumps(DictionaryToStore[key]))) + db.commit() + db.close() + + def get_user_id_with_code(self, code): + try: + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + query = self.query_db("select * from code_and_user_mapping where code=%s;", (code,)) + debug_log.info(query) + user_from_db = loads(query) + return user_from_db + except Exception as e: + debug_log.exception(e) + raise DetailedHTTPException(status=500, + detail={"msg": "Unable to link code to user_id in database", + "detail": {"code": code}}, + title="Failed to link code to user_id") + + # Letting world burn if user was not in db. Fail fast, fail hard. + + def storeSurrogateJSON(self, DictionaryToStore): + db = db_handler.get_db(host=self.host, password=self.passwd, user=self.user, port=self.port, database=self.db) + cursor = db.cursor() + debug_log.info(DictionaryToStore) + + for key in DictionaryToStore: + debug_log.info(key) + cursor.execute("INSERT INTO surrogate_and_user_mapping (user_id, surrogate_id) \ + VALUES (%s, %s)", [key, dumps(DictionaryToStore[key])]) + db.commit() + db.close() + def read_key(path, password=None): ## # Read RSA key from PEM file and return JWK object of it. diff --git a/Service_Mockup/instance/__init__.py b/Service_Mockup/instance/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Service_Mockup/instance/settings.py b/Service_Mockup/instance/settings.py new file mode 100644 index 0000000..4ff9e55 --- /dev/null +++ b/Service_Mockup/instance/settings.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +timeout = 8 + +SERVICE_MGMNT_URL = "http://localhost:7000" + +DEBUG_MODE = True \ No newline at end of file diff --git a/Service_Mockup/instance/settings_template.py.j2 b/Service_Mockup/instance/settings_template.py.j2 new file mode 100644 index 0000000..fb300e9 --- /dev/null +++ b/Service_Mockup/instance/settings_template.py.j2 @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +timeout = 8 +KEYSIZE = 512 +{% if SERVICE_MGMNT_URL is defined %} +SERVICE_MGMNT_URL = {{ SERVICE_MGMNT_URL }} +{% else %} +SERVICE_MGMNT_URL = "http://localhost:7000" +{% endif %} + + +{% if DATABASE_PATH is defined %} +DATABASE_PATH = {{ DATABASE_PATH }} +{% else %} + +DATABASE_PATH = "./db_Mockup.sqlite" +{% endif %} + + +# Name of host to connect to. Default: use the local host via a UNIX socket (where applicable) +{% if MYSQL_HOST is defined %} +MYSQL_HOST = {{ MYSQL_HOST }} +{% else %} +MYSQL_HOST = 'localhost' +{% endif %} + + # User to authenticate as. Default: current effective user. +{% if MYSQL_USER is defined %} +MYSQL_USER = {{ MYSQL_USER }} +{% else %} +MYSQL_USER = 'serviceMockup' +{% endif %} + +# Password to authenticate with. Default: no password. +{% if MYSQL_PASSWORD is defined %} +MYSQL_PASSWORD = {{ MYSQL_PASSWORD }} +{% else %} +MYSQL_PASSWORD = 'MynorcA' +{% endif %} + +# Database to use. Default: no default database. +{% if MYSQL_DB is defined %} +MYSQL_DB = {{ MYSQL_DB }} +{% else %} +MYSQL_DB = 'db_Service_Mockup' +{% endif %} + +# TCP port of MySQL server. Default: 3306. +{% if MYSQL_PORT is defined %} +MYSQL_PORT = {{ MYSQL_PORT }} +{% else %} +MYSQL_PORT = 3306 +{% endif %} + + +{% if CERT_PATH is defined %} +CERT_PATH = {{ CERT_PATH }} +{% else %} +CERT_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_KEY_PATH is defined %} +CERT_KEY_PATH = {{ CERT_KEY_PATH }} +{% else %} +CERT_KEY_PATH = "./service_key.jwk" +{% endif %} + +{% if CERT_PASSWORD_PATH is defined %} +CERT_PASSWORD_PATH = {{ CERT_PASSWORD_PATH }} +{% else %} +CERT_PASSWORD_PATH = "./cert_pw" +{% endif %} + + + +{% if DEBUG_MODE is defined %} +DEBUG_MODE = {{ DEBUG_MODE }} +{% else %} +DEBUG_MODE = True +{% endif %} \ No newline at end of file diff --git a/Service_Mockup/requirements.txt b/Service_Mockup/requirements.txt index 6a0fe52..d1978bd 100644 --- a/Service_Mockup/requirements.txt +++ b/Service_Mockup/requirements.txt @@ -20,6 +20,7 @@ jsonschema==2.5.1 jwcrypto==0.3.1 kombu==3.0.35 MarkupSafe==0.23 +mysqlclient==1.3.7 pyasn1==0.1.9 pycparser==2.14 pycryptodome==3.4 @@ -29,5 +30,7 @@ pytz==2016.6.1 redis==2.10.5 requests==2.11.1 six==1.10.0 +uWSGI==2.0.13.1 Werkzeug==0.11.10 wheel==0.24.0 +restapi-logging-handler==0.2.2 \ No newline at end of file diff --git a/Service_Mockup/wsgi.py b/Service_Mockup/wsgi.py index c806688..9c3b6a5 100644 --- a/Service_Mockup/wsgi.py +++ b/Service_Mockup/wsgi.py @@ -18,7 +18,7 @@ try: from restapi_logging_handler import RestApiHandler - restapihandler = RestApiHandler("http://localhost:9004/") + restapihandler = RestApiHandler("http://172.18.0.1:9004/") logger.addHandler(restapihandler) except Exception as e: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e29cc65 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,239 @@ +version: '2' + +services: + + mysql-db: + image: mysql:5.6 + container_name: mysql-db + volumes: + - mysql-data:/var/lib/mysql + - "./init-db:/docker-entrypoint-initdb.d:ro" + environment: + - MYSQL_ROOT_PASSWORD=Y3xUcheg + - MYSQL_USER=mydataaccount + - MYSQL_PASSWORD=wr8gabrA + networks: + - mydata_sdk_demo_network + + account: + build: + context: ./Account/ + dockerfile: Dockerfile-account + args: + - APP_INSTALL_PATH=/mydata-sdk-account + container_name: account + depends_on: + - mysql-db + command: uwsgi --socket 0.0.0.0:8080 --protocol=http -w wsgi --callable app --processes 2 + environment: + - PYTHONUNBUFFERED='true' + - MYSQL_HOST='mysql-db' + - MYSQL_USER='mydataaccount' + - MYSQL_PASSWORD='wr8gabrA' + - MYSQL_DB='MyDataAccount' + - MYSQL_PORT=3306 + - URL_PREFIX='' +# - SERVER_NAME='' +# - VIRTUAL_HOST=localhost + ports: + - '8080:8080' + networks: + - mydata_sdk_demo_network + + operator_components: + build: + context: . +# context: context: ./Operator_Components/ + dockerfile: Dockerfile-overholt + args: + - APP_INSTALL_PATH=/mydata-sdk-components + - OVERHOLT_APPLICATION_PATH=/Operator_Components + container_name: operator_components + depends_on: + - redis + environment: + - PYTHONUNBUFFERED='true' + - ACCOUNT_MANAGEMENT_URL='http://account:8080/' + - ACCOUNT_MANAGEMENT_USER="test_sdk" + - ACCOUNT_MANAGEMENT_PASSWORD="test_sdk_pw" + - DATABASE_PATH="./db_Operator.sqlite" + - MYSQL_HOST='mysql-db' + - MYSQL_USER='operator' + - MYSQL_PASSWORD='MynorcA' + - MYSQL_DB='db_Operator' + - MYSQL_PORT=3306 + # Naming this CELERY_BROKER_URL messes things up pretty bad. + # Issue is discussed here + # https://github.com/celery/celery/issues/2765#issuecomment-228224236 + - SELERY_BROKER_URL='redis://redis:6379/0' + - SELERY_RESULT_BACKEND='redis://redis:6379/0' + - CERT_PATH="./service_key.jwk" + - CERT_KEY_PATH="./service_key.jwk" + - CERT_PASSWORD_PATH="./cert_pw" + - OPERATOR_UID="41e19fcd-1951-455f-9169-a303f990f52d" + - OPERATOR_ROOT_PATH="/api/1.2" + - OPERATOR_CR_PATH="/cr" + - OPERATOR_SLR_PATH="/slr" + - SERVICE_URL="http://service_components:7000" + - VIRTUAL_HOST=localhost.operator + ports: + - '5000:5000' + networks: + - mydata_sdk_demo_network + + service_components: + build: + context: . +# context: ./Service_Components/ + dockerfile: Dockerfile-overholt + args: + - APP_INSTALL_PATH=/mydata-sdk-components + - OVERHOLT_APPLICATION_PATH=/Service_Components + container_name: service_components + depends_on: + - redis + environment: + - PYTHONUNBUFFERED='true' + - DATABASE_PATH="./db_Srv.sqlite" + - MYSQL_HOST='mysql-db' + - MYSQL_USER='service' + - MYSQL_PASSWORD='MynorcA' + - MYSQL_DB='db_Srv' + - MYSQL_PORT=3306 + # Naming this CELERY_BROKER_URL messes things up pretty bad. + # Issue is discussed here + # https://github.com/celery/celery/issues/2765#issuecomment-228224236 + - SELERY_BROKER_URL='redis://redis:6379/0' + - SELERY_RESULT_BACKEND='redis://redis:6379/0' + - CERT_PATH="./service_key.jwk" + - CERT_KEY_PATH="./service_key.jwk" + - CERT_PASSWORD_PATH="./cert_pw" + - SERVICE_URL="http://service_mockup:2000" + - OPERATOR_URL="http://operator_components:5000" + - SERVICE_ROOT_PATH="/api/1.2" + - SERVICE_CR_PATH="/cr" + - SERVICE_SLR_PATH="/slr" + - VIRTUAL_HOST=localhost.service + networks: + - mydata_sdk_demo_network + + service_mockup: + build: + context: . +# context: ./Service_Mockup/ + dockerfile: Dockerfile-overholt + args: + - APP_INSTALL_PATH=/mydata-sdk-components + - OVERHOLT_APPLICATION_PATH=/Service_Mockup + container_name: service_mockup + depends_on: + - redis + environment: + - PYTHONUNBUFFERED='true' + - SERVICE_MGMNT_URL="http://service_components:7000" + - VIRTUAL_HOST=localhost.srvmockup + - MYSQL_HOST='mysql-db' + - MYSQL_USER='serviceMockup' + - MYSQL_PASSWORD='MynorcA' + - MYSQL_DB='db_Service_Mockup' + - MYSQL_PORT=3306 + networks: + - mydata_sdk_demo_network + + celery_worker_operator: + build: + context: . +# context: context: ./Operator_Components/ + dockerfile: Dockerfile-overholt + args: + - APP_INSTALL_PATH=/mydata-sdk-components + - OVERHOLT_APPLICATION_PATH=/Operator_Components + container_name: celery_worker_operator + depends_on: + - redis + # We can move docker-entrypoint.sh back to Dockerfile once we move to uwsgi (get rid of dumb-init) +# command: ["sh", "-c", "cd /mydata-sdk-components/Operator_Components && celery -A tasks worker"] + command: ["/docker-entrypoint-overholt.sh", "sh", "-c", "cd /mydata-sdk-components/Operator_Components && celery -A op_tasks worker -Q op_queue"] + environment: + - PYTHONUNBUFFERED='true' + - ACCOUNT_MANAGEMENT_URL='http://account:8080/' + - ACCOUNT_MANAGEMENT_USER="test_sdk" + - ACCOUNT_MANAGEMENT_PASSWORD="test_sdk_pw" + - DATABASE_PATH="./db_Operator.sqlite" + # Naming this CELERY_BROKER_URL messes things up pretty bad. + # Issue is discussed here + # https://github.com/celery/celery/issues/2765#issuecomment-228224236 + - SELERY_BROKER_URL='redis://redis:6379/0' + - SELERY_RESULT_BACKEND='redis://redis:6379/0' + # Needed in order to run Celery as root. + - C_FORCE_ROOT="true" + - CERT_PATH="./service_key.jwk" + - CERT_KEY_PATH="./service_key.jwk" + - CERT_PASSWORD_PATH="./cert_pw" + - OPERATOR_UID="41e19fcd-1951-455f-9169-a303f990f52d" + - OPERATOR_ROOT_PATH="/api/1.2" + - OPERATOR_CR_PATH="/cr" + - OPERATOR_SLR_PATH="/slr" + - SERVICE_URL="http://service_components:7000" + - VIRTUAL_HOST=localhost.celeryworker.operator + networks: + - mydata_sdk_demo_network + + celery_worker_service: + build: + context: . +# context: context: ./Service_Components/ + dockerfile: Dockerfile-overholt + args: + - APP_INSTALL_PATH=/mydata-sdk-components + - OVERHOLT_APPLICATION_PATH=/Service_Components + container_name: celery_worker_service + depends_on: + - redis + # We can move docker-entrypoint.sh back to Dockerfile once we move to uwsgi (get rid of dumb-init) +# command: ["sh", "-c", "cd /mydata-sdk-components/Operator_Components && celery -A tasks worker"] + command: ["/docker-entrypoint-overholt.sh", "sh", "-c", "cd /mydata-sdk-components/Service_Components && celery -A srv_tasks worker -Q srv_queue"] + environment: + - PYTHONUNBUFFERED='true' + - ACCOUNT_MANAGEMENT_URL='http://account:8080/' + - ACCOUNT_MANAGEMENT_USER="test_sdk" + - ACCOUNT_MANAGEMENT_PASSWORD="test_sdk_pw" + - DATABASE_PATH="./db_Srv.sqlite" + # Naming this CELERY_BROKER_URL messes things up pretty bad. + # Issue is discussed here + # https://github.com/celery/celery/issues/2765#issuecomment-228224236 + - SELERY_BROKER_URL='redis://redis:6379/0' + - SELERY_RESULT_BACKEND='redis://redis:6379/0' + # Needed in order to run Celery as root. + - C_FORCE_ROOT="true" + - MYSQL_HOST='mysql-db' + - MYSQL_USER='service' + - MYSQL_PASSWORD='MynorcA' + - MYSQL_DB='db_Srv' + - MYSQL_PORT=3306 + - CERT_PATH="./service_key.jwk" + - CERT_KEY_PATH="./service_key.jwk" + - CERT_PASSWORD_PATH="./cert_pw" + - OPERATOR_UID="41e19fcd-1951-455f-9169-a303f990f52d" + - OPERATOR_ROOT_PATH="/api/1.2" + - OPERATOR_CR_PATH="/cr" + - OPERATOR_SLR_PATH="/slr" + - SERVICE_URL="http://service_components:7000" + - VIRTUAL_HOST=localhost.celeryworker.service + networks: + - mydata_sdk_demo_network + + redis: + image: redis:3.2-alpine + container_name: redis + volumes: + - redis-data:/data + networks: + - mydata_sdk_demo_network + +volumes: + mysql-data: + redis-data: + +networks: + mydata_sdk_demo_network: diff --git a/docker-entrypoint-overholt.sh b/docker-entrypoint-overholt.sh new file mode 100755 index 0000000..af128d2 --- /dev/null +++ b/docker-entrypoint-overholt.sh @@ -0,0 +1,22 @@ +#!/bin/bash +#docker-entrypoint.sh + +# Note: This script uses the exec Bash command so that the final running +# application becomes the container’s PID 1. This allows the application to +# receive any Unix signals sent to the container. +# See the ENTRYPOINT help for more details. +# https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/#/entrypoint + +# -e - Exit immediately if a command exits with a non-zero status. +set -e + +# Preprocess configuration files based on environment variables given to +# "docker run" -command or Docker Compose +# TODO: Make this more robust and dynamic, e.g. check if instance-folder +# exists etc. +j2 $APP_INSTALL_PATH$OVERHOLT_APPLICATION_PATH/instance/settings_template.py.j2 > \ + $APP_INSTALL_PATH$OVERHOLT_APPLICATION_PATH/instance/settings.py + +# Try to start whatever was given as a parameter to "docker run" -command +exec "$@" + diff --git a/init-db/MyDataAccount-DBinit.sql b/init-db/MyDataAccount-DBinit.sql new file mode 100755 index 0000000..db3dfbb --- /dev/null +++ b/init-db/MyDataAccount-DBinit.sql @@ -0,0 +1,405 @@ +-- MySQL Script generated by MySQL Workbench +-- 11/17/16 16:42:54 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL'; + +-- ----------------------------------------------------- +-- Schema MyDataAccount +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema MyDataAccount +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `MyDataAccount` DEFAULT CHARACTER SET utf8 ; +USE `MyDataAccount` ; + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Accounts` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Accounts` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Accounts` ( + `id` INT NOT NULL AUTO_INCREMENT, + `globalIdentifier` VARCHAR(255) NOT NULL, + `activated` TINYINT(1) NOT NULL DEFAULT 0, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE INDEX `globalIdenttifyer_UNIQUE` (`globalIdentifier` ASC)) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Particulars` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Particulars` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Particulars` ( + `id` INT NOT NULL AUTO_INCREMENT, + `firstname` VARCHAR(255) NOT NULL, + `lastname` VARCHAR(255) NOT NULL, + `dateOfBirth` DATE NULL DEFAULT NULL, + `img_url` VARCHAR(255) NULL DEFAULT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + `Accounts_id` INT NOT NULL, + PRIMARY KEY (`id`), + INDEX `fk_Particulars_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_Particulars_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`ServiceLinkRecords` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`ServiceLinkRecords` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ServiceLinkRecords` ( + `id` INT NOT NULL AUTO_INCREMENT, + `serviceLinkRecord` BLOB NOT NULL, + `Accounts_id` INT NOT NULL, + `serviceLinkRecordId` VARCHAR(255) NOT NULL, + `serviceId` VARCHAR(255) NOT NULL, + `surrogateId` VARCHAR(255) NOT NULL, + `operatorId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_ServiceLinkRecords_Accounts1_idx` (`Accounts_id` ASC), + UNIQUE INDEX `serviceLinkRecordId_UNIQUE` (`serviceLinkRecordId` ASC), + CONSTRAINT `fk_ServiceLinkRecords_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`ConsentRecords` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`ConsentRecords` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ConsentRecords` ( + `id` INT NOT NULL AUTO_INCREMENT, + `consentRecord` BLOB NOT NULL, + `ServiceLinkRecords_id` INT NOT NULL, + `surrogateId` VARCHAR(255) NOT NULL, + `consentRecordId` VARCHAR(255) NOT NULL, + `ResourceSetId` VARCHAR(255) NOT NULL, + `serviceLinkRecordId` VARCHAR(255) NOT NULL, + `subjectId` VARCHAR(255) NOT NULL, + `role` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_ConsentRecords_ServiceLinkRecords1_idx` (`ServiceLinkRecords_id` ASC), + UNIQUE INDEX `consentRecordId_UNIQUE` (`consentRecordId` ASC), + CONSTRAINT `fk_ConsentRecords_ServiceLinkRecords1` + FOREIGN KEY (`ServiceLinkRecords_id`) + REFERENCES `MyDataAccount`.`ServiceLinkRecords` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`LocalIdentityPWDs` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`LocalIdentityPWDs` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`LocalIdentityPWDs` ( + `id` INT NOT NULL AUTO_INCREMENT, + `password` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`)) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`LocalIdentities` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`LocalIdentities` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`LocalIdentities` ( + `id` INT NOT NULL AUTO_INCREMENT, + `username` VARCHAR(255) NOT NULL, + `LocalIdentityPWDs_id` INT NOT NULL, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE INDEX `username_UNIQUE` (`username` ASC), + INDEX `fk_LocalIdentities_LocalIdentityPWDs1_idx` (`LocalIdentityPWDs_id` ASC), + INDEX `fk_LocalIdentities_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_LocalIdentities_LocalIdentityPWDs1` + FOREIGN KEY (`LocalIdentityPWDs_id`) + REFERENCES `MyDataAccount`.`LocalIdentityPWDs` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION, + CONSTRAINT `fk_LocalIdentities_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`RemoteIdentityProviders` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`RemoteIdentityProviders` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`RemoteIdentityProviders` ( + `id` INT NOT NULL AUTO_INCREMENT, + `name` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`)) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`RemoteIdentities` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`RemoteIdentities` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`RemoteIdentities` ( + `id` INT NOT NULL AUTO_INCREMENT, + `remoteUniqueId` VARCHAR(255) NOT NULL, + `Accounts_id` INT NOT NULL, + `RemoteIdentityProviders_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE INDEX `opdenIdIdentifyer_UNIQUE` (`remoteUniqueId` ASC), + INDEX `fk_RemoteIdentities_Accounts1_idx` (`Accounts_id` ASC), + INDEX `fk_RemoteIdentities_RemoteIdentityProviders1_idx` (`RemoteIdentityProviders_id` ASC), + CONSTRAINT `fk_RemoteIdentities_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION, + CONSTRAINT `fk_RemoteIdentities_RemoteIdentityProviders1` + FOREIGN KEY (`RemoteIdentityProviders_id`) + REFERENCES `MyDataAccount`.`RemoteIdentityProviders` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Salts` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Salts` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Salts` ( + `id` INT NOT NULL AUTO_INCREMENT, + `salt` VARCHAR(255) NOT NULL, + `LocalIdentities_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE INDEX `hash_UNIQUE` (`salt` ASC), + INDEX `fk_Salts_LocalIdentities1_idx` (`LocalIdentities_id` ASC), + CONSTRAINT `fk_Salts_LocalIdentities1` + FOREIGN KEY (`LocalIdentities_id`) + REFERENCES `MyDataAccount`.`LocalIdentities` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Settings` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Settings` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Settings` ( + `id` INT NOT NULL AUTO_INCREMENT, + `setting_key` VARCHAR(255) NOT NULL, + `setting_value` VARCHAR(255) NOT NULL, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_Settings_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_Settings_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Contacts` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Contacts` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Contacts` ( + `id` INT NOT NULL AUTO_INCREMENT, + `address1` VARCHAR(255) NULL DEFAULT NULL, + `address2` VARCHAR(255) NULL, + `postalCode` VARCHAR(255) NULL DEFAULT NULL, + `city` VARCHAR(255) NULL DEFAULT NULL, + `state` VARCHAR(255) NULL DEFAULT NULL, + `country` VARCHAR(255) NULL DEFAULT NULL, + `entryType` VARCHAR(255) NOT NULL, + `prime` TINYINT(1) NOT NULL DEFAULT 0, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_Contacts_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_Contacts_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`ConsentStatusRecords` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`ConsentStatusRecords` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ConsentStatusRecords` ( + `id` INT NOT NULL AUTO_INCREMENT, + `consentStatusRecordId` VARCHAR(255) NOT NULL, + `consentStatus` VARCHAR(255) NOT NULL, + `consentStatusRecord` BLOB NOT NULL, + `ConsentRecords_id` INT NOT NULL, + `consentRecordId` VARCHAR(255) NOT NULL, + `issued_at` BIGINT NOT NULL, + `prevRecordId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_ConsentStatusRecords_ConsentRecords1_idx` (`ConsentRecords_id` ASC), + UNIQUE INDEX `consentStatusRecordId_UNIQUE` (`consentStatusRecordId` ASC), + CONSTRAINT `fk_ConsentStatusRecords_ConsentRecords1` + FOREIGN KEY (`ConsentRecords_id`) + REFERENCES `MyDataAccount`.`ConsentRecords` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`ServiceLinkStatusRecords` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`ServiceLinkStatusRecords` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`ServiceLinkStatusRecords` ( + `id` INT NOT NULL AUTO_INCREMENT, + `serviceLinkStatus` VARCHAR(255) NOT NULL, + `serviceLinkStatusRecord` BLOB NOT NULL, + `ServiceLinkRecords_id` INT NOT NULL, + `serviceLinkRecordId` VARCHAR(255) NOT NULL, + `issued_at` BIGINT NOT NULL, + `prevRecordId` VARCHAR(255) NOT NULL, + `serviceLinkStatusRecordId` VARCHAR(255) NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_ServiceLinkStatusRecords_ServiceLinkRecords1_idx` (`ServiceLinkRecords_id` ASC), + UNIQUE INDEX `serviceLinkStatusRecordId_UNIQUE` (`serviceLinkStatusRecordId` ASC), + CONSTRAINT `fk_ServiceLinkStatusRecords_ServiceLinkRecords1` + FOREIGN KEY (`ServiceLinkRecords_id`) + REFERENCES `MyDataAccount`.`ServiceLinkRecords` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`EventLogs` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`EventLogs` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`EventLogs` ( + `id` INT NOT NULL AUTO_INCREMENT, + `actor` VARCHAR(255) NOT NULL, + `event` BLOB NOT NULL, + `created` BIGINT NOT NULL, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_EventLogs_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_EventLogs_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Emails` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Emails` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Emails` ( + `id` INT NOT NULL AUTO_INCREMENT, + `email` VARCHAR(255) NULL DEFAULT NULL, + `entryType` VARCHAR(255) NOT NULL, + `prime` TINYINT(1) NOT NULL DEFAULT 0, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_Emails_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_Emails_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`Telephones` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`Telephones` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`Telephones` ( + `id` INT NOT NULL AUTO_INCREMENT, + `tel` VARCHAR(255) NULL DEFAULT NULL, + `entryType` VARCHAR(255) NOT NULL, + `prime` TINYINT(1) NOT NULL DEFAULT 0, + `Accounts_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`id`), + INDEX `fk_Telephones_Accounts1_idx` (`Accounts_id` ASC), + CONSTRAINT `fk_Telephones_Accounts1` + FOREIGN KEY (`Accounts_id`) + REFERENCES `MyDataAccount`.`Accounts` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +-- ----------------------------------------------------- +-- Table `MyDataAccount`.`OneTimeCookies` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `MyDataAccount`.`OneTimeCookies` ; + +CREATE TABLE IF NOT EXISTS `MyDataAccount`.`OneTimeCookies` ( + `id` INT NOT NULL AUTO_INCREMENT, + `oneTimeCookie` VARCHAR(255) NOT NULL, + `used` TINYINT(1) NOT NULL DEFAULT 0, + `created` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `LocalIdentities_id` INT NOT NULL, + `deleted` TINYINT(1) NOT NULL DEFAULT 0, + UNIQUE INDEX `oneTimeCookie_UNIQUE` (`oneTimeCookie` ASC), + PRIMARY KEY (`id`), + INDEX `fk_OneTimeCookies_LocalIdentities1_idx` (`LocalIdentities_id` ASC), + CONSTRAINT `fk_OneTimeCookies_LocalIdentities1` + FOREIGN KEY (`LocalIdentities_id`) + REFERENCES `MyDataAccount`.`LocalIdentities` (`id`) + ON DELETE NO ACTION + ON UPDATE NO ACTION) +ENGINE = InnoDB; + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; diff --git a/init-db/MyDataAccount-UserInit.sql b/init-db/MyDataAccount-UserInit.sql new file mode 100644 index 0000000..042a923 --- /dev/null +++ b/init-db/MyDataAccount-UserInit.sql @@ -0,0 +1,11 @@ +-- MySQL Script +-- 09/14/16 10:31:08 + +REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'mydataaccount'@'%'; +DROP USER 'mydataaccount'@'%'; +DELETE FROM mysql.user WHERE user='mydataaccount'; +FLUSH PRIVILEGES; + +CREATE USER 'mydataaccount'@'%' IDENTIFIED BY 'wr8gabrA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON MyDataAccount.* TO 'mydataaccount'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/init-db/Operator_Components-DBinit.sql b/init-db/Operator_Components-DBinit.sql new file mode 100644 index 0000000..2025b6f --- /dev/null +++ b/init-db/Operator_Components-DBinit.sql @@ -0,0 +1,83 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 15.32.11 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Operator +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Operator +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Operator` DEFAULT CHARACTER SET utf8 ; +USE `db_Operator` ; + +-- ----------------------------------------------------- +-- Table `db_Operator`.`cr_tbl` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`cr_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`cr_tbl` ( + `rs_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`rs_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Operator`.`rs_id_tbl` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`rs_id_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`rs_id_tbl` ( + `rs_id` LONGTEXT NOT NULL, + `used` TINYINT(1) NOT NULL, + PRIMARY KEY (`rs_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Operator`.`session_store` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`session_store` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`session_store` ( + `code` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`code`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + +-- ----------------------------------------------------- +-- Table `db_Operator`.`keys_tbl` TODO: Check this, used to have kid as PK but would cause fails since service gives same key for all surrogates atm. +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Operator`.`service_keys_tbl` ; + +CREATE TABLE IF NOT EXISTS `db_Operator`.`service_keys_tbl` ( + `kid` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `key_json` LONGTEXT NOT NULL, + PRIMARY KEY (`surrogate_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; + +CREATE USER 'operator'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Operator.* TO 'operator'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/init-db/Service_Components-DBinit.sql b/init-db/Service_Components-DBinit.sql new file mode 100644 index 0000000..ba9719e --- /dev/null +++ b/init-db/Service_Components-DBinit.sql @@ -0,0 +1,103 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 14.58.51 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE=''; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Srv +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Srv +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Srv` DEFAULT CHARACTER SET utf8 ; +USE `db_Srv` ; + +-- ----------------------------------------------------- +-- Table `db_Srv`.`codes` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`codes` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`codes` ( + `ID` LONGTEXT NOT NULL, + `code` LONGTEXT NOT NULL, + PRIMARY KEY (`ID`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`cr_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`cr_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`cr_storage` ( + `cr_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + `slr_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `rs_id` LONGTEXT NOT NULL, + PRIMARY KEY (`cr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`csr_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`csr_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`csr_storage` ( + `cr_id` VARCHAR(255) NOT NULL, + `csr_id` VARCHAR(255) NOT NULL, + `previous_record_id` VARCHAr(255) NOT NULL, + `consent_status` VARCHAR(10) NOT NULL, + `json` LONGTEXT NOT NULL, + `slr_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + `rs_id` LONGTEXT NOT NULL, + PRIMARY KEY (`csr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`storage` ( + `surrogate_id` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`surrogate_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Srv`.`token_storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Srv`.`token_storage` ; + +CREATE TABLE IF NOT EXISTS `db_Srv`.`token_storage` ( + `cr_id` LONGTEXT NOT NULL, + `token` LONGTEXT NOT NULL, + PRIMARY KEY (`cr_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; +CREATE USER 'service'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Srv.* TO 'service'@'%'; +FLUSH PRIVILEGES; \ No newline at end of file diff --git a/init-db/Service_Mockup-DBinit.sql b/init-db/Service_Mockup-DBinit.sql new file mode 100644 index 0000000..5a61a56 --- /dev/null +++ b/init-db/Service_Mockup-DBinit.sql @@ -0,0 +1,68 @@ +-- MySQL Script generated by MySQL Workbench +-- to 15. syyskuuta 2016 15.37.01 +-- Model: New Model Version: 1.0 +-- MySQL Workbench Forward Engineering + +SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; +SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; +SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; + +-- ----------------------------------------------------- +-- Schema mydb +-- ----------------------------------------------------- +-- ----------------------------------------------------- +-- Schema db_Service_Mockup +-- ----------------------------------------------------- + +-- ----------------------------------------------------- +-- Schema db_Service_Mockup +-- ----------------------------------------------------- +CREATE SCHEMA IF NOT EXISTS `db_Service_Mockup` DEFAULT CHARACTER SET latin1 ; +USE `db_Service_Mockup` ; + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`code_and_user_mapping` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`code_and_user_mapping` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`code_and_user_mapping` ( + `code` LONGTEXT NOT NULL, + `user_id` LONGTEXT NOT NULL, + PRIMARY KEY (`code`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`storage` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`storage` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`storage` ( + `ID` LONGTEXT NOT NULL, + `json` LONGTEXT NOT NULL, + PRIMARY KEY (`ID`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +-- ----------------------------------------------------- +-- Table `db_Service_Mockup`.`surrogate_and_user_mapping` +-- ----------------------------------------------------- +DROP TABLE IF EXISTS `db_Service_Mockup`.`surrogate_and_user_mapping` ; + +CREATE TABLE IF NOT EXISTS `db_Service_Mockup`.`surrogate_and_user_mapping` ( + `user_id` LONGTEXT NOT NULL, + `surrogate_id` LONGTEXT NOT NULL, + PRIMARY KEY (`user_id`(255))) +ENGINE = InnoDB +DEFAULT CHARACTER SET = utf8; + + +SET SQL_MODE=@OLD_SQL_MODE; +SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; +SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; + +CREATE USER 'serviceMockup'@'%' IDENTIFIED BY 'MynorcA'; +GRANT CREATE TEMPORARY TABLES, DELETE, DROP, INSERT, LOCK TABLES, SELECT, UPDATE ON db_Service_Mockup.* TO 'serviceMockup'@'%'; +FLUSH PRIVILEGES; diff --git a/start.sh b/start.sh new file mode 100755 index 0000000..5200283 --- /dev/null +++ b/start.sh @@ -0,0 +1,13 @@ +mkdir -p ./init-db +cp ./Account/doc/database/MyDataAccount-DBinit.sql ./init-db/ +cp ./Account/doc/database/MyDataAccount-UserInit.sql ./init-db/ +cp ./Operator_Components/doc/database/Operator_Components-DBinit.sql ./init-db/ +cp ./Service_Components/doc/database/Service_Components-DBinit.sql ./init-db/ +cp ./Service_Mockup/doc/database/Service_Mockup-DBinit.sql ./init-db/ + +docker-compose rm --force mysql-db # Clean db +docker volume rm mydatasdkbleedingedge_mysql-data # Clean db +reset # Reset terminal +docker-compose down -v --remove-orphans # Clean out trash. +docker-compose up --build # Put the thing up and running + diff --git a/ui_flow.py b/ui_flow.py index fdf133c..812b276 100755 --- a/ui_flow.py +++ b/ui_flow.py @@ -1,58 +1,123 @@ # -*- coding: utf-8 -*- + import json import argparse from requests import get, post +from uuid import uuid4 # TODO: Maybe these should be given as parameters -Service_ID_A = 10 -Service_ID_B = 100 - +#Service_ID_Source = "57f3a57b0cf2fcf22eea33a2" # MyLocation +#Service_ID_Sink = "57f3a57b0cf2fcf22eea33a3" # PHR +#Service_ID_Source = "582b7df00cf2727145535753" # MyLocation +#Service_ID_Sink = "582b7df00cf2727145535754" # PHR +Service_ID_Source = "582f2bf50cf2f4663ec4f01f" # MyLocation +Service_ID_Sink = "582f2bf50cf2f4663ec4f020" # PHR # TODO: Add more printing. Now user barely knows if initialization happened and did it succeed or not. # Sends JSON-payloads to Account that create three new accounts. # Needed in order to start_ui_flow() -function to work. -def initialize(operator_url): +def initialize(account_url): + username = "example_username-" + str(uuid4()) + password = "example_password" + print ("\n##### CREATE USER ACCOUNTS #####") - print("NOTE: Throws an error if run for second time as you cannot " \ - "create more accounts with same unique usernames. " \ + print("NOTE: Throws an error if run for second time as you cannot " + "create more accounts with same unique usernames. " "(Will be fixed in later releases.)\n\n" ) - resp = post(operator_url + 'api/accounts/', - json={"firstName": "Erkki", "lastName": "Esimerkki", "dateOfBirth": "31-05-2016", - "email": "erkki.esimerkki@examlpe.org", "username": "testUffser", "password": "Hello", - "acceptTermsOfService": "True"}) + user_data = {"data": { + "type": "Account", + "attributes": { + 'firstName': 'ExampleFirstName', + 'lastName': 'ExampleLastName', + 'dateOfBirth': '2010-05-14', + 'email': username + '@examlpe.org', + 'username': username, + 'password': password, + 'acceptTermsOfService': 'True' + } + } + } + resp = post(account_url + 'api/accounts/', + json=user_data) print(resp.status_code, resp.reason, resp.text, resp.url) print(json.dumps(json.loads(resp.text), indent=2)) - post(operator_url + 'api/accounts/', - json={"firstName": "Iso", "lastName": "Pasi", "dateOfBirth": "31-05-2016", "email": "iso.pasi@examlpe.org", - "username": "pasi", "password": "0nk0va", "acceptTermsOfService": "True"}) - post(operator_url + 'api/accounts/', json={"firstName": "Dude", "lastName": "Dudeson", "dateOfBirth": "31-05-2016", - "email": "dude.dudeson@examlpe.org", "username": "mydata", - "password": "Hello", "acceptTermsOfService": "True"}) - return + user_data["data"]["attributes"]["firstName"] = "Iso" + user_data["data"]["attributes"]["lastName"] = "Pasi" + user_data["data"]["attributes"]["email"] = "iso.pasi@example.org" + user_data["data"]["attributes"]["username"] = "pasi" + user_data["data"]["attributes"]["password"] = "0nk0va" + resp = post(account_url + 'api/accounts/', + json=user_data) + print(resp.status_code, resp.reason, resp.text, resp.url) + print(json.dumps(json.loads(resp.text), indent=2)) + + user_data["data"]["attributes"]["firstName"] = "Dude" + user_data["data"]["attributes"]["lastName"] = "Dudeson" + user_data["data"]["attributes"]["email"] = "dude.dudeson@example.org" + user_data["data"]["attributes"]["username"] = "mydata" + user_data["data"]["attributes"]["password"] = "Hello" + resp = post(account_url + 'api/accounts/', + json=user_data) + print(resp.status_code, resp.reason, resp.text, resp.url) + print(json.dumps(json.loads(resp.text), indent=2)) + # post(account_url + 'api/accounts/', + # json={"firstName": "Iso", "lastName": "Pasi", "dateOfBirth": "31-05-2016", "email": "iso.pasi@examlpe.org", + # "username": "pasi", "password": "0nk0va", "acceptTermsOfService": "True"}) + # post(operator_url + 'api/accounts/', json={"firstName": "Dude", "lastName": "Dudeson", "dateOfBirth": "31-05-2016", + # "email": "dude.dudeson@examlpe.org", "username": "mydata", + # "password": "Hello", "acceptTermsOfService": "True"}) + return # TODO: Refactor and return something. -# First creates two Service Links by making a GET-request to Operator backend. -# Then gives a Consent for these Services by sending a Consent form as JSON-payload to Operator backend. -# Should print "201 Created" if the flow was excuted succesfully. -def start_ui_flow(operator_url): - print("\n##### MAKE TWO SERVICE LINKS #####") - slr_flow1 = get(operator_url + "api/1.2/slr/account/2/service/1") - print(slr_flow1.url, slr_flow1.reason, slr_flow1.status_code, slr_flow1.text) - slr_flow2 = get(operator_url + "api/1.2/slr/account/2/service/2") - print(slr_flow2.url, slr_flow2.reason, slr_flow2.status_code, slr_flow2.text) +# Creates two Service Links by making a GET-request to Operator backend. +def create_service_link(operator_url, service_id): + print("\n##### CREATE A SERVICE LINK #####") + slr_flow = get(operator_url + "api/1.2/slr/account/2/service/"+service_id) + if not slr_flow.ok: + print("Creation of first SLR failed with status ({}) reason ({}) and the following content:\n{}".format( + slr_flow.status_code, + slr_flow.reason, + json.dumps(json.loads(slr_flow.content), indent=2) + )) + raise Exception("SLR flow failed.") + print(slr_flow.url, slr_flow.reason, slr_flow.status_code, slr_flow.text) - # This format needs to be specified, even if done with url params instead. - ids = {"sink": Service_ID_B, "source": Service_ID_A} + return + + +# TODO: Refactor and return something. +# Gives a Consent for these Services by sending a Consent form as JSON-payload to Operator backend. +# Should print "201 Created" if the Consent was executed succesfully. +def give_consent(operator_url, sink_id, source_id): print("\n##### GIVE CONSENT #####") - req = get(operator_url + "api/1.2/cr/consent_form/account/2?sink={}&source={}".format(Service_ID_B, Service_ID_A)) + # This format needs to be specified, even if done with url params instead. + ids = {"sink": sink_id, "source": source_id} + + print("\n###### 1.FETCH CONSENT FORM ######") + req = get(operator_url + "api/1.2/cr/consent_form/account/2?sink={}&source={}".format(sink_id, source_id)) + if not req.ok: + print("Fetching consent form consent failed with status ({}) reason ({}) and the following content:\n{}".format( + req.status_code, + req.reason, + json.dumps(json.loads(req.content), indent=2) + )) + raise Exception("Consent flow failed.") + + print("\n###### 2.SEND CONSENT FORM ######") print(req.url, req.reason, req.status_code, req.text) js = json.loads(req.text) - req = post(operator_url + "api/1.2/cr/consent_form/account/2", json=js) + if not req.ok: + print("Granting consent failed with status ({}) reason ({}) and the following content:\n{}".format( + req.status_code, + req.reason, + json.dumps(json.loads(req.content), indent=2) + )) + raise Exception("Consent flow failed.") print(req.url, req.reason, req.status_code) print("\n") @@ -67,7 +132,7 @@ def start_ui_flow(operator_url): # Parse command line arguments parser = argparse.ArgumentParser() - # TODO: Use boolean value instead of int. + # Urls help_string_account_url = \ "URL to Account. Defaults to 'http://localhost:8080'. \ NOTE: Throws an error if run for second time as you cannot\ @@ -87,8 +152,66 @@ def start_ui_flow(operator_url): default="http://localhost:5000/", required=False) + # Skips + help_string_skip_init = \ + "Should account init be skipped. Init is done by default. Specify this flag to skip init." + parser.add_argument("--skip_init", + help=help_string_skip_init, + action="store_true", + required=False) + + help_string_skip_slr = \ + "Should account init be skipped. Init is done by default. Specify this flag to skip init." + parser.add_argument("--skip_slr", + help=help_string_skip_slr, + action="store_true", + required=False) + + # IDs + help_string_sink_id = \ + "ID of the Sink. \ + Check that this matches to what is specified in Service Registry. \ + Defaults to '{}'.".format(Service_ID_Sink) + parser.add_argument("--sink_id", + help=help_string_sink_id, + type=str, + default=Service_ID_Sink, + required=False) + + help_string_source_id = \ + "ID of the Source. \ + Check that this matches to what is specified in Service Registry. \ + Defaults to '{}'.".format(Service_ID_Source) + parser.add_argument("--source_id", + help=help_string_source_id, + type=str, + default=Service_ID_Source, + required=False) + +# exclusive_grp = parser.add_mutually_exclusive_group() +# exclusive_grp.add_argument('--skip_init', action='store_true', dest='foo', help='skip init') +# exclusive_grp.add_argument('--no-foo', action='store_false', dest='foo', help='do not do foo') + args = parser.parse_args() - initialize(args.account_url) +# print 'Starting program', 'with' if args.foo else 'without', 'foo' +# print 'Starting program', 'with' if args.no_foo else 'without', 'no_foo' + + # Just for user to see the given input + print(args.account_url) + print(args.operator_url) + print(args.skip_init) + print(args.sink_id) + print(args.source_id) + + if not args.skip_init: + # Do not skip init + initialize(args.account_url) + + # SLR + if not args.skip_slr: + create_service_link(args.operator_url, args.sink_id) + create_service_link(args.operator_url, args.source_id) - start_ui_flow(args.operator_url) + # Consent + give_consent(args.operator_url, args.sink_id, args.source_id)