From 6a7ea41cf7749a6337d5077d588621d09dde412c Mon Sep 17 00:00:00 2001 From: Valentin Kuznetsov Date: Thu, 15 Apr 2021 15:09:49 -0400 Subject: [PATCH] Adjust codebase to work with plain or gzipped request body payloads --- Server/Python/src/dbs/web/DBSWriterModel.py | 53 +++++++++++++++++---- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/Server/Python/src/dbs/web/DBSWriterModel.py b/Server/Python/src/dbs/web/DBSWriterModel.py index 77a6d919..9a8d9085 100644 --- a/Server/Python/src/dbs/web/DBSWriterModel.py +++ b/Server/Python/src/dbs/web/DBSWriterModel.py @@ -18,10 +18,43 @@ import traceback +# import for gzip decompression utility +import gzip +try: + import cStringIO as StringIO +except ImportError: # python3 + import io +except: + import StringIO # CMSMonitoring modules from CMSMonitoring.NATS import NATSManager +def decompress(body): + "Decompress the request body" + if sys.version.startswith('3.'): + zbuf = io.BytesIO() + else: + zbuf = StringIO.StringIO() + zbuf.write(body) + zbuf.seek(0) + zfile = gzip.GzipFile(mode='rb', fileobj=zbuf) + data = zfile.read() + zfile.close() + return data + +def read_body(): + """ + Provides uniform way to read either plain payload body + or gzipped one + """ + raw = request.body.read(int(cherrypy.request.headers['Content-Length'])) + if request.headers['Content-Encoding'] == 'gzip': + raw = decompress(raw) + return raw + body = request.body.read() + return body + class DBSWriterModel(DBSReaderModel): """ DBS3 Server API Documentation @@ -98,7 +131,7 @@ def insertPrimaryDataset(self): """ try : - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("primds", indata) indata.update({"creation_date": dbsUtils().getTime(), "create_by": dbsUtils().getCreateBy() }) @@ -130,7 +163,7 @@ def insertOutputConfig(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("dataset_conf_list", indata) indata.update({"creation_date": dbsUtils().getTime(), @@ -181,7 +214,7 @@ def insertAcquisitionEra(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("acquisition_era", indata) indata.update({"start_date": indata.get("start_date", dbsUtils().getTime()),\ @@ -210,7 +243,7 @@ def insertProcessingEra(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy('processing_era', indata) indata.update({"creation_date": indata.get("creation_date", dbsUtils().getTime()), \ @@ -247,7 +280,7 @@ def insertDataset(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy('dataset', indata) indata.update({"creation_date": dbsUtils().getTime(), @@ -287,7 +320,7 @@ def insertBulkBlock(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) if (indata.get("file_parent_list", []) and indata.get("dataset_parent_list", [])): dbsExceptionHandler("dbsException-invalid-input2", "insertBulkBlock: dataset and file parentages cannot be in the input at the same time", @@ -332,7 +365,7 @@ def insertFileParents(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("file_parent", indata) self.dbsFile.insertFileParents(indata) @@ -365,7 +398,7 @@ def insertBlock(self): """ try: - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("block", indata) self.dbsBlock.insertBlock(indata) @@ -405,7 +438,7 @@ def insertFile(self, qInserts=False): """ if qInserts in (False, 'False'): qInserts=False try: - body = request.body.read() + body = read_body() indata = cjson.decode(body)["files"] if not isinstance(indata, (list, dict)): dbsExceptionHandler("dbsException-invalid-input", "Invalid Input DataType", self.logger.exception, \ @@ -563,7 +596,7 @@ def insertDataTier(self): conn = self.dbi.connection() tran = conn.begin() - body = request.body.read() + body = read_body() indata = cjson.decode(body) indata = validateJSONInputNoCopy("dataTier", indata)